|
{ |
|
"best_metric": 1.1818922758102417, |
|
"best_model_checkpoint": "output/lady-gaga/checkpoint-945", |
|
"epoch": 7.0, |
|
"global_step": 945, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.0001367495807816491, |
|
"loss": 2.9044, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.000135404237921706, |
|
"loss": 2.7308, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0001331816381338556, |
|
"loss": 2.5335, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00013011096805539083, |
|
"loss": 2.6599, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00012623255097542413, |
|
"loss": 2.6824, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00012159731731930818, |
|
"loss": 2.5916, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00011626613584273574, |
|
"loss": 2.4845, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00011030901431811399, |
|
"loss": 2.4155, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00010380418020960441, |
|
"loss": 2.386, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.683705340917955e-05, |
|
"loss": 2.4477, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.949912452347496e-05, |
|
"loss": 2.2686, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 8.188675344149976e-05, |
|
"loss": 2.4056, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.409990396012103e-05, |
|
"loss": 2.4163, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.62408310839107e-05, |
|
"loss": 2.239, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.841273823741302e-05, |
|
"loss": 2.3147, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.071842202299234e-05, |
|
"loss": 2.0396, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.3258922320970566e-05, |
|
"loss": 2.2021, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.613219545860942e-05, |
|
"loss": 2.3175, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.9431827871557342e-05, |
|
"loss": 2.2247, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.3245807149646032e-05, |
|
"loss": 2.3038, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7655366605361312e-05, |
|
"loss": 2.2539, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2733918537834174e-05, |
|
"loss": 2.4522, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.546090200467903e-06, |
|
"loss": 2.1125, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.146875131638653e-06, |
|
"loss": 2.1399, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.5809109929881027e-06, |
|
"loss": 2.159, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.818933985584571e-07, |
|
"loss": 2.4269, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.213343222279864e-08, |
|
"loss": 2.4774, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2.1954123973846436, |
|
"eval_runtime": 8.0612, |
|
"eval_samples_per_second": 22.453, |
|
"eval_steps_per_second": 2.853, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.6226466398373308e-07, |
|
"loss": 1.9999, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.1511035125304852e-06, |
|
"loss": 1.9351, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.0256647877395575e-06, |
|
"loss": 2.077, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 5.7613322090181854e-06, |
|
"loss": 2.0647, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.322181660285877e-06, |
|
"loss": 2.0654, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3661452936688322e-05, |
|
"loss": 2.1723, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.8722163788180865e-05, |
|
"loss": 2.0249, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.4437858196511952e-05, |
|
"loss": 2.3832, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.0733479059415244e-05, |
|
"loss": 2.0252, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.752635382213567e-05, |
|
"loss": 2.119, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.472728011321611e-05, |
|
"loss": 2.1383, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.224169712824147e-05, |
|
"loss": 2.3061, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.997092737921572e-05, |
|
"loss": 1.9195, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 6.781347250322049e-05, |
|
"loss": 2.3574, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 7.566634611411474e-05, |
|
"loss": 1.9516, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 8.34264261945917e-05, |
|
"loss": 2.1115, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.099180926931044e-05, |
|
"loss": 2.2426, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.82631485764331e-05, |
|
"loss": 2.1105, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 0.00010514495866502442, |
|
"loss": 1.8663, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 0.0001115468692866605, |
|
"loss": 1.8012, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 0.00011738481211544774, |
|
"loss": 1.9638, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 0.00012258212471273538, |
|
"loss": 1.8965, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 0.0001270705572395265, |
|
"loss": 2.1566, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 0.00013079116869668585, |
|
"loss": 1.7786, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 0.00013369510092373763, |
|
"loss": 1.9581, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 0.00013574422019228965, |
|
"loss": 1.8796, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 0.00013691161796883605, |
|
"loss": 2.1161, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 2.0334055423736572, |
|
"eval_runtime": 8.0619, |
|
"eval_samples_per_second": 22.451, |
|
"eval_steps_per_second": 2.853, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 0.00013718196427104403, |
|
"loss": 1.8034, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 0.00013655170897733555, |
|
"loss": 1.7696, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 0.00013502912844621666, |
|
"loss": 1.9071, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 0.00013263421683315968, |
|
"loss": 1.6146, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 0.00012939842353223955, |
|
"loss": 1.6415, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 0.0001253642401903761, |
|
"loss": 1.6821, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 0.0001205846427174097, |
|
"loss": 1.7192, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 0.0001151223956193987, |
|
"loss": 1.8815, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 0.00010904922779046358, |
|
"loss": 1.7244, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 0.00010244489058647663, |
|
"loss": 1.9117, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 9.539611054974517e-05, |
|
"loss": 1.7657, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 8.799545053725109e-05, |
|
"loss": 1.8888, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 8.034009420783132e-05, |
|
"loss": 1.617, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 7.253056983011824e-05, |
|
"loss": 1.6686, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 6.46694301698818e-05, |
|
"loss": 1.7576, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 5.6859905792168714e-05, |
|
"loss": 1.5118, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.920454946274896e-05, |
|
"loss": 1.4238, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.180388945025492e-05, |
|
"loss": 1.7583, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 3.4755109413523463e-05, |
|
"loss": 1.8061, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 2.8150772209536505e-05, |
|
"loss": 1.8494, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 2.2077604380601286e-05, |
|
"loss": 1.687, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.6615357282590296e-05, |
|
"loss": 1.495, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.1835759809623895e-05, |
|
"loss": 1.2369, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 7.801576467760446e-06, |
|
"loss": 1.8264, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.565783166840331e-06, |
|
"loss": 1.4865, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.1708715537833474e-06, |
|
"loss": 1.805, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.482910226644399e-07, |
|
"loss": 1.5174, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.8035728955987417e-08, |
|
"loss": 1.5703, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.8793048858642578, |
|
"eval_runtime": 8.0763, |
|
"eval_samples_per_second": 22.411, |
|
"eval_steps_per_second": 2.848, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 2.883820311639419e-07, |
|
"loss": 1.4287, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.4557798077103288e-06, |
|
"loss": 1.4968, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.5048990762623317e-06, |
|
"loss": 1.3425, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 6.408831303314137e-06, |
|
"loss": 1.3711, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.012944276047348e-05, |
|
"loss": 1.3321, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.461787528726463e-05, |
|
"loss": 1.5272, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.981518788455224e-05, |
|
"loss": 1.5268, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 2.5653130713339512e-05, |
|
"loss": 1.3194, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.2055041334975546e-05, |
|
"loss": 1.7163, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.8936851423566864e-05, |
|
"loss": 1.6848, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.620819073068958e-05, |
|
"loss": 1.3757, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 5.377357380540826e-05, |
|
"loss": 1.4964, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 6.15336538858851e-05, |
|
"loss": 1.3765, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 6.938652749677947e-05, |
|
"loss": 1.4432, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 7.722907262078417e-05, |
|
"loss": 1.4727, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 8.495830287175848e-05, |
|
"loss": 1.4478, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 9.247271988678396e-05, |
|
"loss": 1.5617, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 9.967364617786429e-05, |
|
"loss": 1.6394, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 0.00010646652094058468, |
|
"loss": 1.3227, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 0.00011276214180348806, |
|
"loss": 1.6685, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 0.00011847783621181909, |
|
"loss": 1.4602, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 0.0001235385470633116, |
|
"loss": 1.5915, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 0.0001278778183397141, |
|
"loss": 1.6258, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 0.0001314386677909818, |
|
"loss": 1.4522, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 0.0001341743352122604, |
|
"loss": 1.7715, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 0.0001360488964874695, |
|
"loss": 1.5622, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 0.0001370377353360163, |
|
"loss": 1.5959, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.901490569114685, |
|
"eval_runtime": 8.0503, |
|
"eval_samples_per_second": 22.484, |
|
"eval_steps_per_second": 2.857, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 0.00013712786656777722, |
|
"loss": 1.2578, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 0.0001363181066014416, |
|
"loss": 1.2876, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 0.00013461908900701188, |
|
"loss": 1.3214, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 0.00013205312486836134, |
|
"loss": 1.3761, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 0.0001286539097995321, |
|
"loss": 1.4559, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 0.00012446608146216587, |
|
"loss": 1.1372, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 0.0001195446333946388, |
|
"loss": 1.1916, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 0.00011395419285035411, |
|
"loss": 1.3227, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 0.00010776817212844265, |
|
"loss": 1.3371, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 0.00010106780454139066, |
|
"loss": 1.2405, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 9.394107767902941e-05, |
|
"loss": 1.3486, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 8.648157797700773e-05, |
|
"loss": 1.4013, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 7.878726176258706e-05, |
|
"loss": 0.9863, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 7.095916891608919e-05, |
|
"loss": 1.4232, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 6.310009603987893e-05, |
|
"loss": 1.3065, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 5.531324655850025e-05, |
|
"loss": 1.4746, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 4.77008754765251e-05, |
|
"loss": 1.286, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 4.036294659082055e-05, |
|
"loss": 1.4165, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 3.3395819790395745e-05, |
|
"loss": 1.3111, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.689098568188601e-05, |
|
"loss": 1.1802, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.093386415726427e-05, |
|
"loss": 1.2933, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.5602682680691885e-05, |
|
"loss": 1.2776, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.09674490245759e-05, |
|
"loss": 1.5263, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 7.089031944609217e-06, |
|
"loss": 1.3473, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 4.018361866144457e-06, |
|
"loss": 1.2805, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 1.7957620782939648e-06, |
|
"loss": 1.2707, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 4.504192183509004e-07, |
|
"loss": 1.1234, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.7955, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.7918421030044556, |
|
"eval_runtime": 8.0478, |
|
"eval_samples_per_second": 22.491, |
|
"eval_steps_per_second": 2.858, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 4.5041921835088515e-07, |
|
"loss": 1.0568, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 1.795762078293942e-06, |
|
"loss": 1.1207, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 4.018361866144427e-06, |
|
"loss": 1.2954, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 7.0890319446091785e-06, |
|
"loss": 1.2248, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 1.0967449024575855e-05, |
|
"loss": 1.2091, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 1.5602682680691756e-05, |
|
"loss": 1.2467, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.093386415726413e-05, |
|
"loss": 1.1092, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 2.6890985681885842e-05, |
|
"loss": 1.2179, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 3.339581979039557e-05, |
|
"loss": 1.0797, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 4.036294659082037e-05, |
|
"loss": 1.2595, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 4.77008754765249e-05, |
|
"loss": 1.2369, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 5.5313246558500285e-05, |
|
"loss": 1.2623, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 6.310009603987897e-05, |
|
"loss": 1.2695, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 7.095916891608921e-05, |
|
"loss": 1.1535, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 7.87872617625871e-05, |
|
"loss": 1.0823, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 8.648157797700775e-05, |
|
"loss": 1.0971, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 9.394107767902946e-05, |
|
"loss": 0.978, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 0.00010106780454139057, |
|
"loss": 1.2448, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 0.00010776817212844258, |
|
"loss": 1.088, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 0.00011395419285035404, |
|
"loss": 1.1032, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 0.00011954463339463875, |
|
"loss": 1.1252, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 0.0001244660814621658, |
|
"loss": 1.2392, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 0.00012865390979953207, |
|
"loss": 1.1824, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 0.00013205312486836131, |
|
"loss": 1.1355, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 0.00013461908900701185, |
|
"loss": 1.1338, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 0.00013631810660144153, |
|
"loss": 1.3598, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 0.0001371278665677772, |
|
"loss": 1.3885, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 1.7604948282241821, |
|
"eval_runtime": 8.058, |
|
"eval_samples_per_second": 22.462, |
|
"eval_steps_per_second": 2.854, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 0.00013306291378591335, |
|
"loss": 1.3683, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 0.00012990319912618617, |
|
"loss": 1.3883, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 0.00012591446386292745, |
|
"loss": 1.289, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 0.00012115064879796196, |
|
"loss": 1.3834, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 0.00011567617635779509, |
|
"loss": 1.2739, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 0.00010956507939081116, |
|
"loss": 1.0868, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 0.00010290000000000012, |
|
"loss": 1.3412, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 9.577107195028614e-05, |
|
"loss": 1.2163, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 8.827470176398086e-05, |
|
"loss": 1.1091, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 8.051226498795135e-05, |
|
"loss": 1.2886, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 7.258873526325866e-05, |
|
"loss": 1.5192, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 6.461126473674146e-05, |
|
"loss": 1.3007, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 5.6687735012048764e-05, |
|
"loss": 1.0916, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 4.892529823601925e-05, |
|
"loss": 1.1481, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 4.1428928049713965e-05, |
|
"loss": 1.2328, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 3.429999999999998e-05, |
|
"loss": 1.0816, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 2.763492060918892e-05, |
|
"loss": 1.2086, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 2.152382364220499e-05, |
|
"loss": 0.9535, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.604935120203811e-05, |
|
"loss": 1.3744, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.128553613707261e-05, |
|
"loss": 1.2614, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 7.296800873813895e-06, |
|
"loss": 1.2024, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 4.137086214086697e-06, |
|
"loss": 1.2731, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.8491218782241098e-06, |
|
"loss": 1.1451, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 4.638486589027391e-07, |
|
"loss": 1.2302, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.2435, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 1.1818922758102417, |
|
"eval_runtime": 4.3047, |
|
"eval_samples_per_second": 45.067, |
|
"eval_steps_per_second": 5.808, |
|
"step": 945 |
|
} |
|
], |
|
"max_steps": 1350, |
|
"num_train_epochs": 10, |
|
"total_flos": 984287084544000.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|