|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 106.63897763578275, |
|
"global_step": 1280, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.99969999399988e-05, |
|
"loss": 3.2772, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.99869997399948e-05, |
|
"loss": 0.587, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 9.99759995199904e-05, |
|
"loss": 0.2432, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 9.99659993199864e-05, |
|
"loss": 0.1511, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 9.9954999099982e-05, |
|
"loss": 0.1104, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 9.994499889997801e-05, |
|
"loss": 0.0839, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"eval_loss": 0.116455078125, |
|
"eval_runtime": 85.2162, |
|
"eval_samples_per_second": 38.044, |
|
"eval_steps_per_second": 0.481, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 9.9934998699974e-05, |
|
"loss": 0.0716, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 9.99239984799696e-05, |
|
"loss": 0.0665, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 9.99139982799656e-05, |
|
"loss": 0.057, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 9.990299805996121e-05, |
|
"loss": 0.0521, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 9.98929978599572e-05, |
|
"loss": 0.0466, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 9.98829976599532e-05, |
|
"loss": 0.0417, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"eval_loss": 0.12420654296875, |
|
"eval_runtime": 85.7837, |
|
"eval_samples_per_second": 37.793, |
|
"eval_steps_per_second": 0.478, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 9.987199743994881e-05, |
|
"loss": 0.0388, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 11.64, |
|
"learning_rate": 9.986199723994481e-05, |
|
"loss": 0.0371, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 12.48, |
|
"learning_rate": 9.98509970199404e-05, |
|
"loss": 0.0342, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 13.32, |
|
"learning_rate": 9.98409968199364e-05, |
|
"loss": 0.0303, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 14.16, |
|
"learning_rate": 9.982999659993201e-05, |
|
"loss": 0.0287, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 14.96, |
|
"learning_rate": 9.981999639992801e-05, |
|
"loss": 0.0262, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"learning_rate": 9.9809996199924e-05, |
|
"loss": 0.0242, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 15.96, |
|
"eval_loss": 0.13671875, |
|
"eval_runtime": 71.3468, |
|
"eval_samples_per_second": 45.44, |
|
"eval_steps_per_second": 0.575, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 16.64, |
|
"learning_rate": 9.979899597991961e-05, |
|
"loss": 0.0239, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 17.48, |
|
"learning_rate": 9.978899577991561e-05, |
|
"loss": 0.0216, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 18.32, |
|
"learning_rate": 9.97779955599112e-05, |
|
"loss": 0.0204, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 19.16, |
|
"learning_rate": 9.97679953599072e-05, |
|
"loss": 0.0194, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 19.96, |
|
"learning_rate": 9.97579951599032e-05, |
|
"loss": 0.0167, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 20.8, |
|
"learning_rate": 9.974699493989881e-05, |
|
"loss": 0.0177, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 21.32, |
|
"eval_loss": 0.15576171875, |
|
"eval_runtime": 73.465, |
|
"eval_samples_per_second": 44.13, |
|
"eval_steps_per_second": 0.558, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 21.64, |
|
"learning_rate": 9.973699473989481e-05, |
|
"loss": 0.0155, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 22.48, |
|
"learning_rate": 9.97259945198904e-05, |
|
"loss": 0.015, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 23.32, |
|
"learning_rate": 9.971599431988641e-05, |
|
"loss": 0.0148, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 24.16, |
|
"learning_rate": 9.9704994099882e-05, |
|
"loss": 0.0137, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 24.96, |
|
"learning_rate": 9.969499389987799e-05, |
|
"loss": 0.0117, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 25.8, |
|
"learning_rate": 9.96839936798736e-05, |
|
"loss": 0.0118, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 26.64, |
|
"learning_rate": 9.96739934798696e-05, |
|
"loss": 0.0111, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 26.64, |
|
"eval_loss": 0.1729736328125, |
|
"eval_runtime": 73.6, |
|
"eval_samples_per_second": 44.049, |
|
"eval_steps_per_second": 0.557, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 27.48, |
|
"learning_rate": 9.96629932598652e-05, |
|
"loss": 0.0109, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 28.32, |
|
"learning_rate": 9.96529930598612e-05, |
|
"loss": 0.0102, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 29.16, |
|
"learning_rate": 9.96419928398568e-05, |
|
"loss": 0.0094, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 29.96, |
|
"learning_rate": 9.96319926398528e-05, |
|
"loss": 0.0088, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 30.8, |
|
"learning_rate": 9.96219924398488e-05, |
|
"loss": 0.0084, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 31.64, |
|
"learning_rate": 9.96109922198444e-05, |
|
"loss": 0.0082, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 31.96, |
|
"eval_loss": 0.1884765625, |
|
"eval_runtime": 72.563, |
|
"eval_samples_per_second": 44.678, |
|
"eval_steps_per_second": 0.565, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 32.48, |
|
"learning_rate": 9.96009920198404e-05, |
|
"loss": 0.0082, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 33.32, |
|
"learning_rate": 9.9589991799836e-05, |
|
"loss": 0.0078, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 34.16, |
|
"learning_rate": 9.9579991599832e-05, |
|
"loss": 0.0068, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 34.96, |
|
"learning_rate": 9.9569991399828e-05, |
|
"loss": 0.0063, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 35.8, |
|
"learning_rate": 9.95589911798236e-05, |
|
"loss": 0.0061, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 36.64, |
|
"learning_rate": 9.95489909798196e-05, |
|
"loss": 0.0064, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 37.32, |
|
"eval_loss": 0.191162109375, |
|
"eval_runtime": 72.3445, |
|
"eval_samples_per_second": 44.813, |
|
"eval_steps_per_second": 0.567, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 37.48, |
|
"learning_rate": 9.95379907598152e-05, |
|
"loss": 0.0058, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 38.32, |
|
"learning_rate": 9.95279905598112e-05, |
|
"loss": 0.0051, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 39.16, |
|
"learning_rate": 9.95169903398068e-05, |
|
"loss": 0.0053, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 39.96, |
|
"learning_rate": 9.950699013980281e-05, |
|
"loss": 0.0049, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 40.8, |
|
"learning_rate": 9.94969899397988e-05, |
|
"loss": 0.0048, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 41.64, |
|
"learning_rate": 9.94859897197944e-05, |
|
"loss": 0.0047, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 42.48, |
|
"learning_rate": 9.94759895197904e-05, |
|
"loss": 0.0047, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 42.64, |
|
"eval_loss": 0.195556640625, |
|
"eval_runtime": 71.2606, |
|
"eval_samples_per_second": 45.495, |
|
"eval_steps_per_second": 0.575, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 43.32, |
|
"learning_rate": 9.946498929978601e-05, |
|
"loss": 0.0044, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 44.16, |
|
"learning_rate": 9.9454989099782e-05, |
|
"loss": 0.0044, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 44.96, |
|
"learning_rate": 9.9444988899778e-05, |
|
"loss": 0.0039, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 45.8, |
|
"learning_rate": 9.943398867977361e-05, |
|
"loss": 0.0036, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 46.64, |
|
"learning_rate": 9.942398847976961e-05, |
|
"loss": 0.0037, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 47.48, |
|
"learning_rate": 9.94129882597652e-05, |
|
"loss": 0.0034, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 47.96, |
|
"eval_loss": 0.2215576171875, |
|
"eval_runtime": 71.3132, |
|
"eval_samples_per_second": 45.461, |
|
"eval_steps_per_second": 0.575, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 48.32, |
|
"learning_rate": 9.94029880597612e-05, |
|
"loss": 0.0032, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 49.16, |
|
"learning_rate": 9.93919878397568e-05, |
|
"loss": 0.0035, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 49.96, |
|
"learning_rate": 9.938198763975281e-05, |
|
"loss": 0.0032, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 50.8, |
|
"learning_rate": 9.93709874197484e-05, |
|
"loss": 0.003, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 51.64, |
|
"learning_rate": 9.93609872197444e-05, |
|
"loss": 0.0031, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 52.48, |
|
"learning_rate": 9.934998699974e-05, |
|
"loss": 0.0029, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 53.32, |
|
"learning_rate": 9.933998679973599e-05, |
|
"loss": 0.0029, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 53.32, |
|
"eval_loss": 0.221923828125, |
|
"eval_runtime": 71.4852, |
|
"eval_samples_per_second": 45.352, |
|
"eval_steps_per_second": 0.574, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 54.16, |
|
"learning_rate": 9.93289865797316e-05, |
|
"loss": 0.0028, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 54.96, |
|
"learning_rate": 9.93189863797276e-05, |
|
"loss": 0.0027, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 55.8, |
|
"learning_rate": 9.93089861797236e-05, |
|
"loss": 0.0026, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 56.64, |
|
"learning_rate": 9.929798595971919e-05, |
|
"loss": 0.0022, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 57.48, |
|
"learning_rate": 9.92879857597152e-05, |
|
"loss": 0.0022, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 58.32, |
|
"learning_rate": 9.92769855397108e-05, |
|
"loss": 0.0022, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 58.64, |
|
"eval_loss": 0.2276611328125, |
|
"eval_runtime": 71.4916, |
|
"eval_samples_per_second": 45.348, |
|
"eval_steps_per_second": 0.573, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 59.16, |
|
"learning_rate": 9.92669853397068e-05, |
|
"loss": 0.0025, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 59.96, |
|
"learning_rate": 9.925698513970279e-05, |
|
"loss": 0.0021, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 60.8, |
|
"learning_rate": 9.92459849196984e-05, |
|
"loss": 0.0024, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 61.64, |
|
"learning_rate": 9.92359847196944e-05, |
|
"loss": 0.0022, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 62.48, |
|
"learning_rate": 9.922498449968999e-05, |
|
"loss": 0.0018, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 63.32, |
|
"learning_rate": 9.9214984299686e-05, |
|
"loss": 0.0017, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 63.96, |
|
"eval_loss": 0.24072265625, |
|
"eval_runtime": 72.0268, |
|
"eval_samples_per_second": 45.011, |
|
"eval_steps_per_second": 0.569, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 64.16, |
|
"learning_rate": 9.92039840796816e-05, |
|
"loss": 0.0021, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 64.96, |
|
"learning_rate": 9.91939838796776e-05, |
|
"loss": 0.0017, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 65.8, |
|
"learning_rate": 9.91839836796736e-05, |
|
"loss": 0.0022, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 66.64, |
|
"learning_rate": 9.91729834596692e-05, |
|
"loss": 0.0019, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 67.48, |
|
"learning_rate": 9.91629832596652e-05, |
|
"loss": 0.0017, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 68.32, |
|
"learning_rate": 9.91519830396608e-05, |
|
"loss": 0.0017, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 69.16, |
|
"learning_rate": 9.91419828396568e-05, |
|
"loss": 0.0018, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 69.32, |
|
"eval_loss": 0.24853515625, |
|
"eval_runtime": 73.1565, |
|
"eval_samples_per_second": 44.316, |
|
"eval_steps_per_second": 0.56, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 69.96, |
|
"learning_rate": 9.91319826396528e-05, |
|
"loss": 0.0018, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 70.8, |
|
"learning_rate": 9.91209824196484e-05, |
|
"loss": 0.0016, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 71.64, |
|
"learning_rate": 9.91109822196444e-05, |
|
"loss": 0.0015, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 72.48, |
|
"learning_rate": 9.909998199964e-05, |
|
"loss": 0.0016, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 73.32, |
|
"learning_rate": 9.9089981799636e-05, |
|
"loss": 0.0016, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 74.16, |
|
"learning_rate": 9.90789815796316e-05, |
|
"loss": 0.0015, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 74.64, |
|
"eval_loss": 0.23681640625, |
|
"eval_runtime": 71.4778, |
|
"eval_samples_per_second": 45.357, |
|
"eval_steps_per_second": 0.574, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 74.96, |
|
"learning_rate": 9.90689813796276e-05, |
|
"loss": 0.0016, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 75.8, |
|
"learning_rate": 9.905798115962319e-05, |
|
"loss": 0.0016, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 76.64, |
|
"learning_rate": 9.90479809596192e-05, |
|
"loss": 0.0016, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 77.48, |
|
"learning_rate": 9.90369807396148e-05, |
|
"loss": 0.0014, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 78.32, |
|
"learning_rate": 9.90269805396108e-05, |
|
"loss": 0.0011, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 79.16, |
|
"learning_rate": 9.901598031960639e-05, |
|
"loss": 0.0015, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 79.96, |
|
"learning_rate": 9.900598011960239e-05, |
|
"loss": 0.0012, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 79.96, |
|
"eval_loss": 0.254150390625, |
|
"eval_runtime": 76.7721, |
|
"eval_samples_per_second": 42.229, |
|
"eval_steps_per_second": 0.534, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 80.8, |
|
"learning_rate": 9.89959799195984e-05, |
|
"loss": 0.0013, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 81.64, |
|
"learning_rate": 9.898497969959399e-05, |
|
"loss": 0.0014, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 82.48, |
|
"learning_rate": 9.897497949958999e-05, |
|
"loss": 0.0015, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 83.32, |
|
"learning_rate": 9.89639792795856e-05, |
|
"loss": 0.0013, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 84.16, |
|
"learning_rate": 9.89539790795816e-05, |
|
"loss": 0.0014, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 84.96, |
|
"learning_rate": 9.894397887957759e-05, |
|
"loss": 0.0012, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 85.32, |
|
"eval_loss": 0.253662109375, |
|
"eval_runtime": 76.0139, |
|
"eval_samples_per_second": 42.65, |
|
"eval_steps_per_second": 0.539, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 85.8, |
|
"learning_rate": 9.893297865957319e-05, |
|
"loss": 0.0014, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 86.64, |
|
"learning_rate": 9.89229784595692e-05, |
|
"loss": 0.0011, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 87.48, |
|
"learning_rate": 9.89119782395648e-05, |
|
"loss": 0.0012, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 88.32, |
|
"learning_rate": 9.890197803956079e-05, |
|
"loss": 0.0012, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 89.16, |
|
"learning_rate": 9.88909778195564e-05, |
|
"loss": 0.0013, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 89.96, |
|
"learning_rate": 9.88809776195524e-05, |
|
"loss": 0.0011, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 90.64, |
|
"eval_loss": 0.268798828125, |
|
"eval_runtime": 75.8685, |
|
"eval_samples_per_second": 42.732, |
|
"eval_steps_per_second": 0.54, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 90.8, |
|
"learning_rate": 9.88709774195484e-05, |
|
"loss": 0.0011, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 91.64, |
|
"learning_rate": 9.885997719954399e-05, |
|
"loss": 0.0013, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 92.48, |
|
"learning_rate": 9.884997699954e-05, |
|
"loss": 0.001, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 93.32, |
|
"learning_rate": 9.88389767795356e-05, |
|
"loss": 0.0011, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 94.16, |
|
"learning_rate": 9.88289765795316e-05, |
|
"loss": 0.0013, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 94.96, |
|
"learning_rate": 9.881897637952759e-05, |
|
"loss": 0.001, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 95.8, |
|
"learning_rate": 9.88079761595232e-05, |
|
"loss": 0.0016, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 95.96, |
|
"eval_loss": 0.24560546875, |
|
"eval_runtime": 75.5498, |
|
"eval_samples_per_second": 42.912, |
|
"eval_steps_per_second": 0.543, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 96.64, |
|
"learning_rate": 9.87979759595192e-05, |
|
"loss": 0.0011, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 97.48, |
|
"learning_rate": 9.878697573951479e-05, |
|
"loss": 0.0011, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 98.32, |
|
"learning_rate": 9.87769755395108e-05, |
|
"loss": 0.0013, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 99.16, |
|
"learning_rate": 9.87659753195064e-05, |
|
"loss": 0.0011, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 99.96, |
|
"learning_rate": 9.87559751195024e-05, |
|
"loss": 0.0011, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 100.8, |
|
"learning_rate": 9.8744974899498e-05, |
|
"loss": 0.0008, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 101.32, |
|
"eval_loss": 0.278076171875, |
|
"eval_runtime": 74.8396, |
|
"eval_samples_per_second": 43.319, |
|
"eval_steps_per_second": 0.548, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 101.64, |
|
"learning_rate": 9.8734974699494e-05, |
|
"loss": 0.001, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 102.48, |
|
"learning_rate": 9.872397447948958e-05, |
|
"loss": 0.0008, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 103.32, |
|
"learning_rate": 9.87139742794856e-05, |
|
"loss": 0.0009, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 104.16, |
|
"learning_rate": 9.870297405948119e-05, |
|
"loss": 0.0008, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 104.96, |
|
"learning_rate": 9.869297385947719e-05, |
|
"loss": 0.0008, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 105.8, |
|
"learning_rate": 9.86829736594732e-05, |
|
"loss": 0.0011, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 106.64, |
|
"learning_rate": 9.86719734394688e-05, |
|
"loss": 0.0009, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 106.64, |
|
"eval_loss": 0.2666015625, |
|
"eval_runtime": 75.7897, |
|
"eval_samples_per_second": 42.776, |
|
"eval_steps_per_second": 0.541, |
|
"step": 1280 |
|
} |
|
], |
|
"max_steps": 100000, |
|
"num_train_epochs": 8334, |
|
"total_flos": 6810132464640.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|