|
{ |
|
"best_metric": 4.087806224822998, |
|
"best_model_checkpoint": "/mmfs1/gscratch/stf/abhinavp/corpus-filtering/outputs/pp-mod-subj/lstm/3/checkpoints/checkpoint-992147", |
|
"epoch": 1.0250002784396635, |
|
"eval_steps": 10, |
|
"global_step": 992147, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999998362119627e-05, |
|
"loss": 10.8197, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999161405248948e-05, |
|
"loss": 7.5807, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.998322810497896e-05, |
|
"loss": 7.0567, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.997484215746844e-05, |
|
"loss": 6.9828, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.996645620995792e-05, |
|
"loss": 6.953, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99580702624474e-05, |
|
"loss": 6.858, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994968431493688e-05, |
|
"loss": 6.7027, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.994129836742636e-05, |
|
"loss": 6.5933, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.993291241991584e-05, |
|
"loss": 6.5066, |
|
"step": 4096 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992452647240532e-05, |
|
"loss": 6.416, |
|
"step": 4608 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99161405248948e-05, |
|
"loss": 6.3551, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.990775457738428e-05, |
|
"loss": 6.2845, |
|
"step": 5632 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989936862987376e-05, |
|
"loss": 6.2164, |
|
"step": 6144 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989098268236324e-05, |
|
"loss": 6.1439, |
|
"step": 6656 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.988259673485272e-05, |
|
"loss": 6.0822, |
|
"step": 7168 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.98742107873422e-05, |
|
"loss": 6.0393, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.986582483983168e-05, |
|
"loss": 5.9923, |
|
"step": 8192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985743889232116e-05, |
|
"loss": 5.9439, |
|
"step": 8704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984905294481064e-05, |
|
"loss": 5.9108, |
|
"step": 9216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.984066699730012e-05, |
|
"loss": 5.869, |
|
"step": 9728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.983229742859333e-05, |
|
"loss": 5.8431, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9823927859886547e-05, |
|
"loss": 5.8055, |
|
"step": 10752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9815541912376026e-05, |
|
"loss": 5.7702, |
|
"step": 11264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9807155964865506e-05, |
|
"loss": 5.7497, |
|
"step": 11776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9798770017354986e-05, |
|
"loss": 5.7182, |
|
"step": 12288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9790384069844466e-05, |
|
"loss": 5.6916, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9781998122333946e-05, |
|
"loss": 5.6686, |
|
"step": 13312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9773628553627155e-05, |
|
"loss": 5.6477, |
|
"step": 13824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9765242606116635e-05, |
|
"loss": 5.6137, |
|
"step": 14336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9756856658606115e-05, |
|
"loss": 5.6011, |
|
"step": 14848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9748470711095595e-05, |
|
"loss": 5.5745, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9740101142388804e-05, |
|
"loss": 5.5564, |
|
"step": 15872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9731715194878284e-05, |
|
"loss": 5.5438, |
|
"step": 16384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9723329247367764e-05, |
|
"loss": 5.5274, |
|
"step": 16896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9714943299857244e-05, |
|
"loss": 5.5055, |
|
"step": 17408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9706557352346724e-05, |
|
"loss": 5.4965, |
|
"step": 17920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.969820416244367e-05, |
|
"loss": 5.4836, |
|
"step": 18432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968981821493315e-05, |
|
"loss": 5.4643, |
|
"step": 18944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968143226742263e-05, |
|
"loss": 5.4477, |
|
"step": 19456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.967304631991211e-05, |
|
"loss": 5.437, |
|
"step": 19968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.966466037240159e-05, |
|
"loss": 5.4124, |
|
"step": 20480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.965627442489107e-05, |
|
"loss": 5.4114, |
|
"step": 20992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.964788847738054e-05, |
|
"loss": 5.383, |
|
"step": 21504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.963950252987002e-05, |
|
"loss": 5.3832, |
|
"step": 22016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.96311165823595e-05, |
|
"loss": 5.3751, |
|
"step": 22528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.962273063484898e-05, |
|
"loss": 5.3664, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.961434468733847e-05, |
|
"loss": 5.3403, |
|
"step": 23552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.960595873982795e-05, |
|
"loss": 5.3392, |
|
"step": 24064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.959758917112116e-05, |
|
"loss": 5.3343, |
|
"step": 24576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958920322361064e-05, |
|
"loss": 5.3127, |
|
"step": 25088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.958081727610012e-05, |
|
"loss": 5.2984, |
|
"step": 25600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.957244770739333e-05, |
|
"loss": 5.2951, |
|
"step": 26112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.956406175988281e-05, |
|
"loss": 5.3003, |
|
"step": 26624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.955567581237229e-05, |
|
"loss": 5.2678, |
|
"step": 27136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.954728986486177e-05, |
|
"loss": 5.2887, |
|
"step": 27648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.953890391735125e-05, |
|
"loss": 5.2499, |
|
"step": 28160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9530534348644456e-05, |
|
"loss": 5.26, |
|
"step": 28672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9522148401133936e-05, |
|
"loss": 5.2385, |
|
"step": 29184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.951376245362342e-05, |
|
"loss": 5.2342, |
|
"step": 29696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.95053765061129e-05, |
|
"loss": 5.2247, |
|
"step": 30208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.949699055860238e-05, |
|
"loss": 5.2193, |
|
"step": 30720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948860461109186e-05, |
|
"loss": 5.1988, |
|
"step": 31232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.948021866358134e-05, |
|
"loss": 5.2072, |
|
"step": 31744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947183271607082e-05, |
|
"loss": 5.1893, |
|
"step": 32256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.946346314736403e-05, |
|
"loss": 5.1965, |
|
"step": 32768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.945507719985351e-05, |
|
"loss": 5.1779, |
|
"step": 33280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.944669125234299e-05, |
|
"loss": 5.1677, |
|
"step": 33792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.943830530483247e-05, |
|
"loss": 5.1516, |
|
"step": 34304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942991935732195e-05, |
|
"loss": 5.1594, |
|
"step": 34816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.942154978861516e-05, |
|
"loss": 5.153, |
|
"step": 35328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.941316384110464e-05, |
|
"loss": 5.1339, |
|
"step": 35840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.940477789359412e-05, |
|
"loss": 5.1353, |
|
"step": 36352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.93963919460836e-05, |
|
"loss": 5.1265, |
|
"step": 36864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9388022377376816e-05, |
|
"loss": 5.1236, |
|
"step": 37376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9379636429866296e-05, |
|
"loss": 5.1212, |
|
"step": 37888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9371250482355776e-05, |
|
"loss": 5.1151, |
|
"step": 38400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9362864534845256e-05, |
|
"loss": 5.1009, |
|
"step": 38912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.935447858733473e-05, |
|
"loss": 5.0855, |
|
"step": 39424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.934609263982421e-05, |
|
"loss": 5.0928, |
|
"step": 39936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.933770669231369e-05, |
|
"loss": 5.0835, |
|
"step": 40448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932932074480317e-05, |
|
"loss": 5.0844, |
|
"step": 40960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.932093479729265e-05, |
|
"loss": 5.076, |
|
"step": 41472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.931254884978213e-05, |
|
"loss": 5.0664, |
|
"step": 41984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.930416290227161e-05, |
|
"loss": 5.0458, |
|
"step": 42496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.929577695476109e-05, |
|
"loss": 5.0512, |
|
"step": 43008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9287423764858034e-05, |
|
"loss": 5.054, |
|
"step": 43520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9279037817347514e-05, |
|
"loss": 5.0494, |
|
"step": 44032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9270651869836994e-05, |
|
"loss": 5.0385, |
|
"step": 44544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9262265922326474e-05, |
|
"loss": 5.0313, |
|
"step": 45056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.925389635361968e-05, |
|
"loss": 5.0195, |
|
"step": 45568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.924551040610916e-05, |
|
"loss": 5.014, |
|
"step": 46080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.923712445859864e-05, |
|
"loss": 5.0113, |
|
"step": 46592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922873851108812e-05, |
|
"loss": 5.0195, |
|
"step": 47104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922036894238133e-05, |
|
"loss": 5.0088, |
|
"step": 47616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.921198299487081e-05, |
|
"loss": 4.9836, |
|
"step": 48128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.920359704736029e-05, |
|
"loss": 4.9862, |
|
"step": 48640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.919521109984978e-05, |
|
"loss": 4.9925, |
|
"step": 49152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.918682515233926e-05, |
|
"loss": 4.9892, |
|
"step": 49664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917843920482874e-05, |
|
"loss": 4.9766, |
|
"step": 50176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.917006963612195e-05, |
|
"loss": 4.9748, |
|
"step": 50688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.916168368861143e-05, |
|
"loss": 4.9639, |
|
"step": 51200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.915329774110091e-05, |
|
"loss": 4.9615, |
|
"step": 51712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.914491179359039e-05, |
|
"loss": 4.9686, |
|
"step": 52224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.913652584607987e-05, |
|
"loss": 4.9625, |
|
"step": 52736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9128156277373076e-05, |
|
"loss": 4.9368, |
|
"step": 53248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9119770329862556e-05, |
|
"loss": 4.9481, |
|
"step": 53760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9111384382352036e-05, |
|
"loss": 4.9273, |
|
"step": 54272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9102998434841516e-05, |
|
"loss": 4.9324, |
|
"step": 54784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.909462886613473e-05, |
|
"loss": 4.9299, |
|
"step": 55296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.908624291862421e-05, |
|
"loss": 4.9193, |
|
"step": 55808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.907785697111369e-05, |
|
"loss": 4.9184, |
|
"step": 56320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906947102360317e-05, |
|
"loss": 4.9084, |
|
"step": 56832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.906108507609265e-05, |
|
"loss": 4.9067, |
|
"step": 57344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905271550738586e-05, |
|
"loss": 4.9036, |
|
"step": 57856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.904432955987534e-05, |
|
"loss": 4.9155, |
|
"step": 58368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.903594361236482e-05, |
|
"loss": 4.8904, |
|
"step": 58880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.90275576648543e-05, |
|
"loss": 4.8764, |
|
"step": 59392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901917171734378e-05, |
|
"loss": 4.8849, |
|
"step": 59904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.901078576983326e-05, |
|
"loss": 4.8952, |
|
"step": 60416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.900239982232274e-05, |
|
"loss": 4.8782, |
|
"step": 60928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8994013874812214e-05, |
|
"loss": 4.8874, |
|
"step": 61440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.898564430610543e-05, |
|
"loss": 4.8747, |
|
"step": 61952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8977291116202375e-05, |
|
"loss": 4.8738, |
|
"step": 62464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8968905168691855e-05, |
|
"loss": 4.8612, |
|
"step": 62976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8960519221181335e-05, |
|
"loss": 4.8626, |
|
"step": 63488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8952133273670815e-05, |
|
"loss": 4.8581, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8943747326160294e-05, |
|
"loss": 4.8626, |
|
"step": 64512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8935361378649774e-05, |
|
"loss": 4.8421, |
|
"step": 65024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8926991809942984e-05, |
|
"loss": 4.8519, |
|
"step": 65536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8918605862432463e-05, |
|
"loss": 4.8475, |
|
"step": 66048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8910219914921943e-05, |
|
"loss": 4.8342, |
|
"step": 66560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8901833967411423e-05, |
|
"loss": 4.8418, |
|
"step": 67072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.88934480199009e-05, |
|
"loss": 4.835, |
|
"step": 67584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.888506207239038e-05, |
|
"loss": 4.8399, |
|
"step": 68096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.887667612487986e-05, |
|
"loss": 4.8368, |
|
"step": 68608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.886829017736934e-05, |
|
"loss": 4.8179, |
|
"step": 69120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885992060866256e-05, |
|
"loss": 4.813, |
|
"step": 69632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.885153466115204e-05, |
|
"loss": 4.8127, |
|
"step": 70144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.884314871364151e-05, |
|
"loss": 4.8174, |
|
"step": 70656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.883476276613099e-05, |
|
"loss": 4.8259, |
|
"step": 71168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.882637681862047e-05, |
|
"loss": 4.8144, |
|
"step": 71680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.881799087110995e-05, |
|
"loss": 4.8043, |
|
"step": 72192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880960492359943e-05, |
|
"loss": 4.8011, |
|
"step": 72704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880121897608891e-05, |
|
"loss": 4.7995, |
|
"step": 73216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.879286578618586e-05, |
|
"loss": 4.7955, |
|
"step": 73728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.878447983867534e-05, |
|
"loss": 4.7921, |
|
"step": 74240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877609389116482e-05, |
|
"loss": 4.8048, |
|
"step": 74752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.87677079436543e-05, |
|
"loss": 4.7901, |
|
"step": 75264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.875932199614378e-05, |
|
"loss": 4.7839, |
|
"step": 75776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8750952427436986e-05, |
|
"loss": 4.7782, |
|
"step": 76288 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.794678688049316, |
|
"eval_runtime": 547.8678, |
|
"eval_samples_per_second": 696.502, |
|
"eval_steps_per_second": 21.766, |
|
"step": 76319 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8742566479926466e-05, |
|
"loss": 4.7917, |
|
"step": 76800 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8734180532415946e-05, |
|
"loss": 4.7838, |
|
"step": 77312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8725794584905426e-05, |
|
"loss": 4.769, |
|
"step": 77824 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8717408637394906e-05, |
|
"loss": 4.771, |
|
"step": 78336 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8709022689884386e-05, |
|
"loss": 4.7599, |
|
"step": 78848 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8700636742373866e-05, |
|
"loss": 4.7579, |
|
"step": 79360 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8692267173667075e-05, |
|
"loss": 4.7537, |
|
"step": 79872 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.868389760496029e-05, |
|
"loss": 4.7564, |
|
"step": 80384 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.867551165744977e-05, |
|
"loss": 4.7538, |
|
"step": 80896 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.866712570993925e-05, |
|
"loss": 4.7556, |
|
"step": 81408 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865873976242873e-05, |
|
"loss": 4.7517, |
|
"step": 81920 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.865035381491821e-05, |
|
"loss": 4.758, |
|
"step": 82432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.864196786740769e-05, |
|
"loss": 4.7423, |
|
"step": 82944 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.863358191989717e-05, |
|
"loss": 4.7338, |
|
"step": 83456 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.862519597238665e-05, |
|
"loss": 4.7346, |
|
"step": 83968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.861681002487613e-05, |
|
"loss": 4.7165, |
|
"step": 84480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.860844045616934e-05, |
|
"loss": 4.7303, |
|
"step": 84992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.860005450865882e-05, |
|
"loss": 4.7275, |
|
"step": 85504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.85916685611483e-05, |
|
"loss": 4.7239, |
|
"step": 86016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.858328261363778e-05, |
|
"loss": 4.7342, |
|
"step": 86528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.857489666612726e-05, |
|
"loss": 4.7228, |
|
"step": 87040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.856651071861674e-05, |
|
"loss": 4.7219, |
|
"step": 87552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8558124771106226e-05, |
|
"loss": 4.7221, |
|
"step": 88064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.85497388235957e-05, |
|
"loss": 4.7179, |
|
"step": 88576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8541385633692644e-05, |
|
"loss": 4.7079, |
|
"step": 89088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8532999686182124e-05, |
|
"loss": 4.7067, |
|
"step": 89600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8524613738671604e-05, |
|
"loss": 4.7134, |
|
"step": 90112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8516227791161084e-05, |
|
"loss": 4.6984, |
|
"step": 90624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.8507841843650564e-05, |
|
"loss": 4.701, |
|
"step": 91136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8499455896140044e-05, |
|
"loss": 4.6872, |
|
"step": 91648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.849108632743325e-05, |
|
"loss": 4.6893, |
|
"step": 92160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.848270037992273e-05, |
|
"loss": 4.7017, |
|
"step": 92672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.847431443241221e-05, |
|
"loss": 4.6914, |
|
"step": 93184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.846592848490169e-05, |
|
"loss": 4.6925, |
|
"step": 93696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.845754253739117e-05, |
|
"loss": 4.6889, |
|
"step": 94208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.844917296868439e-05, |
|
"loss": 4.6892, |
|
"step": 94720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.844078702117387e-05, |
|
"loss": 4.6864, |
|
"step": 95232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.843240107366335e-05, |
|
"loss": 4.6841, |
|
"step": 95744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.842401512615282e-05, |
|
"loss": 4.6844, |
|
"step": 96256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.84156291786423e-05, |
|
"loss": 4.6724, |
|
"step": 96768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.840724323113178e-05, |
|
"loss": 4.6744, |
|
"step": 97280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839885728362126e-05, |
|
"loss": 4.6622, |
|
"step": 97792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.839047133611074e-05, |
|
"loss": 4.6674, |
|
"step": 98304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.838208538860022e-05, |
|
"loss": 4.6767, |
|
"step": 98816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.837371581989343e-05, |
|
"loss": 4.6744, |
|
"step": 99328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.836532987238292e-05, |
|
"loss": 4.66, |
|
"step": 99840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.83569439248724e-05, |
|
"loss": 4.6614, |
|
"step": 100352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834855797736188e-05, |
|
"loss": 4.6674, |
|
"step": 100864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.834017202985136e-05, |
|
"loss": 4.6482, |
|
"step": 101376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8331802461144566e-05, |
|
"loss": 4.6462, |
|
"step": 101888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8323416513634046e-05, |
|
"loss": 4.6489, |
|
"step": 102400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8315046944927255e-05, |
|
"loss": 4.661, |
|
"step": 102912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8306660997416735e-05, |
|
"loss": 4.6366, |
|
"step": 103424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8298275049906215e-05, |
|
"loss": 4.6637, |
|
"step": 103936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8289889102395695e-05, |
|
"loss": 4.6317, |
|
"step": 104448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8281503154885175e-05, |
|
"loss": 4.6449, |
|
"step": 104960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8273117207374655e-05, |
|
"loss": 4.638, |
|
"step": 105472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8264731259864135e-05, |
|
"loss": 4.6402, |
|
"step": 105984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8256345312353615e-05, |
|
"loss": 4.6326, |
|
"step": 106496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8247959364843095e-05, |
|
"loss": 4.6353, |
|
"step": 107008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823957341733258e-05, |
|
"loss": 4.625, |
|
"step": 107520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.823118746982206e-05, |
|
"loss": 4.634, |
|
"step": 108032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.822281790111527e-05, |
|
"loss": 4.6243, |
|
"step": 108544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.821444833240848e-05, |
|
"loss": 4.631, |
|
"step": 109056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.820606238489796e-05, |
|
"loss": 4.6212, |
|
"step": 109568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.819767643738744e-05, |
|
"loss": 4.6214, |
|
"step": 110080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.818929048987692e-05, |
|
"loss": 4.6026, |
|
"step": 110592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.81809045423664e-05, |
|
"loss": 4.6285, |
|
"step": 111104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.817251859485588e-05, |
|
"loss": 4.6173, |
|
"step": 111616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.816413264734535e-05, |
|
"loss": 4.6061, |
|
"step": 112128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.815576307863857e-05, |
|
"loss": 4.6142, |
|
"step": 112640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.814737713112805e-05, |
|
"loss": 4.6101, |
|
"step": 113152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8138991183617535e-05, |
|
"loss": 4.612, |
|
"step": 113664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.813060523610701e-05, |
|
"loss": 4.6181, |
|
"step": 114176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.812221928859649e-05, |
|
"loss": 4.6155, |
|
"step": 114688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8113849719889704e-05, |
|
"loss": 4.6028, |
|
"step": 115200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8105463772379184e-05, |
|
"loss": 4.595, |
|
"step": 115712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8097094203672393e-05, |
|
"loss": 4.6012, |
|
"step": 116224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8088708256161873e-05, |
|
"loss": 4.6035, |
|
"step": 116736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.808032230865135e-05, |
|
"loss": 4.6083, |
|
"step": 117248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8071936361140827e-05, |
|
"loss": 4.6005, |
|
"step": 117760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8063550413630307e-05, |
|
"loss": 4.5959, |
|
"step": 118272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.8055164466119786e-05, |
|
"loss": 4.5779, |
|
"step": 118784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.804677851860927e-05, |
|
"loss": 4.5867, |
|
"step": 119296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803839257109875e-05, |
|
"loss": 4.593, |
|
"step": 119808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.803000662358823e-05, |
|
"loss": 4.5916, |
|
"step": 120320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.802162067607771e-05, |
|
"loss": 4.5938, |
|
"step": 120832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.801325110737092e-05, |
|
"loss": 4.5855, |
|
"step": 121344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.80048651598604e-05, |
|
"loss": 4.5766, |
|
"step": 121856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.799647921234988e-05, |
|
"loss": 4.5818, |
|
"step": 122368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.798809326483936e-05, |
|
"loss": 4.5743, |
|
"step": 122880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797970731732884e-05, |
|
"loss": 4.5817, |
|
"step": 123392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.797132136981832e-05, |
|
"loss": 4.5826, |
|
"step": 123904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.796295180111153e-05, |
|
"loss": 4.5611, |
|
"step": 124416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.795456585360101e-05, |
|
"loss": 4.5632, |
|
"step": 124928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.794617990609049e-05, |
|
"loss": 4.5738, |
|
"step": 125440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.793779395857997e-05, |
|
"loss": 4.5792, |
|
"step": 125952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792942438987319e-05, |
|
"loss": 4.5643, |
|
"step": 126464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.792103844236267e-05, |
|
"loss": 4.5713, |
|
"step": 126976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.791265249485215e-05, |
|
"loss": 4.5629, |
|
"step": 127488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.790426654734163e-05, |
|
"loss": 4.5628, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7895880599831107e-05, |
|
"loss": 4.5709, |
|
"step": 128512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7887511031124316e-05, |
|
"loss": 4.5676, |
|
"step": 129024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7879125083613796e-05, |
|
"loss": 4.549, |
|
"step": 129536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7870739136103276e-05, |
|
"loss": 4.5654, |
|
"step": 130048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7862353188592756e-05, |
|
"loss": 4.5454, |
|
"step": 130560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7853967241082236e-05, |
|
"loss": 4.5485, |
|
"step": 131072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7845597672375445e-05, |
|
"loss": 4.5566, |
|
"step": 131584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7837211724864925e-05, |
|
"loss": 4.5441, |
|
"step": 132096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782882577735441e-05, |
|
"loss": 4.5454, |
|
"step": 132608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.782043982984389e-05, |
|
"loss": 4.5401, |
|
"step": 133120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.781205388233337e-05, |
|
"loss": 4.541, |
|
"step": 133632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.780368431362658e-05, |
|
"loss": 4.5406, |
|
"step": 134144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.779529836611606e-05, |
|
"loss": 4.556, |
|
"step": 134656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.778692879740927e-05, |
|
"loss": 4.5359, |
|
"step": 135168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.777854284989875e-05, |
|
"loss": 4.5275, |
|
"step": 135680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.777015690238823e-05, |
|
"loss": 4.5339, |
|
"step": 136192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.776177095487771e-05, |
|
"loss": 4.5418, |
|
"step": 136704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.775338500736719e-05, |
|
"loss": 4.5376, |
|
"step": 137216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.774499905985666e-05, |
|
"loss": 4.541, |
|
"step": 137728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.773661311234615e-05, |
|
"loss": 4.538, |
|
"step": 138240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.772822716483563e-05, |
|
"loss": 4.5375, |
|
"step": 138752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771984121732511e-05, |
|
"loss": 4.5289, |
|
"step": 139264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.771145526981459e-05, |
|
"loss": 4.5248, |
|
"step": 139776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.770306932230407e-05, |
|
"loss": 4.5285, |
|
"step": 140288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.769469975359728e-05, |
|
"loss": 4.5347, |
|
"step": 140800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.768631380608676e-05, |
|
"loss": 4.5188, |
|
"step": 141312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.767792785857624e-05, |
|
"loss": 4.5224, |
|
"step": 141824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766954191106572e-05, |
|
"loss": 4.5354, |
|
"step": 142336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.766117234235893e-05, |
|
"loss": 4.5096, |
|
"step": 142848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.765278639484841e-05, |
|
"loss": 4.5232, |
|
"step": 143360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.764440044733789e-05, |
|
"loss": 4.5207, |
|
"step": 143872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.763601449982737e-05, |
|
"loss": 4.5297, |
|
"step": 144384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.762762855231685e-05, |
|
"loss": 4.5236, |
|
"step": 144896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.761924260480633e-05, |
|
"loss": 4.5095, |
|
"step": 145408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7610856657295813e-05, |
|
"loss": 4.5081, |
|
"step": 145920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.7602470709785293e-05, |
|
"loss": 4.5092, |
|
"step": 146432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.75941011410785e-05, |
|
"loss": 4.5163, |
|
"step": 146944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.758571519356798e-05, |
|
"loss": 4.5253, |
|
"step": 147456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.757732924605746e-05, |
|
"loss": 4.5195, |
|
"step": 147968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756895967735067e-05, |
|
"loss": 4.5055, |
|
"step": 148480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.756057372984015e-05, |
|
"loss": 4.5076, |
|
"step": 148992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.755218778232963e-05, |
|
"loss": 4.5103, |
|
"step": 149504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.754380183481911e-05, |
|
"loss": 4.5065, |
|
"step": 150016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.753541588730859e-05, |
|
"loss": 4.503, |
|
"step": 150528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.75270463186018e-05, |
|
"loss": 4.5212, |
|
"step": 151040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.751866037109128e-05, |
|
"loss": 4.5003, |
|
"step": 151552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.751027442358077e-05, |
|
"loss": 4.5026, |
|
"step": 152064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.750188847607025e-05, |
|
"loss": 4.497, |
|
"step": 152576 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.520559787750244, |
|
"eval_runtime": 545.9213, |
|
"eval_samples_per_second": 698.985, |
|
"eval_steps_per_second": 21.844, |
|
"step": 152638 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.749350252855973e-05, |
|
"loss": 4.5128, |
|
"step": 153088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.748511658104921e-05, |
|
"loss": 4.5073, |
|
"step": 153600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.747673063353868e-05, |
|
"loss": 4.494, |
|
"step": 154112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.746834468602816e-05, |
|
"loss": 4.4944, |
|
"step": 154624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.745995873851764e-05, |
|
"loss": 4.488, |
|
"step": 155136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.745157279100712e-05, |
|
"loss": 4.4891, |
|
"step": 155648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.74431868434966e-05, |
|
"loss": 4.4868, |
|
"step": 156160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.743480089598608e-05, |
|
"loss": 4.49, |
|
"step": 156672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.742641494847556e-05, |
|
"loss": 4.4866, |
|
"step": 157184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.741804537976877e-05, |
|
"loss": 4.4921, |
|
"step": 157696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.740965943225825e-05, |
|
"loss": 4.4931, |
|
"step": 158208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7401273484747736e-05, |
|
"loss": 4.4973, |
|
"step": 158720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7392887537237216e-05, |
|
"loss": 4.4835, |
|
"step": 159232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7384501589726696e-05, |
|
"loss": 4.4735, |
|
"step": 159744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7376115642216176e-05, |
|
"loss": 4.4815, |
|
"step": 160256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7367729694705656e-05, |
|
"loss": 4.4615, |
|
"step": 160768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7359343747195135e-05, |
|
"loss": 4.478, |
|
"step": 161280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7350957799684615e-05, |
|
"loss": 4.4744, |
|
"step": 161792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7342571852174095e-05, |
|
"loss": 4.473, |
|
"step": 162304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.733418590466357e-05, |
|
"loss": 4.4879, |
|
"step": 162816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.732579995715305e-05, |
|
"loss": 4.4764, |
|
"step": 163328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.731741400964253e-05, |
|
"loss": 4.4763, |
|
"step": 163840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730904444093574e-05, |
|
"loss": 4.4791, |
|
"step": 164352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.730065849342522e-05, |
|
"loss": 4.4722, |
|
"step": 164864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7292272545914704e-05, |
|
"loss": 4.4701, |
|
"step": 165376 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7283886598404184e-05, |
|
"loss": 4.4646, |
|
"step": 165888 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7275500650893664e-05, |
|
"loss": 4.4729, |
|
"step": 166400 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.7267114703383144e-05, |
|
"loss": 4.4639, |
|
"step": 166912 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.725874513467635e-05, |
|
"loss": 4.4653, |
|
"step": 167424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.725035918716583e-05, |
|
"loss": 4.4499, |
|
"step": 167936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.724197323965531e-05, |
|
"loss": 4.4587, |
|
"step": 168448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.723358729214479e-05, |
|
"loss": 4.47, |
|
"step": 168960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.722520134463427e-05, |
|
"loss": 4.4566, |
|
"step": 169472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.721681539712375e-05, |
|
"loss": 4.4639, |
|
"step": 169984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720842944961323e-05, |
|
"loss": 4.4582, |
|
"step": 170496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.720005988090644e-05, |
|
"loss": 4.4619, |
|
"step": 171008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.719167393339592e-05, |
|
"loss": 4.4641, |
|
"step": 171520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.71832879858854e-05, |
|
"loss": 4.4605, |
|
"step": 172032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.717491841717862e-05, |
|
"loss": 4.459, |
|
"step": 172544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.71665324696681e-05, |
|
"loss": 4.4498, |
|
"step": 173056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.715814652215758e-05, |
|
"loss": 4.4548, |
|
"step": 173568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714976057464706e-05, |
|
"loss": 4.4413, |
|
"step": 174080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.714137462713654e-05, |
|
"loss": 4.4556, |
|
"step": 174592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.713298867962602e-05, |
|
"loss": 4.4564, |
|
"step": 175104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.71246027321155e-05, |
|
"loss": 4.4573, |
|
"step": 175616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.711621678460498e-05, |
|
"loss": 4.4434, |
|
"step": 176128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.710784721589819e-05, |
|
"loss": 4.4454, |
|
"step": 176640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7099461268387667e-05, |
|
"loss": 4.4541, |
|
"step": 177152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7091075320877147e-05, |
|
"loss": 4.4341, |
|
"step": 177664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7082689373366627e-05, |
|
"loss": 4.4382, |
|
"step": 178176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7074303425856106e-05, |
|
"loss": 4.4358, |
|
"step": 178688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7065917478345586e-05, |
|
"loss": 4.4544, |
|
"step": 179200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7057531530835066e-05, |
|
"loss": 4.4293, |
|
"step": 179712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7049145583324546e-05, |
|
"loss": 4.4567, |
|
"step": 180224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.704079239342149e-05, |
|
"loss": 4.4265, |
|
"step": 180736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.703240644591097e-05, |
|
"loss": 4.4338, |
|
"step": 181248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.702402049840045e-05, |
|
"loss": 4.4377, |
|
"step": 181760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7015634550889924e-05, |
|
"loss": 4.4366, |
|
"step": 182272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7007248603379404e-05, |
|
"loss": 4.4325, |
|
"step": 182784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6998862655868884e-05, |
|
"loss": 4.4441, |
|
"step": 183296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6990476708358364e-05, |
|
"loss": 4.4168, |
|
"step": 183808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6982090760847844e-05, |
|
"loss": 4.4326, |
|
"step": 184320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6973704813337324e-05, |
|
"loss": 4.4273, |
|
"step": 184832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6965318865826804e-05, |
|
"loss": 4.436, |
|
"step": 185344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.695694929712002e-05, |
|
"loss": 4.4236, |
|
"step": 185856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.69485633496095e-05, |
|
"loss": 4.4267, |
|
"step": 186368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.694017740209898e-05, |
|
"loss": 4.4093, |
|
"step": 186880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.693179145458846e-05, |
|
"loss": 4.4357, |
|
"step": 187392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.692340550707794e-05, |
|
"loss": 4.426, |
|
"step": 187904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.691501955956742e-05, |
|
"loss": 4.4167, |
|
"step": 188416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.69066336120569e-05, |
|
"loss": 4.4226, |
|
"step": 188928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.689826404335011e-05, |
|
"loss": 4.4214, |
|
"step": 189440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.688987809583959e-05, |
|
"loss": 4.4194, |
|
"step": 189952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.688149214832907e-05, |
|
"loss": 4.4325, |
|
"step": 190464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.687310620081855e-05, |
|
"loss": 4.4292, |
|
"step": 190976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6864753010915494e-05, |
|
"loss": 4.4171, |
|
"step": 191488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6856367063404974e-05, |
|
"loss": 4.4102, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6847981115894454e-05, |
|
"loss": 4.4146, |
|
"step": 192512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6839595168383934e-05, |
|
"loss": 4.4205, |
|
"step": 193024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6831209220873414e-05, |
|
"loss": 4.4237, |
|
"step": 193536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6822823273362894e-05, |
|
"loss": 4.4165, |
|
"step": 194048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6814437325852373e-05, |
|
"loss": 4.4188, |
|
"step": 194560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.6806051378341853e-05, |
|
"loss": 4.3963, |
|
"step": 195072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.679768180963506e-05, |
|
"loss": 4.4104, |
|
"step": 195584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678931224092827e-05, |
|
"loss": 4.4064, |
|
"step": 196096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.678092629341775e-05, |
|
"loss": 4.4116, |
|
"step": 196608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.677254034590723e-05, |
|
"loss": 4.418, |
|
"step": 197120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.676415439839671e-05, |
|
"loss": 4.409, |
|
"step": 197632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.675576845088619e-05, |
|
"loss": 4.3981, |
|
"step": 198144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.674738250337568e-05, |
|
"loss": 4.4075, |
|
"step": 198656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.673899655586516e-05, |
|
"loss": 4.3991, |
|
"step": 199168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.673061060835464e-05, |
|
"loss": 4.4079, |
|
"step": 199680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.672224103964785e-05, |
|
"loss": 4.4068, |
|
"step": 200192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6713871470941056e-05, |
|
"loss": 4.3905, |
|
"step": 200704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6705485523430536e-05, |
|
"loss": 4.3934, |
|
"step": 201216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6697099575920016e-05, |
|
"loss": 4.4, |
|
"step": 201728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6688713628409496e-05, |
|
"loss": 4.4072, |
|
"step": 202240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6680327680898976e-05, |
|
"loss": 4.3967, |
|
"step": 202752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6671958112192185e-05, |
|
"loss": 4.4048, |
|
"step": 203264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6663572164681665e-05, |
|
"loss": 4.3936, |
|
"step": 203776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6655186217171145e-05, |
|
"loss": 4.3958, |
|
"step": 204288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.664680026966063e-05, |
|
"loss": 4.4015, |
|
"step": 204800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.663841432215011e-05, |
|
"loss": 4.3985, |
|
"step": 205312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.663004475344332e-05, |
|
"loss": 4.3845, |
|
"step": 205824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.66216588059328e-05, |
|
"loss": 4.4028, |
|
"step": 206336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.661327285842228e-05, |
|
"loss": 4.3775, |
|
"step": 206848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.660488691091176e-05, |
|
"loss": 4.3813, |
|
"step": 207360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6596500963401234e-05, |
|
"loss": 4.3976, |
|
"step": 207872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6588115015890714e-05, |
|
"loss": 4.3801, |
|
"step": 208384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6579729068380194e-05, |
|
"loss": 4.3899, |
|
"step": 208896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6571343120869674e-05, |
|
"loss": 4.3797, |
|
"step": 209408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.656297355216288e-05, |
|
"loss": 4.3774, |
|
"step": 209920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.655458760465237e-05, |
|
"loss": 4.38, |
|
"step": 210432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.654620165714185e-05, |
|
"loss": 4.3989, |
|
"step": 210944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.653781570963133e-05, |
|
"loss": 4.3794, |
|
"step": 211456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6529462519728275e-05, |
|
"loss": 4.3745, |
|
"step": 211968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6521076572217755e-05, |
|
"loss": 4.3716, |
|
"step": 212480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6512690624707234e-05, |
|
"loss": 4.381, |
|
"step": 212992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.650430467719671e-05, |
|
"loss": 4.3861, |
|
"step": 213504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6495935108489924e-05, |
|
"loss": 4.3836, |
|
"step": 214016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6487549160979403e-05, |
|
"loss": 4.3792, |
|
"step": 214528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6479163213468883e-05, |
|
"loss": 4.3835, |
|
"step": 215040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.647077726595836e-05, |
|
"loss": 4.3746, |
|
"step": 215552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6462391318447837e-05, |
|
"loss": 4.3773, |
|
"step": 216064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.645400537093732e-05, |
|
"loss": 4.376, |
|
"step": 216576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.64456194234268e-05, |
|
"loss": 4.3799, |
|
"step": 217088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.643723347591628e-05, |
|
"loss": 4.3691, |
|
"step": 217600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642884752840576e-05, |
|
"loss": 4.3707, |
|
"step": 218112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.642047795969897e-05, |
|
"loss": 4.3848, |
|
"step": 218624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.641209201218845e-05, |
|
"loss": 4.3622, |
|
"step": 219136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.640370606467793e-05, |
|
"loss": 4.3702, |
|
"step": 219648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.639532011716741e-05, |
|
"loss": 4.3721, |
|
"step": 220160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.638695054846062e-05, |
|
"loss": 4.3819, |
|
"step": 220672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.63785646009501e-05, |
|
"loss": 4.3722, |
|
"step": 221184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.637017865343958e-05, |
|
"loss": 4.3692, |
|
"step": 221696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.636180908473279e-05, |
|
"loss": 4.3591, |
|
"step": 222208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.635342313722228e-05, |
|
"loss": 4.362, |
|
"step": 222720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.634503718971176e-05, |
|
"loss": 4.3716, |
|
"step": 223232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.633665124220124e-05, |
|
"loss": 4.3794, |
|
"step": 223744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6328281673494446e-05, |
|
"loss": 4.3765, |
|
"step": 224256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6319895725983926e-05, |
|
"loss": 4.364, |
|
"step": 224768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6311509778473406e-05, |
|
"loss": 4.3606, |
|
"step": 225280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6303123830962886e-05, |
|
"loss": 4.3719, |
|
"step": 225792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6294754262256095e-05, |
|
"loss": 4.3609, |
|
"step": 226304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6286368314745575e-05, |
|
"loss": 4.3609, |
|
"step": 226816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6277982367235055e-05, |
|
"loss": 4.3746, |
|
"step": 227328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6269596419724535e-05, |
|
"loss": 4.3597, |
|
"step": 227840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6261210472214015e-05, |
|
"loss": 4.363, |
|
"step": 228352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6252824524703495e-05, |
|
"loss": 4.3593, |
|
"step": 228864 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.39060115814209, |
|
"eval_runtime": 543.0154, |
|
"eval_samples_per_second": 702.726, |
|
"eval_steps_per_second": 21.961, |
|
"step": 228957 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6244438577192975e-05, |
|
"loss": 4.3695, |
|
"step": 229376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.623605262968246e-05, |
|
"loss": 4.3707, |
|
"step": 229888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.622768306097567e-05, |
|
"loss": 4.3558, |
|
"step": 230400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.621929711346515e-05, |
|
"loss": 4.3552, |
|
"step": 230912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.621091116595463e-05, |
|
"loss": 4.3541, |
|
"step": 231424 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.620254159724784e-05, |
|
"loss": 4.3496, |
|
"step": 231936 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.619415564973732e-05, |
|
"loss": 4.3531, |
|
"step": 232448 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.61857697022268e-05, |
|
"loss": 4.3513, |
|
"step": 232960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.617738375471628e-05, |
|
"loss": 4.3526, |
|
"step": 233472 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616901418600949e-05, |
|
"loss": 4.3601, |
|
"step": 233984 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.616062823849897e-05, |
|
"loss": 4.3565, |
|
"step": 234496 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.615224229098845e-05, |
|
"loss": 4.3589, |
|
"step": 235008 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.614385634347793e-05, |
|
"loss": 4.3527, |
|
"step": 235520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6135470395967415e-05, |
|
"loss": 4.344, |
|
"step": 236032 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6127084448456895e-05, |
|
"loss": 4.3455, |
|
"step": 236544 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611869850094637e-05, |
|
"loss": 4.3284, |
|
"step": 237056 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.611031255343585e-05, |
|
"loss": 4.3432, |
|
"step": 237568 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6101942984729064e-05, |
|
"loss": 4.3432, |
|
"step": 238080 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.6093557037218544e-05, |
|
"loss": 4.3427, |
|
"step": 238592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.608517108970802e-05, |
|
"loss": 4.3573, |
|
"step": 239104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.60767851421975e-05, |
|
"loss": 4.3455, |
|
"step": 239616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.606841557349071e-05, |
|
"loss": 4.3492, |
|
"step": 240128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.606004600478392e-05, |
|
"loss": 4.3455, |
|
"step": 240640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.60516600572734e-05, |
|
"loss": 4.3414, |
|
"step": 241152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.604327410976288e-05, |
|
"loss": 4.3412, |
|
"step": 241664 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.603488816225236e-05, |
|
"loss": 4.3418, |
|
"step": 242176 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.602650221474184e-05, |
|
"loss": 4.3401, |
|
"step": 242688 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.601811626723132e-05, |
|
"loss": 4.3372, |
|
"step": 243200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.600974669852454e-05, |
|
"loss": 4.3378, |
|
"step": 243712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.600136075101402e-05, |
|
"loss": 4.3255, |
|
"step": 244224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.599299118230723e-05, |
|
"loss": 4.3344, |
|
"step": 244736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.598460523479671e-05, |
|
"loss": 4.3418, |
|
"step": 245248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.597621928728619e-05, |
|
"loss": 4.3342, |
|
"step": 245760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.596783333977567e-05, |
|
"loss": 4.3379, |
|
"step": 246272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595944739226514e-05, |
|
"loss": 4.3339, |
|
"step": 246784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.595106144475462e-05, |
|
"loss": 4.3383, |
|
"step": 247296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.59426754972441e-05, |
|
"loss": 4.3366, |
|
"step": 247808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5934289549733587e-05, |
|
"loss": 4.3427, |
|
"step": 248320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5925903602223066e-05, |
|
"loss": 4.3348, |
|
"step": 248832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5917517654712546e-05, |
|
"loss": 4.3284, |
|
"step": 249344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5909131707202026e-05, |
|
"loss": 4.3329, |
|
"step": 249856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5900745759691506e-05, |
|
"loss": 4.3175, |
|
"step": 250368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5892359812180986e-05, |
|
"loss": 4.3373, |
|
"step": 250880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5883990243474195e-05, |
|
"loss": 4.3373, |
|
"step": 251392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5875604295963675e-05, |
|
"loss": 4.3354, |
|
"step": 251904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5867218348453155e-05, |
|
"loss": 4.3239, |
|
"step": 252416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5858848779746364e-05, |
|
"loss": 4.3247, |
|
"step": 252928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5850462832235844e-05, |
|
"loss": 4.3359, |
|
"step": 253440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5842076884725324e-05, |
|
"loss": 4.3146, |
|
"step": 253952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5833690937214804e-05, |
|
"loss": 4.3214, |
|
"step": 254464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5825304989704284e-05, |
|
"loss": 4.3214, |
|
"step": 254976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.581691904219377e-05, |
|
"loss": 4.3347, |
|
"step": 255488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.580853309468325e-05, |
|
"loss": 4.3088, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.580014714717273e-05, |
|
"loss": 4.3408, |
|
"step": 256512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.579179395726967e-05, |
|
"loss": 4.3103, |
|
"step": 257024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.578340800975915e-05, |
|
"loss": 4.3127, |
|
"step": 257536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.577502206224863e-05, |
|
"loss": 4.3258, |
|
"step": 258048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.576663611473811e-05, |
|
"loss": 4.313, |
|
"step": 258560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.575825016722759e-05, |
|
"loss": 4.3254, |
|
"step": 259072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.574986421971707e-05, |
|
"loss": 4.3283, |
|
"step": 259584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.574147827220655e-05, |
|
"loss": 4.3052, |
|
"step": 260096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.573309232469602e-05, |
|
"loss": 4.3142, |
|
"step": 260608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.572472275598924e-05, |
|
"loss": 4.3185, |
|
"step": 261120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5716336808478725e-05, |
|
"loss": 4.3209, |
|
"step": 261632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5707950860968205e-05, |
|
"loss": 4.3094, |
|
"step": 262144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5699581292261414e-05, |
|
"loss": 4.3141, |
|
"step": 262656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5691195344750894e-05, |
|
"loss": 4.2948, |
|
"step": 263168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5682809397240374e-05, |
|
"loss": 4.3258, |
|
"step": 263680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5674423449729854e-05, |
|
"loss": 4.3126, |
|
"step": 264192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.566605388102306e-05, |
|
"loss": 4.3044, |
|
"step": 264704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.565766793351254e-05, |
|
"loss": 4.3118, |
|
"step": 265216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.564928198600202e-05, |
|
"loss": 4.3107, |
|
"step": 265728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5640896038491496e-05, |
|
"loss": 4.3105, |
|
"step": 266240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.5632510090980976e-05, |
|
"loss": 4.3171, |
|
"step": 266752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.562412414347046e-05, |
|
"loss": 4.3218, |
|
"step": 267264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.561573819595994e-05, |
|
"loss": 4.308, |
|
"step": 267776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.560735224844942e-05, |
|
"loss": 4.3041, |
|
"step": 268288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.55989663009389e-05, |
|
"loss": 4.3066, |
|
"step": 268800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.559058035342838e-05, |
|
"loss": 4.3075, |
|
"step": 269312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.558219440591786e-05, |
|
"loss": 4.32, |
|
"step": 269824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.557382483721107e-05, |
|
"loss": 4.3119, |
|
"step": 270336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.556543888970055e-05, |
|
"loss": 4.3063, |
|
"step": 270848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.555705294219003e-05, |
|
"loss": 4.2937, |
|
"step": 271360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.554866699467951e-05, |
|
"loss": 4.3018, |
|
"step": 271872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.554028104716899e-05, |
|
"loss": 4.3024, |
|
"step": 272384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.553189509965847e-05, |
|
"loss": 4.3057, |
|
"step": 272896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.552350915214795e-05, |
|
"loss": 4.3054, |
|
"step": 273408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.551512320463743e-05, |
|
"loss": 4.3043, |
|
"step": 273920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.550675363593064e-05, |
|
"loss": 4.2966, |
|
"step": 274432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.549836768842013e-05, |
|
"loss": 4.3042, |
|
"step": 274944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5489998119713336e-05, |
|
"loss": 4.2968, |
|
"step": 275456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5481612172202816e-05, |
|
"loss": 4.2985, |
|
"step": 275968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5473226224692296e-05, |
|
"loss": 4.3021, |
|
"step": 276480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5464840277181776e-05, |
|
"loss": 4.2877, |
|
"step": 276992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5456454329671256e-05, |
|
"loss": 4.2892, |
|
"step": 277504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5448068382160736e-05, |
|
"loss": 4.3025, |
|
"step": 278016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5439682434650216e-05, |
|
"loss": 4.2955, |
|
"step": 278528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.543129648713969e-05, |
|
"loss": 4.3012, |
|
"step": 279040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5422926918432905e-05, |
|
"loss": 4.299, |
|
"step": 279552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5414540970922385e-05, |
|
"loss": 4.2928, |
|
"step": 280064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5406155023411865e-05, |
|
"loss": 4.2917, |
|
"step": 280576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5397769075901345e-05, |
|
"loss": 4.3013, |
|
"step": 281088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538939950719456e-05, |
|
"loss": 4.2992, |
|
"step": 281600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.538101355968404e-05, |
|
"loss": 4.2838, |
|
"step": 282112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5372627612173514e-05, |
|
"loss": 4.3034, |
|
"step": 282624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5364241664662994e-05, |
|
"loss": 4.2749, |
|
"step": 283136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5355855717152473e-05, |
|
"loss": 4.2854, |
|
"step": 283648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5347469769641953e-05, |
|
"loss": 4.2976, |
|
"step": 284160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533908382213143e-05, |
|
"loss": 4.2801, |
|
"step": 284672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.533069787462091e-05, |
|
"loss": 4.2933, |
|
"step": 285184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.532232830591412e-05, |
|
"loss": 4.2793, |
|
"step": 285696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.53139423584036e-05, |
|
"loss": 4.2779, |
|
"step": 286208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.530555641089308e-05, |
|
"loss": 4.2821, |
|
"step": 286720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.529717046338256e-05, |
|
"loss": 4.3017, |
|
"step": 287232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.528880089467578e-05, |
|
"loss": 4.2843, |
|
"step": 287744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.528041494716526e-05, |
|
"loss": 4.2757, |
|
"step": 288256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.527202899965474e-05, |
|
"loss": 4.2754, |
|
"step": 288768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.526364305214422e-05, |
|
"loss": 4.2824, |
|
"step": 289280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.52552571046337e-05, |
|
"loss": 4.2909, |
|
"step": 289792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.524687115712318e-05, |
|
"loss": 4.2894, |
|
"step": 290304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.523848520961266e-05, |
|
"loss": 4.2796, |
|
"step": 290816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.523011564090587e-05, |
|
"loss": 4.2904, |
|
"step": 291328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.522172969339535e-05, |
|
"loss": 4.2805, |
|
"step": 291840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.521334374588483e-05, |
|
"loss": 4.2833, |
|
"step": 292352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.520495779837431e-05, |
|
"loss": 4.2806, |
|
"step": 292864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.519657185086379e-05, |
|
"loss": 4.2854, |
|
"step": 293376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.518818590335327e-05, |
|
"loss": 4.2761, |
|
"step": 293888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517979995584275e-05, |
|
"loss": 4.2774, |
|
"step": 294400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.517141400833223e-05, |
|
"loss": 4.29, |
|
"step": 294912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.516304443962544e-05, |
|
"loss": 4.2688, |
|
"step": 295424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.515465849211492e-05, |
|
"loss": 4.2765, |
|
"step": 295936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.51462725446044e-05, |
|
"loss": 4.2811, |
|
"step": 296448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5137886597093876e-05, |
|
"loss": 4.2873, |
|
"step": 296960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.512951702838709e-05, |
|
"loss": 4.2801, |
|
"step": 297472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.512113108087657e-05, |
|
"loss": 4.2786, |
|
"step": 297984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5112745133366045e-05, |
|
"loss": 4.2671, |
|
"step": 298496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5104359185855525e-05, |
|
"loss": 4.2701, |
|
"step": 299008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.509598961714874e-05, |
|
"loss": 4.2808, |
|
"step": 299520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.508760366963822e-05, |
|
"loss": 4.2863, |
|
"step": 300032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.50792177221277e-05, |
|
"loss": 4.2855, |
|
"step": 300544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.507083177461718e-05, |
|
"loss": 4.2738, |
|
"step": 301056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5062478584714125e-05, |
|
"loss": 4.2669, |
|
"step": 301568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5054092637203605e-05, |
|
"loss": 4.2795, |
|
"step": 302080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5045706689693085e-05, |
|
"loss": 4.2712, |
|
"step": 302592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5037320742182565e-05, |
|
"loss": 4.272, |
|
"step": 303104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5028934794672045e-05, |
|
"loss": 4.2842, |
|
"step": 303616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.502054884716152e-05, |
|
"loss": 4.274, |
|
"step": 304128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.5012162899651e-05, |
|
"loss": 4.2701, |
|
"step": 304640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.500377695214048e-05, |
|
"loss": 4.2699, |
|
"step": 305152 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.3090386390686035, |
|
"eval_runtime": 543.1539, |
|
"eval_samples_per_second": 702.547, |
|
"eval_steps_per_second": 21.955, |
|
"step": 305276 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4995407383433694e-05, |
|
"loss": 4.2846, |
|
"step": 305664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4987021435923174e-05, |
|
"loss": 4.2779, |
|
"step": 306176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4978635488412654e-05, |
|
"loss": 4.2672, |
|
"step": 306688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4970249540902134e-05, |
|
"loss": 4.2642, |
|
"step": 307200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4961863593391614e-05, |
|
"loss": 4.2682, |
|
"step": 307712 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4953477645881094e-05, |
|
"loss": 4.262, |
|
"step": 308224 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4945091698370574e-05, |
|
"loss": 4.2655, |
|
"step": 308736 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4936705750860054e-05, |
|
"loss": 4.2604, |
|
"step": 309248 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4928319803349534e-05, |
|
"loss": 4.2678, |
|
"step": 309760 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4919933855839014e-05, |
|
"loss": 4.277, |
|
"step": 310272 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4911547908328494e-05, |
|
"loss": 4.2659, |
|
"step": 310784 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4903161960817974e-05, |
|
"loss": 4.2669, |
|
"step": 311296 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4894776013307454e-05, |
|
"loss": 4.2698, |
|
"step": 311808 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4886390065796934e-05, |
|
"loss": 4.2603, |
|
"step": 312320 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.487800411828641e-05, |
|
"loss": 4.261, |
|
"step": 312832 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.486961817077589e-05, |
|
"loss": 4.2419, |
|
"step": 313344 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4861232223265373e-05, |
|
"loss": 4.2574, |
|
"step": 313856 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.4852846275754853e-05, |
|
"loss": 4.2565, |
|
"step": 314368 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.484446032824433e-05, |
|
"loss": 4.2554, |
|
"step": 314880 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.483609075953754e-05, |
|
"loss": 4.2778, |
|
"step": 315392 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.482770481202702e-05, |
|
"loss": 4.259, |
|
"step": 315904 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.48193188645165e-05, |
|
"loss": 4.2664, |
|
"step": 316416 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.481094929580971e-05, |
|
"loss": 4.2574, |
|
"step": 316928 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.480256334829919e-05, |
|
"loss": 4.2621, |
|
"step": 317440 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.479417740078867e-05, |
|
"loss": 4.2523, |
|
"step": 317952 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.478579145327815e-05, |
|
"loss": 4.2587, |
|
"step": 318464 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.477740550576763e-05, |
|
"loss": 4.2558, |
|
"step": 318976 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476901955825711e-05, |
|
"loss": 4.2553, |
|
"step": 319488 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.476063361074659e-05, |
|
"loss": 4.2535, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.475224766323607e-05, |
|
"loss": 4.2451, |
|
"step": 320512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.474387809452929e-05, |
|
"loss": 4.2471, |
|
"step": 321024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.473549214701877e-05, |
|
"loss": 4.2596, |
|
"step": 321536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.472710619950825e-05, |
|
"loss": 4.2544, |
|
"step": 322048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.471872025199773e-05, |
|
"loss": 4.257, |
|
"step": 322560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4710350683290936e-05, |
|
"loss": 4.2522, |
|
"step": 323072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4701964735780416e-05, |
|
"loss": 4.2572, |
|
"step": 323584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4693578788269896e-05, |
|
"loss": 4.2553, |
|
"step": 324096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4685192840759376e-05, |
|
"loss": 4.2608, |
|
"step": 324608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4676823272052585e-05, |
|
"loss": 4.2537, |
|
"step": 325120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4668437324542065e-05, |
|
"loss": 4.2443, |
|
"step": 325632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4660051377031545e-05, |
|
"loss": 4.256, |
|
"step": 326144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4651665429521025e-05, |
|
"loss": 4.2354, |
|
"step": 326656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.464327948201051e-05, |
|
"loss": 4.2535, |
|
"step": 327168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.463489353449999e-05, |
|
"loss": 4.2519, |
|
"step": 327680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.462650758698947e-05, |
|
"loss": 4.2584, |
|
"step": 328192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.461812163947895e-05, |
|
"loss": 4.2462, |
|
"step": 328704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4609735691968425e-05, |
|
"loss": 4.2403, |
|
"step": 329216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4601349744457905e-05, |
|
"loss": 4.2572, |
|
"step": 329728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4592963796947384e-05, |
|
"loss": 4.2403, |
|
"step": 330240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4584577849436864e-05, |
|
"loss": 4.2391, |
|
"step": 330752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4576208280730074e-05, |
|
"loss": 4.2443, |
|
"step": 331264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4567822333219554e-05, |
|
"loss": 4.2526, |
|
"step": 331776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4559436385709033e-05, |
|
"loss": 4.2346, |
|
"step": 332288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4551050438198513e-05, |
|
"loss": 4.2563, |
|
"step": 332800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.454268086949173e-05, |
|
"loss": 4.233, |
|
"step": 333312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.453429492198121e-05, |
|
"loss": 4.2367, |
|
"step": 333824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.452590897447069e-05, |
|
"loss": 4.2473, |
|
"step": 334336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.451752302696017e-05, |
|
"loss": 4.2318, |
|
"step": 334848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450913707944965e-05, |
|
"loss": 4.2513, |
|
"step": 335360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.450075113193913e-05, |
|
"loss": 4.2537, |
|
"step": 335872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.449236518442861e-05, |
|
"loss": 4.2266, |
|
"step": 336384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.448399561572182e-05, |
|
"loss": 4.2344, |
|
"step": 336896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.44756096682113e-05, |
|
"loss": 4.243, |
|
"step": 337408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.446722372070078e-05, |
|
"loss": 4.2487, |
|
"step": 337920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.445883777319026e-05, |
|
"loss": 4.2335, |
|
"step": 338432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.445045182567974e-05, |
|
"loss": 4.2367, |
|
"step": 338944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.444206587816922e-05, |
|
"loss": 4.2227, |
|
"step": 339456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.44336799306587e-05, |
|
"loss": 4.2444, |
|
"step": 339968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4425310361951914e-05, |
|
"loss": 4.2385, |
|
"step": 340480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.441694079324512e-05, |
|
"loss": 4.2283, |
|
"step": 340992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.44085548457346e-05, |
|
"loss": 4.2384, |
|
"step": 341504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.440016889822408e-05, |
|
"loss": 4.2355, |
|
"step": 342016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.439178295071356e-05, |
|
"loss": 4.2341, |
|
"step": 342528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.438339700320304e-05, |
|
"loss": 4.2428, |
|
"step": 343040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.437501105569252e-05, |
|
"loss": 4.2437, |
|
"step": 343552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.4366625108182e-05, |
|
"loss": 4.2349, |
|
"step": 344064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.435825553947521e-05, |
|
"loss": 4.2322, |
|
"step": 344576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.434986959196469e-05, |
|
"loss": 4.2346, |
|
"step": 345088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.434148364445417e-05, |
|
"loss": 4.2297, |
|
"step": 345600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.433309769694365e-05, |
|
"loss": 4.2497, |
|
"step": 346112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.432471174943313e-05, |
|
"loss": 4.2352, |
|
"step": 346624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.431632580192261e-05, |
|
"loss": 4.2334, |
|
"step": 347136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.430795623321583e-05, |
|
"loss": 4.2167, |
|
"step": 347648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.429957028570531e-05, |
|
"loss": 4.2321, |
|
"step": 348160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.429118433819479e-05, |
|
"loss": 4.2252, |
|
"step": 348672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.428279839068426e-05, |
|
"loss": 4.2368, |
|
"step": 349184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.427441244317374e-05, |
|
"loss": 4.2307, |
|
"step": 349696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.426602649566322e-05, |
|
"loss": 4.2304, |
|
"step": 350208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.42576405481527e-05, |
|
"loss": 4.2237, |
|
"step": 350720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.424925460064218e-05, |
|
"loss": 4.2361, |
|
"step": 351232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.424088503193539e-05, |
|
"loss": 4.2216, |
|
"step": 351744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.423249908442487e-05, |
|
"loss": 4.2267, |
|
"step": 352256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.422411313691435e-05, |
|
"loss": 4.2353, |
|
"step": 352768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4215743568207565e-05, |
|
"loss": 4.2145, |
|
"step": 353280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4207357620697045e-05, |
|
"loss": 4.2159, |
|
"step": 353792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4198971673186525e-05, |
|
"loss": 4.234, |
|
"step": 354304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4190585725676005e-05, |
|
"loss": 4.2213, |
|
"step": 354816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4182199778165485e-05, |
|
"loss": 4.2375, |
|
"step": 355328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4173813830654965e-05, |
|
"loss": 4.2281, |
|
"step": 355840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4165427883144445e-05, |
|
"loss": 4.2164, |
|
"step": 356352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4157041935633925e-05, |
|
"loss": 4.2217, |
|
"step": 356864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4148672366927134e-05, |
|
"loss": 4.229, |
|
"step": 357376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4140286419416614e-05, |
|
"loss": 4.228, |
|
"step": 357888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4131900471906094e-05, |
|
"loss": 4.217, |
|
"step": 358400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4123514524395574e-05, |
|
"loss": 4.2322, |
|
"step": 358912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.411514495568879e-05, |
|
"loss": 4.2047, |
|
"step": 359424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.410675900817827e-05, |
|
"loss": 4.216, |
|
"step": 359936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.409837306066775e-05, |
|
"loss": 4.2287, |
|
"step": 360448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.408998711315723e-05, |
|
"loss": 4.2077, |
|
"step": 360960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.408161754445044e-05, |
|
"loss": 4.2235, |
|
"step": 361472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.407324797574365e-05, |
|
"loss": 4.2178, |
|
"step": 361984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.406486202823313e-05, |
|
"loss": 4.2015, |
|
"step": 362496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.405647608072261e-05, |
|
"loss": 4.2134, |
|
"step": 363008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.404809013321209e-05, |
|
"loss": 4.2307, |
|
"step": 363520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.403970418570157e-05, |
|
"loss": 4.215, |
|
"step": 364032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.403131823819105e-05, |
|
"loss": 4.2108, |
|
"step": 364544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.402293229068053e-05, |
|
"loss": 4.2067, |
|
"step": 365056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.401454634317001e-05, |
|
"loss": 4.2138, |
|
"step": 365568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.400617677446322e-05, |
|
"loss": 4.2229, |
|
"step": 366080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.39977908269527e-05, |
|
"loss": 4.2206, |
|
"step": 366592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.398940487944218e-05, |
|
"loss": 4.2133, |
|
"step": 367104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.398101893193166e-05, |
|
"loss": 4.2185, |
|
"step": 367616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.397263298442114e-05, |
|
"loss": 4.2135, |
|
"step": 368128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3964247036910616e-05, |
|
"loss": 4.2164, |
|
"step": 368640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3955861089400096e-05, |
|
"loss": 4.2143, |
|
"step": 369152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.394749152069331e-05, |
|
"loss": 4.217, |
|
"step": 369664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.393910557318279e-05, |
|
"loss": 4.2056, |
|
"step": 370176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3930719625672265e-05, |
|
"loss": 4.2169, |
|
"step": 370688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.392235005696548e-05, |
|
"loss": 4.2195, |
|
"step": 371200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.391396410945496e-05, |
|
"loss": 4.2052, |
|
"step": 371712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.390557816194444e-05, |
|
"loss": 4.2087, |
|
"step": 372224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.389719221443392e-05, |
|
"loss": 4.217, |
|
"step": 372736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.38888062669234e-05, |
|
"loss": 4.2164, |
|
"step": 373248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.388042031941288e-05, |
|
"loss": 4.2173, |
|
"step": 373760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.387203437190236e-05, |
|
"loss": 4.2109, |
|
"step": 374272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.386364842439184e-05, |
|
"loss": 4.2004, |
|
"step": 374784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.385527885568505e-05, |
|
"loss": 4.2042, |
|
"step": 375296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3846909286978266e-05, |
|
"loss": 4.2131, |
|
"step": 375808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.383852333946774e-05, |
|
"loss": 4.2212, |
|
"step": 376320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.383013739195722e-05, |
|
"loss": 4.2229, |
|
"step": 376832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.38217514444467e-05, |
|
"loss": 4.2087, |
|
"step": 377344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.381336549693618e-05, |
|
"loss": 4.2015, |
|
"step": 377856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.380497954942566e-05, |
|
"loss": 4.2157, |
|
"step": 378368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3796593601915145e-05, |
|
"loss": 4.2041, |
|
"step": 378880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3788207654404625e-05, |
|
"loss": 4.2052, |
|
"step": 379392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3779854464501564e-05, |
|
"loss": 4.2219, |
|
"step": 379904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3771468516991044e-05, |
|
"loss": 4.2123, |
|
"step": 380416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3763082569480524e-05, |
|
"loss": 4.2014, |
|
"step": 380928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.3754696621970004e-05, |
|
"loss": 4.2082, |
|
"step": 381440 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.253242492675781, |
|
"eval_runtime": 538.6642, |
|
"eval_samples_per_second": 708.402, |
|
"eval_steps_per_second": 22.138, |
|
"step": 381595 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3746310674459483e-05, |
|
"loss": 4.2161, |
|
"step": 381952 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3737924726948963e-05, |
|
"loss": 4.2182, |
|
"step": 382464 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3729538779438443e-05, |
|
"loss": 4.2008, |
|
"step": 382976 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.372115283192792e-05, |
|
"loss": 4.2007, |
|
"step": 383488 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.371278326322113e-05, |
|
"loss": 4.2085, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.370439731571061e-05, |
|
"loss": 4.198, |
|
"step": 384512 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.36960113682001e-05, |
|
"loss": 4.201, |
|
"step": 385024 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.368762542068958e-05, |
|
"loss": 4.1926, |
|
"step": 385536 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.367925585198279e-05, |
|
"loss": 4.2074, |
|
"step": 386048 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3670886283276e-05, |
|
"loss": 4.2168, |
|
"step": 386560 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.366250033576548e-05, |
|
"loss": 4.1991, |
|
"step": 387072 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.365411438825496e-05, |
|
"loss": 4.203, |
|
"step": 387584 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.364572844074444e-05, |
|
"loss": 4.2099, |
|
"step": 388096 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.363734249323392e-05, |
|
"loss": 4.196, |
|
"step": 388608 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.36289565457234e-05, |
|
"loss": 4.2001, |
|
"step": 389120 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.362057059821288e-05, |
|
"loss": 4.1802, |
|
"step": 389632 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.361218465070236e-05, |
|
"loss": 4.1922, |
|
"step": 390144 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.3603815081995566e-05, |
|
"loss": 4.1949, |
|
"step": 390656 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.359544551328878e-05, |
|
"loss": 4.1965, |
|
"step": 391168 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.358705956577826e-05, |
|
"loss": 4.2174, |
|
"step": 391680 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.357867361826774e-05, |
|
"loss": 4.1961, |
|
"step": 392192 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.357028767075722e-05, |
|
"loss": 4.2075, |
|
"step": 392704 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.35619017232467e-05, |
|
"loss": 4.1995, |
|
"step": 393216 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.355351577573618e-05, |
|
"loss": 4.2014, |
|
"step": 393728 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.354514620702939e-05, |
|
"loss": 4.1903, |
|
"step": 394240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.353676025951887e-05, |
|
"loss": 4.1961, |
|
"step": 394752 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.352837431200835e-05, |
|
"loss": 4.1972, |
|
"step": 395264 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.351998836449783e-05, |
|
"loss": 4.1953, |
|
"step": 395776 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.351160241698731e-05, |
|
"loss": 4.1934, |
|
"step": 396288 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.350323284828052e-05, |
|
"loss": 4.1866, |
|
"step": 396800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3494846900770006e-05, |
|
"loss": 4.1843, |
|
"step": 397312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3486460953259486e-05, |
|
"loss": 4.2011, |
|
"step": 397824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3478075005748966e-05, |
|
"loss": 4.1967, |
|
"step": 398336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3469689058238446e-05, |
|
"loss": 4.1948, |
|
"step": 398848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3461303110727926e-05, |
|
"loss": 4.1921, |
|
"step": 399360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.34529171632174e-05, |
|
"loss": 4.1958, |
|
"step": 399872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3444547594510615e-05, |
|
"loss": 4.1998, |
|
"step": 400384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3436161647000095e-05, |
|
"loss": 4.1969, |
|
"step": 400896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3427775699489575e-05, |
|
"loss": 4.1965, |
|
"step": 401408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.341938975197905e-05, |
|
"loss": 4.1878, |
|
"step": 401920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.341100380446853e-05, |
|
"loss": 4.1954, |
|
"step": 402432 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.340261785695801e-05, |
|
"loss": 4.1797, |
|
"step": 402944 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3394248288251224e-05, |
|
"loss": 4.1949, |
|
"step": 403456 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3385862340740704e-05, |
|
"loss": 4.1919, |
|
"step": 403968 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3377476393230184e-05, |
|
"loss": 4.2014, |
|
"step": 404480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3369090445719664e-05, |
|
"loss": 4.1895, |
|
"step": 404992 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3360704498209144e-05, |
|
"loss": 4.1826, |
|
"step": 405504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3352318550698624e-05, |
|
"loss": 4.2011, |
|
"step": 406016 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3343932603188104e-05, |
|
"loss": 4.1788, |
|
"step": 406528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3335546655677584e-05, |
|
"loss": 4.1834, |
|
"step": 407040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.332717708697079e-05, |
|
"loss": 4.1855, |
|
"step": 407552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.331879113946027e-05, |
|
"loss": 4.1924, |
|
"step": 408064 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.331040519194975e-05, |
|
"loss": 4.1774, |
|
"step": 408576 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.330201924443923e-05, |
|
"loss": 4.1942, |
|
"step": 409088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.329364967573244e-05, |
|
"loss": 4.1797, |
|
"step": 409600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.328526372822193e-05, |
|
"loss": 4.1791, |
|
"step": 410112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.327687778071141e-05, |
|
"loss": 4.1918, |
|
"step": 410624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.326849183320089e-05, |
|
"loss": 4.1775, |
|
"step": 411136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.326010588569037e-05, |
|
"loss": 4.1924, |
|
"step": 411648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.325171993817985e-05, |
|
"loss": 4.1936, |
|
"step": 412160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.324333399066933e-05, |
|
"loss": 4.176, |
|
"step": 412672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.323494804315881e-05, |
|
"loss": 4.1762, |
|
"step": 413184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.322657847445202e-05, |
|
"loss": 4.1819, |
|
"step": 413696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.32181925269415e-05, |
|
"loss": 4.1954, |
|
"step": 414208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.320980657943098e-05, |
|
"loss": 4.1782, |
|
"step": 414720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.320142063192046e-05, |
|
"loss": 4.1822, |
|
"step": 415232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3193051063213666e-05, |
|
"loss": 4.165, |
|
"step": 415744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3184665115703146e-05, |
|
"loss": 4.1886, |
|
"step": 416256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3176279168192626e-05, |
|
"loss": 4.1789, |
|
"step": 416768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3167893220682106e-05, |
|
"loss": 4.1754, |
|
"step": 417280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3159507273171586e-05, |
|
"loss": 4.18, |
|
"step": 417792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3151121325661066e-05, |
|
"loss": 4.1817, |
|
"step": 418304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.314275175695428e-05, |
|
"loss": 4.1785, |
|
"step": 418816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.313436580944376e-05, |
|
"loss": 4.1851, |
|
"step": 419328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3125979861933235e-05, |
|
"loss": 4.1886, |
|
"step": 419840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3117593914422715e-05, |
|
"loss": 4.1785, |
|
"step": 420352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3109207966912195e-05, |
|
"loss": 4.1747, |
|
"step": 420864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3100822019401675e-05, |
|
"loss": 4.1825, |
|
"step": 421376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3092452450694884e-05, |
|
"loss": 4.1745, |
|
"step": 421888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3084066503184364e-05, |
|
"loss": 4.1901, |
|
"step": 422400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.3075680555673844e-05, |
|
"loss": 4.1784, |
|
"step": 422912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.306729460816333e-05, |
|
"loss": 4.1831, |
|
"step": 423424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.305890866065281e-05, |
|
"loss": 4.1647, |
|
"step": 423936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.305052271314229e-05, |
|
"loss": 4.1761, |
|
"step": 424448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.304213676563177e-05, |
|
"loss": 4.1672, |
|
"step": 424960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.303375081812125e-05, |
|
"loss": 4.1836, |
|
"step": 425472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.302538124941446e-05, |
|
"loss": 4.1758, |
|
"step": 425984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.301699530190394e-05, |
|
"loss": 4.1764, |
|
"step": 426496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.300860935439342e-05, |
|
"loss": 4.1739, |
|
"step": 427008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.30002234068829e-05, |
|
"loss": 4.1845, |
|
"step": 427520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.299183745937238e-05, |
|
"loss": 4.161, |
|
"step": 428032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.298346789066559e-05, |
|
"loss": 4.1711, |
|
"step": 428544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.297508194315507e-05, |
|
"loss": 4.1825, |
|
"step": 429056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2966712374448285e-05, |
|
"loss": 4.1612, |
|
"step": 429568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2958326426937765e-05, |
|
"loss": 4.1642, |
|
"step": 430080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2949940479427244e-05, |
|
"loss": 4.1803, |
|
"step": 430592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2941554531916724e-05, |
|
"loss": 4.1641, |
|
"step": 431104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2933168584406204e-05, |
|
"loss": 4.1868, |
|
"step": 431616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2924782636895684e-05, |
|
"loss": 4.1725, |
|
"step": 432128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2916413068188893e-05, |
|
"loss": 4.1636, |
|
"step": 432640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.290802712067837e-05, |
|
"loss": 4.1662, |
|
"step": 433152 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.289964117316785e-05, |
|
"loss": 4.1781, |
|
"step": 433664 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.289125522565733e-05, |
|
"loss": 4.1774, |
|
"step": 434176 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.288286927814681e-05, |
|
"loss": 4.1623, |
|
"step": 434688 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.287449970944002e-05, |
|
"loss": 4.1773, |
|
"step": 435200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.28661137619295e-05, |
|
"loss": 4.1537, |
|
"step": 435712 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.285772781441898e-05, |
|
"loss": 4.1628, |
|
"step": 436224 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.284934186690847e-05, |
|
"loss": 4.1759, |
|
"step": 436736 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.284095591939795e-05, |
|
"loss": 4.1581, |
|
"step": 437248 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.283256997188742e-05, |
|
"loss": 4.1695, |
|
"step": 437760 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.28241840243769e-05, |
|
"loss": 4.1703, |
|
"step": 438272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.281579807686638e-05, |
|
"loss": 4.1463, |
|
"step": 438784 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.28074285081596e-05, |
|
"loss": 4.1629, |
|
"step": 439296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.279904256064907e-05, |
|
"loss": 4.1746, |
|
"step": 439808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.279065661313855e-05, |
|
"loss": 4.1686, |
|
"step": 440320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.278227066562803e-05, |
|
"loss": 4.1557, |
|
"step": 440832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.277390109692124e-05, |
|
"loss": 4.1558, |
|
"step": 441344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.276551514941072e-05, |
|
"loss": 4.162, |
|
"step": 441856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.275712920190021e-05, |
|
"loss": 4.1707, |
|
"step": 442368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.274874325438969e-05, |
|
"loss": 4.1711, |
|
"step": 442880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2740373685682896e-05, |
|
"loss": 4.1603, |
|
"step": 443392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2731987738172376e-05, |
|
"loss": 4.1653, |
|
"step": 443904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2723601790661856e-05, |
|
"loss": 4.1635, |
|
"step": 444416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2715215843151336e-05, |
|
"loss": 4.1668, |
|
"step": 444928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2706846274444545e-05, |
|
"loss": 4.1621, |
|
"step": 445440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2698460326934025e-05, |
|
"loss": 4.164, |
|
"step": 445952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.269009075822724e-05, |
|
"loss": 4.1575, |
|
"step": 446464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2681704810716714e-05, |
|
"loss": 4.1668, |
|
"step": 446976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2673318863206194e-05, |
|
"loss": 4.1689, |
|
"step": 447488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.266494929449941e-05, |
|
"loss": 4.1513, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.265656334698889e-05, |
|
"loss": 4.1592, |
|
"step": 448512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.264817739947837e-05, |
|
"loss": 4.1706, |
|
"step": 449024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.263979145196785e-05, |
|
"loss": 4.1666, |
|
"step": 449536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.263140550445733e-05, |
|
"loss": 4.1655, |
|
"step": 450048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2623035935750545e-05, |
|
"loss": 4.1612, |
|
"step": 450560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.261464998824002e-05, |
|
"loss": 4.1478, |
|
"step": 451072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.26062640407295e-05, |
|
"loss": 4.1538, |
|
"step": 451584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.259787809321898e-05, |
|
"loss": 4.161, |
|
"step": 452096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.258949214570846e-05, |
|
"loss": 4.1778, |
|
"step": 452608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.258110619819794e-05, |
|
"loss": 4.1712, |
|
"step": 453120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.257272025068742e-05, |
|
"loss": 4.1552, |
|
"step": 453632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.25643343031769e-05, |
|
"loss": 4.1577, |
|
"step": 454144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.255594835566638e-05, |
|
"loss": 4.1613, |
|
"step": 454656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.254756240815586e-05, |
|
"loss": 4.1581, |
|
"step": 455168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.253917646064534e-05, |
|
"loss": 4.1561, |
|
"step": 455680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2530790513134825e-05, |
|
"loss": 4.1739, |
|
"step": 456192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2522404565624305e-05, |
|
"loss": 4.1629, |
|
"step": 456704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2514034996917514e-05, |
|
"loss": 4.1503, |
|
"step": 457216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.2505649049406994e-05, |
|
"loss": 4.1581, |
|
"step": 457728 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.211868762969971, |
|
"eval_runtime": 528.4656, |
|
"eval_samples_per_second": 722.074, |
|
"eval_steps_per_second": 22.565, |
|
"step": 457914 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2497263101896474e-05, |
|
"loss": 4.1716, |
|
"step": 458240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2488877154385954e-05, |
|
"loss": 4.1679, |
|
"step": 458752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2480491206875434e-05, |
|
"loss": 4.155, |
|
"step": 459264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.247210525936491e-05, |
|
"loss": 4.1541, |
|
"step": 459776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.246371931185439e-05, |
|
"loss": 4.1576, |
|
"step": 460288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.245533336434387e-05, |
|
"loss": 4.149, |
|
"step": 460800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.244694741683335e-05, |
|
"loss": 4.153, |
|
"step": 461312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.243856146932283e-05, |
|
"loss": 4.1465, |
|
"step": 461824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.243017552181231e-05, |
|
"loss": 4.1558, |
|
"step": 462336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2421789574301793e-05, |
|
"loss": 4.1696, |
|
"step": 462848 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2413420005595e-05, |
|
"loss": 4.1511, |
|
"step": 463360 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.240503405808448e-05, |
|
"loss": 4.1539, |
|
"step": 463872 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.239664811057396e-05, |
|
"loss": 4.1623, |
|
"step": 464384 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.238826216306344e-05, |
|
"loss": 4.1478, |
|
"step": 464896 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.237987621555292e-05, |
|
"loss": 4.1566, |
|
"step": 465408 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.23714902680424e-05, |
|
"loss": 4.1333, |
|
"step": 465920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.236310432053188e-05, |
|
"loss": 4.1404, |
|
"step": 466432 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.235471837302136e-05, |
|
"loss": 4.1485, |
|
"step": 466944 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.234633242551084e-05, |
|
"loss": 4.1463, |
|
"step": 467456 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2337946478000315e-05, |
|
"loss": 4.1711, |
|
"step": 467968 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2329560530489795e-05, |
|
"loss": 4.1512, |
|
"step": 468480 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.2321174582979275e-05, |
|
"loss": 4.161, |
|
"step": 468992 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.231278863546876e-05, |
|
"loss": 4.1473, |
|
"step": 469504 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.230440268795824e-05, |
|
"loss": 4.1528, |
|
"step": 470016 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.229603311925145e-05, |
|
"loss": 4.1421, |
|
"step": 470528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.228764717174093e-05, |
|
"loss": 4.1489, |
|
"step": 471040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.227926122423041e-05, |
|
"loss": 4.1503, |
|
"step": 471552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.227087527671989e-05, |
|
"loss": 4.1433, |
|
"step": 472064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.226248932920937e-05, |
|
"loss": 4.1527, |
|
"step": 472576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.225410338169885e-05, |
|
"loss": 4.1363, |
|
"step": 473088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.224571743418833e-05, |
|
"loss": 4.1348, |
|
"step": 473600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.223733148667781e-05, |
|
"loss": 4.1554, |
|
"step": 474112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.222896191797102e-05, |
|
"loss": 4.15, |
|
"step": 474624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.22205759704605e-05, |
|
"loss": 4.1521, |
|
"step": 475136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.221219002294998e-05, |
|
"loss": 4.1405, |
|
"step": 475648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.220380407543946e-05, |
|
"loss": 4.153, |
|
"step": 476160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2195434506732676e-05, |
|
"loss": 4.1494, |
|
"step": 476672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2187048559222156e-05, |
|
"loss": 4.1545, |
|
"step": 477184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2178662611711635e-05, |
|
"loss": 4.147, |
|
"step": 477696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2170276664201115e-05, |
|
"loss": 4.1421, |
|
"step": 478208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2161907095494325e-05, |
|
"loss": 4.1524, |
|
"step": 478720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2153521147983804e-05, |
|
"loss": 4.1311, |
|
"step": 479232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2145135200473284e-05, |
|
"loss": 4.1488, |
|
"step": 479744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2136749252962764e-05, |
|
"loss": 4.1479, |
|
"step": 480256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2128363305452244e-05, |
|
"loss": 4.1532, |
|
"step": 480768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2119977357941724e-05, |
|
"loss": 4.1469, |
|
"step": 481280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2111591410431204e-05, |
|
"loss": 4.1354, |
|
"step": 481792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.210320546292068e-05, |
|
"loss": 4.1548, |
|
"step": 482304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.209483589421389e-05, |
|
"loss": 4.136, |
|
"step": 482816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.208644994670338e-05, |
|
"loss": 4.1368, |
|
"step": 483328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.207806399919286e-05, |
|
"loss": 4.1404, |
|
"step": 483840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.206967805168233e-05, |
|
"loss": 4.1445, |
|
"step": 484352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.206130848297555e-05, |
|
"loss": 4.1361, |
|
"step": 484864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.205292253546503e-05, |
|
"loss": 4.1443, |
|
"step": 485376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.204455296675824e-05, |
|
"loss": 4.1338, |
|
"step": 485888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.203616701924772e-05, |
|
"loss": 4.1394, |
|
"step": 486400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.20277810717372e-05, |
|
"loss": 4.1424, |
|
"step": 486912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.201939512422668e-05, |
|
"loss": 4.1365, |
|
"step": 487424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.201100917671615e-05, |
|
"loss": 4.1457, |
|
"step": 487936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.200262322920563e-05, |
|
"loss": 4.15, |
|
"step": 488448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.199423728169512e-05, |
|
"loss": 4.1321, |
|
"step": 488960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.19858513341846e-05, |
|
"loss": 4.1316, |
|
"step": 489472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.197748176547781e-05, |
|
"loss": 4.1347, |
|
"step": 489984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.196909581796729e-05, |
|
"loss": 4.1535, |
|
"step": 490496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.196070987045677e-05, |
|
"loss": 4.1363, |
|
"step": 491008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.195232392294625e-05, |
|
"loss": 4.1363, |
|
"step": 491520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1943954354239456e-05, |
|
"loss": 4.1249, |
|
"step": 492032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1935568406728936e-05, |
|
"loss": 4.1408, |
|
"step": 492544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1927182459218416e-05, |
|
"loss": 4.1355, |
|
"step": 493056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1918796511707896e-05, |
|
"loss": 4.133, |
|
"step": 493568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1910426943001105e-05, |
|
"loss": 4.1362, |
|
"step": 494080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1902040995490585e-05, |
|
"loss": 4.1368, |
|
"step": 494592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.189365504798007e-05, |
|
"loss": 4.1369, |
|
"step": 495104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.188526910046955e-05, |
|
"loss": 4.1387, |
|
"step": 495616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.187688315295903e-05, |
|
"loss": 4.1489, |
|
"step": 496128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.186851358425224e-05, |
|
"loss": 4.1378, |
|
"step": 496640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.186012763674172e-05, |
|
"loss": 4.1325, |
|
"step": 497152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.185175806803493e-05, |
|
"loss": 4.1353, |
|
"step": 497664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.184337212052441e-05, |
|
"loss": 4.1333, |
|
"step": 498176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.183498617301389e-05, |
|
"loss": 4.1474, |
|
"step": 498688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.182660022550337e-05, |
|
"loss": 4.136, |
|
"step": 499200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.181821427799285e-05, |
|
"loss": 4.1405, |
|
"step": 499712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.180984470928606e-05, |
|
"loss": 4.1196, |
|
"step": 500224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.180145876177554e-05, |
|
"loss": 4.1365, |
|
"step": 500736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1793072814265025e-05, |
|
"loss": 4.1223, |
|
"step": 501248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1784686866754505e-05, |
|
"loss": 4.1405, |
|
"step": 501760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1776300919243985e-05, |
|
"loss": 4.135, |
|
"step": 502272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1767914971733465e-05, |
|
"loss": 4.1339, |
|
"step": 502784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.1759529024222945e-05, |
|
"loss": 4.1292, |
|
"step": 503296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1751143076712425e-05, |
|
"loss": 4.1387, |
|
"step": 503808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1742773508005634e-05, |
|
"loss": 4.1228, |
|
"step": 504320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1734387560495114e-05, |
|
"loss": 4.1294, |
|
"step": 504832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1726001612984594e-05, |
|
"loss": 4.1391, |
|
"step": 505344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1717615665474074e-05, |
|
"loss": 4.1194, |
|
"step": 505856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.170924609676728e-05, |
|
"loss": 4.1219, |
|
"step": 506368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.170086014925676e-05, |
|
"loss": 4.1324, |
|
"step": 506880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.169247420174624e-05, |
|
"loss": 4.1239, |
|
"step": 507392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.168408825423572e-05, |
|
"loss": 4.1464, |
|
"step": 507904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.167571868552894e-05, |
|
"loss": 4.1291, |
|
"step": 508416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.166733273801842e-05, |
|
"loss": 4.125, |
|
"step": 508928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.16589467905079e-05, |
|
"loss": 4.1242, |
|
"step": 509440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.165056084299738e-05, |
|
"loss": 4.1382, |
|
"step": 509952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.164219127429059e-05, |
|
"loss": 4.1354, |
|
"step": 510464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.16338217055838e-05, |
|
"loss": 4.1233, |
|
"step": 510976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.162543575807328e-05, |
|
"loss": 4.1325, |
|
"step": 511488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.161704981056276e-05, |
|
"loss": 4.1161, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.160866386305224e-05, |
|
"loss": 4.1184, |
|
"step": 512512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.160027791554172e-05, |
|
"loss": 4.1335, |
|
"step": 513024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1591891968031197e-05, |
|
"loss": 4.1217, |
|
"step": 513536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1583506020520677e-05, |
|
"loss": 4.1271, |
|
"step": 514048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1575120073010156e-05, |
|
"loss": 4.1291, |
|
"step": 514560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.156675050430337e-05, |
|
"loss": 4.1062, |
|
"step": 515072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.155836455679285e-05, |
|
"loss": 4.1214, |
|
"step": 515584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.154997860928233e-05, |
|
"loss": 4.1395, |
|
"step": 516096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.154160904057554e-05, |
|
"loss": 4.1295, |
|
"step": 516608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.153322309306502e-05, |
|
"loss": 4.1115, |
|
"step": 517120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.15248371455545e-05, |
|
"loss": 4.1173, |
|
"step": 517632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.151645119804398e-05, |
|
"loss": 4.1217, |
|
"step": 518144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.150806525053346e-05, |
|
"loss": 4.1284, |
|
"step": 518656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1499679303022934e-05, |
|
"loss": 4.1313, |
|
"step": 519168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1491293355512414e-05, |
|
"loss": 4.121, |
|
"step": 519680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1482907408001894e-05, |
|
"loss": 4.1242, |
|
"step": 520192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.147453783929511e-05, |
|
"loss": 4.1225, |
|
"step": 520704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.146615189178459e-05, |
|
"loss": 4.1253, |
|
"step": 521216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.145776594427407e-05, |
|
"loss": 4.123, |
|
"step": 521728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1449396375567286e-05, |
|
"loss": 4.1258, |
|
"step": 522240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.144101042805676e-05, |
|
"loss": 4.1199, |
|
"step": 522752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.143262448054624e-05, |
|
"loss": 4.1254, |
|
"step": 523264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.142423853303572e-05, |
|
"loss": 4.126, |
|
"step": 523776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.14158525855252e-05, |
|
"loss": 4.1102, |
|
"step": 524288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.140748301681841e-05, |
|
"loss": 4.1211, |
|
"step": 524800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1399113448111624e-05, |
|
"loss": 4.1279, |
|
"step": 525312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1390727500601104e-05, |
|
"loss": 4.1281, |
|
"step": 525824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1382341553090584e-05, |
|
"loss": 4.1265, |
|
"step": 526336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1373955605580064e-05, |
|
"loss": 4.1244, |
|
"step": 526848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1365569658069544e-05, |
|
"loss": 4.1103, |
|
"step": 527360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1357183710559024e-05, |
|
"loss": 4.1126, |
|
"step": 527872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1348797763048504e-05, |
|
"loss": 4.1215, |
|
"step": 528384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.1340411815537984e-05, |
|
"loss": 4.138, |
|
"step": 528896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.133204224683119e-05, |
|
"loss": 4.1328, |
|
"step": 529408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.132365629932067e-05, |
|
"loss": 4.1181, |
|
"step": 529920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.131527035181015e-05, |
|
"loss": 4.1145, |
|
"step": 530432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.130688440429963e-05, |
|
"loss": 4.1245, |
|
"step": 530944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.129849845678911e-05, |
|
"loss": 4.119, |
|
"step": 531456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.129011250927859e-05, |
|
"loss": 4.1157, |
|
"step": 531968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.128172656176807e-05, |
|
"loss": 4.1346, |
|
"step": 532480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.127334061425755e-05, |
|
"loss": 4.1245, |
|
"step": 532992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.126497104555077e-05, |
|
"loss": 4.1113, |
|
"step": 533504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.125658509804025e-05, |
|
"loss": 4.1228, |
|
"step": 534016 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.181921482086182, |
|
"eval_runtime": 541.997, |
|
"eval_samples_per_second": 704.046, |
|
"eval_steps_per_second": 22.002, |
|
"step": 534233 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.124819915052973e-05, |
|
"loss": 4.1298, |
|
"step": 534528 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.123981320301921e-05, |
|
"loss": 4.1306, |
|
"step": 535040 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.123142725550869e-05, |
|
"loss": 4.1205, |
|
"step": 535552 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.122304130799817e-05, |
|
"loss": 4.1153, |
|
"step": 536064 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.121465536048765e-05, |
|
"loss": 4.1149, |
|
"step": 536576 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.120626941297712e-05, |
|
"loss": 4.1182, |
|
"step": 537088 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.11978834654666e-05, |
|
"loss": 4.1121, |
|
"step": 537600 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.118949751795608e-05, |
|
"loss": 4.1078, |
|
"step": 538112 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.118111157044556e-05, |
|
"loss": 4.1162, |
|
"step": 538624 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.117272562293504e-05, |
|
"loss": 4.1301, |
|
"step": 539136 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.116433967542452e-05, |
|
"loss": 4.1141, |
|
"step": 539648 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1155953727914e-05, |
|
"loss": 4.1148, |
|
"step": 540160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.114756778040348e-05, |
|
"loss": 4.1286, |
|
"step": 540672 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.113918183289297e-05, |
|
"loss": 4.1085, |
|
"step": 541184 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.113079588538245e-05, |
|
"loss": 4.1208, |
|
"step": 541696 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.112240993787193e-05, |
|
"loss": 4.0958, |
|
"step": 542208 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.111402399036141e-05, |
|
"loss": 4.101, |
|
"step": 542720 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.110563804285089e-05, |
|
"loss": 4.1135, |
|
"step": 543232 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.109725209534036e-05, |
|
"loss": 4.1075, |
|
"step": 543744 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.108886614782984e-05, |
|
"loss": 4.133, |
|
"step": 544256 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1080496579123056e-05, |
|
"loss": 4.1169, |
|
"step": 544768 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1072127010416266e-05, |
|
"loss": 4.1215, |
|
"step": 545280 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1063741062905746e-05, |
|
"loss": 4.111, |
|
"step": 545792 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1055355115395225e-05, |
|
"loss": 4.1186, |
|
"step": 546304 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1046969167884705e-05, |
|
"loss": 4.1029, |
|
"step": 546816 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1038583220374185e-05, |
|
"loss": 4.1131, |
|
"step": 547328 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1030197272863665e-05, |
|
"loss": 4.1145, |
|
"step": 547840 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1021811325353145e-05, |
|
"loss": 4.1108, |
|
"step": 548352 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1013425377842625e-05, |
|
"loss": 4.109, |
|
"step": 548864 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.1005039430332105e-05, |
|
"loss": 4.1049, |
|
"step": 549376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0996669861625314e-05, |
|
"loss": 4.0978, |
|
"step": 549888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0988283914114794e-05, |
|
"loss": 4.1186, |
|
"step": 550400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0979897966604274e-05, |
|
"loss": 4.1139, |
|
"step": 550912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0971512019093754e-05, |
|
"loss": 4.1132, |
|
"step": 551424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.096314245038696e-05, |
|
"loss": 4.1086, |
|
"step": 551936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.095477288168018e-05, |
|
"loss": 4.1157, |
|
"step": 552448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.094638693416966e-05, |
|
"loss": 4.1117, |
|
"step": 552960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.093800098665914e-05, |
|
"loss": 4.1123, |
|
"step": 553472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.092961503914862e-05, |
|
"loss": 4.1094, |
|
"step": 553984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.09212290916381e-05, |
|
"loss": 4.1109, |
|
"step": 554496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.091284314412758e-05, |
|
"loss": 4.1091, |
|
"step": 555008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.090445719661706e-05, |
|
"loss": 4.0988, |
|
"step": 555520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.089607124910654e-05, |
|
"loss": 4.1166, |
|
"step": 556032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.088770168039975e-05, |
|
"loss": 4.1087, |
|
"step": 556544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.087931573288923e-05, |
|
"loss": 4.1188, |
|
"step": 557056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.087092978537871e-05, |
|
"loss": 4.1132, |
|
"step": 557568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.086254383786819e-05, |
|
"loss": 4.0969, |
|
"step": 558080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.08541742691614e-05, |
|
"loss": 4.1194, |
|
"step": 558592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.084578832165088e-05, |
|
"loss": 4.0986, |
|
"step": 559104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.083740237414036e-05, |
|
"loss": 4.1049, |
|
"step": 559616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0829016426629844e-05, |
|
"loss": 4.1004, |
|
"step": 560128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0820630479119323e-05, |
|
"loss": 4.1112, |
|
"step": 560640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.081226091041253e-05, |
|
"loss": 4.1022, |
|
"step": 561152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.080387496290201e-05, |
|
"loss": 4.1075, |
|
"step": 561664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.079550539419522e-05, |
|
"loss": 4.1021, |
|
"step": 562176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.07871194466847e-05, |
|
"loss": 4.099, |
|
"step": 562688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.077873349917418e-05, |
|
"loss": 4.1078, |
|
"step": 563200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.077034755166366e-05, |
|
"loss": 4.1014, |
|
"step": 563712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.076196160415314e-05, |
|
"loss": 4.1128, |
|
"step": 564224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.075357565664262e-05, |
|
"loss": 4.1103, |
|
"step": 564736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.074520608793583e-05, |
|
"loss": 4.102, |
|
"step": 565248 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.073682014042531e-05, |
|
"loss": 4.0911, |
|
"step": 565760 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.07284341929148e-05, |
|
"loss": 4.102, |
|
"step": 566272 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.072004824540428e-05, |
|
"loss": 4.1161, |
|
"step": 566784 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.071166229789376e-05, |
|
"loss": 4.103, |
|
"step": 567296 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.070327635038324e-05, |
|
"loss": 4.0982, |
|
"step": 567808 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.069489040287272e-05, |
|
"loss": 4.0939, |
|
"step": 568320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.06865044553622e-05, |
|
"loss": 4.1052, |
|
"step": 568832 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0678134886655406e-05, |
|
"loss": 4.0971, |
|
"step": 569344 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0669748939144886e-05, |
|
"loss": 4.104, |
|
"step": 569856 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0661362991634366e-05, |
|
"loss": 4.1024, |
|
"step": 570368 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0652977044123846e-05, |
|
"loss": 4.0996, |
|
"step": 570880 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.064459109661332e-05, |
|
"loss": 4.1033, |
|
"step": 571392 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.06362051491028e-05, |
|
"loss": 4.1073, |
|
"step": 571904 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.062781920159228e-05, |
|
"loss": 4.1104, |
|
"step": 572416 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0619433254081766e-05, |
|
"loss": 4.1013, |
|
"step": 572928 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0611063685374975e-05, |
|
"loss": 4.0992, |
|
"step": 573440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.060269411666819e-05, |
|
"loss": 4.101, |
|
"step": 573952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.059430816915767e-05, |
|
"loss": 4.1, |
|
"step": 574464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0585922221647144e-05, |
|
"loss": 4.1121, |
|
"step": 574976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0577536274136624e-05, |
|
"loss": 4.1068, |
|
"step": 575488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0569150326626104e-05, |
|
"loss": 4.1071, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0560764379115584e-05, |
|
"loss": 4.0873, |
|
"step": 576512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0552378431605064e-05, |
|
"loss": 4.099, |
|
"step": 577024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0543992484094544e-05, |
|
"loss": 4.0888, |
|
"step": 577536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.053562291538775e-05, |
|
"loss": 4.1049, |
|
"step": 578048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.052723696787723e-05, |
|
"loss": 4.1023, |
|
"step": 578560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.051885102036671e-05, |
|
"loss": 4.0994, |
|
"step": 579072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.05104650728562e-05, |
|
"loss": 4.0996, |
|
"step": 579584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.050209550414941e-05, |
|
"loss": 4.1004, |
|
"step": 580096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.049370955663889e-05, |
|
"loss": 4.0908, |
|
"step": 580608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.048532360912837e-05, |
|
"loss": 4.0966, |
|
"step": 581120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.047693766161785e-05, |
|
"loss": 4.1046, |
|
"step": 581632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.046855171410733e-05, |
|
"loss": 4.0887, |
|
"step": 582144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.046016576659681e-05, |
|
"loss": 4.0884, |
|
"step": 582656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.045179619789002e-05, |
|
"loss": 4.0937, |
|
"step": 583168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.04434102503795e-05, |
|
"loss": 4.0957, |
|
"step": 583680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.043502430286898e-05, |
|
"loss": 4.112, |
|
"step": 584192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.042663835535846e-05, |
|
"loss": 4.0928, |
|
"step": 584704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0418268786651666e-05, |
|
"loss": 4.0944, |
|
"step": 585216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.040988283914115e-05, |
|
"loss": 4.0891, |
|
"step": 585728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.040149689163063e-05, |
|
"loss": 4.1064, |
|
"step": 586240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.039311094412011e-05, |
|
"loss": 4.0975, |
|
"step": 586752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.038475775421705e-05, |
|
"loss": 4.0907, |
|
"step": 587264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.037637180670653e-05, |
|
"loss": 4.1006, |
|
"step": 587776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.036798585919601e-05, |
|
"loss": 4.0812, |
|
"step": 588288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.035959991168549e-05, |
|
"loss": 4.0817, |
|
"step": 588800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.035121396417497e-05, |
|
"loss": 4.1048, |
|
"step": 589312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.034284439546818e-05, |
|
"loss": 4.0884, |
|
"step": 589824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.033445844795766e-05, |
|
"loss": 4.0963, |
|
"step": 590336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.032607250044714e-05, |
|
"loss": 4.0933, |
|
"step": 590848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.031768655293662e-05, |
|
"loss": 4.0696, |
|
"step": 591360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.030930060542611e-05, |
|
"loss": 4.0914, |
|
"step": 591872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.030091465791559e-05, |
|
"loss": 4.105, |
|
"step": 592384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.029252871040507e-05, |
|
"loss": 4.0955, |
|
"step": 592896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0284142762894547e-05, |
|
"loss": 4.0834, |
|
"step": 593408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0275773194187756e-05, |
|
"loss": 4.0832, |
|
"step": 593920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0267387246677236e-05, |
|
"loss": 4.0845, |
|
"step": 594432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0259001299166716e-05, |
|
"loss": 4.0969, |
|
"step": 594944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0250631730459925e-05, |
|
"loss": 4.0985, |
|
"step": 595456 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0242245782949405e-05, |
|
"loss": 4.0895, |
|
"step": 595968 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0233859835438885e-05, |
|
"loss": 4.089, |
|
"step": 596480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0225473887928365e-05, |
|
"loss": 4.0958, |
|
"step": 596992 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0217087940417845e-05, |
|
"loss": 4.0919, |
|
"step": 597504 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0208701992907324e-05, |
|
"loss": 4.0866, |
|
"step": 598016 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0200316045396804e-05, |
|
"loss": 4.0969, |
|
"step": 598528 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0191930097886284e-05, |
|
"loss": 4.0872, |
|
"step": 599040 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.01835605291795e-05, |
|
"loss": 4.088, |
|
"step": 599552 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.017519096047271e-05, |
|
"loss": 4.1022, |
|
"step": 600064 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.016680501296219e-05, |
|
"loss": 4.0746, |
|
"step": 600576 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.015841906545167e-05, |
|
"loss": 4.0891, |
|
"step": 601088 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.015003311794115e-05, |
|
"loss": 4.0975, |
|
"step": 601600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.014164717043063e-05, |
|
"loss": 4.0942, |
|
"step": 602112 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.01332612229201e-05, |
|
"loss": 4.093, |
|
"step": 602624 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.012487527540958e-05, |
|
"loss": 4.0933, |
|
"step": 603136 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.01165057067028e-05, |
|
"loss": 4.0802, |
|
"step": 603648 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.010811975919228e-05, |
|
"loss": 4.0787, |
|
"step": 604160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.009973381168176e-05, |
|
"loss": 4.0929, |
|
"step": 604672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.009134786417124e-05, |
|
"loss": 4.0982, |
|
"step": 605184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.008296191666072e-05, |
|
"loss": 4.1038, |
|
"step": 605696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.00745759691502e-05, |
|
"loss": 4.0855, |
|
"step": 606208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.006619002163968e-05, |
|
"loss": 4.0822, |
|
"step": 606720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.005780407412916e-05, |
|
"loss": 4.0952, |
|
"step": 607232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.004943450542237e-05, |
|
"loss": 4.0845, |
|
"step": 607744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.004104855791185e-05, |
|
"loss": 4.0891, |
|
"step": 608256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.003266261040133e-05, |
|
"loss": 4.0965, |
|
"step": 608768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.002427666289081e-05, |
|
"loss": 4.0967, |
|
"step": 609280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0015907094184016e-05, |
|
"loss": 4.0791, |
|
"step": 609792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0007521146673496e-05, |
|
"loss": 4.0934, |
|
"step": 610304 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.157905101776123, |
|
"eval_runtime": 555.1002, |
|
"eval_samples_per_second": 687.427, |
|
"eval_steps_per_second": 21.483, |
|
"step": 610552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.999913519916298e-05, |
|
"loss": 4.0883, |
|
"step": 610816 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.999074925165246e-05, |
|
"loss": 4.1015, |
|
"step": 611328 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.99823960617494e-05, |
|
"loss": 4.0871, |
|
"step": 611840 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.997401011423888e-05, |
|
"loss": 4.0871, |
|
"step": 612352 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.996562416672836e-05, |
|
"loss": 4.0809, |
|
"step": 612864 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.995723821921784e-05, |
|
"loss": 4.0878, |
|
"step": 613376 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.994885227170732e-05, |
|
"loss": 4.0865, |
|
"step": 613888 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.99404663241968e-05, |
|
"loss": 4.071, |
|
"step": 614400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.993208037668628e-05, |
|
"loss": 4.0846, |
|
"step": 614912 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.992369442917576e-05, |
|
"loss": 4.0997, |
|
"step": 615424 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.991532486046897e-05, |
|
"loss": 4.083, |
|
"step": 615936 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.990693891295845e-05, |
|
"loss": 4.0824, |
|
"step": 616448 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.989855296544793e-05, |
|
"loss": 4.0963, |
|
"step": 616960 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9890167017937416e-05, |
|
"loss": 4.0774, |
|
"step": 617472 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9881781070426896e-05, |
|
"loss": 4.0872, |
|
"step": 617984 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9873395122916376e-05, |
|
"loss": 4.0674, |
|
"step": 618496 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9865025554209585e-05, |
|
"loss": 4.0684, |
|
"step": 619008 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9856639606699065e-05, |
|
"loss": 4.085, |
|
"step": 619520 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9848253659188545e-05, |
|
"loss": 4.075, |
|
"step": 620032 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9839867711678025e-05, |
|
"loss": 4.1014, |
|
"step": 620544 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9831481764167505e-05, |
|
"loss": 4.0868, |
|
"step": 621056 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9823095816656985e-05, |
|
"loss": 4.0896, |
|
"step": 621568 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.981470986914646e-05, |
|
"loss": 4.0804, |
|
"step": 622080 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.980632392163594e-05, |
|
"loss": 4.0886, |
|
"step": 622592 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.979793797412542e-05, |
|
"loss": 4.0741, |
|
"step": 623104 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9789568405418634e-05, |
|
"loss": 4.0805, |
|
"step": 623616 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9781182457908114e-05, |
|
"loss": 4.0768, |
|
"step": 624128 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9772796510397594e-05, |
|
"loss": 4.0823, |
|
"step": 624640 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.976442694169081e-05, |
|
"loss": 4.0823, |
|
"step": 625152 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.975604099418029e-05, |
|
"loss": 4.0702, |
|
"step": 625664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.974765504666976e-05, |
|
"loss": 4.0693, |
|
"step": 626176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.973926909915924e-05, |
|
"loss": 4.0831, |
|
"step": 626688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.973088315164872e-05, |
|
"loss": 4.0847, |
|
"step": 627200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.97224972041382e-05, |
|
"loss": 4.08, |
|
"step": 627712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.971411125662768e-05, |
|
"loss": 4.0785, |
|
"step": 628224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.970572530911716e-05, |
|
"loss": 4.0846, |
|
"step": 628736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.969735574041037e-05, |
|
"loss": 4.0818, |
|
"step": 629248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.968896979289985e-05, |
|
"loss": 4.0829, |
|
"step": 629760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.968058384538934e-05, |
|
"loss": 4.0789, |
|
"step": 630272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.967219789787882e-05, |
|
"loss": 4.0781, |
|
"step": 630784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.966382832917203e-05, |
|
"loss": 4.0814, |
|
"step": 631296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.965544238166151e-05, |
|
"loss": 4.0677, |
|
"step": 631808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.964705643415099e-05, |
|
"loss": 4.0806, |
|
"step": 632320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9638686865444197e-05, |
|
"loss": 4.0855, |
|
"step": 632832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9630317296737406e-05, |
|
"loss": 4.0833, |
|
"step": 633344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9621931349226886e-05, |
|
"loss": 4.0848, |
|
"step": 633856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9613545401716366e-05, |
|
"loss": 4.0618, |
|
"step": 634368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9605159454205845e-05, |
|
"loss": 4.092, |
|
"step": 634880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9596773506695325e-05, |
|
"loss": 4.0685, |
|
"step": 635392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9588387559184805e-05, |
|
"loss": 4.0752, |
|
"step": 635904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.958000161167429e-05, |
|
"loss": 4.0723, |
|
"step": 636416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.957161566416377e-05, |
|
"loss": 4.0756, |
|
"step": 636928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.956322971665325e-05, |
|
"loss": 4.0796, |
|
"step": 637440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.955484376914273e-05, |
|
"loss": 4.0694, |
|
"step": 637952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.954645782163221e-05, |
|
"loss": 4.0754, |
|
"step": 638464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.953807187412169e-05, |
|
"loss": 4.0663, |
|
"step": 638976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.952968592661117e-05, |
|
"loss": 4.0805, |
|
"step": 639488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.952131635790438e-05, |
|
"loss": 4.0701, |
|
"step": 640000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.951293041039386e-05, |
|
"loss": 4.0842, |
|
"step": 640512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.950454446288334e-05, |
|
"loss": 4.0761, |
|
"step": 641024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.949615851537282e-05, |
|
"loss": 4.0732, |
|
"step": 641536 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.948780532546976e-05, |
|
"loss": 4.0653, |
|
"step": 642048 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9479419377959246e-05, |
|
"loss": 4.0745, |
|
"step": 642560 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9471033430448726e-05, |
|
"loss": 4.0804, |
|
"step": 643072 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9462647482938206e-05, |
|
"loss": 4.0749, |
|
"step": 643584 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9454261535427686e-05, |
|
"loss": 4.0713, |
|
"step": 644096 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9445908345524624e-05, |
|
"loss": 4.0647, |
|
"step": 644608 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9437522398014104e-05, |
|
"loss": 4.0721, |
|
"step": 645120 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9429136450503584e-05, |
|
"loss": 4.0679, |
|
"step": 645632 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9420750502993064e-05, |
|
"loss": 4.0729, |
|
"step": 646144 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9412364555482544e-05, |
|
"loss": 4.0687, |
|
"step": 646656 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9403978607972024e-05, |
|
"loss": 4.0703, |
|
"step": 647168 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9395592660461504e-05, |
|
"loss": 4.0744, |
|
"step": 647680 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9387206712950984e-05, |
|
"loss": 4.081, |
|
"step": 648192 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9378820765440464e-05, |
|
"loss": 4.0756, |
|
"step": 648704 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9370434817929943e-05, |
|
"loss": 4.0704, |
|
"step": 649216 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9362048870419423e-05, |
|
"loss": 4.0721, |
|
"step": 649728 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9353662922908903e-05, |
|
"loss": 4.0721, |
|
"step": 650240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.934529335420212e-05, |
|
"loss": 4.0666, |
|
"step": 650752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.933690740669159e-05, |
|
"loss": 4.0803, |
|
"step": 651264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.932852145918107e-05, |
|
"loss": 4.0779, |
|
"step": 651776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.932013551167055e-05, |
|
"loss": 4.0775, |
|
"step": 652288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.93117823217675e-05, |
|
"loss": 4.0582, |
|
"step": 652800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.930339637425698e-05, |
|
"loss": 4.0678, |
|
"step": 653312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.929501042674646e-05, |
|
"loss": 4.0585, |
|
"step": 653824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.928662447923594e-05, |
|
"loss": 4.0797, |
|
"step": 654336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.927823853172542e-05, |
|
"loss": 4.0744, |
|
"step": 654848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.92698525842149e-05, |
|
"loss": 4.0668, |
|
"step": 655360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.926146663670438e-05, |
|
"loss": 4.0696, |
|
"step": 655872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.925308068919386e-05, |
|
"loss": 4.0728, |
|
"step": 656384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.924469474168334e-05, |
|
"loss": 4.063, |
|
"step": 656896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9236325172976546e-05, |
|
"loss": 4.0649, |
|
"step": 657408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9227939225466026e-05, |
|
"loss": 4.0738, |
|
"step": 657920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9219553277955506e-05, |
|
"loss": 4.0601, |
|
"step": 658432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9211167330444986e-05, |
|
"loss": 4.0637, |
|
"step": 658944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9202797761738195e-05, |
|
"loss": 4.0621, |
|
"step": 659456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9194411814227675e-05, |
|
"loss": 4.0688, |
|
"step": 659968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9186025866717155e-05, |
|
"loss": 4.0798, |
|
"step": 660480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9177639919206635e-05, |
|
"loss": 4.0649, |
|
"step": 660992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.916927035049985e-05, |
|
"loss": 4.0666, |
|
"step": 661504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.916090078179307e-05, |
|
"loss": 4.0631, |
|
"step": 662016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9152531213086276e-05, |
|
"loss": 4.0742, |
|
"step": 662528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9144145265575756e-05, |
|
"loss": 4.07, |
|
"step": 663040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9135759318065236e-05, |
|
"loss": 4.063, |
|
"step": 663552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9127373370554716e-05, |
|
"loss": 4.0711, |
|
"step": 664064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.911898742304419e-05, |
|
"loss": 4.0512, |
|
"step": 664576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.911060147553367e-05, |
|
"loss": 4.0556, |
|
"step": 665088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.910221552802315e-05, |
|
"loss": 4.0751, |
|
"step": 665600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.909382958051263e-05, |
|
"loss": 4.0656, |
|
"step": 666112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.908544363300211e-05, |
|
"loss": 4.06, |
|
"step": 666624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.907705768549159e-05, |
|
"loss": 4.0659, |
|
"step": 667136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.906867173798107e-05, |
|
"loss": 4.0443, |
|
"step": 667648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9060285790470555e-05, |
|
"loss": 4.0653, |
|
"step": 668160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9051916221763764e-05, |
|
"loss": 4.0712, |
|
"step": 668672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9043530274253244e-05, |
|
"loss": 4.0715, |
|
"step": 669184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9035144326742724e-05, |
|
"loss": 4.0529, |
|
"step": 669696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9026774758035933e-05, |
|
"loss": 4.0588, |
|
"step": 670208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.901838881052541e-05, |
|
"loss": 4.0536, |
|
"step": 670720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.901000286301489e-05, |
|
"loss": 4.0675, |
|
"step": 671232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.900161691550437e-05, |
|
"loss": 4.0688, |
|
"step": 671744 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.899323096799385e-05, |
|
"loss": 4.0623, |
|
"step": 672256 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.898484502048333e-05, |
|
"loss": 4.0608, |
|
"step": 672768 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.897647545177654e-05, |
|
"loss": 4.0673, |
|
"step": 673280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.896808950426602e-05, |
|
"loss": 4.0642, |
|
"step": 673792 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.895970355675551e-05, |
|
"loss": 4.0607, |
|
"step": 674304 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.895133398804872e-05, |
|
"loss": 4.0626, |
|
"step": 674816 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.89429480405382e-05, |
|
"loss": 4.0611, |
|
"step": 675328 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.893456209302768e-05, |
|
"loss": 4.0621, |
|
"step": 675840 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.892617614551716e-05, |
|
"loss": 4.0687, |
|
"step": 676352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.891779019800664e-05, |
|
"loss": 4.0517, |
|
"step": 676864 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.890940425049612e-05, |
|
"loss": 4.0579, |
|
"step": 677376 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.89010183029856e-05, |
|
"loss": 4.0703, |
|
"step": 677888 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.889263235547508e-05, |
|
"loss": 4.0625, |
|
"step": 678400 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.888424640796455e-05, |
|
"loss": 4.0665, |
|
"step": 678912 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.887587683925777e-05, |
|
"loss": 4.066, |
|
"step": 679424 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.886749089174725e-05, |
|
"loss": 4.0573, |
|
"step": 679936 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.885910494423673e-05, |
|
"loss": 4.0515, |
|
"step": 680448 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.885071899672621e-05, |
|
"loss": 4.0612, |
|
"step": 680960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.884233304921569e-05, |
|
"loss": 4.0743, |
|
"step": 681472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8833947101705167e-05, |
|
"loss": 4.0745, |
|
"step": 681984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8825561154194647e-05, |
|
"loss": 4.0563, |
|
"step": 682496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8817191585487856e-05, |
|
"loss": 4.0581, |
|
"step": 683008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8808805637977336e-05, |
|
"loss": 4.0631, |
|
"step": 683520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8800419690466816e-05, |
|
"loss": 4.0596, |
|
"step": 684032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8792033742956296e-05, |
|
"loss": 4.0634, |
|
"step": 684544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8783647795445775e-05, |
|
"loss": 4.0679, |
|
"step": 685056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8775261847935255e-05, |
|
"loss": 4.0721, |
|
"step": 685568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8766875900424735e-05, |
|
"loss": 4.0536, |
|
"step": 686080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.8758489952914215e-05, |
|
"loss": 4.0643, |
|
"step": 686592 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.138033866882324, |
|
"eval_runtime": 554.944, |
|
"eval_samples_per_second": 687.621, |
|
"eval_steps_per_second": 21.489, |
|
"step": 686871 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.875012038420743e-05, |
|
"loss": 4.0694, |
|
"step": 687104 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.874173443669691e-05, |
|
"loss": 4.0733, |
|
"step": 687616 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.873334848918639e-05, |
|
"loss": 4.0623, |
|
"step": 688128 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.872496254167587e-05, |
|
"loss": 4.0567, |
|
"step": 688640 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.871657659416535e-05, |
|
"loss": 4.0562, |
|
"step": 689152 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.870819064665483e-05, |
|
"loss": 4.0594, |
|
"step": 689664 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.869980469914431e-05, |
|
"loss": 4.0597, |
|
"step": 690176 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.869141875163379e-05, |
|
"loss": 4.0467, |
|
"step": 690688 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8683032804123264e-05, |
|
"loss": 4.0561, |
|
"step": 691200 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8674646856612744e-05, |
|
"loss": 4.0722, |
|
"step": 691712 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8666260909102224e-05, |
|
"loss": 4.0553, |
|
"step": 692224 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8657874961591704e-05, |
|
"loss": 4.0533, |
|
"step": 692736 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8649489014081184e-05, |
|
"loss": 4.0705, |
|
"step": 693248 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8641103066570664e-05, |
|
"loss": 4.0498, |
|
"step": 693760 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8632717119060144e-05, |
|
"loss": 4.0593, |
|
"step": 694272 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8624331171549624e-05, |
|
"loss": 4.0418, |
|
"step": 694784 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.861594522403911e-05, |
|
"loss": 4.0417, |
|
"step": 695296 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.860755927652859e-05, |
|
"loss": 4.0576, |
|
"step": 695808 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.859917332901807e-05, |
|
"loss": 4.0493, |
|
"step": 696320 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.859078738150755e-05, |
|
"loss": 4.0728, |
|
"step": 696832 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.858243419160449e-05, |
|
"loss": 4.0582, |
|
"step": 697344 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.857404824409397e-05, |
|
"loss": 4.0694, |
|
"step": 697856 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.856566229658345e-05, |
|
"loss": 4.0537, |
|
"step": 698368 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.855727634907293e-05, |
|
"loss": 4.059, |
|
"step": 698880 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.854889040156241e-05, |
|
"loss": 4.0469, |
|
"step": 699392 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.854050445405189e-05, |
|
"loss": 4.0579, |
|
"step": 699904 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.853211850654137e-05, |
|
"loss": 4.0516, |
|
"step": 700416 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.852373255903085e-05, |
|
"loss": 4.0567, |
|
"step": 700928 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8515362990324064e-05, |
|
"loss": 4.054, |
|
"step": 701440 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.8506977042813544e-05, |
|
"loss": 4.0457, |
|
"step": 701952 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.849860747410675e-05, |
|
"loss": 4.042, |
|
"step": 702464 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.849022152659623e-05, |
|
"loss": 4.0606, |
|
"step": 702976 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.848183557908571e-05, |
|
"loss": 4.0558, |
|
"step": 703488 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.847344963157519e-05, |
|
"loss": 4.0556, |
|
"step": 704000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.846506368406467e-05, |
|
"loss": 4.0528, |
|
"step": 704512 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.845667773655415e-05, |
|
"loss": 4.0588, |
|
"step": 705024 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8448291789043626e-05, |
|
"loss": 4.0517, |
|
"step": 705536 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8439905841533106e-05, |
|
"loss": 4.0564, |
|
"step": 706048 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.843153627282632e-05, |
|
"loss": 4.0572, |
|
"step": 706560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.842316670411953e-05, |
|
"loss": 4.0535, |
|
"step": 707072 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.841478075660902e-05, |
|
"loss": 4.0536, |
|
"step": 707584 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.84063948090985e-05, |
|
"loss": 4.0486, |
|
"step": 708096 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.839800886158798e-05, |
|
"loss": 4.0499, |
|
"step": 708608 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.838962291407745e-05, |
|
"loss": 4.0617, |
|
"step": 709120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.838125334537067e-05, |
|
"loss": 4.0583, |
|
"step": 709632 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.837286739786015e-05, |
|
"loss": 4.0592, |
|
"step": 710144 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.836448145034963e-05, |
|
"loss": 4.0394, |
|
"step": 710656 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.83560955028391e-05, |
|
"loss": 4.0641, |
|
"step": 711168 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.834770955532858e-05, |
|
"loss": 4.0467, |
|
"step": 711680 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.833932360781806e-05, |
|
"loss": 4.0484, |
|
"step": 712192 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.833093766030754e-05, |
|
"loss": 4.0485, |
|
"step": 712704 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.832255171279702e-05, |
|
"loss": 4.0459, |
|
"step": 713216 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.83141657652865e-05, |
|
"loss": 4.0571, |
|
"step": 713728 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8305796196579716e-05, |
|
"loss": 4.0445, |
|
"step": 714240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8297410249069195e-05, |
|
"loss": 4.0528, |
|
"step": 714752 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8289024301558675e-05, |
|
"loss": 4.036, |
|
"step": 715264 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8280638354048155e-05, |
|
"loss": 4.0546, |
|
"step": 715776 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8272252406537635e-05, |
|
"loss": 4.0431, |
|
"step": 716288 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8263882837830844e-05, |
|
"loss": 4.0602, |
|
"step": 716800 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8255496890320324e-05, |
|
"loss": 4.0551, |
|
"step": 717312 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8247110942809804e-05, |
|
"loss": 4.0468, |
|
"step": 717824 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8238724995299284e-05, |
|
"loss": 4.041, |
|
"step": 718336 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8230339047788764e-05, |
|
"loss": 4.0463, |
|
"step": 718848 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.822196947908197e-05, |
|
"loss": 4.05, |
|
"step": 719360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.821358353157145e-05, |
|
"loss": 4.0526, |
|
"step": 719872 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.820519758406093e-05, |
|
"loss": 4.0478, |
|
"step": 720384 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.819682801535415e-05, |
|
"loss": 4.0404, |
|
"step": 720896 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.818844206784363e-05, |
|
"loss": 4.047, |
|
"step": 721408 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.818005612033311e-05, |
|
"loss": 4.0418, |
|
"step": 721920 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.817167017282259e-05, |
|
"loss": 4.0482, |
|
"step": 722432 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.816328422531207e-05, |
|
"loss": 4.0426, |
|
"step": 722944 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.815489827780155e-05, |
|
"loss": 4.042, |
|
"step": 723456 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.814651233029103e-05, |
|
"loss": 4.0538, |
|
"step": 723968 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.813812638278051e-05, |
|
"loss": 4.0565, |
|
"step": 724480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.812975681407372e-05, |
|
"loss": 4.0497, |
|
"step": 724992 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.812138724536693e-05, |
|
"loss": 4.0476, |
|
"step": 725504 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.811300129785641e-05, |
|
"loss": 4.0423, |
|
"step": 726016 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.810461535034589e-05, |
|
"loss": 4.05, |
|
"step": 726528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8096229402835374e-05, |
|
"loss": 4.0439, |
|
"step": 727040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8087843455324854e-05, |
|
"loss": 4.0531, |
|
"step": 727552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8079457507814334e-05, |
|
"loss": 4.0564, |
|
"step": 728064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8071071560303814e-05, |
|
"loss": 4.0546, |
|
"step": 728576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.806268561279329e-05, |
|
"loss": 4.0381, |
|
"step": 729088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.80543160440865e-05, |
|
"loss": 4.0396, |
|
"step": 729600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.804593009657598e-05, |
|
"loss": 4.0312, |
|
"step": 730112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.803754414906546e-05, |
|
"loss": 4.0566, |
|
"step": 730624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8029158201554936e-05, |
|
"loss": 4.0483, |
|
"step": 731136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.802078863284815e-05, |
|
"loss": 4.0472, |
|
"step": 731648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.801240268533763e-05, |
|
"loss": 4.0463, |
|
"step": 732160 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.800401673782711e-05, |
|
"loss": 4.0469, |
|
"step": 732672 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.799563079031659e-05, |
|
"loss": 4.0377, |
|
"step": 733184 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.798724484280607e-05, |
|
"loss": 4.0407, |
|
"step": 733696 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.797887527409929e-05, |
|
"loss": 4.0526, |
|
"step": 734208 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.797048932658876e-05, |
|
"loss": 4.0333, |
|
"step": 734720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.796210337907824e-05, |
|
"loss": 4.0387, |
|
"step": 735232 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7953733810371456e-05, |
|
"loss": 4.0385, |
|
"step": 735744 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7945347862860936e-05, |
|
"loss": 4.045, |
|
"step": 736256 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.793696191535041e-05, |
|
"loss": 4.0539, |
|
"step": 736768 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.792857596783989e-05, |
|
"loss": 4.0422, |
|
"step": 737280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.792019002032937e-05, |
|
"loss": 4.0468, |
|
"step": 737792 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.791180407281885e-05, |
|
"loss": 4.0398, |
|
"step": 738304 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.790341812530833e-05, |
|
"loss": 4.0497, |
|
"step": 738816 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.789503217779781e-05, |
|
"loss": 4.0448, |
|
"step": 739328 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7886662609091025e-05, |
|
"loss": 4.0396, |
|
"step": 739840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7878293040384234e-05, |
|
"loss": 4.0435, |
|
"step": 740352 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7869907092873714e-05, |
|
"loss": 4.0308, |
|
"step": 740864 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7861521145363194e-05, |
|
"loss": 4.0332, |
|
"step": 741376 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7853135197852674e-05, |
|
"loss": 4.0474, |
|
"step": 741888 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.784476562914588e-05, |
|
"loss": 4.0418, |
|
"step": 742400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.783637968163536e-05, |
|
"loss": 4.0348, |
|
"step": 742912 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.782799373412484e-05, |
|
"loss": 4.0438, |
|
"step": 743424 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.781960778661432e-05, |
|
"loss": 4.0223, |
|
"step": 743936 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.78112218391038e-05, |
|
"loss": 4.0384, |
|
"step": 744448 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.780283589159328e-05, |
|
"loss": 4.0473, |
|
"step": 744960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.779444994408276e-05, |
|
"loss": 4.047, |
|
"step": 745472 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.778606399657225e-05, |
|
"loss": 4.0335, |
|
"step": 745984 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.777769442786546e-05, |
|
"loss": 4.0331, |
|
"step": 746496 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.776930848035494e-05, |
|
"loss": 4.0296, |
|
"step": 747008 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.776092253284442e-05, |
|
"loss": 4.0407, |
|
"step": 747520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.775255296413763e-05, |
|
"loss": 4.0463, |
|
"step": 748032 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.774416701662711e-05, |
|
"loss": 4.0437, |
|
"step": 748544 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.773578106911659e-05, |
|
"loss": 4.0323, |
|
"step": 749056 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.772739512160607e-05, |
|
"loss": 4.0488, |
|
"step": 749568 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.771902555289928e-05, |
|
"loss": 4.0368, |
|
"step": 750080 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7710639605388757e-05, |
|
"loss": 4.039, |
|
"step": 750592 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.770227003668197e-05, |
|
"loss": 4.0353, |
|
"step": 751104 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.769388408917145e-05, |
|
"loss": 4.0425, |
|
"step": 751616 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.768549814166093e-05, |
|
"loss": 4.0375, |
|
"step": 752128 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.767711219415041e-05, |
|
"loss": 4.044, |
|
"step": 752640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.766872624663989e-05, |
|
"loss": 4.0314, |
|
"step": 753152 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.766034029912937e-05, |
|
"loss": 4.0364, |
|
"step": 753664 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.765195435161885e-05, |
|
"loss": 4.0492, |
|
"step": 754176 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.764356840410833e-05, |
|
"loss": 4.0393, |
|
"step": 754688 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.763518245659781e-05, |
|
"loss": 4.0415, |
|
"step": 755200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.762679650908729e-05, |
|
"loss": 4.0476, |
|
"step": 755712 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.761841056157677e-05, |
|
"loss": 4.0305, |
|
"step": 756224 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7610024614066245e-05, |
|
"loss": 4.0323, |
|
"step": 756736 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7601638666555725e-05, |
|
"loss": 4.0369, |
|
"step": 757248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7593252719045205e-05, |
|
"loss": 4.0447, |
|
"step": 757760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.758488315033842e-05, |
|
"loss": 4.0559, |
|
"step": 758272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.75764972028279e-05, |
|
"loss": 4.0351, |
|
"step": 758784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.756812763412112e-05, |
|
"loss": 4.0382, |
|
"step": 759296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.75597416866106e-05, |
|
"loss": 4.0371, |
|
"step": 759808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.755135573910007e-05, |
|
"loss": 4.0368, |
|
"step": 760320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.754296979158955e-05, |
|
"loss": 4.0404, |
|
"step": 760832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7534600222882766e-05, |
|
"loss": 4.0437, |
|
"step": 761344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.752621427537224e-05, |
|
"loss": 4.0458, |
|
"step": 761856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.751782832786172e-05, |
|
"loss": 4.031, |
|
"step": 762368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.75094423803512e-05, |
|
"loss": 4.0416, |
|
"step": 762880 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.122170925140381, |
|
"eval_runtime": 543.8826, |
|
"eval_samples_per_second": 701.605, |
|
"eval_steps_per_second": 21.926, |
|
"step": 763190 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.750105643284068e-05, |
|
"loss": 4.041, |
|
"step": 763392 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.749267048533016e-05, |
|
"loss": 4.0473, |
|
"step": 763904 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.748428453781964e-05, |
|
"loss": 4.0415, |
|
"step": 764416 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.747589859030912e-05, |
|
"loss": 4.0312, |
|
"step": 764928 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7467512642798605e-05, |
|
"loss": 4.0373, |
|
"step": 765440 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7459143074091815e-05, |
|
"loss": 4.0337, |
|
"step": 765952 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7450757126581294e-05, |
|
"loss": 4.0405, |
|
"step": 766464 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7442371179070774e-05, |
|
"loss": 4.0185, |
|
"step": 766976 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7433985231560254e-05, |
|
"loss": 4.0337, |
|
"step": 767488 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7425599284049734e-05, |
|
"loss": 4.0474, |
|
"step": 768000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7417229715342943e-05, |
|
"loss": 4.0347, |
|
"step": 768512 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7408843767832423e-05, |
|
"loss": 4.0341, |
|
"step": 769024 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.74004578203219e-05, |
|
"loss": 4.0446, |
|
"step": 769536 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.739207187281138e-05, |
|
"loss": 4.0259, |
|
"step": 770048 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.738368592530086e-05, |
|
"loss": 4.0367, |
|
"step": 770560 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.737529997779034e-05, |
|
"loss": 4.0184, |
|
"step": 771072 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.736691403027982e-05, |
|
"loss": 4.0167, |
|
"step": 771584 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.735854446157304e-05, |
|
"loss": 4.0387, |
|
"step": 772096 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.735015851406252e-05, |
|
"loss": 4.0273, |
|
"step": 772608 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7341772566552e-05, |
|
"loss": 4.0516, |
|
"step": 773120 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.733338661904148e-05, |
|
"loss": 4.0326, |
|
"step": 773632 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.732501705033469e-05, |
|
"loss": 4.0463, |
|
"step": 774144 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.731663110282417e-05, |
|
"loss": 4.0342, |
|
"step": 774656 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.730824515531365e-05, |
|
"loss": 4.0354, |
|
"step": 775168 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.729985920780313e-05, |
|
"loss": 4.0232, |
|
"step": 775680 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.72914732602926e-05, |
|
"loss": 4.037, |
|
"step": 776192 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.728308731278208e-05, |
|
"loss": 4.0297, |
|
"step": 776704 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.727470136527156e-05, |
|
"loss": 4.0323, |
|
"step": 777216 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.726631541776104e-05, |
|
"loss": 4.0291, |
|
"step": 777728 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.725794584905426e-05, |
|
"loss": 4.0285, |
|
"step": 778240 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.724955990154374e-05, |
|
"loss": 4.0163, |
|
"step": 778752 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.724117395403322e-05, |
|
"loss": 4.0381, |
|
"step": 779264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.723280438532643e-05, |
|
"loss": 4.0329, |
|
"step": 779776 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7224418437815906e-05, |
|
"loss": 4.0316, |
|
"step": 780288 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7216032490305386e-05, |
|
"loss": 4.0321, |
|
"step": 780800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.72076629215986e-05, |
|
"loss": 4.0342, |
|
"step": 781312 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7199276974088075e-05, |
|
"loss": 4.0337, |
|
"step": 781824 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7190891026577555e-05, |
|
"loss": 4.0339, |
|
"step": 782336 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7182505079067035e-05, |
|
"loss": 4.0318, |
|
"step": 782848 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7174119131556515e-05, |
|
"loss": 4.0317, |
|
"step": 783360 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7165733184045995e-05, |
|
"loss": 4.0311, |
|
"step": 783872 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.715734723653548e-05, |
|
"loss": 4.028, |
|
"step": 784384 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.714896128902496e-05, |
|
"loss": 4.0258, |
|
"step": 784896 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.714057534151444e-05, |
|
"loss": 4.0363, |
|
"step": 785408 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.713218939400392e-05, |
|
"loss": 4.0323, |
|
"step": 785920 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.71238034464934e-05, |
|
"loss": 4.0372, |
|
"step": 786432 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.711541749898288e-05, |
|
"loss": 4.0183, |
|
"step": 786944 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.710706430907982e-05, |
|
"loss": 4.0422, |
|
"step": 787456 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.70986783615693e-05, |
|
"loss": 4.0294, |
|
"step": 787968 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.709029241405878e-05, |
|
"loss": 4.0228, |
|
"step": 788480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.708190646654826e-05, |
|
"loss": 4.027, |
|
"step": 788992 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.707352051903774e-05, |
|
"loss": 4.0273, |
|
"step": 789504 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.706513457152722e-05, |
|
"loss": 4.0327, |
|
"step": 790016 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.70567486240167e-05, |
|
"loss": 4.0224, |
|
"step": 790528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.704836267650618e-05, |
|
"loss": 4.0298, |
|
"step": 791040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7039993107799395e-05, |
|
"loss": 4.0126, |
|
"step": 791552 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7031607160288875e-05, |
|
"loss": 4.0355, |
|
"step": 792064 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7023221212778355e-05, |
|
"loss": 4.0197, |
|
"step": 792576 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7014835265267835e-05, |
|
"loss": 4.0334, |
|
"step": 793088 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.7006465696561044e-05, |
|
"loss": 4.0329, |
|
"step": 793600 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6998079749050524e-05, |
|
"loss": 4.0264, |
|
"step": 794112 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6989693801540004e-05, |
|
"loss": 4.0217, |
|
"step": 794624 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6981307854029484e-05, |
|
"loss": 4.021, |
|
"step": 795136 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.697293828532269e-05, |
|
"loss": 4.0243, |
|
"step": 795648 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.696455233781217e-05, |
|
"loss": 4.0337, |
|
"step": 796160 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.695616639030165e-05, |
|
"loss": 4.0272, |
|
"step": 796672 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.694778044279113e-05, |
|
"loss": 4.0173, |
|
"step": 797184 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.693941087408435e-05, |
|
"loss": 4.0233, |
|
"step": 797696 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.693104130537756e-05, |
|
"loss": 4.0227, |
|
"step": 798208 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.692265535786704e-05, |
|
"loss": 4.0267, |
|
"step": 798720 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.691426941035652e-05, |
|
"loss": 4.0226, |
|
"step": 799232 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6905883462846e-05, |
|
"loss": 4.0247, |
|
"step": 799744 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.689749751533548e-05, |
|
"loss": 4.0253, |
|
"step": 800256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.688911156782496e-05, |
|
"loss": 4.0361, |
|
"step": 800768 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.688072562031444e-05, |
|
"loss": 4.0273, |
|
"step": 801280 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.687233967280391e-05, |
|
"loss": 4.0229, |
|
"step": 801792 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6863970104097126e-05, |
|
"loss": 4.0242, |
|
"step": 802304 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6855584156586606e-05, |
|
"loss": 4.0293, |
|
"step": 802816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6847198209076086e-05, |
|
"loss": 4.0259, |
|
"step": 803328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6838812261565566e-05, |
|
"loss": 4.0285, |
|
"step": 803840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6830426314055046e-05, |
|
"loss": 4.0346, |
|
"step": 804352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6822040366544526e-05, |
|
"loss": 4.0346, |
|
"step": 804864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6813654419034006e-05, |
|
"loss": 4.019, |
|
"step": 805376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6805268471523486e-05, |
|
"loss": 4.0133, |
|
"step": 805888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6796898902816695e-05, |
|
"loss": 4.0097, |
|
"step": 806400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6788512955306175e-05, |
|
"loss": 4.0334, |
|
"step": 806912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6780127007795655e-05, |
|
"loss": 4.0251, |
|
"step": 807424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6771741060285135e-05, |
|
"loss": 4.0274, |
|
"step": 807936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6763355112774615e-05, |
|
"loss": 4.0232, |
|
"step": 808448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6754985544067824e-05, |
|
"loss": 4.0265, |
|
"step": 808960 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6746599596557304e-05, |
|
"loss": 4.0174, |
|
"step": 809472 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.673821364904679e-05, |
|
"loss": 4.0197, |
|
"step": 809984 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.672984408034e-05, |
|
"loss": 4.0315, |
|
"step": 810496 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.672145813282948e-05, |
|
"loss": 4.0117, |
|
"step": 811008 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.671307218531896e-05, |
|
"loss": 4.0167, |
|
"step": 811520 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.670468623780844e-05, |
|
"loss": 4.016, |
|
"step": 812032 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.669630029029792e-05, |
|
"loss": 4.0278, |
|
"step": 812544 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.66879143427874e-05, |
|
"loss": 4.0261, |
|
"step": 813056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.667952839527688e-05, |
|
"loss": 4.0277, |
|
"step": 813568 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.667114244776636e-05, |
|
"loss": 4.0233, |
|
"step": 814080 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.666275650025584e-05, |
|
"loss": 4.018, |
|
"step": 814592 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.665438693154905e-05, |
|
"loss": 4.0287, |
|
"step": 815104 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.664600098403853e-05, |
|
"loss": 4.024, |
|
"step": 815616 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.663761503652801e-05, |
|
"loss": 4.0227, |
|
"step": 816128 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6629245467821224e-05, |
|
"loss": 4.0221, |
|
"step": 816640 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6620859520310704e-05, |
|
"loss": 4.0063, |
|
"step": 817152 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6612473572800184e-05, |
|
"loss": 4.013, |
|
"step": 817664 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6604087625289664e-05, |
|
"loss": 4.0273, |
|
"step": 818176 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6595701677779144e-05, |
|
"loss": 4.0227, |
|
"step": 818688 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6587315730268624e-05, |
|
"loss": 4.0131, |
|
"step": 819200 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.657894616156183e-05, |
|
"loss": 4.0244, |
|
"step": 819712 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.657056021405131e-05, |
|
"loss": 4.0027, |
|
"step": 820224 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.656217426654079e-05, |
|
"loss": 4.016, |
|
"step": 820736 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.655378831903027e-05, |
|
"loss": 4.0255, |
|
"step": 821248 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6545402371519746e-05, |
|
"loss": 4.0274, |
|
"step": 821760 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6537016424009226e-05, |
|
"loss": 4.0132, |
|
"step": 822272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.652863047649871e-05, |
|
"loss": 4.0142, |
|
"step": 822784 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.652024452898819e-05, |
|
"loss": 4.0061, |
|
"step": 823296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.651185858147767e-05, |
|
"loss": 4.0193, |
|
"step": 823808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.650348901277088e-05, |
|
"loss": 4.0261, |
|
"step": 824320 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.649510306526036e-05, |
|
"loss": 4.0203, |
|
"step": 824832 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.648671711774984e-05, |
|
"loss": 4.0118, |
|
"step": 825344 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.647833117023932e-05, |
|
"loss": 4.0249, |
|
"step": 825856 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.646996160153253e-05, |
|
"loss": 4.0211, |
|
"step": 826368 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.646157565402201e-05, |
|
"loss": 4.0167, |
|
"step": 826880 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.645318970651149e-05, |
|
"loss": 4.0108, |
|
"step": 827392 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.644480375900097e-05, |
|
"loss": 4.0275, |
|
"step": 827904 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.643641781149045e-05, |
|
"loss": 4.0163, |
|
"step": 828416 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.642804824278366e-05, |
|
"loss": 4.0233, |
|
"step": 828928 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.641966229527315e-05, |
|
"loss": 4.0112, |
|
"step": 829440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6411292726566356e-05, |
|
"loss": 4.0128, |
|
"step": 829952 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6402906779055836e-05, |
|
"loss": 4.0247, |
|
"step": 830464 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6394520831545316e-05, |
|
"loss": 4.0201, |
|
"step": 830976 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6386134884034796e-05, |
|
"loss": 4.021, |
|
"step": 831488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6377748936524276e-05, |
|
"loss": 4.0258, |
|
"step": 832000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6369362989013756e-05, |
|
"loss": 4.0134, |
|
"step": 832512 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6360993420306965e-05, |
|
"loss": 4.0078, |
|
"step": 833024 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6352607472796445e-05, |
|
"loss": 4.0173, |
|
"step": 833536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6344221525285925e-05, |
|
"loss": 4.0231, |
|
"step": 834048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6335835577775405e-05, |
|
"loss": 4.0361, |
|
"step": 834560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6327449630264884e-05, |
|
"loss": 4.0148, |
|
"step": 835072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6319063682754364e-05, |
|
"loss": 4.0172, |
|
"step": 835584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6310677735243844e-05, |
|
"loss": 4.0167, |
|
"step": 836096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.630229178773333e-05, |
|
"loss": 4.0162, |
|
"step": 836608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.629392221902654e-05, |
|
"loss": 4.0194, |
|
"step": 837120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.628555265031975e-05, |
|
"loss": 4.0213, |
|
"step": 837632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.627716670280923e-05, |
|
"loss": 4.0286, |
|
"step": 838144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.626878075529871e-05, |
|
"loss": 4.0089, |
|
"step": 838656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.626039480778819e-05, |
|
"loss": 4.023, |
|
"step": 839168 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.108505725860596, |
|
"eval_runtime": 544.1634, |
|
"eval_samples_per_second": 701.243, |
|
"eval_steps_per_second": 21.914, |
|
"step": 839509 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.625200886027767e-05, |
|
"loss": 4.034, |
|
"step": 839680 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.624362291276715e-05, |
|
"loss": 4.0272, |
|
"step": 840192 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.623523696525663e-05, |
|
"loss": 4.0207, |
|
"step": 840704 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.622685101774611e-05, |
|
"loss": 4.0109, |
|
"step": 841216 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.621846507023558e-05, |
|
"loss": 4.0196, |
|
"step": 841728 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.621007912272507e-05, |
|
"loss": 4.0136, |
|
"step": 842240 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.620169317521455e-05, |
|
"loss": 4.019, |
|
"step": 842752 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.619330722770403e-05, |
|
"loss": 4.0005, |
|
"step": 843264 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.618492128019351e-05, |
|
"loss": 4.016, |
|
"step": 843776 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.617653533268299e-05, |
|
"loss": 4.025, |
|
"step": 844288 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.61681657639762e-05, |
|
"loss": 4.012, |
|
"step": 844800 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.615977981646568e-05, |
|
"loss": 4.015, |
|
"step": 845312 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.615139386895516e-05, |
|
"loss": 4.0279, |
|
"step": 845824 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.614300792144464e-05, |
|
"loss": 4.0088, |
|
"step": 846336 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.613462197393412e-05, |
|
"loss": 4.0133, |
|
"step": 846848 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.61262360264236e-05, |
|
"loss": 4.001, |
|
"step": 847360 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.611785007891308e-05, |
|
"loss": 3.9928, |
|
"step": 847872 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.610946413140256e-05, |
|
"loss": 4.0206, |
|
"step": 848384 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.610107818389204e-05, |
|
"loss": 4.0052, |
|
"step": 848896 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.609269223638152e-05, |
|
"loss": 4.0305, |
|
"step": 849408 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6084306288871e-05, |
|
"loss": 4.0157, |
|
"step": 849920 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.607592034136048e-05, |
|
"loss": 4.0249, |
|
"step": 850432 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.606753439384996e-05, |
|
"loss": 4.0115, |
|
"step": 850944 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.605914844633944e-05, |
|
"loss": 4.0213, |
|
"step": 851456 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.605076249882892e-05, |
|
"loss": 3.9982, |
|
"step": 851968 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.60423765513184e-05, |
|
"loss": 4.0186, |
|
"step": 852480 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6034006982611606e-05, |
|
"loss": 4.0104, |
|
"step": 852992 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6025621035101086e-05, |
|
"loss": 4.0136, |
|
"step": 853504 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6017235087590566e-05, |
|
"loss": 4.0071, |
|
"step": 854016 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6008849140080046e-05, |
|
"loss": 4.0095, |
|
"step": 854528 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6000479571373255e-05, |
|
"loss": 3.997, |
|
"step": 855040 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5992093623862735e-05, |
|
"loss": 4.0162, |
|
"step": 855552 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5983707676352215e-05, |
|
"loss": 4.0159, |
|
"step": 856064 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.59753217288417e-05, |
|
"loss": 4.0118, |
|
"step": 856576 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.596695216013491e-05, |
|
"loss": 4.0129, |
|
"step": 857088 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.595856621262439e-05, |
|
"loss": 4.0205, |
|
"step": 857600 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.595018026511387e-05, |
|
"loss": 4.009, |
|
"step": 858112 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.594181069640708e-05, |
|
"loss": 4.0151, |
|
"step": 858624 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.593342474889656e-05, |
|
"loss": 4.0124, |
|
"step": 859136 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.592503880138604e-05, |
|
"loss": 4.0147, |
|
"step": 859648 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.591665285387552e-05, |
|
"loss": 4.0101, |
|
"step": 860160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5908266906365e-05, |
|
"loss": 4.0031, |
|
"step": 860672 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.589988095885448e-05, |
|
"loss": 4.0083, |
|
"step": 861184 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.589151139014769e-05, |
|
"loss": 4.0174, |
|
"step": 861696 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.588312544263717e-05, |
|
"loss": 4.0162, |
|
"step": 862208 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5874739495126656e-05, |
|
"loss": 4.0166, |
|
"step": 862720 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5866353547616136e-05, |
|
"loss": 4.0042, |
|
"step": 863232 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5857967600105615e-05, |
|
"loss": 4.0237, |
|
"step": 863744 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5849581652595095e-05, |
|
"loss": 4.008, |
|
"step": 864256 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5841195705084575e-05, |
|
"loss": 4.0007, |
|
"step": 864768 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5832842515181514e-05, |
|
"loss": 4.0014, |
|
"step": 865280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5824456567670994e-05, |
|
"loss": 4.0112, |
|
"step": 865792 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5816070620160474e-05, |
|
"loss": 4.0147, |
|
"step": 866304 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5807684672649953e-05, |
|
"loss": 4.0022, |
|
"step": 866816 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5799298725139433e-05, |
|
"loss": 4.0128, |
|
"step": 867328 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.579091277762891e-05, |
|
"loss": 3.9931, |
|
"step": 867840 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.578252683011839e-05, |
|
"loss": 4.0167, |
|
"step": 868352 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.577414088260787e-05, |
|
"loss": 4.0067, |
|
"step": 868864 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.576575493509735e-05, |
|
"loss": 4.013, |
|
"step": 869376 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.575738536639057e-05, |
|
"loss": 4.0089, |
|
"step": 869888 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.574899941888005e-05, |
|
"loss": 4.0082, |
|
"step": 870400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.574061347136953e-05, |
|
"loss": 4.0048, |
|
"step": 870912 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.573222752385901e-05, |
|
"loss": 4.0028, |
|
"step": 871424 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.572384157634848e-05, |
|
"loss": 4.0052, |
|
"step": 871936 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.571545562883796e-05, |
|
"loss": 4.0144, |
|
"step": 872448 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.570706968132744e-05, |
|
"loss": 4.0093, |
|
"step": 872960 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.569870011262066e-05, |
|
"loss": 3.9938, |
|
"step": 873472 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.569031416511013e-05, |
|
"loss": 4.0075, |
|
"step": 873984 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.568192821759961e-05, |
|
"loss": 4.0033, |
|
"step": 874496 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.567354227008909e-05, |
|
"loss": 4.0103, |
|
"step": 875008 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.566515632257858e-05, |
|
"loss": 3.9998, |
|
"step": 875520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.565677037506806e-05, |
|
"loss": 4.011, |
|
"step": 876032 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.564838442755754e-05, |
|
"loss": 4.005, |
|
"step": 876544 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.563999848004702e-05, |
|
"loss": 4.0124, |
|
"step": 877056 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.56316125325365e-05, |
|
"loss": 4.012, |
|
"step": 877568 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.562324296382971e-05, |
|
"loss": 4.0076, |
|
"step": 878080 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.561485701631919e-05, |
|
"loss": 4.0025, |
|
"step": 878592 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5606471068808667e-05, |
|
"loss": 4.0081, |
|
"step": 879104 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5598101500101876e-05, |
|
"loss": 4.0044, |
|
"step": 879616 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5589715552591356e-05, |
|
"loss": 4.0171, |
|
"step": 880128 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5581329605080836e-05, |
|
"loss": 4.0139, |
|
"step": 880640 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.5572943657570316e-05, |
|
"loss": 4.0204, |
|
"step": 881152 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.556457408886353e-05, |
|
"loss": 3.9996, |
|
"step": 881664 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.555618814135301e-05, |
|
"loss": 3.9965, |
|
"step": 882176 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.554780219384249e-05, |
|
"loss": 3.991, |
|
"step": 882688 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.553941624633197e-05, |
|
"loss": 4.0141, |
|
"step": 883200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.553103029882145e-05, |
|
"loss": 4.0058, |
|
"step": 883712 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.552264435131093e-05, |
|
"loss": 4.0072, |
|
"step": 884224 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.551427478260414e-05, |
|
"loss": 4.0074, |
|
"step": 884736 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.550588883509362e-05, |
|
"loss": 4.0074, |
|
"step": 885248 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.54975028875831e-05, |
|
"loss": 3.9948, |
|
"step": 885760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.548911694007258e-05, |
|
"loss": 4.0055, |
|
"step": 886272 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.548073099256206e-05, |
|
"loss": 4.01, |
|
"step": 886784 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.547234504505154e-05, |
|
"loss": 3.9946, |
|
"step": 887296 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.546395909754102e-05, |
|
"loss": 3.9934, |
|
"step": 887808 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.54555731500305e-05, |
|
"loss": 4.0002, |
|
"step": 888320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.544720358132371e-05, |
|
"loss": 4.0098, |
|
"step": 888832 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5438817633813196e-05, |
|
"loss": 4.0077, |
|
"step": 889344 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.543043168630267e-05, |
|
"loss": 4.0096, |
|
"step": 889856 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5422062117595885e-05, |
|
"loss": 4.0086, |
|
"step": 890368 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5413676170085365e-05, |
|
"loss": 3.9955, |
|
"step": 890880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5405290222574845e-05, |
|
"loss": 4.012, |
|
"step": 891392 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.539690427506432e-05, |
|
"loss": 4.0074, |
|
"step": 891904 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.53885183275538e-05, |
|
"loss": 4.0015, |
|
"step": 892416 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5380148758847014e-05, |
|
"loss": 4.0004, |
|
"step": 892928 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5371762811336494e-05, |
|
"loss": 3.9957, |
|
"step": 893440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.536337686382597e-05, |
|
"loss": 3.997, |
|
"step": 893952 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.535499091631545e-05, |
|
"loss": 4.0026, |
|
"step": 894464 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5346604968804934e-05, |
|
"loss": 4.0058, |
|
"step": 894976 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5338219021294414e-05, |
|
"loss": 3.9987, |
|
"step": 895488 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5329833073783894e-05, |
|
"loss": 4.0042, |
|
"step": 896000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5321447126273373e-05, |
|
"loss": 3.9881, |
|
"step": 896512 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5313061178762853e-05, |
|
"loss": 3.9945, |
|
"step": 897024 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.530469161005606e-05, |
|
"loss": 4.0089, |
|
"step": 897536 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.529630566254554e-05, |
|
"loss": 4.0087, |
|
"step": 898048 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.528791971503502e-05, |
|
"loss": 3.9946, |
|
"step": 898560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.52795337675245e-05, |
|
"loss": 3.996, |
|
"step": 899072 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.527116419881771e-05, |
|
"loss": 3.99, |
|
"step": 899584 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.526277825130719e-05, |
|
"loss": 3.9998, |
|
"step": 900096 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.525439230379667e-05, |
|
"loss": 4.0061, |
|
"step": 900608 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.524600635628615e-05, |
|
"loss": 4.002, |
|
"step": 901120 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.523762040877563e-05, |
|
"loss": 3.9941, |
|
"step": 901632 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.522925084006885e-05, |
|
"loss": 4.0082, |
|
"step": 902144 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.522086489255833e-05, |
|
"loss": 4.0014, |
|
"step": 902656 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5212495323851536e-05, |
|
"loss": 4.0053, |
|
"step": 903168 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5204109376341016e-05, |
|
"loss": 3.9922, |
|
"step": 903680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5195723428830496e-05, |
|
"loss": 4.0081, |
|
"step": 904192 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5187353860123705e-05, |
|
"loss": 3.9964, |
|
"step": 904704 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5178967912613185e-05, |
|
"loss": 4.0036, |
|
"step": 905216 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5170581965102665e-05, |
|
"loss": 3.9965, |
|
"step": 905728 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5162196017592145e-05, |
|
"loss": 3.9967, |
|
"step": 906240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5153810070081625e-05, |
|
"loss": 4.0042, |
|
"step": 906752 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5145424122571105e-05, |
|
"loss": 4.0077, |
|
"step": 907264 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5137038175060585e-05, |
|
"loss": 4.0003, |
|
"step": 907776 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.512865222755007e-05, |
|
"loss": 4.0067, |
|
"step": 908288 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.512026628003955e-05, |
|
"loss": 3.9931, |
|
"step": 908800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.511188033252903e-05, |
|
"loss": 3.9933, |
|
"step": 909312 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5103494385018505e-05, |
|
"loss": 3.9989, |
|
"step": 909824 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.509512481631172e-05, |
|
"loss": 4.0043, |
|
"step": 910336 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.50867388688012e-05, |
|
"loss": 4.0204, |
|
"step": 910848 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.507835292129068e-05, |
|
"loss": 4.0009, |
|
"step": 911360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5069966973780154e-05, |
|
"loss": 4.0009, |
|
"step": 911872 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5061581026269634e-05, |
|
"loss": 3.9962, |
|
"step": 912384 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5053195078759114e-05, |
|
"loss": 3.9962, |
|
"step": 912896 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5044809131248594e-05, |
|
"loss": 4.0034, |
|
"step": 913408 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5036423183738074e-05, |
|
"loss": 4.0087, |
|
"step": 913920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.502805361503129e-05, |
|
"loss": 4.0098, |
|
"step": 914432 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.501966766752077e-05, |
|
"loss": 3.9919, |
|
"step": 914944 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.501128172001025e-05, |
|
"loss": 4.004, |
|
"step": 915456 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 4.097228050231934, |
|
"eval_runtime": 622.3683, |
|
"eval_samples_per_second": 613.127, |
|
"eval_steps_per_second": 19.161, |
|
"step": 915828 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.500291215130346e-05, |
|
"loss": 3.9994, |
|
"step": 915968 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.499452620379294e-05, |
|
"loss": 4.0102, |
|
"step": 916480 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.498614025628242e-05, |
|
"loss": 4.0042, |
|
"step": 916992 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.49777543087719e-05, |
|
"loss": 3.9931, |
|
"step": 917504 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.496936836126138e-05, |
|
"loss": 4.0019, |
|
"step": 918016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.496099879255459e-05, |
|
"loss": 3.994, |
|
"step": 918528 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.495261284504407e-05, |
|
"loss": 4.0037, |
|
"step": 919040 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.494422689753355e-05, |
|
"loss": 3.9818, |
|
"step": 919552 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.493584095002303e-05, |
|
"loss": 4.0026, |
|
"step": 920064 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.492745500251251e-05, |
|
"loss": 4.0024, |
|
"step": 920576 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.491908543380572e-05, |
|
"loss": 3.997, |
|
"step": 921088 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.49106994862952e-05, |
|
"loss": 3.997, |
|
"step": 921600 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.490231353878468e-05, |
|
"loss": 4.0054, |
|
"step": 922112 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.489392759127416e-05, |
|
"loss": 3.9995, |
|
"step": 922624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.488554164376364e-05, |
|
"loss": 3.9931, |
|
"step": 923136 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.487715569625312e-05, |
|
"loss": 3.9841, |
|
"step": 923648 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.48687697487426e-05, |
|
"loss": 3.9766, |
|
"step": 924160 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.486038380123208e-05, |
|
"loss": 3.9997, |
|
"step": 924672 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.485201423252529e-05, |
|
"loss": 3.9907, |
|
"step": 925184 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.484362828501477e-05, |
|
"loss": 4.0105, |
|
"step": 925696 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.483524233750425e-05, |
|
"loss": 4.001, |
|
"step": 926208 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.482687276879746e-05, |
|
"loss": 4.0084, |
|
"step": 926720 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.481848682128694e-05, |
|
"loss": 3.9938, |
|
"step": 927232 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.481010087377643e-05, |
|
"loss": 4.0041, |
|
"step": 927744 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.480171492626591e-05, |
|
"loss": 3.9855, |
|
"step": 928256 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.479332897875539e-05, |
|
"loss": 4.0002, |
|
"step": 928768 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.478494303124487e-05, |
|
"loss": 3.9956, |
|
"step": 929280 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.477655708373434e-05, |
|
"loss": 3.9962, |
|
"step": 929792 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.476817113622382e-05, |
|
"loss": 3.9886, |
|
"step": 930304 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.4759801567517036e-05, |
|
"loss": 3.9928, |
|
"step": 930816 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4751415620006516e-05, |
|
"loss": 3.9824, |
|
"step": 931328 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4743046051299726e-05, |
|
"loss": 3.9962, |
|
"step": 931840 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4734660103789205e-05, |
|
"loss": 3.9993, |
|
"step": 932352 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4726274156278685e-05, |
|
"loss": 3.9973, |
|
"step": 932864 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4717888208768165e-05, |
|
"loss": 3.9928, |
|
"step": 933376 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.470951864006138e-05, |
|
"loss": 4.0007, |
|
"step": 933888 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.470113269255086e-05, |
|
"loss": 3.9972, |
|
"step": 934400 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.469274674504034e-05, |
|
"loss": 3.9939, |
|
"step": 934912 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4684360797529814e-05, |
|
"loss": 3.9983, |
|
"step": 935424 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4675974850019294e-05, |
|
"loss": 3.9987, |
|
"step": 935936 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4667588902508774e-05, |
|
"loss": 3.9941, |
|
"step": 936448 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4659202954998254e-05, |
|
"loss": 3.9875, |
|
"step": 936960 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4650817007487734e-05, |
|
"loss": 3.9901, |
|
"step": 937472 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.464244743878094e-05, |
|
"loss": 4.0009, |
|
"step": 937984 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.463406149127042e-05, |
|
"loss": 4.0016, |
|
"step": 938496 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.46256755437599e-05, |
|
"loss": 4.0012, |
|
"step": 939008 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.461730597505312e-05, |
|
"loss": 3.9883, |
|
"step": 939520 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.46089200275426e-05, |
|
"loss": 4.0082, |
|
"step": 940032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.460053408003208e-05, |
|
"loss": 3.9915, |
|
"step": 940544 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.459214813252156e-05, |
|
"loss": 3.984, |
|
"step": 941056 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.458376218501104e-05, |
|
"loss": 3.9842, |
|
"step": 941568 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.457537623750052e-05, |
|
"loss": 3.9941, |
|
"step": 942080 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.456700666879373e-05, |
|
"loss": 3.9997, |
|
"step": 942592 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.455862072128321e-05, |
|
"loss": 3.986, |
|
"step": 943104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.455023477377269e-05, |
|
"loss": 3.9981, |
|
"step": 943616 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.454184882626217e-05, |
|
"loss": 3.9761, |
|
"step": 944128 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.453346287875165e-05, |
|
"loss": 3.9973, |
|
"step": 944640 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.452507693124113e-05, |
|
"loss": 3.992, |
|
"step": 945152 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.451669098373061e-05, |
|
"loss": 4.0013, |
|
"step": 945664 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.450833779382755e-05, |
|
"loss": 3.9888, |
|
"step": 946176 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.449995184631703e-05, |
|
"loss": 3.9941, |
|
"step": 946688 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.449156589880651e-05, |
|
"loss": 3.9906, |
|
"step": 947200 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.448317995129599e-05, |
|
"loss": 3.9853, |
|
"step": 947712 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.447479400378547e-05, |
|
"loss": 3.9904, |
|
"step": 948224 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.446640805627495e-05, |
|
"loss": 4.0015, |
|
"step": 948736 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.445802210876443e-05, |
|
"loss": 3.9958, |
|
"step": 949248 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.444963616125391e-05, |
|
"loss": 3.9749, |
|
"step": 949760 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.444125021374339e-05, |
|
"loss": 3.9913, |
|
"step": 950272 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.443286426623287e-05, |
|
"loss": 3.9892, |
|
"step": 950784 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4424478318722345e-05, |
|
"loss": 3.9901, |
|
"step": 951296 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4416092371211825e-05, |
|
"loss": 3.9848, |
|
"step": 951808 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4407706423701305e-05, |
|
"loss": 3.9972, |
|
"step": 952320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.439933685499452e-05, |
|
"loss": 3.9888, |
|
"step": 952832 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4390950907484e-05, |
|
"loss": 3.9957, |
|
"step": 953344 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.438256495997348e-05, |
|
"loss": 3.9953, |
|
"step": 953856 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.437417901246296e-05, |
|
"loss": 3.9946, |
|
"step": 954368 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.436579306495244e-05, |
|
"loss": 3.9916, |
|
"step": 954880 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.435742349624565e-05, |
|
"loss": 3.9893, |
|
"step": 955392 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4349053927538866e-05, |
|
"loss": 3.9903, |
|
"step": 955904 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4340667980028346e-05, |
|
"loss": 3.9957, |
|
"step": 956416 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.433228203251782e-05, |
|
"loss": 4.0003, |
|
"step": 956928 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.43238960850073e-05, |
|
"loss": 3.9977, |
|
"step": 957440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4315526516300515e-05, |
|
"loss": 3.9854, |
|
"step": 957952 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4307140568789995e-05, |
|
"loss": 3.9825, |
|
"step": 958464 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4298754621279475e-05, |
|
"loss": 3.9765, |
|
"step": 958976 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4290368673768955e-05, |
|
"loss": 3.997, |
|
"step": 959488 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4281982726258435e-05, |
|
"loss": 3.9868, |
|
"step": 960000 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4273596778747915e-05, |
|
"loss": 3.9986, |
|
"step": 960512 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4265210831237395e-05, |
|
"loss": 3.9879, |
|
"step": 961024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.4256824883726875e-05, |
|
"loss": 3.9897, |
|
"step": 961536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.4248455315020084e-05, |
|
"loss": 3.9784, |
|
"step": 962048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.4240069367509564e-05, |
|
"loss": 3.9929, |
|
"step": 962560 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.4231683419999044e-05, |
|
"loss": 3.9946, |
|
"step": 963072 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.4223297472488524e-05, |
|
"loss": 3.9796, |
|
"step": 963584 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.421492790378173e-05, |
|
"loss": 3.9797, |
|
"step": 964096 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.420654195627121e-05, |
|
"loss": 3.9815, |
|
"step": 964608 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.419815600876069e-05, |
|
"loss": 3.9932, |
|
"step": 965120 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.418977006125017e-05, |
|
"loss": 3.9917, |
|
"step": 965632 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.418138411373966e-05, |
|
"loss": 3.9971, |
|
"step": 966144 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.417299816622914e-05, |
|
"loss": 3.9926, |
|
"step": 966656 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.416461221871862e-05, |
|
"loss": 3.9792, |
|
"step": 967168 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.41562262712081e-05, |
|
"loss": 3.9918, |
|
"step": 967680 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.414785670250131e-05, |
|
"loss": 3.989, |
|
"step": 968192 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.413947075499079e-05, |
|
"loss": 3.9926, |
|
"step": 968704 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.4131101186284e-05, |
|
"loss": 3.9795, |
|
"step": 969216 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.412271523877348e-05, |
|
"loss": 3.9858, |
|
"step": 969728 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.411432929126296e-05, |
|
"loss": 3.9773, |
|
"step": 970240 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.410594334375244e-05, |
|
"loss": 3.9868, |
|
"step": 970752 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.409755739624192e-05, |
|
"loss": 3.9907, |
|
"step": 971264 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.4089187827535126e-05, |
|
"loss": 3.9857, |
|
"step": 971776 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.408080188002461e-05, |
|
"loss": 3.9884, |
|
"step": 972288 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.407241593251409e-05, |
|
"loss": 3.9739, |
|
"step": 972800 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.406402998500357e-05, |
|
"loss": 3.9761, |
|
"step": 973312 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.405564403749305e-05, |
|
"loss": 3.9919, |
|
"step": 973824 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.404725808998253e-05, |
|
"loss": 3.9966, |
|
"step": 974336 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.4038872142472006e-05, |
|
"loss": 3.9748, |
|
"step": 974848 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.4030486194961486e-05, |
|
"loss": 3.9806, |
|
"step": 975360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.4022100247450966e-05, |
|
"loss": 3.9798, |
|
"step": 975872 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.401373067874418e-05, |
|
"loss": 3.9831, |
|
"step": 976384 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.400536111003739e-05, |
|
"loss": 3.9909, |
|
"step": 976896 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.399697516252687e-05, |
|
"loss": 3.9836, |
|
"step": 977408 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.398858921501635e-05, |
|
"loss": 3.9791, |
|
"step": 977920 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.398020326750583e-05, |
|
"loss": 3.9934, |
|
"step": 978432 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.397181731999531e-05, |
|
"loss": 3.9832, |
|
"step": 978944 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.396343137248479e-05, |
|
"loss": 3.9897, |
|
"step": 979456 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3955061803778007e-05, |
|
"loss": 3.9768, |
|
"step": 979968 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.394667585626748e-05, |
|
"loss": 3.9894, |
|
"step": 980480 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3938306287560696e-05, |
|
"loss": 3.9836, |
|
"step": 980992 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3929920340050176e-05, |
|
"loss": 3.9884, |
|
"step": 981504 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3921534392539655e-05, |
|
"loss": 3.9805, |
|
"step": 982016 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.391314844502913e-05, |
|
"loss": 3.977, |
|
"step": 982528 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.390476249751861e-05, |
|
"loss": 3.9884, |
|
"step": 983040 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.389637655000809e-05, |
|
"loss": 3.996, |
|
"step": 983552 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.388799060249757e-05, |
|
"loss": 3.9854, |
|
"step": 984064 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.387960465498705e-05, |
|
"loss": 3.9907, |
|
"step": 984576 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3871218707476535e-05, |
|
"loss": 3.9804, |
|
"step": 985088 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3862849138769744e-05, |
|
"loss": 3.9741, |
|
"step": 985600 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3854463191259224e-05, |
|
"loss": 3.9854, |
|
"step": 986112 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3846077243748704e-05, |
|
"loss": 3.9883, |
|
"step": 986624 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3837691296238184e-05, |
|
"loss": 4.0066, |
|
"step": 987136 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3829305348727664e-05, |
|
"loss": 3.9807, |
|
"step": 987648 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3820919401217144e-05, |
|
"loss": 3.9877, |
|
"step": 988160 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3812533453706624e-05, |
|
"loss": 3.9788, |
|
"step": 988672 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3804147506196104e-05, |
|
"loss": 3.9853, |
|
"step": 989184 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.379579431629304e-05, |
|
"loss": 3.9867, |
|
"step": 989696 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.378740836878252e-05, |
|
"loss": 3.9903, |
|
"step": 990208 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.3779022421272e-05, |
|
"loss": 3.9976, |
|
"step": 990720 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.377063647376149e-05, |
|
"loss": 3.9765, |
|
"step": 991232 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.376225052625097e-05, |
|
"loss": 3.9818, |
|
"step": 991744 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.087806224822998, |
|
"eval_runtime": 624.7945, |
|
"eval_samples_per_second": 610.746, |
|
"eval_steps_per_second": 19.086, |
|
"step": 992147 |
|
} |
|
], |
|
"logging_steps": 512, |
|
"max_steps": 3052726, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 10, |
|
"total_flos": 3.81796497507722e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|