|
[ |
|
{ |
|
"loss": 1.9278494873046874, |
|
"learning_rate": 4.554605380367005e-05, |
|
"epoch": 0.2672367717797969, |
|
"total_flos": 4360067088384000, |
|
"step": 1000 |
|
}, |
|
{ |
|
"loss": 1.8067623291015624, |
|
"learning_rate": 4.109210760734011e-05, |
|
"epoch": 0.5344735435595938, |
|
"total_flos": 8720134176768000, |
|
"step": 2000 |
|
}, |
|
{ |
|
"loss": 1.77223828125, |
|
"learning_rate": 3.663816141101016e-05, |
|
"epoch": 0.8017103153393907, |
|
"total_flos": 13080201265152000, |
|
"step": 3000 |
|
}, |
|
{ |
|
"loss": 1.6007529296875, |
|
"learning_rate": 3.2184215214680206e-05, |
|
"epoch": 1.0689470871191875, |
|
"total_flos": 17438088319991808, |
|
"step": 4000 |
|
}, |
|
{ |
|
"loss": 1.21124267578125, |
|
"learning_rate": 2.7730269018350257e-05, |
|
"epoch": 1.3361838588989845, |
|
"total_flos": 21798155408375808, |
|
"step": 5000 |
|
}, |
|
{ |
|
"loss": 1.20527734375, |
|
"learning_rate": 2.327632282202031e-05, |
|
"epoch": 1.6034206306787815, |
|
"total_flos": 26158222496759808, |
|
"step": 6000 |
|
}, |
|
{ |
|
"loss": 1.1963974609375, |
|
"learning_rate": 1.8822376625690362e-05, |
|
"epoch": 1.8706574024585783, |
|
"total_flos": 30518289585143808, |
|
"step": 7000 |
|
}, |
|
{ |
|
"loss": 1.0101669921875, |
|
"learning_rate": 1.4368430429360413e-05, |
|
"epoch": 2.137894174238375, |
|
"total_flos": 34876176639983616, |
|
"step": 8000 |
|
}, |
|
{ |
|
"loss": 0.8346533203125, |
|
"learning_rate": 9.914484233030466e-06, |
|
"epoch": 2.405130946018172, |
|
"total_flos": 39236243728367616, |
|
"step": 9000 |
|
}, |
|
{ |
|
"loss": 0.820884765625, |
|
"learning_rate": 5.4605380367005166e-06, |
|
"epoch": 2.672367717797969, |
|
"total_flos": 43596310816751616, |
|
"step": 10000 |
|
}, |
|
{ |
|
"loss": 0.8244423828125, |
|
"learning_rate": 1.0065918403705683e-06, |
|
"epoch": 2.939604489577766, |
|
"total_flos": 47956377905135616, |
|
"step": 11000 |
|
} |
|
] |