TheBguy87's picture
first GPT2 model with ~2M words
5b9aa08
raw
history blame contribute delete
432 Bytes
{
"epoch": 3.0,
"eval_accuracy": 0.324633431085044,
"eval_loss": 4.011027812957764,
"eval_runtime": 4.2815,
"eval_samples": 90,
"eval_samples_per_second": 21.021,
"eval_steps_per_second": 2.803,
"perplexity": 55.20358037015378,
"train_loss": 5.383951053103885,
"train_runtime": 1331.1447,
"train_samples": 2962,
"train_samples_per_second": 6.675,
"train_steps_per_second": 1.668
}