Meta-Llama-3-8B-Base-5e-7 / train_results.json
tengxiao1
TX
f952910
raw
history blame contribute delete
233 Bytes
{
"epoch": 0.9982631930527722,
"total_flos": 0.0,
"train_loss": 1.7001326604879898,
"train_runtime": 20882.5667,
"train_samples": 59876,
"train_samples_per_second": 2.867,
"train_steps_per_second": 0.022
}