zephyr-7b-dpo-full-beta-0.2 / eval_results.json
tianlinliu0121's picture
Model save
727b63f
raw
history blame contribute delete
566 Bytes
{
"epoch": 3.0,
"eval_logits/chosen": -2.697049617767334,
"eval_logits/rejected": -2.6942925453186035,
"eval_logps/chosen": -314.599609375,
"eval_logps/rejected": -282.62579345703125,
"eval_loss": 0.7903289198875427,
"eval_rewards/accuracies": 0.7658730149269104,
"eval_rewards/chosen": -3.2220375537872314,
"eval_rewards/margins": 4.1147027015686035,
"eval_rewards/rejected": -7.336739540100098,
"eval_runtime": 217.5722,
"eval_samples": 2000,
"eval_samples_per_second": 9.192,
"eval_steps_per_second": 0.29
}