PyTorch
llama
alignment-handbook
Generated from Trainer
File size: 769 Bytes
5aa60eb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
{
    "epoch": 1.0,
    "eval_logits/chosen": 1.106881022453308,
    "eval_logits/rejected": 1.0551656484603882,
    "eval_logps/chosen": -504.99224853515625,
    "eval_logps/rejected": -705.9820556640625,
    "eval_loss": 0.4338752329349518,
    "eval_rewards/accuracies": 0.8196428418159485,
    "eval_rewards/chosen": -2.3226053714752197,
    "eval_rewards/margins": 1.9564555883407593,
    "eval_rewards/rejected": -4.2790608406066895,
    "eval_runtime": 187.0474,
    "eval_samples": 4461,
    "eval_samples_per_second": 23.85,
    "eval_steps_per_second": 0.374,
    "total_flos": 0.0,
    "train_loss": 0.4974772959890384,
    "train_runtime": 14966.1301,
    "train_samples": 133368,
    "train_samples_per_second": 8.911,
    "train_steps_per_second": 0.278
}