File size: 981 Bytes
4bce160
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
{
    "epoch": 0.992,
    "eval_abs_diff": 0.421875,
    "eval_all_logps_1": -819.568115234375,
    "eval_all_logps_1_values": -819.568115234375,
    "eval_all_logps_2": 444.0224914550781,
    "eval_all_logps_2_values": 444.0224914550781,
    "eval_logits/chosen": -1.4453125,
    "eval_logits/rejected": -1.484375,
    "eval_logps/chosen": -2.265625,
    "eval_logps/rejected": -2.34375,
    "eval_loss": 1.7294530868530273,
    "eval_original_losses": 1.7265625,
    "eval_rewards/accuracies": 0.4650000035762787,
    "eval_rewards/chosen": -5.65625,
    "eval_rewards/margins": 0.2001953125,
    "eval_rewards/rejected": -5.84375,
    "eval_runtime": 15.6751,
    "eval_samples": 200,
    "eval_samples_per_second": 12.759,
    "eval_steps_per_second": 3.19,
    "eval_weight": 1.0,
    "total_flos": 0.0,
    "train_loss": 1.8891444052419355,
    "train_runtime": 316.7887,
    "train_samples": 1000,
    "train_samples_per_second": 3.157,
    "train_steps_per_second": 0.098
}