PyTorch
llama
alignment-handbook
Generated from Trainer
File size: 765 Bytes
2804acb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
{
    "epoch": 1.0,
    "eval_logits/chosen": 1.226016640663147,
    "eval_logits/rejected": 1.1679025888442993,
    "eval_logps/chosen": -449.40472412109375,
    "eval_logps/rejected": -588.7138671875,
    "eval_loss": 0.4695761799812317,
    "eval_rewards/accuracies": 0.8089285492897034,
    "eval_rewards/chosen": -1.546373724937439,
    "eval_rewards/margins": 1.3221595287322998,
    "eval_rewards/rejected": -2.8685333728790283,
    "eval_runtime": 173.1226,
    "eval_samples": 4461,
    "eval_samples_per_second": 25.768,
    "eval_steps_per_second": 0.404,
    "total_flos": 0.0,
    "train_loss": 0.5209795107882678,
    "train_runtime": 14376.1789,
    "train_samples": 133368,
    "train_samples_per_second": 9.277,
    "train_steps_per_second": 0.29
}