File size: 262 Bytes
f26b8c4
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
{
    "epoch": 0.9985022466300549,
    "num_input_tokens_seen": 18683616,
    "total_flos": 1.599323192678744e+18,
    "train_loss": 2.4129163398742675,
    "train_runtime": 18708.0864,
    "train_samples_per_second": 0.214,
    "train_steps_per_second": 0.013
}