File size: 210 Bytes
4d37132
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
{
    "epoch": 1.94,
    "total_flos": 8.866156938264576e+16,
    "train_loss": 0.011351865083724988,
    "train_runtime": 119.1901,
    "train_samples_per_second": 13.189,
    "train_steps_per_second": 0.403
}