ttt-mc-pythia-70m / all_results.json
ricdomolm's picture
Add files from e3
3e39f03
raw
history blame contribute delete
977 Bytes
[{"train_loss": 1.3125, "epoch": 2, "step": 3072, "lr": 4.750621824247943e-05}, {"train_loss": 1.3828125, "epoch": 2, "step": 3200, "lr": 4.0073511784396337e-05}, {"train_loss": 3.203125, "epoch": 2, "step": 3328, "lr": 3.312524130702509e-05}, {"train_loss": 1.3828125, "epoch": 2, "step": 3456, "lr": 2.6717575498422943e-05}, {"train_loss": 1.3828125, "epoch": 2, "step": 3584, "lr": 2.0902312886461217e-05}, {"train_loss": 1.3828125, "epoch": 2, "step": 3712, "lr": 1.572646310795336e-05}, {"train_loss": 1.3828125, "epoch": 2, "step": 3840, "lr": 1.1231866890399501e-05}, {"train_loss": 3.3125, "epoch": 2, "step": 3968, "lr": 7.45485781835279e-06}, {"train_loss": 2.96875, "epoch": 2, "step": 4096, "lr": 4.425968618633292e-06}, {"train_loss": 1.3828125, "epoch": 2, "step": 4224, "lr": 2.169684338732114e-06}, {"train_loss": 1.3828125, "epoch": 2, "step": 4352, "lr": 7.04244413671129e-07}, {"train_loss": 1.3828125, "epoch": 2, "step": 4480, "lr": 4.149522137787409e-08}]