Training in progress, step 1152, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 83945296
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:63c7efc391bfd77f385c391ef221b407b7c3583932b65b9b7a2ae9e66088fe2e
|
3 |
size 83945296
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 43123028
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4edd677ae4d208be5f46218328c4991990a368711161a703bef7784efc85af0f
|
3 |
size 43123028
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e4b27c8e8c4f338a52568ccda7e850541aa858528be200e9d505032813f2b115
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:64bd2cb08658ad4a8a2a98da2006414bdebd7bc0f90d4970d57c9a4293d69495
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0
|
5 |
"eval_steps": 288,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -6079,6 +6079,2030 @@
|
|
6079 |
"eval_samples_per_second": 17.706,
|
6080 |
"eval_steps_per_second": 8.871,
|
6081 |
"step": 864
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6082 |
}
|
6083 |
],
|
6084 |
"logging_steps": 1,
|
@@ -6093,12 +8117,12 @@
|
|
6093 |
"should_evaluate": false,
|
6094 |
"should_log": false,
|
6095 |
"should_save": true,
|
6096 |
-
"should_training_stop":
|
6097 |
},
|
6098 |
"attributes": {}
|
6099 |
}
|
6100 |
},
|
6101 |
-
"total_flos":
|
6102 |
"train_batch_size": 2,
|
6103 |
"trial_name": null,
|
6104 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 1.0,
|
5 |
"eval_steps": 288,
|
6 |
+
"global_step": 1152,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
6079 |
"eval_samples_per_second": 17.706,
|
6080 |
"eval_steps_per_second": 8.871,
|
6081 |
"step": 864
|
6082 |
+
},
|
6083 |
+
{
|
6084 |
+
"epoch": 0.7508680555555556,
|
6085 |
+
"grad_norm": 13.497057914733887,
|
6086 |
+
"learning_rate": 2.9581706094407346e-05,
|
6087 |
+
"loss": 1.9589,
|
6088 |
+
"step": 865
|
6089 |
+
},
|
6090 |
+
{
|
6091 |
+
"epoch": 0.7517361111111112,
|
6092 |
+
"grad_norm": 13.387433052062988,
|
6093 |
+
"learning_rate": 2.9386649750904738e-05,
|
6094 |
+
"loss": 1.9046,
|
6095 |
+
"step": 866
|
6096 |
+
},
|
6097 |
+
{
|
6098 |
+
"epoch": 0.7526041666666666,
|
6099 |
+
"grad_norm": 11.690869331359863,
|
6100 |
+
"learning_rate": 2.9192127792182633e-05,
|
6101 |
+
"loss": 1.8332,
|
6102 |
+
"step": 867
|
6103 |
+
},
|
6104 |
+
{
|
6105 |
+
"epoch": 0.7534722222222222,
|
6106 |
+
"grad_norm": 17.66724967956543,
|
6107 |
+
"learning_rate": 2.899814169033622e-05,
|
6108 |
+
"loss": 1.4837,
|
6109 |
+
"step": 868
|
6110 |
+
},
|
6111 |
+
{
|
6112 |
+
"epoch": 0.7543402777777778,
|
6113 |
+
"grad_norm": 14.348726272583008,
|
6114 |
+
"learning_rate": 2.8804692913405474e-05,
|
6115 |
+
"loss": 1.9775,
|
6116 |
+
"step": 869
|
6117 |
+
},
|
6118 |
+
{
|
6119 |
+
"epoch": 0.7552083333333334,
|
6120 |
+
"grad_norm": 21.707870483398438,
|
6121 |
+
"learning_rate": 2.8611782925364006e-05,
|
6122 |
+
"loss": 1.5591,
|
6123 |
+
"step": 870
|
6124 |
+
},
|
6125 |
+
{
|
6126 |
+
"epoch": 0.7560763888888888,
|
6127 |
+
"grad_norm": 15.366388320922852,
|
6128 |
+
"learning_rate": 2.841941318610808e-05,
|
6129 |
+
"loss": 1.7812,
|
6130 |
+
"step": 871
|
6131 |
+
},
|
6132 |
+
{
|
6133 |
+
"epoch": 0.7569444444444444,
|
6134 |
+
"grad_norm": 13.18627643585205,
|
6135 |
+
"learning_rate": 2.8227585151445367e-05,
|
6136 |
+
"loss": 2.004,
|
6137 |
+
"step": 872
|
6138 |
+
},
|
6139 |
+
{
|
6140 |
+
"epoch": 0.7578125,
|
6141 |
+
"grad_norm": 12.81862735748291,
|
6142 |
+
"learning_rate": 2.8036300273084125e-05,
|
6143 |
+
"loss": 1.8925,
|
6144 |
+
"step": 873
|
6145 |
+
},
|
6146 |
+
{
|
6147 |
+
"epoch": 0.7586805555555556,
|
6148 |
+
"grad_norm": 13.406712532043457,
|
6149 |
+
"learning_rate": 2.784555999862206e-05,
|
6150 |
+
"loss": 2.0415,
|
6151 |
+
"step": 874
|
6152 |
+
},
|
6153 |
+
{
|
6154 |
+
"epoch": 0.7595486111111112,
|
6155 |
+
"grad_norm": 14.577287673950195,
|
6156 |
+
"learning_rate": 2.7655365771535556e-05,
|
6157 |
+
"loss": 1.9892,
|
6158 |
+
"step": 875
|
6159 |
+
},
|
6160 |
+
{
|
6161 |
+
"epoch": 0.7604166666666666,
|
6162 |
+
"grad_norm": 13.368107795715332,
|
6163 |
+
"learning_rate": 2.7465719031168612e-05,
|
6164 |
+
"loss": 1.8459,
|
6165 |
+
"step": 876
|
6166 |
+
},
|
6167 |
+
{
|
6168 |
+
"epoch": 0.7612847222222222,
|
6169 |
+
"grad_norm": 12.317877769470215,
|
6170 |
+
"learning_rate": 2.727662121272192e-05,
|
6171 |
+
"loss": 1.8278,
|
6172 |
+
"step": 877
|
6173 |
+
},
|
6174 |
+
{
|
6175 |
+
"epoch": 0.7621527777777778,
|
6176 |
+
"grad_norm": 14.853096008300781,
|
6177 |
+
"learning_rate": 2.7088073747242104e-05,
|
6178 |
+
"loss": 2.0533,
|
6179 |
+
"step": 878
|
6180 |
+
},
|
6181 |
+
{
|
6182 |
+
"epoch": 0.7630208333333334,
|
6183 |
+
"grad_norm": 13.712597846984863,
|
6184 |
+
"learning_rate": 2.6900078061610822e-05,
|
6185 |
+
"loss": 1.8704,
|
6186 |
+
"step": 879
|
6187 |
+
},
|
6188 |
+
{
|
6189 |
+
"epoch": 0.7638888888888888,
|
6190 |
+
"grad_norm": 12.881746292114258,
|
6191 |
+
"learning_rate": 2.671263557853404e-05,
|
6192 |
+
"loss": 1.9443,
|
6193 |
+
"step": 880
|
6194 |
+
},
|
6195 |
+
{
|
6196 |
+
"epoch": 0.7647569444444444,
|
6197 |
+
"grad_norm": 13.358099937438965,
|
6198 |
+
"learning_rate": 2.6525747716531236e-05,
|
6199 |
+
"loss": 1.6258,
|
6200 |
+
"step": 881
|
6201 |
+
},
|
6202 |
+
{
|
6203 |
+
"epoch": 0.765625,
|
6204 |
+
"grad_norm": 16.698448181152344,
|
6205 |
+
"learning_rate": 2.633941588992458e-05,
|
6206 |
+
"loss": 1.8157,
|
6207 |
+
"step": 882
|
6208 |
+
},
|
6209 |
+
{
|
6210 |
+
"epoch": 0.7664930555555556,
|
6211 |
+
"grad_norm": 12.458288192749023,
|
6212 |
+
"learning_rate": 2.615364150882833e-05,
|
6213 |
+
"loss": 1.8839,
|
6214 |
+
"step": 883
|
6215 |
+
},
|
6216 |
+
{
|
6217 |
+
"epoch": 0.7673611111111112,
|
6218 |
+
"grad_norm": 14.559062004089355,
|
6219 |
+
"learning_rate": 2.5968425979138112e-05,
|
6220 |
+
"loss": 1.8366,
|
6221 |
+
"step": 884
|
6222 |
+
},
|
6223 |
+
{
|
6224 |
+
"epoch": 0.7682291666666666,
|
6225 |
+
"grad_norm": 16.640411376953125,
|
6226 |
+
"learning_rate": 2.578377070252038e-05,
|
6227 |
+
"loss": 1.8429,
|
6228 |
+
"step": 885
|
6229 |
+
},
|
6230 |
+
{
|
6231 |
+
"epoch": 0.7690972222222222,
|
6232 |
+
"grad_norm": 13.513100624084473,
|
6233 |
+
"learning_rate": 2.5599677076401608e-05,
|
6234 |
+
"loss": 1.9461,
|
6235 |
+
"step": 886
|
6236 |
+
},
|
6237 |
+
{
|
6238 |
+
"epoch": 0.7699652777777778,
|
6239 |
+
"grad_norm": 13.054133415222168,
|
6240 |
+
"learning_rate": 2.541614649395796e-05,
|
6241 |
+
"loss": 1.7364,
|
6242 |
+
"step": 887
|
6243 |
+
},
|
6244 |
+
{
|
6245 |
+
"epoch": 0.7708333333333334,
|
6246 |
+
"grad_norm": 16.152196884155273,
|
6247 |
+
"learning_rate": 2.5233180344104536e-05,
|
6248 |
+
"loss": 2.0531,
|
6249 |
+
"step": 888
|
6250 |
+
},
|
6251 |
+
{
|
6252 |
+
"epoch": 0.7717013888888888,
|
6253 |
+
"grad_norm": 15.05471420288086,
|
6254 |
+
"learning_rate": 2.505078001148492e-05,
|
6255 |
+
"loss": 1.894,
|
6256 |
+
"step": 889
|
6257 |
+
},
|
6258 |
+
{
|
6259 |
+
"epoch": 0.7725694444444444,
|
6260 |
+
"grad_norm": 16.116544723510742,
|
6261 |
+
"learning_rate": 2.486894687646085e-05,
|
6262 |
+
"loss": 2.1467,
|
6263 |
+
"step": 890
|
6264 |
+
},
|
6265 |
+
{
|
6266 |
+
"epoch": 0.7734375,
|
6267 |
+
"grad_norm": 16.958139419555664,
|
6268 |
+
"learning_rate": 2.4687682315101513e-05,
|
6269 |
+
"loss": 2.1954,
|
6270 |
+
"step": 891
|
6271 |
+
},
|
6272 |
+
{
|
6273 |
+
"epoch": 0.7743055555555556,
|
6274 |
+
"grad_norm": 15.613293647766113,
|
6275 |
+
"learning_rate": 2.450698769917339e-05,
|
6276 |
+
"loss": 2.093,
|
6277 |
+
"step": 892
|
6278 |
+
},
|
6279 |
+
{
|
6280 |
+
"epoch": 0.7751736111111112,
|
6281 |
+
"grad_norm": 19.14984130859375,
|
6282 |
+
"learning_rate": 2.4326864396129666e-05,
|
6283 |
+
"loss": 2.0221,
|
6284 |
+
"step": 893
|
6285 |
+
},
|
6286 |
+
{
|
6287 |
+
"epoch": 0.7760416666666666,
|
6288 |
+
"grad_norm": 15.305646896362305,
|
6289 |
+
"learning_rate": 2.4147313769099977e-05,
|
6290 |
+
"loss": 2.039,
|
6291 |
+
"step": 894
|
6292 |
+
},
|
6293 |
+
{
|
6294 |
+
"epoch": 0.7769097222222222,
|
6295 |
+
"grad_norm": 16.29265022277832,
|
6296 |
+
"learning_rate": 2.396833717688015e-05,
|
6297 |
+
"loss": 1.9908,
|
6298 |
+
"step": 895
|
6299 |
+
},
|
6300 |
+
{
|
6301 |
+
"epoch": 0.7777777777777778,
|
6302 |
+
"grad_norm": 17.18115997314453,
|
6303 |
+
"learning_rate": 2.3789935973921752e-05,
|
6304 |
+
"loss": 2.2076,
|
6305 |
+
"step": 896
|
6306 |
+
},
|
6307 |
+
{
|
6308 |
+
"epoch": 0.7786458333333334,
|
6309 |
+
"grad_norm": 17.150726318359375,
|
6310 |
+
"learning_rate": 2.3612111510322054e-05,
|
6311 |
+
"loss": 2.0696,
|
6312 |
+
"step": 897
|
6313 |
+
},
|
6314 |
+
{
|
6315 |
+
"epoch": 0.7795138888888888,
|
6316 |
+
"grad_norm": 23.77643394470215,
|
6317 |
+
"learning_rate": 2.3434865131813633e-05,
|
6318 |
+
"loss": 1.8396,
|
6319 |
+
"step": 898
|
6320 |
+
},
|
6321 |
+
{
|
6322 |
+
"epoch": 0.7803819444444444,
|
6323 |
+
"grad_norm": 27.85177993774414,
|
6324 |
+
"learning_rate": 2.325819817975423e-05,
|
6325 |
+
"loss": 2.1839,
|
6326 |
+
"step": 899
|
6327 |
+
},
|
6328 |
+
{
|
6329 |
+
"epoch": 0.78125,
|
6330 |
+
"grad_norm": 23.00263214111328,
|
6331 |
+
"learning_rate": 2.308211199111674e-05,
|
6332 |
+
"loss": 1.9888,
|
6333 |
+
"step": 900
|
6334 |
+
},
|
6335 |
+
{
|
6336 |
+
"epoch": 0.7821180555555556,
|
6337 |
+
"grad_norm": 8.42990779876709,
|
6338 |
+
"learning_rate": 2.290660789847886e-05,
|
6339 |
+
"loss": 1.9037,
|
6340 |
+
"step": 901
|
6341 |
+
},
|
6342 |
+
{
|
6343 |
+
"epoch": 0.7829861111111112,
|
6344 |
+
"grad_norm": 8.972105979919434,
|
6345 |
+
"learning_rate": 2.273168723001321e-05,
|
6346 |
+
"loss": 1.7786,
|
6347 |
+
"step": 902
|
6348 |
+
},
|
6349 |
+
{
|
6350 |
+
"epoch": 0.7838541666666666,
|
6351 |
+
"grad_norm": 14.119694709777832,
|
6352 |
+
"learning_rate": 2.255735130947716e-05,
|
6353 |
+
"loss": 1.7296,
|
6354 |
+
"step": 903
|
6355 |
+
},
|
6356 |
+
{
|
6357 |
+
"epoch": 0.7847222222222222,
|
6358 |
+
"grad_norm": 9.153631210327148,
|
6359 |
+
"learning_rate": 2.2383601456202797e-05,
|
6360 |
+
"loss": 1.9097,
|
6361 |
+
"step": 904
|
6362 |
+
},
|
6363 |
+
{
|
6364 |
+
"epoch": 0.7855902777777778,
|
6365 |
+
"grad_norm": 9.159906387329102,
|
6366 |
+
"learning_rate": 2.2210438985087135e-05,
|
6367 |
+
"loss": 1.817,
|
6368 |
+
"step": 905
|
6369 |
+
},
|
6370 |
+
{
|
6371 |
+
"epoch": 0.7864583333333334,
|
6372 |
+
"grad_norm": 11.576175689697266,
|
6373 |
+
"learning_rate": 2.2037865206581855e-05,
|
6374 |
+
"loss": 1.9782,
|
6375 |
+
"step": 906
|
6376 |
+
},
|
6377 |
+
{
|
6378 |
+
"epoch": 0.7873263888888888,
|
6379 |
+
"grad_norm": 11.687173843383789,
|
6380 |
+
"learning_rate": 2.1865881426683687e-05,
|
6381 |
+
"loss": 1.7461,
|
6382 |
+
"step": 907
|
6383 |
+
},
|
6384 |
+
{
|
6385 |
+
"epoch": 0.7881944444444444,
|
6386 |
+
"grad_norm": 9.847061157226562,
|
6387 |
+
"learning_rate": 2.169448894692433e-05,
|
6388 |
+
"loss": 1.8658,
|
6389 |
+
"step": 908
|
6390 |
+
},
|
6391 |
+
{
|
6392 |
+
"epoch": 0.7890625,
|
6393 |
+
"grad_norm": 10.564614295959473,
|
6394 |
+
"learning_rate": 2.152368906436064e-05,
|
6395 |
+
"loss": 1.8111,
|
6396 |
+
"step": 909
|
6397 |
+
},
|
6398 |
+
{
|
6399 |
+
"epoch": 0.7899305555555556,
|
6400 |
+
"grad_norm": 10.352989196777344,
|
6401 |
+
"learning_rate": 2.1353483071564938e-05,
|
6402 |
+
"loss": 1.8505,
|
6403 |
+
"step": 910
|
6404 |
+
},
|
6405 |
+
{
|
6406 |
+
"epoch": 0.7907986111111112,
|
6407 |
+
"grad_norm": 14.823884963989258,
|
6408 |
+
"learning_rate": 2.1183872256615e-05,
|
6409 |
+
"loss": 1.7093,
|
6410 |
+
"step": 911
|
6411 |
+
},
|
6412 |
+
{
|
6413 |
+
"epoch": 0.7916666666666666,
|
6414 |
+
"grad_norm": 13.754022598266602,
|
6415 |
+
"learning_rate": 2.1014857903084583e-05,
|
6416 |
+
"loss": 1.6578,
|
6417 |
+
"step": 912
|
6418 |
+
},
|
6419 |
+
{
|
6420 |
+
"epoch": 0.7925347222222222,
|
6421 |
+
"grad_norm": 10.550676345825195,
|
6422 |
+
"learning_rate": 2.084644129003346e-05,
|
6423 |
+
"loss": 1.8193,
|
6424 |
+
"step": 913
|
6425 |
+
},
|
6426 |
+
{
|
6427 |
+
"epoch": 0.7934027777777778,
|
6428 |
+
"grad_norm": 11.5505952835083,
|
6429 |
+
"learning_rate": 2.0678623691997856e-05,
|
6430 |
+
"loss": 1.9247,
|
6431 |
+
"step": 914
|
6432 |
+
},
|
6433 |
+
{
|
6434 |
+
"epoch": 0.7942708333333334,
|
6435 |
+
"grad_norm": 11.248336791992188,
|
6436 |
+
"learning_rate": 2.051140637898088e-05,
|
6437 |
+
"loss": 1.9936,
|
6438 |
+
"step": 915
|
6439 |
+
},
|
6440 |
+
{
|
6441 |
+
"epoch": 0.7951388888888888,
|
6442 |
+
"grad_norm": 11.271432876586914,
|
6443 |
+
"learning_rate": 2.034479061644271e-05,
|
6444 |
+
"loss": 2.0647,
|
6445 |
+
"step": 916
|
6446 |
+
},
|
6447 |
+
{
|
6448 |
+
"epoch": 0.7960069444444444,
|
6449 |
+
"grad_norm": 11.465668678283691,
|
6450 |
+
"learning_rate": 2.017877766529126e-05,
|
6451 |
+
"loss": 2.0862,
|
6452 |
+
"step": 917
|
6453 |
+
},
|
6454 |
+
{
|
6455 |
+
"epoch": 0.796875,
|
6456 |
+
"grad_norm": 11.615191459655762,
|
6457 |
+
"learning_rate": 2.001336878187241e-05,
|
6458 |
+
"loss": 1.8142,
|
6459 |
+
"step": 918
|
6460 |
+
},
|
6461 |
+
{
|
6462 |
+
"epoch": 0.7977430555555556,
|
6463 |
+
"grad_norm": 15.20010757446289,
|
6464 |
+
"learning_rate": 1.9848565217960635e-05,
|
6465 |
+
"loss": 1.965,
|
6466 |
+
"step": 919
|
6467 |
+
},
|
6468 |
+
{
|
6469 |
+
"epoch": 0.7986111111111112,
|
6470 |
+
"grad_norm": 12.711029052734375,
|
6471 |
+
"learning_rate": 1.968436822074954e-05,
|
6472 |
+
"loss": 2.0629,
|
6473 |
+
"step": 920
|
6474 |
+
},
|
6475 |
+
{
|
6476 |
+
"epoch": 0.7994791666666666,
|
6477 |
+
"grad_norm": 12.506697654724121,
|
6478 |
+
"learning_rate": 1.9520779032842283e-05,
|
6479 |
+
"loss": 2.0334,
|
6480 |
+
"step": 921
|
6481 |
+
},
|
6482 |
+
{
|
6483 |
+
"epoch": 0.8003472222222222,
|
6484 |
+
"grad_norm": 12.275419235229492,
|
6485 |
+
"learning_rate": 1.9357798892242407e-05,
|
6486 |
+
"loss": 1.5386,
|
6487 |
+
"step": 922
|
6488 |
+
},
|
6489 |
+
{
|
6490 |
+
"epoch": 0.8012152777777778,
|
6491 |
+
"grad_norm": 11.594676971435547,
|
6492 |
+
"learning_rate": 1.9195429032344216e-05,
|
6493 |
+
"loss": 1.9125,
|
6494 |
+
"step": 923
|
6495 |
+
},
|
6496 |
+
{
|
6497 |
+
"epoch": 0.8020833333333334,
|
6498 |
+
"grad_norm": 13.308306694030762,
|
6499 |
+
"learning_rate": 1.9033670681923564e-05,
|
6500 |
+
"loss": 2.052,
|
6501 |
+
"step": 924
|
6502 |
+
},
|
6503 |
+
{
|
6504 |
+
"epoch": 0.8029513888888888,
|
6505 |
+
"grad_norm": 12.94735050201416,
|
6506 |
+
"learning_rate": 1.8872525065128632e-05,
|
6507 |
+
"loss": 1.9368,
|
6508 |
+
"step": 925
|
6509 |
+
},
|
6510 |
+
{
|
6511 |
+
"epoch": 0.8038194444444444,
|
6512 |
+
"grad_norm": 17.014511108398438,
|
6513 |
+
"learning_rate": 1.8711993401470483e-05,
|
6514 |
+
"loss": 1.7076,
|
6515 |
+
"step": 926
|
6516 |
+
},
|
6517 |
+
{
|
6518 |
+
"epoch": 0.8046875,
|
6519 |
+
"grad_norm": 13.584419250488281,
|
6520 |
+
"learning_rate": 1.855207690581403e-05,
|
6521 |
+
"loss": 1.8544,
|
6522 |
+
"step": 927
|
6523 |
+
},
|
6524 |
+
{
|
6525 |
+
"epoch": 0.8055555555555556,
|
6526 |
+
"grad_norm": 12.811192512512207,
|
6527 |
+
"learning_rate": 1.839277678836866e-05,
|
6528 |
+
"loss": 1.6873,
|
6529 |
+
"step": 928
|
6530 |
+
},
|
6531 |
+
{
|
6532 |
+
"epoch": 0.8064236111111112,
|
6533 |
+
"grad_norm": 15.083394050598145,
|
6534 |
+
"learning_rate": 1.8234094254679157e-05,
|
6535 |
+
"loss": 1.7113,
|
6536 |
+
"step": 929
|
6537 |
+
},
|
6538 |
+
{
|
6539 |
+
"epoch": 0.8072916666666666,
|
6540 |
+
"grad_norm": 13.675579071044922,
|
6541 |
+
"learning_rate": 1.807603050561667e-05,
|
6542 |
+
"loss": 2.0513,
|
6543 |
+
"step": 930
|
6544 |
+
},
|
6545 |
+
{
|
6546 |
+
"epoch": 0.8081597222222222,
|
6547 |
+
"grad_norm": 14.19676399230957,
|
6548 |
+
"learning_rate": 1.7918586737369424e-05,
|
6549 |
+
"loss": 1.5953,
|
6550 |
+
"step": 931
|
6551 |
+
},
|
6552 |
+
{
|
6553 |
+
"epoch": 0.8090277777777778,
|
6554 |
+
"grad_norm": 13.67191219329834,
|
6555 |
+
"learning_rate": 1.776176414143389e-05,
|
6556 |
+
"loss": 1.8388,
|
6557 |
+
"step": 932
|
6558 |
+
},
|
6559 |
+
{
|
6560 |
+
"epoch": 0.8098958333333334,
|
6561 |
+
"grad_norm": 13.31896686553955,
|
6562 |
+
"learning_rate": 1.7605563904605558e-05,
|
6563 |
+
"loss": 1.9421,
|
6564 |
+
"step": 933
|
6565 |
+
},
|
6566 |
+
{
|
6567 |
+
"epoch": 0.8107638888888888,
|
6568 |
+
"grad_norm": 13.741345405578613,
|
6569 |
+
"learning_rate": 1.7449987208970087e-05,
|
6570 |
+
"loss": 1.8479,
|
6571 |
+
"step": 934
|
6572 |
+
},
|
6573 |
+
{
|
6574 |
+
"epoch": 0.8116319444444444,
|
6575 |
+
"grad_norm": 15.538813591003418,
|
6576 |
+
"learning_rate": 1.7295035231894384e-05,
|
6577 |
+
"loss": 1.8963,
|
6578 |
+
"step": 935
|
6579 |
+
},
|
6580 |
+
{
|
6581 |
+
"epoch": 0.8125,
|
6582 |
+
"grad_norm": 14.026754379272461,
|
6583 |
+
"learning_rate": 1.714070914601752e-05,
|
6584 |
+
"loss": 2.031,
|
6585 |
+
"step": 936
|
6586 |
+
},
|
6587 |
+
{
|
6588 |
+
"epoch": 0.8133680555555556,
|
6589 |
+
"grad_norm": 17.442493438720703,
|
6590 |
+
"learning_rate": 1.698701011924212e-05,
|
6591 |
+
"loss": 1.9126,
|
6592 |
+
"step": 937
|
6593 |
+
},
|
6594 |
+
{
|
6595 |
+
"epoch": 0.8142361111111112,
|
6596 |
+
"grad_norm": 13.540362358093262,
|
6597 |
+
"learning_rate": 1.6833939314725244e-05,
|
6598 |
+
"loss": 1.8406,
|
6599 |
+
"step": 938
|
6600 |
+
},
|
6601 |
+
{
|
6602 |
+
"epoch": 0.8151041666666666,
|
6603 |
+
"grad_norm": 14.226949691772461,
|
6604 |
+
"learning_rate": 1.6681497890869756e-05,
|
6605 |
+
"loss": 1.7313,
|
6606 |
+
"step": 939
|
6607 |
+
},
|
6608 |
+
{
|
6609 |
+
"epoch": 0.8159722222222222,
|
6610 |
+
"grad_norm": 15.58519458770752,
|
6611 |
+
"learning_rate": 1.6529687001315598e-05,
|
6612 |
+
"loss": 1.9592,
|
6613 |
+
"step": 940
|
6614 |
+
},
|
6615 |
+
{
|
6616 |
+
"epoch": 0.8168402777777778,
|
6617 |
+
"grad_norm": 16.236202239990234,
|
6618 |
+
"learning_rate": 1.6378507794930865e-05,
|
6619 |
+
"loss": 2.142,
|
6620 |
+
"step": 941
|
6621 |
+
},
|
6622 |
+
{
|
6623 |
+
"epoch": 0.8177083333333334,
|
6624 |
+
"grad_norm": 15.077717781066895,
|
6625 |
+
"learning_rate": 1.6227961415803305e-05,
|
6626 |
+
"loss": 1.8444,
|
6627 |
+
"step": 942
|
6628 |
+
},
|
6629 |
+
{
|
6630 |
+
"epoch": 0.8185763888888888,
|
6631 |
+
"grad_norm": 15.909273147583008,
|
6632 |
+
"learning_rate": 1.6078049003231554e-05,
|
6633 |
+
"loss": 1.8266,
|
6634 |
+
"step": 943
|
6635 |
+
},
|
6636 |
+
{
|
6637 |
+
"epoch": 0.8194444444444444,
|
6638 |
+
"grad_norm": 17.78791046142578,
|
6639 |
+
"learning_rate": 1.5928771691716517e-05,
|
6640 |
+
"loss": 1.6309,
|
6641 |
+
"step": 944
|
6642 |
+
},
|
6643 |
+
{
|
6644 |
+
"epoch": 0.8203125,
|
6645 |
+
"grad_norm": 15.556278228759766,
|
6646 |
+
"learning_rate": 1.578013061095288e-05,
|
6647 |
+
"loss": 1.8906,
|
6648 |
+
"step": 945
|
6649 |
+
},
|
6650 |
+
{
|
6651 |
+
"epoch": 0.8211805555555556,
|
6652 |
+
"grad_norm": 15.386207580566406,
|
6653 |
+
"learning_rate": 1.5632126885820385e-05,
|
6654 |
+
"loss": 2.1058,
|
6655 |
+
"step": 946
|
6656 |
+
},
|
6657 |
+
{
|
6658 |
+
"epoch": 0.8220486111111112,
|
6659 |
+
"grad_norm": 16.071699142456055,
|
6660 |
+
"learning_rate": 1.5484761636375543e-05,
|
6661 |
+
"loss": 2.0292,
|
6662 |
+
"step": 947
|
6663 |
+
},
|
6664 |
+
{
|
6665 |
+
"epoch": 0.8229166666666666,
|
6666 |
+
"grad_norm": 19.116928100585938,
|
6667 |
+
"learning_rate": 1.5338035977842946e-05,
|
6668 |
+
"loss": 2.0804,
|
6669 |
+
"step": 948
|
6670 |
+
},
|
6671 |
+
{
|
6672 |
+
"epoch": 0.8237847222222222,
|
6673 |
+
"grad_norm": 18.66615104675293,
|
6674 |
+
"learning_rate": 1.5191951020606931e-05,
|
6675 |
+
"loss": 1.9572,
|
6676 |
+
"step": 949
|
6677 |
+
},
|
6678 |
+
{
|
6679 |
+
"epoch": 0.8246527777777778,
|
6680 |
+
"grad_norm": 30.38023567199707,
|
6681 |
+
"learning_rate": 1.5046507870203152e-05,
|
6682 |
+
"loss": 1.8707,
|
6683 |
+
"step": 950
|
6684 |
+
},
|
6685 |
+
{
|
6686 |
+
"epoch": 0.8255208333333334,
|
6687 |
+
"grad_norm": 8.646780014038086,
|
6688 |
+
"learning_rate": 1.4901707627310279e-05,
|
6689 |
+
"loss": 1.8069,
|
6690 |
+
"step": 951
|
6691 |
+
},
|
6692 |
+
{
|
6693 |
+
"epoch": 0.8263888888888888,
|
6694 |
+
"grad_norm": 9.259119987487793,
|
6695 |
+
"learning_rate": 1.4757551387741587e-05,
|
6696 |
+
"loss": 1.7852,
|
6697 |
+
"step": 952
|
6698 |
+
},
|
6699 |
+
{
|
6700 |
+
"epoch": 0.8272569444444444,
|
6701 |
+
"grad_norm": 10.570134162902832,
|
6702 |
+
"learning_rate": 1.4614040242436666e-05,
|
6703 |
+
"loss": 1.8528,
|
6704 |
+
"step": 953
|
6705 |
+
},
|
6706 |
+
{
|
6707 |
+
"epoch": 0.828125,
|
6708 |
+
"grad_norm": 13.254385948181152,
|
6709 |
+
"learning_rate": 1.4471175277453197e-05,
|
6710 |
+
"loss": 1.7671,
|
6711 |
+
"step": 954
|
6712 |
+
},
|
6713 |
+
{
|
6714 |
+
"epoch": 0.8289930555555556,
|
6715 |
+
"grad_norm": 10.687222480773926,
|
6716 |
+
"learning_rate": 1.4328957573958712e-05,
|
6717 |
+
"loss": 1.9012,
|
6718 |
+
"step": 955
|
6719 |
+
},
|
6720 |
+
{
|
6721 |
+
"epoch": 0.8298611111111112,
|
6722 |
+
"grad_norm": 11.228791236877441,
|
6723 |
+
"learning_rate": 1.4187388208222474e-05,
|
6724 |
+
"loss": 1.9358,
|
6725 |
+
"step": 956
|
6726 |
+
},
|
6727 |
+
{
|
6728 |
+
"epoch": 0.8307291666666666,
|
6729 |
+
"grad_norm": 16.7152042388916,
|
6730 |
+
"learning_rate": 1.404646825160727e-05,
|
6731 |
+
"loss": 1.6779,
|
6732 |
+
"step": 957
|
6733 |
+
},
|
6734 |
+
{
|
6735 |
+
"epoch": 0.8315972222222222,
|
6736 |
+
"grad_norm": 11.432924270629883,
|
6737 |
+
"learning_rate": 1.3906198770561263e-05,
|
6738 |
+
"loss": 1.8343,
|
6739 |
+
"step": 958
|
6740 |
+
},
|
6741 |
+
{
|
6742 |
+
"epoch": 0.8324652777777778,
|
6743 |
+
"grad_norm": 11.40870475769043,
|
6744 |
+
"learning_rate": 1.376658082661002e-05,
|
6745 |
+
"loss": 2.0898,
|
6746 |
+
"step": 959
|
6747 |
+
},
|
6748 |
+
{
|
6749 |
+
"epoch": 0.8333333333333334,
|
6750 |
+
"grad_norm": 11.310538291931152,
|
6751 |
+
"learning_rate": 1.36276154763484e-05,
|
6752 |
+
"loss": 1.766,
|
6753 |
+
"step": 960
|
6754 |
+
},
|
6755 |
+
{
|
6756 |
+
"epoch": 0.8342013888888888,
|
6757 |
+
"grad_norm": 11.705902099609375,
|
6758 |
+
"learning_rate": 1.3489303771432627e-05,
|
6759 |
+
"loss": 1.886,
|
6760 |
+
"step": 961
|
6761 |
+
},
|
6762 |
+
{
|
6763 |
+
"epoch": 0.8350694444444444,
|
6764 |
+
"grad_norm": 11.772234916687012,
|
6765 |
+
"learning_rate": 1.3351646758572301e-05,
|
6766 |
+
"loss": 1.8639,
|
6767 |
+
"step": 962
|
6768 |
+
},
|
6769 |
+
{
|
6770 |
+
"epoch": 0.8359375,
|
6771 |
+
"grad_norm": 11.186405181884766,
|
6772 |
+
"learning_rate": 1.3214645479522436e-05,
|
6773 |
+
"loss": 1.748,
|
6774 |
+
"step": 963
|
6775 |
+
},
|
6776 |
+
{
|
6777 |
+
"epoch": 0.8368055555555556,
|
6778 |
+
"grad_norm": 11.405023574829102,
|
6779 |
+
"learning_rate": 1.3078300971075618e-05,
|
6780 |
+
"loss": 1.6184,
|
6781 |
+
"step": 964
|
6782 |
+
},
|
6783 |
+
{
|
6784 |
+
"epoch": 0.8376736111111112,
|
6785 |
+
"grad_norm": 12.797281265258789,
|
6786 |
+
"learning_rate": 1.2942614265054143e-05,
|
6787 |
+
"loss": 1.9074,
|
6788 |
+
"step": 965
|
6789 |
+
},
|
6790 |
+
{
|
6791 |
+
"epoch": 0.8385416666666666,
|
6792 |
+
"grad_norm": 11.599189758300781,
|
6793 |
+
"learning_rate": 1.280758638830224e-05,
|
6794 |
+
"loss": 1.9691,
|
6795 |
+
"step": 966
|
6796 |
+
},
|
6797 |
+
{
|
6798 |
+
"epoch": 0.8394097222222222,
|
6799 |
+
"grad_norm": 11.033600807189941,
|
6800 |
+
"learning_rate": 1.2673218362678297e-05,
|
6801 |
+
"loss": 1.7378,
|
6802 |
+
"step": 967
|
6803 |
+
},
|
6804 |
+
{
|
6805 |
+
"epoch": 0.8402777777777778,
|
6806 |
+
"grad_norm": 12.463747024536133,
|
6807 |
+
"learning_rate": 1.253951120504706e-05,
|
6808 |
+
"loss": 1.9531,
|
6809 |
+
"step": 968
|
6810 |
+
},
|
6811 |
+
{
|
6812 |
+
"epoch": 0.8411458333333334,
|
6813 |
+
"grad_norm": 13.710183143615723,
|
6814 |
+
"learning_rate": 1.2406465927272004e-05,
|
6815 |
+
"loss": 1.7371,
|
6816 |
+
"step": 969
|
6817 |
+
},
|
6818 |
+
{
|
6819 |
+
"epoch": 0.8420138888888888,
|
6820 |
+
"grad_norm": 13.09089183807373,
|
6821 |
+
"learning_rate": 1.2274083536207636e-05,
|
6822 |
+
"loss": 1.7077,
|
6823 |
+
"step": 970
|
6824 |
+
},
|
6825 |
+
{
|
6826 |
+
"epoch": 0.8428819444444444,
|
6827 |
+
"grad_norm": 14.44745922088623,
|
6828 |
+
"learning_rate": 1.2142365033691949e-05,
|
6829 |
+
"loss": 1.852,
|
6830 |
+
"step": 971
|
6831 |
+
},
|
6832 |
+
{
|
6833 |
+
"epoch": 0.84375,
|
6834 |
+
"grad_norm": 15.711243629455566,
|
6835 |
+
"learning_rate": 1.2011311416538774e-05,
|
6836 |
+
"loss": 1.8276,
|
6837 |
+
"step": 972
|
6838 |
+
},
|
6839 |
+
{
|
6840 |
+
"epoch": 0.8446180555555556,
|
6841 |
+
"grad_norm": 12.988260269165039,
|
6842 |
+
"learning_rate": 1.1880923676530199e-05,
|
6843 |
+
"loss": 1.6246,
|
6844 |
+
"step": 973
|
6845 |
+
},
|
6846 |
+
{
|
6847 |
+
"epoch": 0.8454861111111112,
|
6848 |
+
"grad_norm": 14.229040145874023,
|
6849 |
+
"learning_rate": 1.1751202800409178e-05,
|
6850 |
+
"loss": 1.4541,
|
6851 |
+
"step": 974
|
6852 |
+
},
|
6853 |
+
{
|
6854 |
+
"epoch": 0.8463541666666666,
|
6855 |
+
"grad_norm": 13.075967788696289,
|
6856 |
+
"learning_rate": 1.1622149769871926e-05,
|
6857 |
+
"loss": 1.8491,
|
6858 |
+
"step": 975
|
6859 |
+
},
|
6860 |
+
{
|
6861 |
+
"epoch": 0.8472222222222222,
|
6862 |
+
"grad_norm": 13.813093185424805,
|
6863 |
+
"learning_rate": 1.1493765561560643e-05,
|
6864 |
+
"loss": 1.8382,
|
6865 |
+
"step": 976
|
6866 |
+
},
|
6867 |
+
{
|
6868 |
+
"epoch": 0.8480902777777778,
|
6869 |
+
"grad_norm": 19.68598747253418,
|
6870 |
+
"learning_rate": 1.1366051147056033e-05,
|
6871 |
+
"loss": 1.6911,
|
6872 |
+
"step": 977
|
6873 |
+
},
|
6874 |
+
{
|
6875 |
+
"epoch": 0.8489583333333334,
|
6876 |
+
"grad_norm": 13.526206970214844,
|
6877 |
+
"learning_rate": 1.1239007492869913e-05,
|
6878 |
+
"loss": 1.8204,
|
6879 |
+
"step": 978
|
6880 |
+
},
|
6881 |
+
{
|
6882 |
+
"epoch": 0.8498263888888888,
|
6883 |
+
"grad_norm": 14.392912864685059,
|
6884 |
+
"learning_rate": 1.1112635560437979e-05,
|
6885 |
+
"loss": 1.9543,
|
6886 |
+
"step": 979
|
6887 |
+
},
|
6888 |
+
{
|
6889 |
+
"epoch": 0.8506944444444444,
|
6890 |
+
"grad_norm": 15.086181640625,
|
6891 |
+
"learning_rate": 1.0986936306112472e-05,
|
6892 |
+
"loss": 1.6954,
|
6893 |
+
"step": 980
|
6894 |
+
},
|
6895 |
+
{
|
6896 |
+
"epoch": 0.8515625,
|
6897 |
+
"grad_norm": 13.736040115356445,
|
6898 |
+
"learning_rate": 1.086191068115503e-05,
|
6899 |
+
"loss": 1.9458,
|
6900 |
+
"step": 981
|
6901 |
+
},
|
6902 |
+
{
|
6903 |
+
"epoch": 0.8524305555555556,
|
6904 |
+
"grad_norm": 14.614473342895508,
|
6905 |
+
"learning_rate": 1.0737559631729354e-05,
|
6906 |
+
"loss": 1.9502,
|
6907 |
+
"step": 982
|
6908 |
+
},
|
6909 |
+
{
|
6910 |
+
"epoch": 0.8532986111111112,
|
6911 |
+
"grad_norm": 16.009204864501953,
|
6912 |
+
"learning_rate": 1.061388409889419e-05,
|
6913 |
+
"loss": 1.7764,
|
6914 |
+
"step": 983
|
6915 |
+
},
|
6916 |
+
{
|
6917 |
+
"epoch": 0.8541666666666666,
|
6918 |
+
"grad_norm": 13.77364730834961,
|
6919 |
+
"learning_rate": 1.0490885018596075e-05,
|
6920 |
+
"loss": 1.8786,
|
6921 |
+
"step": 984
|
6922 |
+
},
|
6923 |
+
{
|
6924 |
+
"epoch": 0.8550347222222222,
|
6925 |
+
"grad_norm": 13.435188293457031,
|
6926 |
+
"learning_rate": 1.0368563321662317e-05,
|
6927 |
+
"loss": 2.134,
|
6928 |
+
"step": 985
|
6929 |
+
},
|
6930 |
+
{
|
6931 |
+
"epoch": 0.8559027777777778,
|
6932 |
+
"grad_norm": 13.468422889709473,
|
6933 |
+
"learning_rate": 1.0246919933794041e-05,
|
6934 |
+
"loss": 1.9328,
|
6935 |
+
"step": 986
|
6936 |
+
},
|
6937 |
+
{
|
6938 |
+
"epoch": 0.8567708333333334,
|
6939 |
+
"grad_norm": 15.681264877319336,
|
6940 |
+
"learning_rate": 1.012595577555897e-05,
|
6941 |
+
"loss": 1.8238,
|
6942 |
+
"step": 987
|
6943 |
+
},
|
6944 |
+
{
|
6945 |
+
"epoch": 0.8576388888888888,
|
6946 |
+
"grad_norm": 13.976241111755371,
|
6947 |
+
"learning_rate": 1.0005671762384671e-05,
|
6948 |
+
"loss": 2.0445,
|
6949 |
+
"step": 988
|
6950 |
+
},
|
6951 |
+
{
|
6952 |
+
"epoch": 0.8585069444444444,
|
6953 |
+
"grad_norm": 18.10539436340332,
|
6954 |
+
"learning_rate": 9.886068804551507e-06,
|
6955 |
+
"loss": 1.6096,
|
6956 |
+
"step": 989
|
6957 |
+
},
|
6958 |
+
{
|
6959 |
+
"epoch": 0.859375,
|
6960 |
+
"grad_norm": 21.029102325439453,
|
6961 |
+
"learning_rate": 9.76714780718574e-06,
|
6962 |
+
"loss": 1.9935,
|
6963 |
+
"step": 990
|
6964 |
+
},
|
6965 |
+
{
|
6966 |
+
"epoch": 0.8602430555555556,
|
6967 |
+
"grad_norm": 16.24623680114746,
|
6968 |
+
"learning_rate": 9.648909670252804e-06,
|
6969 |
+
"loss": 1.9739,
|
6970 |
+
"step": 991
|
6971 |
+
},
|
6972 |
+
{
|
6973 |
+
"epoch": 0.8611111111111112,
|
6974 |
+
"grad_norm": 15.228158950805664,
|
6975 |
+
"learning_rate": 9.531355288550325e-06,
|
6976 |
+
"loss": 1.9713,
|
6977 |
+
"step": 992
|
6978 |
+
},
|
6979 |
+
{
|
6980 |
+
"epoch": 0.8619791666666666,
|
6981 |
+
"grad_norm": 16.473939895629883,
|
6982 |
+
"learning_rate": 9.414485551701502e-06,
|
6983 |
+
"loss": 2.0711,
|
6984 |
+
"step": 993
|
6985 |
+
},
|
6986 |
+
{
|
6987 |
+
"epoch": 0.8628472222222222,
|
6988 |
+
"grad_norm": 16.327442169189453,
|
6989 |
+
"learning_rate": 9.298301344148275e-06,
|
6990 |
+
"loss": 2.1539,
|
6991 |
+
"step": 994
|
6992 |
+
},
|
6993 |
+
{
|
6994 |
+
"epoch": 0.8637152777777778,
|
6995 |
+
"grad_norm": 21.785234451293945,
|
6996 |
+
"learning_rate": 9.182803545144624e-06,
|
6997 |
+
"loss": 1.9409,
|
6998 |
+
"step": 995
|
6999 |
+
},
|
7000 |
+
{
|
7001 |
+
"epoch": 0.8645833333333334,
|
7002 |
+
"grad_norm": 24.67173194885254,
|
7003 |
+
"learning_rate": 9.067993028750066e-06,
|
7004 |
+
"loss": 2.1132,
|
7005 |
+
"step": 996
|
7006 |
+
},
|
7007 |
+
{
|
7008 |
+
"epoch": 0.8654513888888888,
|
7009 |
+
"grad_norm": 21.997880935668945,
|
7010 |
+
"learning_rate": 8.953870663822794e-06,
|
7011 |
+
"loss": 1.8001,
|
7012 |
+
"step": 997
|
7013 |
+
},
|
7014 |
+
{
|
7015 |
+
"epoch": 0.8663194444444444,
|
7016 |
+
"grad_norm": 19.86835479736328,
|
7017 |
+
"learning_rate": 8.84043731401335e-06,
|
7018 |
+
"loss": 2.051,
|
7019 |
+
"step": 998
|
7020 |
+
},
|
7021 |
+
{
|
7022 |
+
"epoch": 0.8671875,
|
7023 |
+
"grad_norm": 23.3819637298584,
|
7024 |
+
"learning_rate": 8.727693837757922e-06,
|
7025 |
+
"loss": 2.1168,
|
7026 |
+
"step": 999
|
7027 |
+
},
|
7028 |
+
{
|
7029 |
+
"epoch": 0.8680555555555556,
|
7030 |
+
"grad_norm": 34.970333099365234,
|
7031 |
+
"learning_rate": 8.615641088271875e-06,
|
7032 |
+
"loss": 2.3178,
|
7033 |
+
"step": 1000
|
7034 |
+
},
|
7035 |
+
{
|
7036 |
+
"epoch": 0.8689236111111112,
|
7037 |
+
"grad_norm": 8.34579849243164,
|
7038 |
+
"learning_rate": 8.5042799135434e-06,
|
7039 |
+
"loss": 1.8757,
|
7040 |
+
"step": 1001
|
7041 |
+
},
|
7042 |
+
{
|
7043 |
+
"epoch": 0.8697916666666666,
|
7044 |
+
"grad_norm": 9.125555038452148,
|
7045 |
+
"learning_rate": 8.39361115632693e-06,
|
7046 |
+
"loss": 1.638,
|
7047 |
+
"step": 1002
|
7048 |
+
},
|
7049 |
+
{
|
7050 |
+
"epoch": 0.8706597222222222,
|
7051 |
+
"grad_norm": 9.663819313049316,
|
7052 |
+
"learning_rate": 8.283635654136945e-06,
|
7053 |
+
"loss": 1.7429,
|
7054 |
+
"step": 1003
|
7055 |
+
},
|
7056 |
+
{
|
7057 |
+
"epoch": 0.8715277777777778,
|
7058 |
+
"grad_norm": 12.29391860961914,
|
7059 |
+
"learning_rate": 8.174354239241444e-06,
|
7060 |
+
"loss": 1.7301,
|
7061 |
+
"step": 1004
|
7062 |
+
},
|
7063 |
+
{
|
7064 |
+
"epoch": 0.8723958333333334,
|
7065 |
+
"grad_norm": 11.39750862121582,
|
7066 |
+
"learning_rate": 8.065767738655772e-06,
|
7067 |
+
"loss": 1.7811,
|
7068 |
+
"step": 1005
|
7069 |
+
},
|
7070 |
+
{
|
7071 |
+
"epoch": 0.8732638888888888,
|
7072 |
+
"grad_norm": 11.529508590698242,
|
7073 |
+
"learning_rate": 7.95787697413638e-06,
|
7074 |
+
"loss": 1.6474,
|
7075 |
+
"step": 1006
|
7076 |
+
},
|
7077 |
+
{
|
7078 |
+
"epoch": 0.8741319444444444,
|
7079 |
+
"grad_norm": 12.057182312011719,
|
7080 |
+
"learning_rate": 7.850682762174478e-06,
|
7081 |
+
"loss": 1.7135,
|
7082 |
+
"step": 1007
|
7083 |
+
},
|
7084 |
+
{
|
7085 |
+
"epoch": 0.875,
|
7086 |
+
"grad_norm": 11.368781089782715,
|
7087 |
+
"learning_rate": 7.744185913990009e-06,
|
7088 |
+
"loss": 1.7238,
|
7089 |
+
"step": 1008
|
7090 |
+
},
|
7091 |
+
{
|
7092 |
+
"epoch": 0.8758680555555556,
|
7093 |
+
"grad_norm": 11.934446334838867,
|
7094 |
+
"learning_rate": 7.638387235525379e-06,
|
7095 |
+
"loss": 1.6828,
|
7096 |
+
"step": 1009
|
7097 |
+
},
|
7098 |
+
{
|
7099 |
+
"epoch": 0.8767361111111112,
|
7100 |
+
"grad_norm": 11.287178039550781,
|
7101 |
+
"learning_rate": 7.533287527439425e-06,
|
7102 |
+
"loss": 1.8623,
|
7103 |
+
"step": 1010
|
7104 |
+
},
|
7105 |
+
{
|
7106 |
+
"epoch": 0.8776041666666666,
|
7107 |
+
"grad_norm": 12.284099578857422,
|
7108 |
+
"learning_rate": 7.428887585101374e-06,
|
7109 |
+
"loss": 1.8997,
|
7110 |
+
"step": 1011
|
7111 |
+
},
|
7112 |
+
{
|
7113 |
+
"epoch": 0.8784722222222222,
|
7114 |
+
"grad_norm": 12.491585731506348,
|
7115 |
+
"learning_rate": 7.325188198584765e-06,
|
7116 |
+
"loss": 1.747,
|
7117 |
+
"step": 1012
|
7118 |
+
},
|
7119 |
+
{
|
7120 |
+
"epoch": 0.8793402777777778,
|
7121 |
+
"grad_norm": 15.202817916870117,
|
7122 |
+
"learning_rate": 7.222190152661523e-06,
|
7123 |
+
"loss": 1.6519,
|
7124 |
+
"step": 1013
|
7125 |
+
},
|
7126 |
+
{
|
7127 |
+
"epoch": 0.8802083333333334,
|
7128 |
+
"grad_norm": 10.36656665802002,
|
7129 |
+
"learning_rate": 7.119894226795976e-06,
|
7130 |
+
"loss": 1.7042,
|
7131 |
+
"step": 1014
|
7132 |
+
},
|
7133 |
+
{
|
7134 |
+
"epoch": 0.8810763888888888,
|
7135 |
+
"grad_norm": 12.499467849731445,
|
7136 |
+
"learning_rate": 7.0183011951389655e-06,
|
7137 |
+
"loss": 1.7734,
|
7138 |
+
"step": 1015
|
7139 |
+
},
|
7140 |
+
{
|
7141 |
+
"epoch": 0.8819444444444444,
|
7142 |
+
"grad_norm": 13.125415802001953,
|
7143 |
+
"learning_rate": 6.917411826522058e-06,
|
7144 |
+
"loss": 1.7441,
|
7145 |
+
"step": 1016
|
7146 |
+
},
|
7147 |
+
{
|
7148 |
+
"epoch": 0.8828125,
|
7149 |
+
"grad_norm": 11.724601745605469,
|
7150 |
+
"learning_rate": 6.817226884451588e-06,
|
7151 |
+
"loss": 1.7951,
|
7152 |
+
"step": 1017
|
7153 |
+
},
|
7154 |
+
{
|
7155 |
+
"epoch": 0.8836805555555556,
|
7156 |
+
"grad_norm": 14.876508712768555,
|
7157 |
+
"learning_rate": 6.717747127103036e-06,
|
7158 |
+
"loss": 2.0207,
|
7159 |
+
"step": 1018
|
7160 |
+
},
|
7161 |
+
{
|
7162 |
+
"epoch": 0.8845486111111112,
|
7163 |
+
"grad_norm": 12.72968578338623,
|
7164 |
+
"learning_rate": 6.618973307315169e-06,
|
7165 |
+
"loss": 1.9239,
|
7166 |
+
"step": 1019
|
7167 |
+
},
|
7168 |
+
{
|
7169 |
+
"epoch": 0.8854166666666666,
|
7170 |
+
"grad_norm": 12.328423500061035,
|
7171 |
+
"learning_rate": 6.520906172584374e-06,
|
7172 |
+
"loss": 2.0128,
|
7173 |
+
"step": 1020
|
7174 |
+
},
|
7175 |
+
{
|
7176 |
+
"epoch": 0.8862847222222222,
|
7177 |
+
"grad_norm": 12.540122032165527,
|
7178 |
+
"learning_rate": 6.423546465059083e-06,
|
7179 |
+
"loss": 1.5292,
|
7180 |
+
"step": 1021
|
7181 |
+
},
|
7182 |
+
{
|
7183 |
+
"epoch": 0.8871527777777778,
|
7184 |
+
"grad_norm": 11.738141059875488,
|
7185 |
+
"learning_rate": 6.32689492153401e-06,
|
7186 |
+
"loss": 1.9566,
|
7187 |
+
"step": 1022
|
7188 |
+
},
|
7189 |
+
{
|
7190 |
+
"epoch": 0.8880208333333334,
|
7191 |
+
"grad_norm": 13.872244834899902,
|
7192 |
+
"learning_rate": 6.230952273444724e-06,
|
7193 |
+
"loss": 1.845,
|
7194 |
+
"step": 1023
|
7195 |
+
},
|
7196 |
+
{
|
7197 |
+
"epoch": 0.8888888888888888,
|
7198 |
+
"grad_norm": 14.202521324157715,
|
7199 |
+
"learning_rate": 6.135719246862015e-06,
|
7200 |
+
"loss": 1.739,
|
7201 |
+
"step": 1024
|
7202 |
+
},
|
7203 |
+
{
|
7204 |
+
"epoch": 0.8897569444444444,
|
7205 |
+
"grad_norm": 14.746661186218262,
|
7206 |
+
"learning_rate": 6.041196562486384e-06,
|
7207 |
+
"loss": 1.7072,
|
7208 |
+
"step": 1025
|
7209 |
+
},
|
7210 |
+
{
|
7211 |
+
"epoch": 0.890625,
|
7212 |
+
"grad_norm": 11.926168441772461,
|
7213 |
+
"learning_rate": 5.947384935642741e-06,
|
7214 |
+
"loss": 1.6198,
|
7215 |
+
"step": 1026
|
7216 |
+
},
|
7217 |
+
{
|
7218 |
+
"epoch": 0.8914930555555556,
|
7219 |
+
"grad_norm": 17.295331954956055,
|
7220 |
+
"learning_rate": 5.854285076274768e-06,
|
7221 |
+
"loss": 2.088,
|
7222 |
+
"step": 1027
|
7223 |
+
},
|
7224 |
+
{
|
7225 |
+
"epoch": 0.8923611111111112,
|
7226 |
+
"grad_norm": 15.318548202514648,
|
7227 |
+
"learning_rate": 5.761897688939743e-06,
|
7228 |
+
"loss": 1.987,
|
7229 |
+
"step": 1028
|
7230 |
+
},
|
7231 |
+
{
|
7232 |
+
"epoch": 0.8932291666666666,
|
7233 |
+
"grad_norm": 12.704611778259277,
|
7234 |
+
"learning_rate": 5.670223472803105e-06,
|
7235 |
+
"loss": 2.0586,
|
7236 |
+
"step": 1029
|
7237 |
+
},
|
7238 |
+
{
|
7239 |
+
"epoch": 0.8940972222222222,
|
7240 |
+
"grad_norm": 17.130081176757812,
|
7241 |
+
"learning_rate": 5.57926312163316e-06,
|
7242 |
+
"loss": 1.6881,
|
7243 |
+
"step": 1030
|
7244 |
+
},
|
7245 |
+
{
|
7246 |
+
"epoch": 0.8949652777777778,
|
7247 |
+
"grad_norm": 18.214981079101562,
|
7248 |
+
"learning_rate": 5.489017323795897e-06,
|
7249 |
+
"loss": 1.7165,
|
7250 |
+
"step": 1031
|
7251 |
+
},
|
7252 |
+
{
|
7253 |
+
"epoch": 0.8958333333333334,
|
7254 |
+
"grad_norm": 14.554677963256836,
|
7255 |
+
"learning_rate": 5.399486762249695e-06,
|
7256 |
+
"loss": 1.8351,
|
7257 |
+
"step": 1032
|
7258 |
+
},
|
7259 |
+
{
|
7260 |
+
"epoch": 0.8967013888888888,
|
7261 |
+
"grad_norm": 14.150464057922363,
|
7262 |
+
"learning_rate": 5.3106721145402386e-06,
|
7263 |
+
"loss": 1.87,
|
7264 |
+
"step": 1033
|
7265 |
+
},
|
7266 |
+
{
|
7267 |
+
"epoch": 0.8975694444444444,
|
7268 |
+
"grad_norm": 14.381793022155762,
|
7269 |
+
"learning_rate": 5.222574052795337e-06,
|
7270 |
+
"loss": 1.6235,
|
7271 |
+
"step": 1034
|
7272 |
+
},
|
7273 |
+
{
|
7274 |
+
"epoch": 0.8984375,
|
7275 |
+
"grad_norm": 17.520017623901367,
|
7276 |
+
"learning_rate": 5.135193243719827e-06,
|
7277 |
+
"loss": 1.5983,
|
7278 |
+
"step": 1035
|
7279 |
+
},
|
7280 |
+
{
|
7281 |
+
"epoch": 0.8993055555555556,
|
7282 |
+
"grad_norm": 13.546918869018555,
|
7283 |
+
"learning_rate": 5.048530348590586e-06,
|
7284 |
+
"loss": 1.8277,
|
7285 |
+
"step": 1036
|
7286 |
+
},
|
7287 |
+
{
|
7288 |
+
"epoch": 0.9001736111111112,
|
7289 |
+
"grad_norm": 14.402194023132324,
|
7290 |
+
"learning_rate": 4.962586023251448e-06,
|
7291 |
+
"loss": 2.0176,
|
7292 |
+
"step": 1037
|
7293 |
+
},
|
7294 |
+
{
|
7295 |
+
"epoch": 0.9010416666666666,
|
7296 |
+
"grad_norm": 14.430099487304688,
|
7297 |
+
"learning_rate": 4.877360918108342e-06,
|
7298 |
+
"loss": 1.9925,
|
7299 |
+
"step": 1038
|
7300 |
+
},
|
7301 |
+
{
|
7302 |
+
"epoch": 0.9019097222222222,
|
7303 |
+
"grad_norm": 18.529205322265625,
|
7304 |
+
"learning_rate": 4.792855678124275e-06,
|
7305 |
+
"loss": 1.8463,
|
7306 |
+
"step": 1039
|
7307 |
+
},
|
7308 |
+
{
|
7309 |
+
"epoch": 0.9027777777777778,
|
7310 |
+
"grad_norm": 15.545275688171387,
|
7311 |
+
"learning_rate": 4.709070942814453e-06,
|
7312 |
+
"loss": 2.131,
|
7313 |
+
"step": 1040
|
7314 |
+
},
|
7315 |
+
{
|
7316 |
+
"epoch": 0.9036458333333334,
|
7317 |
+
"grad_norm": 14.902131080627441,
|
7318 |
+
"learning_rate": 4.6260073462415785e-06,
|
7319 |
+
"loss": 2.1101,
|
7320 |
+
"step": 1041
|
7321 |
+
},
|
7322 |
+
{
|
7323 |
+
"epoch": 0.9045138888888888,
|
7324 |
+
"grad_norm": 14.553145408630371,
|
7325 |
+
"learning_rate": 4.543665517010831e-06,
|
7326 |
+
"loss": 1.9121,
|
7327 |
+
"step": 1042
|
7328 |
+
},
|
7329 |
+
{
|
7330 |
+
"epoch": 0.9053819444444444,
|
7331 |
+
"grad_norm": 14.558218955993652,
|
7332 |
+
"learning_rate": 4.462046078265336e-06,
|
7333 |
+
"loss": 1.9187,
|
7334 |
+
"step": 1043
|
7335 |
+
},
|
7336 |
+
{
|
7337 |
+
"epoch": 0.90625,
|
7338 |
+
"grad_norm": 15.623437881469727,
|
7339 |
+
"learning_rate": 4.381149647681282e-06,
|
7340 |
+
"loss": 1.8382,
|
7341 |
+
"step": 1044
|
7342 |
+
},
|
7343 |
+
{
|
7344 |
+
"epoch": 0.9071180555555556,
|
7345 |
+
"grad_norm": 21.00379753112793,
|
7346 |
+
"learning_rate": 4.3009768374633e-06,
|
7347 |
+
"loss": 1.9282,
|
7348 |
+
"step": 1045
|
7349 |
+
},
|
7350 |
+
{
|
7351 |
+
"epoch": 0.9079861111111112,
|
7352 |
+
"grad_norm": 18.359010696411133,
|
7353 |
+
"learning_rate": 4.221528254339846e-06,
|
7354 |
+
"loss": 1.983,
|
7355 |
+
"step": 1046
|
7356 |
+
},
|
7357 |
+
{
|
7358 |
+
"epoch": 0.9088541666666666,
|
7359 |
+
"grad_norm": 20.251195907592773,
|
7360 |
+
"learning_rate": 4.142804499558639e-06,
|
7361 |
+
"loss": 1.6667,
|
7362 |
+
"step": 1047
|
7363 |
+
},
|
7364 |
+
{
|
7365 |
+
"epoch": 0.9097222222222222,
|
7366 |
+
"grad_norm": 20.23505210876465,
|
7367 |
+
"learning_rate": 4.064806168882041e-06,
|
7368 |
+
"loss": 1.8808,
|
7369 |
+
"step": 1048
|
7370 |
+
},
|
7371 |
+
{
|
7372 |
+
"epoch": 0.9105902777777778,
|
7373 |
+
"grad_norm": 23.457834243774414,
|
7374 |
+
"learning_rate": 3.98753385258257e-06,
|
7375 |
+
"loss": 2.1341,
|
7376 |
+
"step": 1049
|
7377 |
+
},
|
7378 |
+
{
|
7379 |
+
"epoch": 0.9114583333333334,
|
7380 |
+
"grad_norm": 26.815454483032227,
|
7381 |
+
"learning_rate": 3.910988135438442e-06,
|
7382 |
+
"loss": 2.2132,
|
7383 |
+
"step": 1050
|
7384 |
+
},
|
7385 |
+
{
|
7386 |
+
"epoch": 0.9123263888888888,
|
7387 |
+
"grad_norm": 8.473207473754883,
|
7388 |
+
"learning_rate": 3.835169596729171e-06,
|
7389 |
+
"loss": 1.9366,
|
7390 |
+
"step": 1051
|
7391 |
+
},
|
7392 |
+
{
|
7393 |
+
"epoch": 0.9131944444444444,
|
7394 |
+
"grad_norm": 8.994145393371582,
|
7395 |
+
"learning_rate": 3.760078810231138e-06,
|
7396 |
+
"loss": 1.9322,
|
7397 |
+
"step": 1052
|
7398 |
+
},
|
7399 |
+
{
|
7400 |
+
"epoch": 0.9140625,
|
7401 |
+
"grad_norm": 8.751708030700684,
|
7402 |
+
"learning_rate": 3.685716344213297e-06,
|
7403 |
+
"loss": 1.9166,
|
7404 |
+
"step": 1053
|
7405 |
+
},
|
7406 |
+
{
|
7407 |
+
"epoch": 0.9149305555555556,
|
7408 |
+
"grad_norm": 11.358210563659668,
|
7409 |
+
"learning_rate": 3.612082761432822e-06,
|
7410 |
+
"loss": 1.9726,
|
7411 |
+
"step": 1054
|
7412 |
+
},
|
7413 |
+
{
|
7414 |
+
"epoch": 0.9157986111111112,
|
7415 |
+
"grad_norm": 10.732216835021973,
|
7416 |
+
"learning_rate": 3.5391786191308894e-06,
|
7417 |
+
"loss": 1.8501,
|
7418 |
+
"step": 1055
|
7419 |
+
},
|
7420 |
+
{
|
7421 |
+
"epoch": 0.9166666666666666,
|
7422 |
+
"grad_norm": 10.60909652709961,
|
7423 |
+
"learning_rate": 3.467004469028412e-06,
|
7424 |
+
"loss": 1.9029,
|
7425 |
+
"step": 1056
|
7426 |
+
},
|
7427 |
+
{
|
7428 |
+
"epoch": 0.9175347222222222,
|
7429 |
+
"grad_norm": 10.67011547088623,
|
7430 |
+
"learning_rate": 3.3955608573219555e-06,
|
7431 |
+
"loss": 1.9642,
|
7432 |
+
"step": 1057
|
7433 |
+
},
|
7434 |
+
{
|
7435 |
+
"epoch": 0.9184027777777778,
|
7436 |
+
"grad_norm": 11.154162406921387,
|
7437 |
+
"learning_rate": 3.3248483246795193e-06,
|
7438 |
+
"loss": 1.8521,
|
7439 |
+
"step": 1058
|
7440 |
+
},
|
7441 |
+
{
|
7442 |
+
"epoch": 0.9192708333333334,
|
7443 |
+
"grad_norm": 12.020743370056152,
|
7444 |
+
"learning_rate": 3.2548674062364726e-06,
|
7445 |
+
"loss": 1.9838,
|
7446 |
+
"step": 1059
|
7447 |
+
},
|
7448 |
+
{
|
7449 |
+
"epoch": 0.9201388888888888,
|
7450 |
+
"grad_norm": 11.383040428161621,
|
7451 |
+
"learning_rate": 3.1856186315914805e-06,
|
7452 |
+
"loss": 2.0,
|
7453 |
+
"step": 1060
|
7454 |
+
},
|
7455 |
+
{
|
7456 |
+
"epoch": 0.9210069444444444,
|
7457 |
+
"grad_norm": 12.290900230407715,
|
7458 |
+
"learning_rate": 3.117102524802562e-06,
|
7459 |
+
"loss": 1.7312,
|
7460 |
+
"step": 1061
|
7461 |
+
},
|
7462 |
+
{
|
7463 |
+
"epoch": 0.921875,
|
7464 |
+
"grad_norm": 10.455514907836914,
|
7465 |
+
"learning_rate": 3.0493196043830606e-06,
|
7466 |
+
"loss": 1.7082,
|
7467 |
+
"step": 1062
|
7468 |
+
},
|
7469 |
+
{
|
7470 |
+
"epoch": 0.9227430555555556,
|
7471 |
+
"grad_norm": 15.14477825164795,
|
7472 |
+
"learning_rate": 2.9822703832977694e-06,
|
7473 |
+
"loss": 1.7378,
|
7474 |
+
"step": 1063
|
7475 |
+
},
|
7476 |
+
{
|
7477 |
+
"epoch": 0.9236111111111112,
|
7478 |
+
"grad_norm": 15.008790016174316,
|
7479 |
+
"learning_rate": 2.915955368958967e-06,
|
7480 |
+
"loss": 1.4908,
|
7481 |
+
"step": 1064
|
7482 |
+
},
|
7483 |
+
{
|
7484 |
+
"epoch": 0.9244791666666666,
|
7485 |
+
"grad_norm": 11.197698593139648,
|
7486 |
+
"learning_rate": 2.850375063222699e-06,
|
7487 |
+
"loss": 1.7582,
|
7488 |
+
"step": 1065
|
7489 |
+
},
|
7490 |
+
{
|
7491 |
+
"epoch": 0.9253472222222222,
|
7492 |
+
"grad_norm": 11.746088981628418,
|
7493 |
+
"learning_rate": 2.785529962384836e-06,
|
7494 |
+
"loss": 1.97,
|
7495 |
+
"step": 1066
|
7496 |
+
},
|
7497 |
+
{
|
7498 |
+
"epoch": 0.9262152777777778,
|
7499 |
+
"grad_norm": 12.114376068115234,
|
7500 |
+
"learning_rate": 2.7214205571774786e-06,
|
7501 |
+
"loss": 1.798,
|
7502 |
+
"step": 1067
|
7503 |
+
},
|
7504 |
+
{
|
7505 |
+
"epoch": 0.9270833333333334,
|
7506 |
+
"grad_norm": 13.894766807556152,
|
7507 |
+
"learning_rate": 2.658047332765112e-06,
|
7508 |
+
"loss": 1.9275,
|
7509 |
+
"step": 1068
|
7510 |
+
},
|
7511 |
+
{
|
7512 |
+
"epoch": 0.9279513888888888,
|
7513 |
+
"grad_norm": 10.888907432556152,
|
7514 |
+
"learning_rate": 2.595410768740991e-06,
|
7515 |
+
"loss": 1.7554,
|
7516 |
+
"step": 1069
|
7517 |
+
},
|
7518 |
+
{
|
7519 |
+
"epoch": 0.9288194444444444,
|
7520 |
+
"grad_norm": 16.657119750976562,
|
7521 |
+
"learning_rate": 2.5335113391235064e-06,
|
7522 |
+
"loss": 2.0056,
|
7523 |
+
"step": 1070
|
7524 |
+
},
|
7525 |
+
{
|
7526 |
+
"epoch": 0.9296875,
|
7527 |
+
"grad_norm": 15.70227336883545,
|
7528 |
+
"learning_rate": 2.47234951235259e-06,
|
7529 |
+
"loss": 1.7914,
|
7530 |
+
"step": 1071
|
7531 |
+
},
|
7532 |
+
{
|
7533 |
+
"epoch": 0.9305555555555556,
|
7534 |
+
"grad_norm": 14.840006828308105,
|
7535 |
+
"learning_rate": 2.411925751286159e-06,
|
7536 |
+
"loss": 1.8983,
|
7537 |
+
"step": 1072
|
7538 |
+
},
|
7539 |
+
{
|
7540 |
+
"epoch": 0.9314236111111112,
|
7541 |
+
"grad_norm": 13.980358123779297,
|
7542 |
+
"learning_rate": 2.3522405131966798e-06,
|
7543 |
+
"loss": 1.7292,
|
7544 |
+
"step": 1073
|
7545 |
+
},
|
7546 |
+
{
|
7547 |
+
"epoch": 0.9322916666666666,
|
7548 |
+
"grad_norm": 11.940156936645508,
|
7549 |
+
"learning_rate": 2.293294249767597e-06,
|
7550 |
+
"loss": 1.7353,
|
7551 |
+
"step": 1074
|
7552 |
+
},
|
7553 |
+
{
|
7554 |
+
"epoch": 0.9331597222222222,
|
7555 |
+
"grad_norm": 14.269763946533203,
|
7556 |
+
"learning_rate": 2.2350874070900084e-06,
|
7557 |
+
"loss": 1.958,
|
7558 |
+
"step": 1075
|
7559 |
+
},
|
7560 |
+
{
|
7561 |
+
"epoch": 0.9340277777777778,
|
7562 |
+
"grad_norm": 13.120196342468262,
|
7563 |
+
"learning_rate": 2.17762042565921e-06,
|
7564 |
+
"loss": 1.9363,
|
7565 |
+
"step": 1076
|
7566 |
+
},
|
7567 |
+
{
|
7568 |
+
"epoch": 0.9348958333333334,
|
7569 |
+
"grad_norm": 12.493894577026367,
|
7570 |
+
"learning_rate": 2.120893740371477e-06,
|
7571 |
+
"loss": 1.8201,
|
7572 |
+
"step": 1077
|
7573 |
+
},
|
7574 |
+
{
|
7575 |
+
"epoch": 0.9357638888888888,
|
7576 |
+
"grad_norm": 12.92516803741455,
|
7577 |
+
"learning_rate": 2.0649077805206307e-06,
|
7578 |
+
"loss": 1.866,
|
7579 |
+
"step": 1078
|
7580 |
+
},
|
7581 |
+
{
|
7582 |
+
"epoch": 0.9366319444444444,
|
7583 |
+
"grad_norm": 12.748099327087402,
|
7584 |
+
"learning_rate": 2.0096629697949233e-06,
|
7585 |
+
"loss": 2.008,
|
7586 |
+
"step": 1079
|
7587 |
+
},
|
7588 |
+
{
|
7589 |
+
"epoch": 0.9375,
|
7590 |
+
"grad_norm": 16.6556396484375,
|
7591 |
+
"learning_rate": 1.9551597262737252e-06,
|
7592 |
+
"loss": 1.9844,
|
7593 |
+
"step": 1080
|
7594 |
+
},
|
7595 |
+
{
|
7596 |
+
"epoch": 0.9383680555555556,
|
7597 |
+
"grad_norm": 15.163797378540039,
|
7598 |
+
"learning_rate": 1.9013984624244085e-06,
|
7599 |
+
"loss": 1.7668,
|
7600 |
+
"step": 1081
|
7601 |
+
},
|
7602 |
+
{
|
7603 |
+
"epoch": 0.9392361111111112,
|
7604 |
+
"grad_norm": 17.656597137451172,
|
7605 |
+
"learning_rate": 1.848379585099258e-06,
|
7606 |
+
"loss": 1.7489,
|
7607 |
+
"step": 1082
|
7608 |
+
},
|
7609 |
+
{
|
7610 |
+
"epoch": 0.9401041666666666,
|
7611 |
+
"grad_norm": 13.84435749053955,
|
7612 |
+
"learning_rate": 1.7961034955323087e-06,
|
7613 |
+
"loss": 2.0097,
|
7614 |
+
"step": 1083
|
7615 |
+
},
|
7616 |
+
{
|
7617 |
+
"epoch": 0.9409722222222222,
|
7618 |
+
"grad_norm": 14.789044380187988,
|
7619 |
+
"learning_rate": 1.7445705893364029e-06,
|
7620 |
+
"loss": 1.8604,
|
7621 |
+
"step": 1084
|
7622 |
+
},
|
7623 |
+
{
|
7624 |
+
"epoch": 0.9418402777777778,
|
7625 |
+
"grad_norm": 15.406126976013184,
|
7626 |
+
"learning_rate": 1.6937812565001155e-06,
|
7627 |
+
"loss": 1.945,
|
7628 |
+
"step": 1085
|
7629 |
+
},
|
7630 |
+
{
|
7631 |
+
"epoch": 0.9427083333333334,
|
7632 |
+
"grad_norm": 14.652530670166016,
|
7633 |
+
"learning_rate": 1.6437358813848558e-06,
|
7634 |
+
"loss": 1.9336,
|
7635 |
+
"step": 1086
|
7636 |
+
},
|
7637 |
+
{
|
7638 |
+
"epoch": 0.9435763888888888,
|
7639 |
+
"grad_norm": 17.914424896240234,
|
7640 |
+
"learning_rate": 1.5944348427219369e-06,
|
7641 |
+
"loss": 1.8055,
|
7642 |
+
"step": 1087
|
7643 |
+
},
|
7644 |
+
{
|
7645 |
+
"epoch": 0.9444444444444444,
|
7646 |
+
"grad_norm": 14.453635215759277,
|
7647 |
+
"learning_rate": 1.5458785136096998e-06,
|
7648 |
+
"loss": 1.9833,
|
7649 |
+
"step": 1088
|
7650 |
+
},
|
7651 |
+
{
|
7652 |
+
"epoch": 0.9453125,
|
7653 |
+
"grad_norm": 14.691089630126953,
|
7654 |
+
"learning_rate": 1.4980672615107382e-06,
|
7655 |
+
"loss": 1.9429,
|
7656 |
+
"step": 1089
|
7657 |
+
},
|
7658 |
+
{
|
7659 |
+
"epoch": 0.9461805555555556,
|
7660 |
+
"grad_norm": 14.366144180297852,
|
7661 |
+
"learning_rate": 1.4510014482490452e-06,
|
7662 |
+
"loss": 1.9237,
|
7663 |
+
"step": 1090
|
7664 |
+
},
|
7665 |
+
{
|
7666 |
+
"epoch": 0.9470486111111112,
|
7667 |
+
"grad_norm": 16.21904754638672,
|
7668 |
+
"learning_rate": 1.4046814300073263e-06,
|
7669 |
+
"loss": 1.951,
|
7670 |
+
"step": 1091
|
7671 |
+
},
|
7672 |
+
{
|
7673 |
+
"epoch": 0.9479166666666666,
|
7674 |
+
"grad_norm": 16.610876083374023,
|
7675 |
+
"learning_rate": 1.3591075573243129e-06,
|
7676 |
+
"loss": 2.0993,
|
7677 |
+
"step": 1092
|
7678 |
+
},
|
7679 |
+
{
|
7680 |
+
"epoch": 0.9487847222222222,
|
7681 |
+
"grad_norm": 15.262898445129395,
|
7682 |
+
"learning_rate": 1.3142801750920308e-06,
|
7683 |
+
"loss": 2.0727,
|
7684 |
+
"step": 1093
|
7685 |
+
},
|
7686 |
+
{
|
7687 |
+
"epoch": 0.9496527777777778,
|
7688 |
+
"grad_norm": 15.337409019470215,
|
7689 |
+
"learning_rate": 1.2701996225533252e-06,
|
7690 |
+
"loss": 2.04,
|
7691 |
+
"step": 1094
|
7692 |
+
},
|
7693 |
+
{
|
7694 |
+
"epoch": 0.9505208333333334,
|
7695 |
+
"grad_norm": 18.810794830322266,
|
7696 |
+
"learning_rate": 1.22686623329914e-06,
|
7697 |
+
"loss": 1.8913,
|
7698 |
+
"step": 1095
|
7699 |
+
},
|
7700 |
+
{
|
7701 |
+
"epoch": 0.9513888888888888,
|
7702 |
+
"grad_norm": 15.501535415649414,
|
7703 |
+
"learning_rate": 1.184280335266108e-06,
|
7704 |
+
"loss": 2.178,
|
7705 |
+
"step": 1096
|
7706 |
+
},
|
7707 |
+
{
|
7708 |
+
"epoch": 0.9522569444444444,
|
7709 |
+
"grad_norm": 23.515823364257812,
|
7710 |
+
"learning_rate": 1.1424422507340216e-06,
|
7711 |
+
"loss": 2.0881,
|
7712 |
+
"step": 1097
|
7713 |
+
},
|
7714 |
+
{
|
7715 |
+
"epoch": 0.953125,
|
7716 |
+
"grad_norm": 25.078577041625977,
|
7717 |
+
"learning_rate": 1.101352296323399e-06,
|
7718 |
+
"loss": 1.8935,
|
7719 |
+
"step": 1098
|
7720 |
+
},
|
7721 |
+
{
|
7722 |
+
"epoch": 0.9539930555555556,
|
7723 |
+
"grad_norm": 21.68729019165039,
|
7724 |
+
"learning_rate": 1.0610107829930994e-06,
|
7725 |
+
"loss": 1.9361,
|
7726 |
+
"step": 1099
|
7727 |
+
},
|
7728 |
+
{
|
7729 |
+
"epoch": 0.9548611111111112,
|
7730 |
+
"grad_norm": 28.165672302246094,
|
7731 |
+
"learning_rate": 1.021418016037945e-06,
|
7732 |
+
"loss": 2.2755,
|
7733 |
+
"step": 1100
|
7734 |
+
},
|
7735 |
+
{
|
7736 |
+
"epoch": 0.9557291666666666,
|
7737 |
+
"grad_norm": 7.87142276763916,
|
7738 |
+
"learning_rate": 9.825742950864359e-07,
|
7739 |
+
"loss": 1.8745,
|
7740 |
+
"step": 1101
|
7741 |
+
},
|
7742 |
+
{
|
7743 |
+
"epoch": 0.9565972222222222,
|
7744 |
+
"grad_norm": 9.18879508972168,
|
7745 |
+
"learning_rate": 9.444799140984617e-07,
|
7746 |
+
"loss": 1.7996,
|
7747 |
+
"step": 1102
|
7748 |
+
},
|
7749 |
+
{
|
7750 |
+
"epoch": 0.9574652777777778,
|
7751 |
+
"grad_norm": 7.753636360168457,
|
7752 |
+
"learning_rate": 9.071351613631151e-07,
|
7753 |
+
"loss": 1.8538,
|
7754 |
+
"step": 1103
|
7755 |
+
},
|
7756 |
+
{
|
7757 |
+
"epoch": 0.9583333333333334,
|
7758 |
+
"grad_norm": 14.192962646484375,
|
7759 |
+
"learning_rate": 8.70540319496449e-07,
|
7760 |
+
"loss": 1.9124,
|
7761 |
+
"step": 1104
|
7762 |
+
},
|
7763 |
+
{
|
7764 |
+
"epoch": 0.9592013888888888,
|
7765 |
+
"grad_norm": 9.842401504516602,
|
7766 |
+
"learning_rate": 8.346956654394e-07,
|
7767 |
+
"loss": 1.7753,
|
7768 |
+
"step": 1105
|
7769 |
+
},
|
7770 |
+
{
|
7771 |
+
"epoch": 0.9600694444444444,
|
7772 |
+
"grad_norm": 10.271657943725586,
|
7773 |
+
"learning_rate": 7.996014704556465e-07,
|
7774 |
+
"loss": 1.632,
|
7775 |
+
"step": 1106
|
7776 |
+
},
|
7777 |
+
{
|
7778 |
+
"epoch": 0.9609375,
|
7779 |
+
"grad_norm": 10.388742446899414,
|
7780 |
+
"learning_rate": 7.652580001295761e-07,
|
7781 |
+
"loss": 1.8832,
|
7782 |
+
"step": 1107
|
7783 |
+
},
|
7784 |
+
{
|
7785 |
+
"epoch": 0.9618055555555556,
|
7786 |
+
"grad_norm": 10.298879623413086,
|
7787 |
+
"learning_rate": 7.316655143642881e-07,
|
7788 |
+
"loss": 1.8801,
|
7789 |
+
"step": 1108
|
7790 |
+
},
|
7791 |
+
{
|
7792 |
+
"epoch": 0.9626736111111112,
|
7793 |
+
"grad_norm": 10.070116996765137,
|
7794 |
+
"learning_rate": 6.988242673796053e-07,
|
7795 |
+
"loss": 1.689,
|
7796 |
+
"step": 1109
|
7797 |
+
},
|
7798 |
+
{
|
7799 |
+
"epoch": 0.9635416666666666,
|
7800 |
+
"grad_norm": 12.104198455810547,
|
7801 |
+
"learning_rate": 6.667345077101428e-07,
|
7802 |
+
"loss": 1.5876,
|
7803 |
+
"step": 1110
|
7804 |
+
},
|
7805 |
+
{
|
7806 |
+
"epoch": 0.9644097222222222,
|
7807 |
+
"grad_norm": 11.707262992858887,
|
7808 |
+
"learning_rate": 6.353964782034427e-07,
|
7809 |
+
"loss": 1.9296,
|
7810 |
+
"step": 1111
|
7811 |
+
},
|
7812 |
+
{
|
7813 |
+
"epoch": 0.9652777777777778,
|
7814 |
+
"grad_norm": 13.419182777404785,
|
7815 |
+
"learning_rate": 6.048104160181644e-07,
|
7816 |
+
"loss": 1.7369,
|
7817 |
+
"step": 1112
|
7818 |
+
},
|
7819 |
+
{
|
7820 |
+
"epoch": 0.9661458333333334,
|
7821 |
+
"grad_norm": 11.767274856567383,
|
7822 |
+
"learning_rate": 5.749765526222306e-07,
|
7823 |
+
"loss": 1.9889,
|
7824 |
+
"step": 1113
|
7825 |
+
},
|
7826 |
+
{
|
7827 |
+
"epoch": 0.9670138888888888,
|
7828 |
+
"grad_norm": 10.876611709594727,
|
7829 |
+
"learning_rate": 5.458951137911061e-07,
|
7830 |
+
"loss": 1.8486,
|
7831 |
+
"step": 1114
|
7832 |
+
},
|
7833 |
+
{
|
7834 |
+
"epoch": 0.9678819444444444,
|
7835 |
+
"grad_norm": 11.94616413116455,
|
7836 |
+
"learning_rate": 5.175663196061109e-07,
|
7837 |
+
"loss": 1.8348,
|
7838 |
+
"step": 1115
|
7839 |
+
},
|
7840 |
+
{
|
7841 |
+
"epoch": 0.96875,
|
7842 |
+
"grad_norm": 13.167989730834961,
|
7843 |
+
"learning_rate": 4.899903844527098e-07,
|
7844 |
+
"loss": 1.6042,
|
7845 |
+
"step": 1116
|
7846 |
+
},
|
7847 |
+
{
|
7848 |
+
"epoch": 0.9696180555555556,
|
7849 |
+
"grad_norm": 11.482317924499512,
|
7850 |
+
"learning_rate": 4.6316751701892534e-07,
|
7851 |
+
"loss": 1.8771,
|
7852 |
+
"step": 1117
|
7853 |
+
},
|
7854 |
+
{
|
7855 |
+
"epoch": 0.9704861111111112,
|
7856 |
+
"grad_norm": 14.85400390625,
|
7857 |
+
"learning_rate": 4.3709792029372756e-07,
|
7858 |
+
"loss": 1.7911,
|
7859 |
+
"step": 1118
|
7860 |
+
},
|
7861 |
+
{
|
7862 |
+
"epoch": 0.9713541666666666,
|
7863 |
+
"grad_norm": 10.730084419250488,
|
7864 |
+
"learning_rate": 4.117817915655353e-07,
|
7865 |
+
"loss": 1.8488,
|
7866 |
+
"step": 1119
|
7867 |
+
},
|
7868 |
+
{
|
7869 |
+
"epoch": 0.9722222222222222,
|
7870 |
+
"grad_norm": 15.081836700439453,
|
7871 |
+
"learning_rate": 3.8721932242070657e-07,
|
7872 |
+
"loss": 1.8417,
|
7873 |
+
"step": 1120
|
7874 |
+
},
|
7875 |
+
{
|
7876 |
+
"epoch": 0.9730902777777778,
|
7877 |
+
"grad_norm": 14.770096778869629,
|
7878 |
+
"learning_rate": 3.6341069874206157e-07,
|
7879 |
+
"loss": 1.5839,
|
7880 |
+
"step": 1121
|
7881 |
+
},
|
7882 |
+
{
|
7883 |
+
"epoch": 0.9739583333333334,
|
7884 |
+
"grad_norm": 15.167415618896484,
|
7885 |
+
"learning_rate": 3.4035610070751736e-07,
|
7886 |
+
"loss": 2.1499,
|
7887 |
+
"step": 1122
|
7888 |
+
},
|
7889 |
+
{
|
7890 |
+
"epoch": 0.9748263888888888,
|
7891 |
+
"grad_norm": 11.67169189453125,
|
7892 |
+
"learning_rate": 3.18055702788711e-07,
|
7893 |
+
"loss": 1.9342,
|
7894 |
+
"step": 1123
|
7895 |
+
},
|
7896 |
+
{
|
7897 |
+
"epoch": 0.9756944444444444,
|
7898 |
+
"grad_norm": 12.89733600616455,
|
7899 |
+
"learning_rate": 2.9650967374966755e-07,
|
7900 |
+
"loss": 1.8843,
|
7901 |
+
"step": 1124
|
7902 |
+
},
|
7903 |
+
{
|
7904 |
+
"epoch": 0.9765625,
|
7905 |
+
"grad_norm": 16.48362922668457,
|
7906 |
+
"learning_rate": 2.7571817664551193e-07,
|
7907 |
+
"loss": 1.7655,
|
7908 |
+
"step": 1125
|
7909 |
+
},
|
7910 |
+
{
|
7911 |
+
"epoch": 0.9774305555555556,
|
7912 |
+
"grad_norm": 11.52205753326416,
|
7913 |
+
"learning_rate": 2.556813688212922e-07,
|
7914 |
+
"loss": 1.7175,
|
7915 |
+
"step": 1126
|
7916 |
+
},
|
7917 |
+
{
|
7918 |
+
"epoch": 0.9782986111111112,
|
7919 |
+
"grad_norm": 14.660330772399902,
|
7920 |
+
"learning_rate": 2.3639940191073628e-07,
|
7921 |
+
"loss": 1.7116,
|
7922 |
+
"step": 1127
|
7923 |
+
},
|
7924 |
+
{
|
7925 |
+
"epoch": 0.9791666666666666,
|
7926 |
+
"grad_norm": 17.55712127685547,
|
7927 |
+
"learning_rate": 2.1787242183509693e-07,
|
7928 |
+
"loss": 1.8758,
|
7929 |
+
"step": 1128
|
7930 |
+
},
|
7931 |
+
{
|
7932 |
+
"epoch": 0.9800347222222222,
|
7933 |
+
"grad_norm": 16.475013732910156,
|
7934 |
+
"learning_rate": 2.0010056880208628e-07,
|
7935 |
+
"loss": 1.8526,
|
7936 |
+
"step": 1129
|
7937 |
+
},
|
7938 |
+
{
|
7939 |
+
"epoch": 0.9809027777777778,
|
7940 |
+
"grad_norm": 13.74273681640625,
|
7941 |
+
"learning_rate": 1.8308397730479877e-07,
|
7942 |
+
"loss": 1.965,
|
7943 |
+
"step": 1130
|
7944 |
+
},
|
7945 |
+
{
|
7946 |
+
"epoch": 0.9817708333333334,
|
7947 |
+
"grad_norm": 15.196414947509766,
|
7948 |
+
"learning_rate": 1.6682277612070086e-07,
|
7949 |
+
"loss": 1.8297,
|
7950 |
+
"step": 1131
|
7951 |
+
},
|
7952 |
+
{
|
7953 |
+
"epoch": 0.9826388888888888,
|
7954 |
+
"grad_norm": 14.796883583068848,
|
7955 |
+
"learning_rate": 1.5131708831062075e-07,
|
7956 |
+
"loss": 2.1325,
|
7957 |
+
"step": 1132
|
7958 |
+
},
|
7959 |
+
{
|
7960 |
+
"epoch": 0.9835069444444444,
|
7961 |
+
"grad_norm": 12.838567733764648,
|
7962 |
+
"learning_rate": 1.3656703121784908e-07,
|
7963 |
+
"loss": 1.9729,
|
7964 |
+
"step": 1133
|
7965 |
+
},
|
7966 |
+
{
|
7967 |
+
"epoch": 0.984375,
|
7968 |
+
"grad_norm": 14.149857521057129,
|
7969 |
+
"learning_rate": 1.2257271646728407e-07,
|
7970 |
+
"loss": 1.7948,
|
7971 |
+
"step": 1134
|
7972 |
+
},
|
7973 |
+
{
|
7974 |
+
"epoch": 0.9852430555555556,
|
7975 |
+
"grad_norm": 15.99991226196289,
|
7976 |
+
"learning_rate": 1.0933424996451003e-07,
|
7977 |
+
"loss": 1.7998,
|
7978 |
+
"step": 1135
|
7979 |
+
},
|
7980 |
+
{
|
7981 |
+
"epoch": 0.9861111111111112,
|
7982 |
+
"grad_norm": 15.018757820129395,
|
7983 |
+
"learning_rate": 9.685173189503128e-08,
|
7984 |
+
"loss": 2.0206,
|
7985 |
+
"step": 1136
|
7986 |
+
},
|
7987 |
+
{
|
7988 |
+
"epoch": 0.9869791666666666,
|
7989 |
+
"grad_norm": 14.15756607055664,
|
7990 |
+
"learning_rate": 8.512525672353944e-08,
|
7991 |
+
"loss": 1.9028,
|
7992 |
+
"step": 1137
|
7993 |
+
},
|
7994 |
+
{
|
7995 |
+
"epoch": 0.9878472222222222,
|
7996 |
+
"grad_norm": 17.212684631347656,
|
7997 |
+
"learning_rate": 7.415491319316959e-08,
|
7998 |
+
"loss": 1.9987,
|
7999 |
+
"step": 1138
|
8000 |
+
},
|
8001 |
+
{
|
8002 |
+
"epoch": 0.9887152777777778,
|
8003 |
+
"grad_norm": 16.452335357666016,
|
8004 |
+
"learning_rate": 6.394078432482297e-08,
|
8005 |
+
"loss": 1.7762,
|
8006 |
+
"step": 1139
|
8007 |
+
},
|
8008 |
+
{
|
8009 |
+
"epoch": 0.9895833333333334,
|
8010 |
+
"grad_norm": 15.122843742370605,
|
8011 |
+
"learning_rate": 5.4482947416567566e-08,
|
8012 |
+
"loss": 1.7858,
|
8013 |
+
"step": 1140
|
8014 |
+
},
|
8015 |
+
{
|
8016 |
+
"epoch": 0.9904513888888888,
|
8017 |
+
"grad_norm": 16.55274200439453,
|
8018 |
+
"learning_rate": 4.578147404302735e-08,
|
8019 |
+
"loss": 1.9584,
|
8020 |
+
"step": 1141
|
8021 |
+
},
|
8022 |
+
{
|
8023 |
+
"epoch": 0.9913194444444444,
|
8024 |
+
"grad_norm": 20.493459701538086,
|
8025 |
+
"learning_rate": 3.783643005484949e-08,
|
8026 |
+
"loss": 1.974,
|
8027 |
+
"step": 1142
|
8028 |
+
},
|
8029 |
+
{
|
8030 |
+
"epoch": 0.9921875,
|
8031 |
+
"grad_norm": 16.82901954650879,
|
8032 |
+
"learning_rate": 3.06478755782047e-08,
|
8033 |
+
"loss": 1.9193,
|
8034 |
+
"step": 1143
|
8035 |
+
},
|
8036 |
+
{
|
8037 |
+
"epoch": 0.9930555555555556,
|
8038 |
+
"grad_norm": 22.513282775878906,
|
8039 |
+
"learning_rate": 2.4215865014354244e-08,
|
8040 |
+
"loss": 1.6038,
|
8041 |
+
"step": 1144
|
8042 |
+
},
|
8043 |
+
{
|
8044 |
+
"epoch": 0.9939236111111112,
|
8045 |
+
"grad_norm": 14.857792854309082,
|
8046 |
+
"learning_rate": 1.854044703917257e-08,
|
8047 |
+
"loss": 1.9603,
|
8048 |
+
"step": 1145
|
8049 |
+
},
|
8050 |
+
{
|
8051 |
+
"epoch": 0.9947916666666666,
|
8052 |
+
"grad_norm": 14.495781898498535,
|
8053 |
+
"learning_rate": 1.3621664602880835e-08,
|
8054 |
+
"loss": 1.8849,
|
8055 |
+
"step": 1146
|
8056 |
+
},
|
8057 |
+
{
|
8058 |
+
"epoch": 0.9956597222222222,
|
8059 |
+
"grad_norm": 16.096176147460938,
|
8060 |
+
"learning_rate": 9.459554929613923e-09,
|
8061 |
+
"loss": 2.1838,
|
8062 |
+
"step": 1147
|
8063 |
+
},
|
8064 |
+
{
|
8065 |
+
"epoch": 0.9965277777777778,
|
8066 |
+
"grad_norm": 16.341297149658203,
|
8067 |
+
"learning_rate": 6.054149517220609e-09,
|
8068 |
+
"loss": 2.0568,
|
8069 |
+
"step": 1148
|
8070 |
+
},
|
8071 |
+
{
|
8072 |
+
"epoch": 0.9973958333333334,
|
8073 |
+
"grad_norm": 17.340593338012695,
|
8074 |
+
"learning_rate": 3.4054741369859975e-09,
|
8075 |
+
"loss": 1.696,
|
8076 |
+
"step": 1149
|
8077 |
+
},
|
8078 |
+
{
|
8079 |
+
"epoch": 0.9982638888888888,
|
8080 |
+
"grad_norm": 27.64981460571289,
|
8081 |
+
"learning_rate": 1.5135488334538928e-09,
|
8082 |
+
"loss": 2.1148,
|
8083 |
+
"step": 1150
|
8084 |
+
},
|
8085 |
+
{
|
8086 |
+
"epoch": 0.9991319444444444,
|
8087 |
+
"grad_norm": 11.244271278381348,
|
8088 |
+
"learning_rate": 3.7838792424915723e-10,
|
8089 |
+
"loss": 1.9841,
|
8090 |
+
"step": 1151
|
8091 |
+
},
|
8092 |
+
{
|
8093 |
+
"epoch": 1.0,
|
8094 |
+
"grad_norm": 14.939323425292969,
|
8095 |
+
"learning_rate": 0.0,
|
8096 |
+
"loss": 1.7883,
|
8097 |
+
"step": 1152
|
8098 |
+
},
|
8099 |
+
{
|
8100 |
+
"epoch": 1.0,
|
8101 |
+
"eval_loss": 1.9270657300949097,
|
8102 |
+
"eval_runtime": 28.1202,
|
8103 |
+
"eval_samples_per_second": 17.247,
|
8104 |
+
"eval_steps_per_second": 8.641,
|
8105 |
+
"step": 1152
|
8106 |
}
|
8107 |
],
|
8108 |
"logging_steps": 1,
|
|
|
8117 |
"should_evaluate": false,
|
8118 |
"should_log": false,
|
8119 |
"should_save": true,
|
8120 |
+
"should_training_stop": true
|
8121 |
},
|
8122 |
"attributes": {}
|
8123 |
}
|
8124 |
},
|
8125 |
+
"total_flos": 4.0377130193780736e+17,
|
8126 |
"train_batch_size": 2,
|
8127 |
"trial_name": null,
|
8128 |
"trial_params": null
|