Training in progress, step 750, checkpoint
Browse files
last-checkpoint/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 83945296
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:99a77a83a96075b809ebf9cc00a63acbc7957a42a478e066b93e59296ebacdda
|
3 |
size 83945296
|
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 43123028
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e05babd698fc1b664a2688fdd396bcd9fa52762a02688a6f2a49644fe2e508bc
|
3 |
size 43123028
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14244
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:66a278e168770b17c6200dfa59a55a1ce746c931ea0cd8f411aebbb607213cf7
|
3 |
size 14244
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1064
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ac9c79078a88e8cb4c64a0c9b0c0b44a2e00e7411388bd2b931c0d5e22790bf5
|
3 |
size 1064
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
-
"epoch": 0.
|
5 |
"eval_steps": 375,
|
6 |
-
"global_step":
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
@@ -2640,6 +2640,2639 @@
|
|
2640 |
"eval_samples_per_second": 16.605,
|
2641 |
"eval_steps_per_second": 8.302,
|
2642 |
"step": 375
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2643 |
}
|
2644 |
],
|
2645 |
"logging_steps": 1,
|
@@ -2659,7 +5292,7 @@
|
|
2659 |
"attributes": {}
|
2660 |
}
|
2661 |
},
|
2662 |
-
"total_flos":
|
2663 |
"train_batch_size": 2,
|
2664 |
"trial_name": null,
|
2665 |
"trial_params": null
|
|
|
1 |
{
|
2 |
"best_metric": null,
|
3 |
"best_model_checkpoint": null,
|
4 |
+
"epoch": 0.04901300069843526,
|
5 |
"eval_steps": 375,
|
6 |
+
"global_step": 750,
|
7 |
"is_hyper_param_search": false,
|
8 |
"is_local_process_zero": true,
|
9 |
"is_world_process_zero": true,
|
|
|
2640 |
"eval_samples_per_second": 16.605,
|
2641 |
"eval_steps_per_second": 8.302,
|
2642 |
"step": 375
|
2643 |
+
},
|
2644 |
+
{
|
2645 |
+
"epoch": 0.024571851016815544,
|
2646 |
+
"grad_norm": 2.339012384414673,
|
2647 |
+
"learning_rate": 0.00017167309246649297,
|
2648 |
+
"loss": 3.1042,
|
2649 |
+
"step": 376
|
2650 |
+
},
|
2651 |
+
{
|
2652 |
+
"epoch": 0.024637201684413457,
|
2653 |
+
"grad_norm": 2.4127211570739746,
|
2654 |
+
"learning_rate": 0.0001715259004365791,
|
2655 |
+
"loss": 3.1541,
|
2656 |
+
"step": 377
|
2657 |
+
},
|
2658 |
+
{
|
2659 |
+
"epoch": 0.02470255235201137,
|
2660 |
+
"grad_norm": 2.5378224849700928,
|
2661 |
+
"learning_rate": 0.00017137839043356484,
|
2662 |
+
"loss": 3.1696,
|
2663 |
+
"step": 378
|
2664 |
+
},
|
2665 |
+
{
|
2666 |
+
"epoch": 0.024767903019609284,
|
2667 |
+
"grad_norm": 2.3195438385009766,
|
2668 |
+
"learning_rate": 0.00017123056311321562,
|
2669 |
+
"loss": 3.0877,
|
2670 |
+
"step": 379
|
2671 |
+
},
|
2672 |
+
{
|
2673 |
+
"epoch": 0.024833253687207197,
|
2674 |
+
"grad_norm": 2.556420087814331,
|
2675 |
+
"learning_rate": 0.0001710824191327075,
|
2676 |
+
"loss": 3.0469,
|
2677 |
+
"step": 380
|
2678 |
+
},
|
2679 |
+
{
|
2680 |
+
"epoch": 0.024898604354805114,
|
2681 |
+
"grad_norm": 2.2406117916107178,
|
2682 |
+
"learning_rate": 0.00017093395915062428,
|
2683 |
+
"loss": 3.0364,
|
2684 |
+
"step": 381
|
2685 |
+
},
|
2686 |
+
{
|
2687 |
+
"epoch": 0.024963955022403027,
|
2688 |
+
"grad_norm": 2.573399782180786,
|
2689 |
+
"learning_rate": 0.00017078518382695465,
|
2690 |
+
"loss": 3.0017,
|
2691 |
+
"step": 382
|
2692 |
+
},
|
2693 |
+
{
|
2694 |
+
"epoch": 0.02502930569000094,
|
2695 |
+
"grad_norm": 2.368605375289917,
|
2696 |
+
"learning_rate": 0.00017063609382308908,
|
2697 |
+
"loss": 3.1007,
|
2698 |
+
"step": 383
|
2699 |
+
},
|
2700 |
+
{
|
2701 |
+
"epoch": 0.025094656357598853,
|
2702 |
+
"grad_norm": 2.4229259490966797,
|
2703 |
+
"learning_rate": 0.00017048668980181698,
|
2704 |
+
"loss": 2.9687,
|
2705 |
+
"step": 384
|
2706 |
+
},
|
2707 |
+
{
|
2708 |
+
"epoch": 0.025160007025196766,
|
2709 |
+
"grad_norm": 2.3166263103485107,
|
2710 |
+
"learning_rate": 0.00017033697242732377,
|
2711 |
+
"loss": 2.8533,
|
2712 |
+
"step": 385
|
2713 |
+
},
|
2714 |
+
{
|
2715 |
+
"epoch": 0.02522535769279468,
|
2716 |
+
"grad_norm": 3.2635836601257324,
|
2717 |
+
"learning_rate": 0.0001701869423651879,
|
2718 |
+
"loss": 3.0645,
|
2719 |
+
"step": 386
|
2720 |
+
},
|
2721 |
+
{
|
2722 |
+
"epoch": 0.025290708360392593,
|
2723 |
+
"grad_norm": 2.6406781673431396,
|
2724 |
+
"learning_rate": 0.00017003660028237793,
|
2725 |
+
"loss": 3.0866,
|
2726 |
+
"step": 387
|
2727 |
+
},
|
2728 |
+
{
|
2729 |
+
"epoch": 0.02535605902799051,
|
2730 |
+
"grad_norm": 3.3586738109588623,
|
2731 |
+
"learning_rate": 0.00016988594684724947,
|
2732 |
+
"loss": 2.9727,
|
2733 |
+
"step": 388
|
2734 |
+
},
|
2735 |
+
{
|
2736 |
+
"epoch": 0.025421409695588423,
|
2737 |
+
"grad_norm": 3.0996286869049072,
|
2738 |
+
"learning_rate": 0.00016973498272954222,
|
2739 |
+
"loss": 3.0378,
|
2740 |
+
"step": 389
|
2741 |
+
},
|
2742 |
+
{
|
2743 |
+
"epoch": 0.025486760363186336,
|
2744 |
+
"grad_norm": 2.8624045848846436,
|
2745 |
+
"learning_rate": 0.00016958370860037717,
|
2746 |
+
"loss": 3.3147,
|
2747 |
+
"step": 390
|
2748 |
+
},
|
2749 |
+
{
|
2750 |
+
"epoch": 0.02555211103078425,
|
2751 |
+
"grad_norm": 3.3544201850891113,
|
2752 |
+
"learning_rate": 0.00016943212513225345,
|
2753 |
+
"loss": 3.1472,
|
2754 |
+
"step": 391
|
2755 |
+
},
|
2756 |
+
{
|
2757 |
+
"epoch": 0.025617461698382162,
|
2758 |
+
"grad_norm": 4.079308986663818,
|
2759 |
+
"learning_rate": 0.00016928023299904533,
|
2760 |
+
"loss": 2.9491,
|
2761 |
+
"step": 392
|
2762 |
+
},
|
2763 |
+
{
|
2764 |
+
"epoch": 0.025682812365980075,
|
2765 |
+
"grad_norm": 3.312035083770752,
|
2766 |
+
"learning_rate": 0.0001691280328759992,
|
2767 |
+
"loss": 2.811,
|
2768 |
+
"step": 393
|
2769 |
+
},
|
2770 |
+
{
|
2771 |
+
"epoch": 0.02574816303357799,
|
2772 |
+
"grad_norm": 3.138267755508423,
|
2773 |
+
"learning_rate": 0.00016897552543973084,
|
2774 |
+
"loss": 2.6726,
|
2775 |
+
"step": 394
|
2776 |
+
},
|
2777 |
+
{
|
2778 |
+
"epoch": 0.025813513701175905,
|
2779 |
+
"grad_norm": 3.613313913345337,
|
2780 |
+
"learning_rate": 0.00016882271136822206,
|
2781 |
+
"loss": 2.8072,
|
2782 |
+
"step": 395
|
2783 |
+
},
|
2784 |
+
{
|
2785 |
+
"epoch": 0.02587886436877382,
|
2786 |
+
"grad_norm": 3.8998324871063232,
|
2787 |
+
"learning_rate": 0.0001686695913408179,
|
2788 |
+
"loss": 3.1871,
|
2789 |
+
"step": 396
|
2790 |
+
},
|
2791 |
+
{
|
2792 |
+
"epoch": 0.02594421503637173,
|
2793 |
+
"grad_norm": 6.394239902496338,
|
2794 |
+
"learning_rate": 0.0001685161660382235,
|
2795 |
+
"loss": 3.0556,
|
2796 |
+
"step": 397
|
2797 |
+
},
|
2798 |
+
{
|
2799 |
+
"epoch": 0.026009565703969645,
|
2800 |
+
"grad_norm": 5.196231842041016,
|
2801 |
+
"learning_rate": 0.00016836243614250113,
|
2802 |
+
"loss": 3.4508,
|
2803 |
+
"step": 398
|
2804 |
+
},
|
2805 |
+
{
|
2806 |
+
"epoch": 0.026074916371567558,
|
2807 |
+
"grad_norm": 5.16098165512085,
|
2808 |
+
"learning_rate": 0.00016820840233706719,
|
2809 |
+
"loss": 2.7991,
|
2810 |
+
"step": 399
|
2811 |
+
},
|
2812 |
+
{
|
2813 |
+
"epoch": 0.02614026703916547,
|
2814 |
+
"grad_norm": 9.842926979064941,
|
2815 |
+
"learning_rate": 0.0001680540653066891,
|
2816 |
+
"loss": 2.9014,
|
2817 |
+
"step": 400
|
2818 |
+
},
|
2819 |
+
{
|
2820 |
+
"epoch": 0.026205617706763384,
|
2821 |
+
"grad_norm": 18.941564559936523,
|
2822 |
+
"learning_rate": 0.00016789942573748232,
|
2823 |
+
"loss": 3.7021,
|
2824 |
+
"step": 401
|
2825 |
+
},
|
2826 |
+
{
|
2827 |
+
"epoch": 0.026270968374361298,
|
2828 |
+
"grad_norm": 5.617252826690674,
|
2829 |
+
"learning_rate": 0.0001677444843169072,
|
2830 |
+
"loss": 3.4405,
|
2831 |
+
"step": 402
|
2832 |
+
},
|
2833 |
+
{
|
2834 |
+
"epoch": 0.026336319041959214,
|
2835 |
+
"grad_norm": 6.610146522521973,
|
2836 |
+
"learning_rate": 0.00016758924173376603,
|
2837 |
+
"loss": 3.2698,
|
2838 |
+
"step": 403
|
2839 |
+
},
|
2840 |
+
{
|
2841 |
+
"epoch": 0.026401669709557127,
|
2842 |
+
"grad_norm": 3.9733502864837646,
|
2843 |
+
"learning_rate": 0.0001674336986781999,
|
2844 |
+
"loss": 3.2307,
|
2845 |
+
"step": 404
|
2846 |
+
},
|
2847 |
+
{
|
2848 |
+
"epoch": 0.02646702037715504,
|
2849 |
+
"grad_norm": 1.9799753427505493,
|
2850 |
+
"learning_rate": 0.00016727785584168581,
|
2851 |
+
"loss": 3.1771,
|
2852 |
+
"step": 405
|
2853 |
+
},
|
2854 |
+
{
|
2855 |
+
"epoch": 0.026532371044752954,
|
2856 |
+
"grad_norm": 2.117891311645508,
|
2857 |
+
"learning_rate": 0.0001671217139170333,
|
2858 |
+
"loss": 3.2574,
|
2859 |
+
"step": 406
|
2860 |
+
},
|
2861 |
+
{
|
2862 |
+
"epoch": 0.026597721712350867,
|
2863 |
+
"grad_norm": 2.028475522994995,
|
2864 |
+
"learning_rate": 0.00016696527359838154,
|
2865 |
+
"loss": 3.3172,
|
2866 |
+
"step": 407
|
2867 |
+
},
|
2868 |
+
{
|
2869 |
+
"epoch": 0.02666307237994878,
|
2870 |
+
"grad_norm": 1.6997264623641968,
|
2871 |
+
"learning_rate": 0.00016680853558119632,
|
2872 |
+
"loss": 3.119,
|
2873 |
+
"step": 408
|
2874 |
+
},
|
2875 |
+
{
|
2876 |
+
"epoch": 0.026728423047546693,
|
2877 |
+
"grad_norm": 1.6663233041763306,
|
2878 |
+
"learning_rate": 0.0001666515005622668,
|
2879 |
+
"loss": 3.0839,
|
2880 |
+
"step": 409
|
2881 |
+
},
|
2882 |
+
{
|
2883 |
+
"epoch": 0.02679377371514461,
|
2884 |
+
"grad_norm": 1.6285985708236694,
|
2885 |
+
"learning_rate": 0.0001664941692397025,
|
2886 |
+
"loss": 3.0017,
|
2887 |
+
"step": 410
|
2888 |
+
},
|
2889 |
+
{
|
2890 |
+
"epoch": 0.026859124382742523,
|
2891 |
+
"grad_norm": 1.765998363494873,
|
2892 |
+
"learning_rate": 0.00016633654231293013,
|
2893 |
+
"loss": 2.9814,
|
2894 |
+
"step": 411
|
2895 |
+
},
|
2896 |
+
{
|
2897 |
+
"epoch": 0.026924475050340436,
|
2898 |
+
"grad_norm": 1.6121286153793335,
|
2899 |
+
"learning_rate": 0.00016617862048269065,
|
2900 |
+
"loss": 2.8452,
|
2901 |
+
"step": 412
|
2902 |
+
},
|
2903 |
+
{
|
2904 |
+
"epoch": 0.02698982571793835,
|
2905 |
+
"grad_norm": 1.76937997341156,
|
2906 |
+
"learning_rate": 0.00016602040445103588,
|
2907 |
+
"loss": 3.1867,
|
2908 |
+
"step": 413
|
2909 |
+
},
|
2910 |
+
{
|
2911 |
+
"epoch": 0.027055176385536263,
|
2912 |
+
"grad_norm": 1.8129544258117676,
|
2913 |
+
"learning_rate": 0.00016586189492132566,
|
2914 |
+
"loss": 2.9669,
|
2915 |
+
"step": 414
|
2916 |
+
},
|
2917 |
+
{
|
2918 |
+
"epoch": 0.027120527053134176,
|
2919 |
+
"grad_norm": 1.7040003538131714,
|
2920 |
+
"learning_rate": 0.00016570309259822453,
|
2921 |
+
"loss": 2.9403,
|
2922 |
+
"step": 415
|
2923 |
+
},
|
2924 |
+
{
|
2925 |
+
"epoch": 0.02718587772073209,
|
2926 |
+
"grad_norm": 1.8865150213241577,
|
2927 |
+
"learning_rate": 0.0001655439981876987,
|
2928 |
+
"loss": 3.1433,
|
2929 |
+
"step": 416
|
2930 |
+
},
|
2931 |
+
{
|
2932 |
+
"epoch": 0.027251228388330006,
|
2933 |
+
"grad_norm": 1.884498119354248,
|
2934 |
+
"learning_rate": 0.00016538461239701277,
|
2935 |
+
"loss": 3.0231,
|
2936 |
+
"step": 417
|
2937 |
+
},
|
2938 |
+
{
|
2939 |
+
"epoch": 0.02731657905592792,
|
2940 |
+
"grad_norm": 1.9052989482879639,
|
2941 |
+
"learning_rate": 0.00016522493593472683,
|
2942 |
+
"loss": 2.8977,
|
2943 |
+
"step": 418
|
2944 |
+
},
|
2945 |
+
{
|
2946 |
+
"epoch": 0.027381929723525832,
|
2947 |
+
"grad_norm": 2.26375150680542,
|
2948 |
+
"learning_rate": 0.0001650649695106931,
|
2949 |
+
"loss": 3.1765,
|
2950 |
+
"step": 419
|
2951 |
+
},
|
2952 |
+
{
|
2953 |
+
"epoch": 0.027447280391123745,
|
2954 |
+
"grad_norm": 2.234987258911133,
|
2955 |
+
"learning_rate": 0.00016490471383605288,
|
2956 |
+
"loss": 3.2449,
|
2957 |
+
"step": 420
|
2958 |
+
},
|
2959 |
+
{
|
2960 |
+
"epoch": 0.02751263105872166,
|
2961 |
+
"grad_norm": 2.077850818634033,
|
2962 |
+
"learning_rate": 0.00016474416962323325,
|
2963 |
+
"loss": 2.838,
|
2964 |
+
"step": 421
|
2965 |
+
},
|
2966 |
+
{
|
2967 |
+
"epoch": 0.027577981726319572,
|
2968 |
+
"grad_norm": 2.2941253185272217,
|
2969 |
+
"learning_rate": 0.00016458333758594414,
|
2970 |
+
"loss": 2.985,
|
2971 |
+
"step": 422
|
2972 |
+
},
|
2973 |
+
{
|
2974 |
+
"epoch": 0.027643332393917485,
|
2975 |
+
"grad_norm": 2.149695873260498,
|
2976 |
+
"learning_rate": 0.00016442221843917496,
|
2977 |
+
"loss": 3.2526,
|
2978 |
+
"step": 423
|
2979 |
+
},
|
2980 |
+
{
|
2981 |
+
"epoch": 0.0277086830615154,
|
2982 |
+
"grad_norm": 2.061628580093384,
|
2983 |
+
"learning_rate": 0.00016426081289919143,
|
2984 |
+
"loss": 3.1972,
|
2985 |
+
"step": 424
|
2986 |
+
},
|
2987 |
+
{
|
2988 |
+
"epoch": 0.027774033729113315,
|
2989 |
+
"grad_norm": 2.2111194133758545,
|
2990 |
+
"learning_rate": 0.0001640991216835326,
|
2991 |
+
"loss": 3.1089,
|
2992 |
+
"step": 425
|
2993 |
+
},
|
2994 |
+
{
|
2995 |
+
"epoch": 0.027839384396711228,
|
2996 |
+
"grad_norm": 2.13586163520813,
|
2997 |
+
"learning_rate": 0.00016393714551100734,
|
2998 |
+
"loss": 3.099,
|
2999 |
+
"step": 426
|
3000 |
+
},
|
3001 |
+
{
|
3002 |
+
"epoch": 0.02790473506430914,
|
3003 |
+
"grad_norm": 2.1120922565460205,
|
3004 |
+
"learning_rate": 0.0001637748851016914,
|
3005 |
+
"loss": 3.1227,
|
3006 |
+
"step": 427
|
3007 |
+
},
|
3008 |
+
{
|
3009 |
+
"epoch": 0.027970085731907054,
|
3010 |
+
"grad_norm": 2.342458486557007,
|
3011 |
+
"learning_rate": 0.00016361234117692413,
|
3012 |
+
"loss": 3.3583,
|
3013 |
+
"step": 428
|
3014 |
+
},
|
3015 |
+
{
|
3016 |
+
"epoch": 0.028035436399504968,
|
3017 |
+
"grad_norm": 2.4300196170806885,
|
3018 |
+
"learning_rate": 0.00016344951445930526,
|
3019 |
+
"loss": 3.2421,
|
3020 |
+
"step": 429
|
3021 |
+
},
|
3022 |
+
{
|
3023 |
+
"epoch": 0.02810078706710288,
|
3024 |
+
"grad_norm": 2.273008108139038,
|
3025 |
+
"learning_rate": 0.0001632864056726917,
|
3026 |
+
"loss": 3.0715,
|
3027 |
+
"step": 430
|
3028 |
+
},
|
3029 |
+
{
|
3030 |
+
"epoch": 0.028166137734700798,
|
3031 |
+
"grad_norm": 2.4410061836242676,
|
3032 |
+
"learning_rate": 0.00016312301554219426,
|
3033 |
+
"loss": 3.1462,
|
3034 |
+
"step": 431
|
3035 |
+
},
|
3036 |
+
{
|
3037 |
+
"epoch": 0.02823148840229871,
|
3038 |
+
"grad_norm": 2.5503828525543213,
|
3039 |
+
"learning_rate": 0.00016295934479417453,
|
3040 |
+
"loss": 3.0751,
|
3041 |
+
"step": 432
|
3042 |
+
},
|
3043 |
+
{
|
3044 |
+
"epoch": 0.028296839069896624,
|
3045 |
+
"grad_norm": 2.3271334171295166,
|
3046 |
+
"learning_rate": 0.00016279539415624164,
|
3047 |
+
"loss": 3.0947,
|
3048 |
+
"step": 433
|
3049 |
+
},
|
3050 |
+
{
|
3051 |
+
"epoch": 0.028362189737494537,
|
3052 |
+
"grad_norm": 2.718640089035034,
|
3053 |
+
"learning_rate": 0.0001626311643572489,
|
3054 |
+
"loss": 2.9781,
|
3055 |
+
"step": 434
|
3056 |
+
},
|
3057 |
+
{
|
3058 |
+
"epoch": 0.02842754040509245,
|
3059 |
+
"grad_norm": 3.1213088035583496,
|
3060 |
+
"learning_rate": 0.00016246665612729074,
|
3061 |
+
"loss": 3.1426,
|
3062 |
+
"step": 435
|
3063 |
+
},
|
3064 |
+
{
|
3065 |
+
"epoch": 0.028492891072690364,
|
3066 |
+
"grad_norm": 2.5128207206726074,
|
3067 |
+
"learning_rate": 0.00016230187019769928,
|
3068 |
+
"loss": 2.8893,
|
3069 |
+
"step": 436
|
3070 |
+
},
|
3071 |
+
{
|
3072 |
+
"epoch": 0.028558241740288277,
|
3073 |
+
"grad_norm": 2.748035430908203,
|
3074 |
+
"learning_rate": 0.00016213680730104124,
|
3075 |
+
"loss": 2.9823,
|
3076 |
+
"step": 437
|
3077 |
+
},
|
3078 |
+
{
|
3079 |
+
"epoch": 0.028623592407886193,
|
3080 |
+
"grad_norm": 3.006138801574707,
|
3081 |
+
"learning_rate": 0.0001619714681711146,
|
3082 |
+
"loss": 2.9931,
|
3083 |
+
"step": 438
|
3084 |
+
},
|
3085 |
+
{
|
3086 |
+
"epoch": 0.028688943075484107,
|
3087 |
+
"grad_norm": 2.952946424484253,
|
3088 |
+
"learning_rate": 0.00016180585354294536,
|
3089 |
+
"loss": 2.8257,
|
3090 |
+
"step": 439
|
3091 |
+
},
|
3092 |
+
{
|
3093 |
+
"epoch": 0.02875429374308202,
|
3094 |
+
"grad_norm": 3.55165958404541,
|
3095 |
+
"learning_rate": 0.00016163996415278424,
|
3096 |
+
"loss": 3.0414,
|
3097 |
+
"step": 440
|
3098 |
+
},
|
3099 |
+
{
|
3100 |
+
"epoch": 0.028819644410679933,
|
3101 |
+
"grad_norm": 3.483384370803833,
|
3102 |
+
"learning_rate": 0.00016147380073810346,
|
3103 |
+
"loss": 3.0496,
|
3104 |
+
"step": 441
|
3105 |
+
},
|
3106 |
+
{
|
3107 |
+
"epoch": 0.028884995078277846,
|
3108 |
+
"grad_norm": 3.988264322280884,
|
3109 |
+
"learning_rate": 0.0001613073640375934,
|
3110 |
+
"loss": 3.0479,
|
3111 |
+
"step": 442
|
3112 |
+
},
|
3113 |
+
{
|
3114 |
+
"epoch": 0.02895034574587576,
|
3115 |
+
"grad_norm": 3.7168514728546143,
|
3116 |
+
"learning_rate": 0.00016114065479115946,
|
3117 |
+
"loss": 2.7044,
|
3118 |
+
"step": 443
|
3119 |
+
},
|
3120 |
+
{
|
3121 |
+
"epoch": 0.029015696413473673,
|
3122 |
+
"grad_norm": 3.7768290042877197,
|
3123 |
+
"learning_rate": 0.00016097367373991842,
|
3124 |
+
"loss": 3.2466,
|
3125 |
+
"step": 444
|
3126 |
+
},
|
3127 |
+
{
|
3128 |
+
"epoch": 0.02908104708107159,
|
3129 |
+
"grad_norm": 4.519565582275391,
|
3130 |
+
"learning_rate": 0.00016080642162619565,
|
3131 |
+
"loss": 2.6545,
|
3132 |
+
"step": 445
|
3133 |
+
},
|
3134 |
+
{
|
3135 |
+
"epoch": 0.029146397748669502,
|
3136 |
+
"grad_norm": 3.5160961151123047,
|
3137 |
+
"learning_rate": 0.0001606388991935214,
|
3138 |
+
"loss": 3.0352,
|
3139 |
+
"step": 446
|
3140 |
+
},
|
3141 |
+
{
|
3142 |
+
"epoch": 0.029211748416267416,
|
3143 |
+
"grad_norm": 3.72017502784729,
|
3144 |
+
"learning_rate": 0.0001604711071866277,
|
3145 |
+
"loss": 2.7479,
|
3146 |
+
"step": 447
|
3147 |
+
},
|
3148 |
+
{
|
3149 |
+
"epoch": 0.02927709908386533,
|
3150 |
+
"grad_norm": 4.312921047210693,
|
3151 |
+
"learning_rate": 0.00016030304635144494,
|
3152 |
+
"loss": 3.0772,
|
3153 |
+
"step": 448
|
3154 |
+
},
|
3155 |
+
{
|
3156 |
+
"epoch": 0.029342449751463242,
|
3157 |
+
"grad_norm": 5.077251434326172,
|
3158 |
+
"learning_rate": 0.00016013471743509862,
|
3159 |
+
"loss": 2.801,
|
3160 |
+
"step": 449
|
3161 |
+
},
|
3162 |
+
{
|
3163 |
+
"epoch": 0.029407800419061155,
|
3164 |
+
"grad_norm": 7.480358600616455,
|
3165 |
+
"learning_rate": 0.00015996612118590603,
|
3166 |
+
"loss": 3.4775,
|
3167 |
+
"step": 450
|
3168 |
+
},
|
3169 |
+
{
|
3170 |
+
"epoch": 0.02947315108665907,
|
3171 |
+
"grad_norm": 24.4322452545166,
|
3172 |
+
"learning_rate": 0.00015979725835337294,
|
3173 |
+
"loss": 4.0432,
|
3174 |
+
"step": 451
|
3175 |
+
},
|
3176 |
+
{
|
3177 |
+
"epoch": 0.029538501754256985,
|
3178 |
+
"grad_norm": 8.49026107788086,
|
3179 |
+
"learning_rate": 0.00015962812968819016,
|
3180 |
+
"loss": 3.4552,
|
3181 |
+
"step": 452
|
3182 |
+
},
|
3183 |
+
{
|
3184 |
+
"epoch": 0.029603852421854898,
|
3185 |
+
"grad_norm": 7.049969673156738,
|
3186 |
+
"learning_rate": 0.0001594587359422303,
|
3187 |
+
"loss": 3.3839,
|
3188 |
+
"step": 453
|
3189 |
+
},
|
3190 |
+
{
|
3191 |
+
"epoch": 0.02966920308945281,
|
3192 |
+
"grad_norm": 9.08895206451416,
|
3193 |
+
"learning_rate": 0.0001592890778685444,
|
3194 |
+
"loss": 3.4686,
|
3195 |
+
"step": 454
|
3196 |
+
},
|
3197 |
+
{
|
3198 |
+
"epoch": 0.029734553757050725,
|
3199 |
+
"grad_norm": 3.652604818344116,
|
3200 |
+
"learning_rate": 0.00015911915622135862,
|
3201 |
+
"loss": 3.1602,
|
3202 |
+
"step": 455
|
3203 |
+
},
|
3204 |
+
{
|
3205 |
+
"epoch": 0.029799904424648638,
|
3206 |
+
"grad_norm": 8.94747257232666,
|
3207 |
+
"learning_rate": 0.00015894897175607086,
|
3208 |
+
"loss": 3.283,
|
3209 |
+
"step": 456
|
3210 |
+
},
|
3211 |
+
{
|
3212 |
+
"epoch": 0.02986525509224655,
|
3213 |
+
"grad_norm": 3.347947597503662,
|
3214 |
+
"learning_rate": 0.00015877852522924732,
|
3215 |
+
"loss": 3.0169,
|
3216 |
+
"step": 457
|
3217 |
+
},
|
3218 |
+
{
|
3219 |
+
"epoch": 0.029930605759844464,
|
3220 |
+
"grad_norm": 2.9615442752838135,
|
3221 |
+
"learning_rate": 0.00015860781739861928,
|
3222 |
+
"loss": 3.105,
|
3223 |
+
"step": 458
|
3224 |
+
},
|
3225 |
+
{
|
3226 |
+
"epoch": 0.02999595642744238,
|
3227 |
+
"grad_norm": 1.7090089321136475,
|
3228 |
+
"learning_rate": 0.00015843684902307962,
|
3229 |
+
"loss": 2.9634,
|
3230 |
+
"step": 459
|
3231 |
+
},
|
3232 |
+
{
|
3233 |
+
"epoch": 0.030061307095040294,
|
3234 |
+
"grad_norm": 1.87732994556427,
|
3235 |
+
"learning_rate": 0.00015826562086267956,
|
3236 |
+
"loss": 3.0923,
|
3237 |
+
"step": 460
|
3238 |
+
},
|
3239 |
+
{
|
3240 |
+
"epoch": 0.030126657762638207,
|
3241 |
+
"grad_norm": 1.8423588275909424,
|
3242 |
+
"learning_rate": 0.00015809413367862512,
|
3243 |
+
"loss": 3.0317,
|
3244 |
+
"step": 461
|
3245 |
+
},
|
3246 |
+
{
|
3247 |
+
"epoch": 0.03019200843023612,
|
3248 |
+
"grad_norm": 1.9231833219528198,
|
3249 |
+
"learning_rate": 0.00015792238823327388,
|
3250 |
+
"loss": 3.0236,
|
3251 |
+
"step": 462
|
3252 |
+
},
|
3253 |
+
{
|
3254 |
+
"epoch": 0.030257359097834034,
|
3255 |
+
"grad_norm": 1.718591570854187,
|
3256 |
+
"learning_rate": 0.00015775038529013152,
|
3257 |
+
"loss": 3.0055,
|
3258 |
+
"step": 463
|
3259 |
+
},
|
3260 |
+
{
|
3261 |
+
"epoch": 0.030322709765431947,
|
3262 |
+
"grad_norm": 1.6554956436157227,
|
3263 |
+
"learning_rate": 0.0001575781256138485,
|
3264 |
+
"loss": 2.9719,
|
3265 |
+
"step": 464
|
3266 |
+
},
|
3267 |
+
{
|
3268 |
+
"epoch": 0.03038806043302986,
|
3269 |
+
"grad_norm": 1.7320899963378906,
|
3270 |
+
"learning_rate": 0.00015740560997021648,
|
3271 |
+
"loss": 3.048,
|
3272 |
+
"step": 465
|
3273 |
+
},
|
3274 |
+
{
|
3275 |
+
"epoch": 0.030453411100627773,
|
3276 |
+
"grad_norm": 2.028738260269165,
|
3277 |
+
"learning_rate": 0.00015723283912616513,
|
3278 |
+
"loss": 3.1429,
|
3279 |
+
"step": 466
|
3280 |
+
},
|
3281 |
+
{
|
3282 |
+
"epoch": 0.03051876176822569,
|
3283 |
+
"grad_norm": 1.883371114730835,
|
3284 |
+
"learning_rate": 0.00015705981384975866,
|
3285 |
+
"loss": 3.0832,
|
3286 |
+
"step": 467
|
3287 |
+
},
|
3288 |
+
{
|
3289 |
+
"epoch": 0.030584112435823603,
|
3290 |
+
"grad_norm": 1.8488166332244873,
|
3291 |
+
"learning_rate": 0.0001568865349101923,
|
3292 |
+
"loss": 2.9372,
|
3293 |
+
"step": 468
|
3294 |
+
},
|
3295 |
+
{
|
3296 |
+
"epoch": 0.030649463103421516,
|
3297 |
+
"grad_norm": 2.0402255058288574,
|
3298 |
+
"learning_rate": 0.00015671300307778898,
|
3299 |
+
"loss": 3.2902,
|
3300 |
+
"step": 469
|
3301 |
+
},
|
3302 |
+
{
|
3303 |
+
"epoch": 0.03071481377101943,
|
3304 |
+
"grad_norm": 1.9097765684127808,
|
3305 |
+
"learning_rate": 0.00015653921912399589,
|
3306 |
+
"loss": 3.0758,
|
3307 |
+
"step": 470
|
3308 |
+
},
|
3309 |
+
{
|
3310 |
+
"epoch": 0.030780164438617343,
|
3311 |
+
"grad_norm": 1.973686695098877,
|
3312 |
+
"learning_rate": 0.00015636518382138107,
|
3313 |
+
"loss": 2.8372,
|
3314 |
+
"step": 471
|
3315 |
+
},
|
3316 |
+
{
|
3317 |
+
"epoch": 0.030845515106215256,
|
3318 |
+
"grad_norm": 1.9181742668151855,
|
3319 |
+
"learning_rate": 0.0001561908979436299,
|
3320 |
+
"loss": 2.8792,
|
3321 |
+
"step": 472
|
3322 |
+
},
|
3323 |
+
{
|
3324 |
+
"epoch": 0.03091086577381317,
|
3325 |
+
"grad_norm": 1.8591138124465942,
|
3326 |
+
"learning_rate": 0.00015601636226554168,
|
3327 |
+
"loss": 3.0248,
|
3328 |
+
"step": 473
|
3329 |
+
},
|
3330 |
+
{
|
3331 |
+
"epoch": 0.030976216441411086,
|
3332 |
+
"grad_norm": 1.9952205419540405,
|
3333 |
+
"learning_rate": 0.00015584157756302634,
|
3334 |
+
"loss": 2.8397,
|
3335 |
+
"step": 474
|
3336 |
+
},
|
3337 |
+
{
|
3338 |
+
"epoch": 0.031041567109009,
|
3339 |
+
"grad_norm": 2.088942289352417,
|
3340 |
+
"learning_rate": 0.0001556665446131007,
|
3341 |
+
"loss": 2.9455,
|
3342 |
+
"step": 475
|
3343 |
+
},
|
3344 |
+
{
|
3345 |
+
"epoch": 0.031106917776606912,
|
3346 |
+
"grad_norm": 2.370609998703003,
|
3347 |
+
"learning_rate": 0.00015549126419388536,
|
3348 |
+
"loss": 3.0924,
|
3349 |
+
"step": 476
|
3350 |
+
},
|
3351 |
+
{
|
3352 |
+
"epoch": 0.031172268444204825,
|
3353 |
+
"grad_norm": 2.1024417877197266,
|
3354 |
+
"learning_rate": 0.0001553157370846009,
|
3355 |
+
"loss": 2.857,
|
3356 |
+
"step": 477
|
3357 |
+
},
|
3358 |
+
{
|
3359 |
+
"epoch": 0.03123761911180274,
|
3360 |
+
"grad_norm": 2.367189407348633,
|
3361 |
+
"learning_rate": 0.00015513996406556465,
|
3362 |
+
"loss": 2.9313,
|
3363 |
+
"step": 478
|
3364 |
+
},
|
3365 |
+
{
|
3366 |
+
"epoch": 0.031302969779400655,
|
3367 |
+
"grad_norm": 2.203998327255249,
|
3368 |
+
"learning_rate": 0.00015496394591818716,
|
3369 |
+
"loss": 3.0986,
|
3370 |
+
"step": 479
|
3371 |
+
},
|
3372 |
+
{
|
3373 |
+
"epoch": 0.03136832044699857,
|
3374 |
+
"grad_norm": 2.1466145515441895,
|
3375 |
+
"learning_rate": 0.0001547876834249687,
|
3376 |
+
"loss": 2.7535,
|
3377 |
+
"step": 480
|
3378 |
+
},
|
3379 |
+
{
|
3380 |
+
"epoch": 0.03143367111459648,
|
3381 |
+
"grad_norm": 2.2550671100616455,
|
3382 |
+
"learning_rate": 0.00015461117736949577,
|
3383 |
+
"loss": 3.0209,
|
3384 |
+
"step": 481
|
3385 |
+
},
|
3386 |
+
{
|
3387 |
+
"epoch": 0.031499021782194395,
|
3388 |
+
"grad_norm": 2.7715814113616943,
|
3389 |
+
"learning_rate": 0.00015443442853643762,
|
3390 |
+
"loss": 3.1244,
|
3391 |
+
"step": 482
|
3392 |
+
},
|
3393 |
+
{
|
3394 |
+
"epoch": 0.03156437244979231,
|
3395 |
+
"grad_norm": 2.6216328144073486,
|
3396 |
+
"learning_rate": 0.00015425743771154294,
|
3397 |
+
"loss": 3.0967,
|
3398 |
+
"step": 483
|
3399 |
+
},
|
3400 |
+
{
|
3401 |
+
"epoch": 0.03162972311739022,
|
3402 |
+
"grad_norm": 2.9239511489868164,
|
3403 |
+
"learning_rate": 0.00015408020568163602,
|
3404 |
+
"loss": 2.9292,
|
3405 |
+
"step": 484
|
3406 |
+
},
|
3407 |
+
{
|
3408 |
+
"epoch": 0.031695073784988134,
|
3409 |
+
"grad_norm": 2.6827921867370605,
|
3410 |
+
"learning_rate": 0.00015390273323461352,
|
3411 |
+
"loss": 3.0856,
|
3412 |
+
"step": 485
|
3413 |
+
},
|
3414 |
+
{
|
3415 |
+
"epoch": 0.03176042445258605,
|
3416 |
+
"grad_norm": 2.7837822437286377,
|
3417 |
+
"learning_rate": 0.0001537250211594409,
|
3418 |
+
"loss": 2.9937,
|
3419 |
+
"step": 486
|
3420 |
+
},
|
3421 |
+
{
|
3422 |
+
"epoch": 0.03182577512018396,
|
3423 |
+
"grad_norm": 2.8517332077026367,
|
3424 |
+
"learning_rate": 0.0001535470702461489,
|
3425 |
+
"loss": 3.18,
|
3426 |
+
"step": 487
|
3427 |
+
},
|
3428 |
+
{
|
3429 |
+
"epoch": 0.031891125787781874,
|
3430 |
+
"grad_norm": 3.131091594696045,
|
3431 |
+
"learning_rate": 0.00015336888128583,
|
3432 |
+
"loss": 3.0751,
|
3433 |
+
"step": 488
|
3434 |
+
},
|
3435 |
+
{
|
3436 |
+
"epoch": 0.03195647645537979,
|
3437 |
+
"grad_norm": 3.4372997283935547,
|
3438 |
+
"learning_rate": 0.000153190455070635,
|
3439 |
+
"loss": 3.1115,
|
3440 |
+
"step": 489
|
3441 |
+
},
|
3442 |
+
{
|
3443 |
+
"epoch": 0.0320218271229777,
|
3444 |
+
"grad_norm": 3.2346112728118896,
|
3445 |
+
"learning_rate": 0.00015301179239376938,
|
3446 |
+
"loss": 3.1384,
|
3447 |
+
"step": 490
|
3448 |
+
},
|
3449 |
+
{
|
3450 |
+
"epoch": 0.03208717779057562,
|
3451 |
+
"grad_norm": 3.102898120880127,
|
3452 |
+
"learning_rate": 0.00015283289404948976,
|
3453 |
+
"loss": 3.0725,
|
3454 |
+
"step": 491
|
3455 |
+
},
|
3456 |
+
{
|
3457 |
+
"epoch": 0.032152528458173533,
|
3458 |
+
"grad_norm": 3.6215391159057617,
|
3459 |
+
"learning_rate": 0.0001526537608331006,
|
3460 |
+
"loss": 2.9572,
|
3461 |
+
"step": 492
|
3462 |
+
},
|
3463 |
+
{
|
3464 |
+
"epoch": 0.03221787912577145,
|
3465 |
+
"grad_norm": 3.090087652206421,
|
3466 |
+
"learning_rate": 0.00015247439354095041,
|
3467 |
+
"loss": 2.9867,
|
3468 |
+
"step": 493
|
3469 |
+
},
|
3470 |
+
{
|
3471 |
+
"epoch": 0.03228322979336936,
|
3472 |
+
"grad_norm": 3.4262967109680176,
|
3473 |
+
"learning_rate": 0.00015229479297042823,
|
3474 |
+
"loss": 2.8331,
|
3475 |
+
"step": 494
|
3476 |
+
},
|
3477 |
+
{
|
3478 |
+
"epoch": 0.03234858046096727,
|
3479 |
+
"grad_norm": 3.6537046432495117,
|
3480 |
+
"learning_rate": 0.00015211495991996027,
|
3481 |
+
"loss": 3.0767,
|
3482 |
+
"step": 495
|
3483 |
+
},
|
3484 |
+
{
|
3485 |
+
"epoch": 0.032413931128565186,
|
3486 |
+
"grad_norm": 3.5232338905334473,
|
3487 |
+
"learning_rate": 0.0001519348951890062,
|
3488 |
+
"loss": 2.8771,
|
3489 |
+
"step": 496
|
3490 |
+
},
|
3491 |
+
{
|
3492 |
+
"epoch": 0.0324792817961631,
|
3493 |
+
"grad_norm": 4.434952259063721,
|
3494 |
+
"learning_rate": 0.0001517545995780556,
|
3495 |
+
"loss": 2.8009,
|
3496 |
+
"step": 497
|
3497 |
+
},
|
3498 |
+
{
|
3499 |
+
"epoch": 0.03254463246376101,
|
3500 |
+
"grad_norm": 6.826545238494873,
|
3501 |
+
"learning_rate": 0.00015157407388862452,
|
3502 |
+
"loss": 2.8267,
|
3503 |
+
"step": 498
|
3504 |
+
},
|
3505 |
+
{
|
3506 |
+
"epoch": 0.032609983131358926,
|
3507 |
+
"grad_norm": 4.22343111038208,
|
3508 |
+
"learning_rate": 0.00015139331892325179,
|
3509 |
+
"loss": 2.3913,
|
3510 |
+
"step": 499
|
3511 |
+
},
|
3512 |
+
{
|
3513 |
+
"epoch": 0.03267533379895684,
|
3514 |
+
"grad_norm": 6.091843128204346,
|
3515 |
+
"learning_rate": 0.0001512123354854955,
|
3516 |
+
"loss": 2.8988,
|
3517 |
+
"step": 500
|
3518 |
+
},
|
3519 |
+
{
|
3520 |
+
"epoch": 0.03274068446655475,
|
3521 |
+
"grad_norm": 11.71518611907959,
|
3522 |
+
"learning_rate": 0.0001510311243799295,
|
3523 |
+
"loss": 3.6692,
|
3524 |
+
"step": 501
|
3525 |
+
},
|
3526 |
+
{
|
3527 |
+
"epoch": 0.032806035134152665,
|
3528 |
+
"grad_norm": 7.051513671875,
|
3529 |
+
"learning_rate": 0.00015084968641213958,
|
3530 |
+
"loss": 3.5298,
|
3531 |
+
"step": 502
|
3532 |
+
},
|
3533 |
+
{
|
3534 |
+
"epoch": 0.03287138580175058,
|
3535 |
+
"grad_norm": 4.335301399230957,
|
3536 |
+
"learning_rate": 0.00015066802238872023,
|
3537 |
+
"loss": 3.234,
|
3538 |
+
"step": 503
|
3539 |
+
},
|
3540 |
+
{
|
3541 |
+
"epoch": 0.03293673646934849,
|
3542 |
+
"grad_norm": 3.7799558639526367,
|
3543 |
+
"learning_rate": 0.0001504861331172709,
|
3544 |
+
"loss": 3.0844,
|
3545 |
+
"step": 504
|
3546 |
+
},
|
3547 |
+
{
|
3548 |
+
"epoch": 0.033002087136946405,
|
3549 |
+
"grad_norm": 2.185335397720337,
|
3550 |
+
"learning_rate": 0.0001503040194063922,
|
3551 |
+
"loss": 3.1615,
|
3552 |
+
"step": 505
|
3553 |
+
},
|
3554 |
+
{
|
3555 |
+
"epoch": 0.033067437804544325,
|
3556 |
+
"grad_norm": 1.9385499954223633,
|
3557 |
+
"learning_rate": 0.00015012168206568268,
|
3558 |
+
"loss": 3.1164,
|
3559 |
+
"step": 506
|
3560 |
+
},
|
3561 |
+
{
|
3562 |
+
"epoch": 0.03313278847214224,
|
3563 |
+
"grad_norm": 1.9973067045211792,
|
3564 |
+
"learning_rate": 0.00014993912190573505,
|
3565 |
+
"loss": 3.0556,
|
3566 |
+
"step": 507
|
3567 |
+
},
|
3568 |
+
{
|
3569 |
+
"epoch": 0.03319813913974015,
|
3570 |
+
"grad_norm": 1.8199883699417114,
|
3571 |
+
"learning_rate": 0.00014975633973813242,
|
3572 |
+
"loss": 2.9567,
|
3573 |
+
"step": 508
|
3574 |
+
},
|
3575 |
+
{
|
3576 |
+
"epoch": 0.033263489807338065,
|
3577 |
+
"grad_norm": 1.5722929239273071,
|
3578 |
+
"learning_rate": 0.00014957333637544503,
|
3579 |
+
"loss": 2.9897,
|
3580 |
+
"step": 509
|
3581 |
+
},
|
3582 |
+
{
|
3583 |
+
"epoch": 0.03332884047493598,
|
3584 |
+
"grad_norm": 1.9580084085464478,
|
3585 |
+
"learning_rate": 0.00014939011263122634,
|
3586 |
+
"loss": 3.0393,
|
3587 |
+
"step": 510
|
3588 |
+
},
|
3589 |
+
{
|
3590 |
+
"epoch": 0.03339419114253389,
|
3591 |
+
"grad_norm": 1.7566349506378174,
|
3592 |
+
"learning_rate": 0.0001492066693200096,
|
3593 |
+
"loss": 3.1124,
|
3594 |
+
"step": 511
|
3595 |
+
},
|
3596 |
+
{
|
3597 |
+
"epoch": 0.033459541810131804,
|
3598 |
+
"grad_norm": 1.995408058166504,
|
3599 |
+
"learning_rate": 0.00014902300725730413,
|
3600 |
+
"loss": 3.0233,
|
3601 |
+
"step": 512
|
3602 |
+
},
|
3603 |
+
{
|
3604 |
+
"epoch": 0.03352489247772972,
|
3605 |
+
"grad_norm": 1.8557460308074951,
|
3606 |
+
"learning_rate": 0.00014883912725959167,
|
3607 |
+
"loss": 3.0566,
|
3608 |
+
"step": 513
|
3609 |
+
},
|
3610 |
+
{
|
3611 |
+
"epoch": 0.03359024314532763,
|
3612 |
+
"grad_norm": 1.7373842000961304,
|
3613 |
+
"learning_rate": 0.00014865503014432292,
|
3614 |
+
"loss": 2.9989,
|
3615 |
+
"step": 514
|
3616 |
+
},
|
3617 |
+
{
|
3618 |
+
"epoch": 0.033655593812925544,
|
3619 |
+
"grad_norm": 2.1281046867370605,
|
3620 |
+
"learning_rate": 0.00014847071672991367,
|
3621 |
+
"loss": 3.1251,
|
3622 |
+
"step": 515
|
3623 |
+
},
|
3624 |
+
{
|
3625 |
+
"epoch": 0.03372094448052346,
|
3626 |
+
"grad_norm": 1.8922642469406128,
|
3627 |
+
"learning_rate": 0.0001482861878357414,
|
3628 |
+
"loss": 3.0562,
|
3629 |
+
"step": 516
|
3630 |
+
},
|
3631 |
+
{
|
3632 |
+
"epoch": 0.03378629514812137,
|
3633 |
+
"grad_norm": 2.2692742347717285,
|
3634 |
+
"learning_rate": 0.00014810144428214144,
|
3635 |
+
"loss": 3.1552,
|
3636 |
+
"step": 517
|
3637 |
+
},
|
3638 |
+
{
|
3639 |
+
"epoch": 0.03385164581571928,
|
3640 |
+
"grad_norm": 1.7412115335464478,
|
3641 |
+
"learning_rate": 0.0001479164868904034,
|
3642 |
+
"loss": 2.8901,
|
3643 |
+
"step": 518
|
3644 |
+
},
|
3645 |
+
{
|
3646 |
+
"epoch": 0.0339169964833172,
|
3647 |
+
"grad_norm": 2.91524338722229,
|
3648 |
+
"learning_rate": 0.00014773131648276758,
|
3649 |
+
"loss": 3.2572,
|
3650 |
+
"step": 519
|
3651 |
+
},
|
3652 |
+
{
|
3653 |
+
"epoch": 0.03398234715091512,
|
3654 |
+
"grad_norm": 2.537008047103882,
|
3655 |
+
"learning_rate": 0.00014754593388242117,
|
3656 |
+
"loss": 3.1831,
|
3657 |
+
"step": 520
|
3658 |
+
},
|
3659 |
+
{
|
3660 |
+
"epoch": 0.03404769781851303,
|
3661 |
+
"grad_norm": 2.0767822265625,
|
3662 |
+
"learning_rate": 0.0001473603399134948,
|
3663 |
+
"loss": 2.9038,
|
3664 |
+
"step": 521
|
3665 |
+
},
|
3666 |
+
{
|
3667 |
+
"epoch": 0.03411304848611094,
|
3668 |
+
"grad_norm": 2.3586618900299072,
|
3669 |
+
"learning_rate": 0.0001471745354010586,
|
3670 |
+
"loss": 3.1186,
|
3671 |
+
"step": 522
|
3672 |
+
},
|
3673 |
+
{
|
3674 |
+
"epoch": 0.034178399153708856,
|
3675 |
+
"grad_norm": 2.0097970962524414,
|
3676 |
+
"learning_rate": 0.00014698852117111884,
|
3677 |
+
"loss": 3.0701,
|
3678 |
+
"step": 523
|
3679 |
+
},
|
3680 |
+
{
|
3681 |
+
"epoch": 0.03424374982130677,
|
3682 |
+
"grad_norm": 2.0309152603149414,
|
3683 |
+
"learning_rate": 0.000146802298050614,
|
3684 |
+
"loss": 3.044,
|
3685 |
+
"step": 524
|
3686 |
+
},
|
3687 |
+
{
|
3688 |
+
"epoch": 0.03430910048890468,
|
3689 |
+
"grad_norm": 2.0452139377593994,
|
3690 |
+
"learning_rate": 0.0001466158668674112,
|
3691 |
+
"loss": 2.9056,
|
3692 |
+
"step": 525
|
3693 |
+
},
|
3694 |
+
{
|
3695 |
+
"epoch": 0.034374451156502596,
|
3696 |
+
"grad_norm": 2.242048740386963,
|
3697 |
+
"learning_rate": 0.00014642922845030257,
|
3698 |
+
"loss": 3.3416,
|
3699 |
+
"step": 526
|
3700 |
+
},
|
3701 |
+
{
|
3702 |
+
"epoch": 0.03443980182410051,
|
3703 |
+
"grad_norm": 2.0582847595214844,
|
3704 |
+
"learning_rate": 0.0001462423836290015,
|
3705 |
+
"loss": 2.9922,
|
3706 |
+
"step": 527
|
3707 |
+
},
|
3708 |
+
{
|
3709 |
+
"epoch": 0.03450515249169842,
|
3710 |
+
"grad_norm": 1.8688050508499146,
|
3711 |
+
"learning_rate": 0.00014605533323413887,
|
3712 |
+
"loss": 2.9809,
|
3713 |
+
"step": 528
|
3714 |
+
},
|
3715 |
+
{
|
3716 |
+
"epoch": 0.034570503159296335,
|
3717 |
+
"grad_norm": 2.1673102378845215,
|
3718 |
+
"learning_rate": 0.00014586807809725962,
|
3719 |
+
"loss": 2.9412,
|
3720 |
+
"step": 529
|
3721 |
+
},
|
3722 |
+
{
|
3723 |
+
"epoch": 0.03463585382689425,
|
3724 |
+
"grad_norm": 2.2136361598968506,
|
3725 |
+
"learning_rate": 0.00014568061905081875,
|
3726 |
+
"loss": 2.9397,
|
3727 |
+
"step": 530
|
3728 |
+
},
|
3729 |
+
{
|
3730 |
+
"epoch": 0.03470120449449216,
|
3731 |
+
"grad_norm": 2.265554428100586,
|
3732 |
+
"learning_rate": 0.00014549295692817778,
|
3733 |
+
"loss": 3.094,
|
3734 |
+
"step": 531
|
3735 |
+
},
|
3736 |
+
{
|
3737 |
+
"epoch": 0.034766555162090075,
|
3738 |
+
"grad_norm": 2.590942621231079,
|
3739 |
+
"learning_rate": 0.00014530509256360102,
|
3740 |
+
"loss": 3.0893,
|
3741 |
+
"step": 532
|
3742 |
+
},
|
3743 |
+
{
|
3744 |
+
"epoch": 0.03483190582968799,
|
3745 |
+
"grad_norm": 2.161839485168457,
|
3746 |
+
"learning_rate": 0.00014511702679225193,
|
3747 |
+
"loss": 2.8916,
|
3748 |
+
"step": 533
|
3749 |
+
},
|
3750 |
+
{
|
3751 |
+
"epoch": 0.03489725649728591,
|
3752 |
+
"grad_norm": 2.8425076007843018,
|
3753 |
+
"learning_rate": 0.0001449287604501893,
|
3754 |
+
"loss": 3.0664,
|
3755 |
+
"step": 534
|
3756 |
+
},
|
3757 |
+
{
|
3758 |
+
"epoch": 0.03496260716488382,
|
3759 |
+
"grad_norm": 2.508582592010498,
|
3760 |
+
"learning_rate": 0.00014474029437436348,
|
3761 |
+
"loss": 3.0965,
|
3762 |
+
"step": 535
|
3763 |
+
},
|
3764 |
+
{
|
3765 |
+
"epoch": 0.035027957832481735,
|
3766 |
+
"grad_norm": 2.5080628395080566,
|
3767 |
+
"learning_rate": 0.00014455162940261285,
|
3768 |
+
"loss": 2.913,
|
3769 |
+
"step": 536
|
3770 |
+
},
|
3771 |
+
{
|
3772 |
+
"epoch": 0.03509330850007965,
|
3773 |
+
"grad_norm": 2.7472963333129883,
|
3774 |
+
"learning_rate": 0.0001443627663736599,
|
3775 |
+
"loss": 3.034,
|
3776 |
+
"step": 537
|
3777 |
+
},
|
3778 |
+
{
|
3779 |
+
"epoch": 0.03515865916767756,
|
3780 |
+
"grad_norm": 3.1874053478240967,
|
3781 |
+
"learning_rate": 0.00014417370612710778,
|
3782 |
+
"loss": 3.1924,
|
3783 |
+
"step": 538
|
3784 |
+
},
|
3785 |
+
{
|
3786 |
+
"epoch": 0.035224009835275474,
|
3787 |
+
"grad_norm": 3.3476574420928955,
|
3788 |
+
"learning_rate": 0.00014398444950343623,
|
3789 |
+
"loss": 2.8451,
|
3790 |
+
"step": 539
|
3791 |
+
},
|
3792 |
+
{
|
3793 |
+
"epoch": 0.03528936050287339,
|
3794 |
+
"grad_norm": 3.191981315612793,
|
3795 |
+
"learning_rate": 0.00014379499734399798,
|
3796 |
+
"loss": 3.0234,
|
3797 |
+
"step": 540
|
3798 |
+
},
|
3799 |
+
{
|
3800 |
+
"epoch": 0.0353547111704713,
|
3801 |
+
"grad_norm": 3.4992892742156982,
|
3802 |
+
"learning_rate": 0.0001436053504910151,
|
3803 |
+
"loss": 2.6897,
|
3804 |
+
"step": 541
|
3805 |
+
},
|
3806 |
+
{
|
3807 |
+
"epoch": 0.035420061838069214,
|
3808 |
+
"grad_norm": 3.7841637134552,
|
3809 |
+
"learning_rate": 0.0001434155097875752,
|
3810 |
+
"loss": 3.2661,
|
3811 |
+
"step": 542
|
3812 |
+
},
|
3813 |
+
{
|
3814 |
+
"epoch": 0.03548541250566713,
|
3815 |
+
"grad_norm": 4.506886005401611,
|
3816 |
+
"learning_rate": 0.00014322547607762762,
|
3817 |
+
"loss": 2.9592,
|
3818 |
+
"step": 543
|
3819 |
+
},
|
3820 |
+
{
|
3821 |
+
"epoch": 0.03555076317326504,
|
3822 |
+
"grad_norm": 4.825909614562988,
|
3823 |
+
"learning_rate": 0.0001430352502059797,
|
3824 |
+
"loss": 2.7436,
|
3825 |
+
"step": 544
|
3826 |
+
},
|
3827 |
+
{
|
3828 |
+
"epoch": 0.035616113840862954,
|
3829 |
+
"grad_norm": 3.0600483417510986,
|
3830 |
+
"learning_rate": 0.0001428448330182931,
|
3831 |
+
"loss": 2.778,
|
3832 |
+
"step": 545
|
3833 |
+
},
|
3834 |
+
{
|
3835 |
+
"epoch": 0.03568146450846087,
|
3836 |
+
"grad_norm": 9.061102867126465,
|
3837 |
+
"learning_rate": 0.00014265422536107993,
|
3838 |
+
"loss": 3.1647,
|
3839 |
+
"step": 546
|
3840 |
+
},
|
3841 |
+
{
|
3842 |
+
"epoch": 0.03574681517605878,
|
3843 |
+
"grad_norm": 7.136674880981445,
|
3844 |
+
"learning_rate": 0.00014246342808169914,
|
3845 |
+
"loss": 2.5426,
|
3846 |
+
"step": 547
|
3847 |
+
},
|
3848 |
+
{
|
3849 |
+
"epoch": 0.0358121658436567,
|
3850 |
+
"grad_norm": 5.525428295135498,
|
3851 |
+
"learning_rate": 0.00014227244202835257,
|
3852 |
+
"loss": 2.6582,
|
3853 |
+
"step": 548
|
3854 |
+
},
|
3855 |
+
{
|
3856 |
+
"epoch": 0.03587751651125461,
|
3857 |
+
"grad_norm": 5.315718650817871,
|
3858 |
+
"learning_rate": 0.0001420812680500813,
|
3859 |
+
"loss": 2.8048,
|
3860 |
+
"step": 549
|
3861 |
+
},
|
3862 |
+
{
|
3863 |
+
"epoch": 0.035942867178852526,
|
3864 |
+
"grad_norm": 8.281085014343262,
|
3865 |
+
"learning_rate": 0.00014188990699676184,
|
3866 |
+
"loss": 2.8449,
|
3867 |
+
"step": 550
|
3868 |
+
},
|
3869 |
+
{
|
3870 |
+
"epoch": 0.03600821784645044,
|
3871 |
+
"grad_norm": 8.362018585205078,
|
3872 |
+
"learning_rate": 0.00014169835971910238,
|
3873 |
+
"loss": 3.1894,
|
3874 |
+
"step": 551
|
3875 |
+
},
|
3876 |
+
{
|
3877 |
+
"epoch": 0.03607356851404835,
|
3878 |
+
"grad_norm": 11.041610717773438,
|
3879 |
+
"learning_rate": 0.0001415066270686389,
|
3880 |
+
"loss": 3.4059,
|
3881 |
+
"step": 552
|
3882 |
+
},
|
3883 |
+
{
|
3884 |
+
"epoch": 0.036138919181646266,
|
3885 |
+
"grad_norm": 3.9808542728424072,
|
3886 |
+
"learning_rate": 0.00014131470989773158,
|
3887 |
+
"loss": 3.2764,
|
3888 |
+
"step": 553
|
3889 |
+
},
|
3890 |
+
{
|
3891 |
+
"epoch": 0.03620426984924418,
|
3892 |
+
"grad_norm": 13.12489128112793,
|
3893 |
+
"learning_rate": 0.0001411226090595608,
|
3894 |
+
"loss": 3.2788,
|
3895 |
+
"step": 554
|
3896 |
+
},
|
3897 |
+
{
|
3898 |
+
"epoch": 0.03626962051684209,
|
3899 |
+
"grad_norm": 8.727335929870605,
|
3900 |
+
"learning_rate": 0.00014093032540812348,
|
3901 |
+
"loss": 3.2385,
|
3902 |
+
"step": 555
|
3903 |
+
},
|
3904 |
+
{
|
3905 |
+
"epoch": 0.036334971184440006,
|
3906 |
+
"grad_norm": 3.04437255859375,
|
3907 |
+
"learning_rate": 0.0001407378597982293,
|
3908 |
+
"loss": 3.2088,
|
3909 |
+
"step": 556
|
3910 |
+
},
|
3911 |
+
{
|
3912 |
+
"epoch": 0.03640032185203792,
|
3913 |
+
"grad_norm": 3.878338575363159,
|
3914 |
+
"learning_rate": 0.00014054521308549673,
|
3915 |
+
"loss": 3.1957,
|
3916 |
+
"step": 557
|
3917 |
+
},
|
3918 |
+
{
|
3919 |
+
"epoch": 0.03646567251963583,
|
3920 |
+
"grad_norm": 2.566128969192505,
|
3921 |
+
"learning_rate": 0.0001403523861263495,
|
3922 |
+
"loss": 3.0187,
|
3923 |
+
"step": 558
|
3924 |
+
},
|
3925 |
+
{
|
3926 |
+
"epoch": 0.036531023187233745,
|
3927 |
+
"grad_norm": 2.8429207801818848,
|
3928 |
+
"learning_rate": 0.00014015937977801256,
|
3929 |
+
"loss": 3.2802,
|
3930 |
+
"step": 559
|
3931 |
+
},
|
3932 |
+
{
|
3933 |
+
"epoch": 0.03659637385483166,
|
3934 |
+
"grad_norm": 2.0213189125061035,
|
3935 |
+
"learning_rate": 0.00013996619489850822,
|
3936 |
+
"loss": 3.1732,
|
3937 |
+
"step": 560
|
3938 |
+
},
|
3939 |
+
{
|
3940 |
+
"epoch": 0.03666172452242957,
|
3941 |
+
"grad_norm": 2.244999408721924,
|
3942 |
+
"learning_rate": 0.00013977283234665273,
|
3943 |
+
"loss": 3.1004,
|
3944 |
+
"step": 561
|
3945 |
+
},
|
3946 |
+
{
|
3947 |
+
"epoch": 0.036727075190027485,
|
3948 |
+
"grad_norm": 1.8090487718582153,
|
3949 |
+
"learning_rate": 0.00013957929298205195,
|
3950 |
+
"loss": 3.0267,
|
3951 |
+
"step": 562
|
3952 |
+
},
|
3953 |
+
{
|
3954 |
+
"epoch": 0.036792425857625405,
|
3955 |
+
"grad_norm": 1.7424795627593994,
|
3956 |
+
"learning_rate": 0.00013938557766509792,
|
3957 |
+
"loss": 2.8685,
|
3958 |
+
"step": 563
|
3959 |
+
},
|
3960 |
+
{
|
3961 |
+
"epoch": 0.03685777652522332,
|
3962 |
+
"grad_norm": 2.0039384365081787,
|
3963 |
+
"learning_rate": 0.0001391916872569648,
|
3964 |
+
"loss": 2.9659,
|
3965 |
+
"step": 564
|
3966 |
+
},
|
3967 |
+
{
|
3968 |
+
"epoch": 0.03692312719282123,
|
3969 |
+
"grad_norm": 1.816052794456482,
|
3970 |
+
"learning_rate": 0.00013899762261960518,
|
3971 |
+
"loss": 3.0037,
|
3972 |
+
"step": 565
|
3973 |
+
},
|
3974 |
+
{
|
3975 |
+
"epoch": 0.036988477860419144,
|
3976 |
+
"grad_norm": 1.7479966878890991,
|
3977 |
+
"learning_rate": 0.0001388033846157462,
|
3978 |
+
"loss": 2.8999,
|
3979 |
+
"step": 566
|
3980 |
+
},
|
3981 |
+
{
|
3982 |
+
"epoch": 0.03705382852801706,
|
3983 |
+
"grad_norm": 1.7347345352172852,
|
3984 |
+
"learning_rate": 0.0001386089741088857,
|
3985 |
+
"loss": 2.9378,
|
3986 |
+
"step": 567
|
3987 |
+
},
|
3988 |
+
{
|
3989 |
+
"epoch": 0.03711917919561497,
|
3990 |
+
"grad_norm": 1.8642627000808716,
|
3991 |
+
"learning_rate": 0.00013841439196328836,
|
3992 |
+
"loss": 3.0348,
|
3993 |
+
"step": 568
|
3994 |
+
},
|
3995 |
+
{
|
3996 |
+
"epoch": 0.037184529863212884,
|
3997 |
+
"grad_norm": 1.9142097234725952,
|
3998 |
+
"learning_rate": 0.00013821963904398193,
|
3999 |
+
"loss": 3.1449,
|
4000 |
+
"step": 569
|
4001 |
+
},
|
4002 |
+
{
|
4003 |
+
"epoch": 0.0372498805308108,
|
4004 |
+
"grad_norm": 1.9393805265426636,
|
4005 |
+
"learning_rate": 0.00013802471621675338,
|
4006 |
+
"loss": 3.0023,
|
4007 |
+
"step": 570
|
4008 |
+
},
|
4009 |
+
{
|
4010 |
+
"epoch": 0.03731523119840871,
|
4011 |
+
"grad_norm": 1.9792324304580688,
|
4012 |
+
"learning_rate": 0.00013782962434814492,
|
4013 |
+
"loss": 3.0849,
|
4014 |
+
"step": 571
|
4015 |
+
},
|
4016 |
+
{
|
4017 |
+
"epoch": 0.037380581866006624,
|
4018 |
+
"grad_norm": 2.1985344886779785,
|
4019 |
+
"learning_rate": 0.00013763436430545034,
|
4020 |
+
"loss": 2.9901,
|
4021 |
+
"step": 572
|
4022 |
+
},
|
4023 |
+
{
|
4024 |
+
"epoch": 0.03744593253360454,
|
4025 |
+
"grad_norm": 1.9945766925811768,
|
4026 |
+
"learning_rate": 0.00013743893695671096,
|
4027 |
+
"loss": 2.9633,
|
4028 |
+
"step": 573
|
4029 |
+
},
|
4030 |
+
{
|
4031 |
+
"epoch": 0.03751128320120245,
|
4032 |
+
"grad_norm": 1.930016279220581,
|
4033 |
+
"learning_rate": 0.00013724334317071198,
|
4034 |
+
"loss": 3.007,
|
4035 |
+
"step": 574
|
4036 |
+
},
|
4037 |
+
{
|
4038 |
+
"epoch": 0.03757663386880036,
|
4039 |
+
"grad_norm": 2.070556402206421,
|
4040 |
+
"learning_rate": 0.00013704758381697844,
|
4041 |
+
"loss": 3.1439,
|
4042 |
+
"step": 575
|
4043 |
+
},
|
4044 |
+
{
|
4045 |
+
"epoch": 0.037641984536398276,
|
4046 |
+
"grad_norm": 2.0270776748657227,
|
4047 |
+
"learning_rate": 0.00013685165976577146,
|
4048 |
+
"loss": 3.0146,
|
4049 |
+
"step": 576
|
4050 |
+
},
|
4051 |
+
{
|
4052 |
+
"epoch": 0.037707335203996196,
|
4053 |
+
"grad_norm": 2.047429323196411,
|
4054 |
+
"learning_rate": 0.0001366555718880843,
|
4055 |
+
"loss": 2.8983,
|
4056 |
+
"step": 577
|
4057 |
+
},
|
4058 |
+
{
|
4059 |
+
"epoch": 0.03777268587159411,
|
4060 |
+
"grad_norm": 2.2146401405334473,
|
4061 |
+
"learning_rate": 0.00013645932105563844,
|
4062 |
+
"loss": 2.9096,
|
4063 |
+
"step": 578
|
4064 |
+
},
|
4065 |
+
{
|
4066 |
+
"epoch": 0.03783803653919202,
|
4067 |
+
"grad_norm": 2.0988502502441406,
|
4068 |
+
"learning_rate": 0.00013626290814088005,
|
4069 |
+
"loss": 2.9559,
|
4070 |
+
"step": 579
|
4071 |
+
},
|
4072 |
+
{
|
4073 |
+
"epoch": 0.037903387206789936,
|
4074 |
+
"grad_norm": 2.1318140029907227,
|
4075 |
+
"learning_rate": 0.00013606633401697557,
|
4076 |
+
"loss": 2.8678,
|
4077 |
+
"step": 580
|
4078 |
+
},
|
4079 |
+
{
|
4080 |
+
"epoch": 0.03796873787438785,
|
4081 |
+
"grad_norm": 2.47351336479187,
|
4082 |
+
"learning_rate": 0.00013586959955780824,
|
4083 |
+
"loss": 3.1913,
|
4084 |
+
"step": 581
|
4085 |
+
},
|
4086 |
+
{
|
4087 |
+
"epoch": 0.03803408854198576,
|
4088 |
+
"grad_norm": 2.630089044570923,
|
4089 |
+
"learning_rate": 0.00013567270563797398,
|
4090 |
+
"loss": 3.4317,
|
4091 |
+
"step": 582
|
4092 |
+
},
|
4093 |
+
{
|
4094 |
+
"epoch": 0.038099439209583676,
|
4095 |
+
"grad_norm": 2.557398557662964,
|
4096 |
+
"learning_rate": 0.00013547565313277776,
|
4097 |
+
"loss": 2.9004,
|
4098 |
+
"step": 583
|
4099 |
+
},
|
4100 |
+
{
|
4101 |
+
"epoch": 0.03816478987718159,
|
4102 |
+
"grad_norm": 2.377253293991089,
|
4103 |
+
"learning_rate": 0.00013527844291822948,
|
4104 |
+
"loss": 2.8715,
|
4105 |
+
"step": 584
|
4106 |
+
},
|
4107 |
+
{
|
4108 |
+
"epoch": 0.0382301405447795,
|
4109 |
+
"grad_norm": 2.532958984375,
|
4110 |
+
"learning_rate": 0.0001350810758710401,
|
4111 |
+
"loss": 3.0972,
|
4112 |
+
"step": 585
|
4113 |
+
},
|
4114 |
+
{
|
4115 |
+
"epoch": 0.038295491212377415,
|
4116 |
+
"grad_norm": 2.6120903491973877,
|
4117 |
+
"learning_rate": 0.00013488355286861783,
|
4118 |
+
"loss": 2.9928,
|
4119 |
+
"step": 586
|
4120 |
+
},
|
4121 |
+
{
|
4122 |
+
"epoch": 0.03836084187997533,
|
4123 |
+
"grad_norm": 2.865553617477417,
|
4124 |
+
"learning_rate": 0.0001346858747890642,
|
4125 |
+
"loss": 2.9036,
|
4126 |
+
"step": 587
|
4127 |
+
},
|
4128 |
+
{
|
4129 |
+
"epoch": 0.03842619254757324,
|
4130 |
+
"grad_norm": 2.734539270401001,
|
4131 |
+
"learning_rate": 0.00013448804251117003,
|
4132 |
+
"loss": 3.1063,
|
4133 |
+
"step": 588
|
4134 |
+
},
|
4135 |
+
{
|
4136 |
+
"epoch": 0.038491543215171155,
|
4137 |
+
"grad_norm": 3.559826374053955,
|
4138 |
+
"learning_rate": 0.0001342900569144119,
|
4139 |
+
"loss": 3.0002,
|
4140 |
+
"step": 589
|
4141 |
+
},
|
4142 |
+
{
|
4143 |
+
"epoch": 0.03855689388276907,
|
4144 |
+
"grad_norm": 3.4512901306152344,
|
4145 |
+
"learning_rate": 0.0001340919188789477,
|
4146 |
+
"loss": 3.0635,
|
4147 |
+
"step": 590
|
4148 |
+
},
|
4149 |
+
{
|
4150 |
+
"epoch": 0.03862224455036699,
|
4151 |
+
"grad_norm": 4.525444030761719,
|
4152 |
+
"learning_rate": 0.00013389362928561317,
|
4153 |
+
"loss": 2.904,
|
4154 |
+
"step": 591
|
4155 |
+
},
|
4156 |
+
{
|
4157 |
+
"epoch": 0.0386875952179649,
|
4158 |
+
"grad_norm": 2.985776424407959,
|
4159 |
+
"learning_rate": 0.00013369518901591772,
|
4160 |
+
"loss": 2.8948,
|
4161 |
+
"step": 592
|
4162 |
+
},
|
4163 |
+
{
|
4164 |
+
"epoch": 0.038752945885562814,
|
4165 |
+
"grad_norm": 3.0237293243408203,
|
4166 |
+
"learning_rate": 0.00013349659895204067,
|
4167 |
+
"loss": 2.6273,
|
4168 |
+
"step": 593
|
4169 |
+
},
|
4170 |
+
{
|
4171 |
+
"epoch": 0.03881829655316073,
|
4172 |
+
"grad_norm": 3.293497323989868,
|
4173 |
+
"learning_rate": 0.0001332978599768272,
|
4174 |
+
"loss": 2.5571,
|
4175 |
+
"step": 594
|
4176 |
+
},
|
4177 |
+
{
|
4178 |
+
"epoch": 0.03888364722075864,
|
4179 |
+
"grad_norm": 4.987462997436523,
|
4180 |
+
"learning_rate": 0.00013309897297378455,
|
4181 |
+
"loss": 3.0358,
|
4182 |
+
"step": 595
|
4183 |
+
},
|
4184 |
+
{
|
4185 |
+
"epoch": 0.038948997888356554,
|
4186 |
+
"grad_norm": 4.07577657699585,
|
4187 |
+
"learning_rate": 0.00013289993882707797,
|
4188 |
+
"loss": 2.6679,
|
4189 |
+
"step": 596
|
4190 |
+
},
|
4191 |
+
{
|
4192 |
+
"epoch": 0.03901434855595447,
|
4193 |
+
"grad_norm": 4.8090901374816895,
|
4194 |
+
"learning_rate": 0.00013270075842152678,
|
4195 |
+
"loss": 2.8967,
|
4196 |
+
"step": 597
|
4197 |
+
},
|
4198 |
+
{
|
4199 |
+
"epoch": 0.03907969922355238,
|
4200 |
+
"grad_norm": 6.000687599182129,
|
4201 |
+
"learning_rate": 0.00013250143264260074,
|
4202 |
+
"loss": 2.8839,
|
4203 |
+
"step": 598
|
4204 |
+
},
|
4205 |
+
{
|
4206 |
+
"epoch": 0.039145049891150294,
|
4207 |
+
"grad_norm": 9.676702499389648,
|
4208 |
+
"learning_rate": 0.0001323019623764156,
|
4209 |
+
"loss": 3.0498,
|
4210 |
+
"step": 599
|
4211 |
+
},
|
4212 |
+
{
|
4213 |
+
"epoch": 0.03921040055874821,
|
4214 |
+
"grad_norm": 15.539387702941895,
|
4215 |
+
"learning_rate": 0.00013210234850972964,
|
4216 |
+
"loss": 2.8019,
|
4217 |
+
"step": 600
|
4218 |
+
},
|
4219 |
+
{
|
4220 |
+
"epoch": 0.03927575122634612,
|
4221 |
+
"grad_norm": 32.17361068725586,
|
4222 |
+
"learning_rate": 0.0001319025919299394,
|
4223 |
+
"loss": 3.9383,
|
4224 |
+
"step": 601
|
4225 |
+
},
|
4226 |
+
{
|
4227 |
+
"epoch": 0.03934110189394403,
|
4228 |
+
"grad_norm": 17.129863739013672,
|
4229 |
+
"learning_rate": 0.00013170269352507597,
|
4230 |
+
"loss": 3.4502,
|
4231 |
+
"step": 602
|
4232 |
+
},
|
4233 |
+
{
|
4234 |
+
"epoch": 0.039406452561541946,
|
4235 |
+
"grad_norm": 5.781350135803223,
|
4236 |
+
"learning_rate": 0.0001315026541838008,
|
4237 |
+
"loss": 3.1775,
|
4238 |
+
"step": 603
|
4239 |
+
},
|
4240 |
+
{
|
4241 |
+
"epoch": 0.03947180322913986,
|
4242 |
+
"grad_norm": 20.643762588500977,
|
4243 |
+
"learning_rate": 0.00013130247479540202,
|
4244 |
+
"loss": 3.394,
|
4245 |
+
"step": 604
|
4246 |
+
},
|
4247 |
+
{
|
4248 |
+
"epoch": 0.03953715389673778,
|
4249 |
+
"grad_norm": 15.9223051071167,
|
4250 |
+
"learning_rate": 0.00013110215624979025,
|
4251 |
+
"loss": 3.3603,
|
4252 |
+
"step": 605
|
4253 |
+
},
|
4254 |
+
{
|
4255 |
+
"epoch": 0.03960250456433569,
|
4256 |
+
"grad_norm": 3.7204947471618652,
|
4257 |
+
"learning_rate": 0.00013090169943749476,
|
4258 |
+
"loss": 3.1395,
|
4259 |
+
"step": 606
|
4260 |
+
},
|
4261 |
+
{
|
4262 |
+
"epoch": 0.039667855231933606,
|
4263 |
+
"grad_norm": 2.6271538734436035,
|
4264 |
+
"learning_rate": 0.00013070110524965954,
|
4265 |
+
"loss": 3.0419,
|
4266 |
+
"step": 607
|
4267 |
+
},
|
4268 |
+
{
|
4269 |
+
"epoch": 0.03973320589953152,
|
4270 |
+
"grad_norm": 2.098278284072876,
|
4271 |
+
"learning_rate": 0.00013050037457803924,
|
4272 |
+
"loss": 3.0074,
|
4273 |
+
"step": 608
|
4274 |
+
},
|
4275 |
+
{
|
4276 |
+
"epoch": 0.03979855656712943,
|
4277 |
+
"grad_norm": 2.050483226776123,
|
4278 |
+
"learning_rate": 0.0001302995083149953,
|
4279 |
+
"loss": 3.1576,
|
4280 |
+
"step": 609
|
4281 |
+
},
|
4282 |
+
{
|
4283 |
+
"epoch": 0.039863907234727346,
|
4284 |
+
"grad_norm": 3.23539662361145,
|
4285 |
+
"learning_rate": 0.0001300985073534919,
|
4286 |
+
"loss": 3.1858,
|
4287 |
+
"step": 610
|
4288 |
+
},
|
4289 |
+
{
|
4290 |
+
"epoch": 0.03992925790232526,
|
4291 |
+
"grad_norm": 2.2334954738616943,
|
4292 |
+
"learning_rate": 0.00012989737258709203,
|
4293 |
+
"loss": 3.1913,
|
4294 |
+
"step": 611
|
4295 |
+
},
|
4296 |
+
{
|
4297 |
+
"epoch": 0.03999460856992317,
|
4298 |
+
"grad_norm": 1.9432390928268433,
|
4299 |
+
"learning_rate": 0.00012969610490995358,
|
4300 |
+
"loss": 3.0337,
|
4301 |
+
"step": 612
|
4302 |
+
},
|
4303 |
+
{
|
4304 |
+
"epoch": 0.040059959237521085,
|
4305 |
+
"grad_norm": 1.6395848989486694,
|
4306 |
+
"learning_rate": 0.00012949470521682528,
|
4307 |
+
"loss": 2.802,
|
4308 |
+
"step": 613
|
4309 |
+
},
|
4310 |
+
{
|
4311 |
+
"epoch": 0.040125309905119,
|
4312 |
+
"grad_norm": 1.7931872606277466,
|
4313 |
+
"learning_rate": 0.0001292931744030427,
|
4314 |
+
"loss": 3.0174,
|
4315 |
+
"step": 614
|
4316 |
+
},
|
4317 |
+
{
|
4318 |
+
"epoch": 0.04019066057271691,
|
4319 |
+
"grad_norm": 1.8623560667037964,
|
4320 |
+
"learning_rate": 0.0001290915133645243,
|
4321 |
+
"loss": 3.0511,
|
4322 |
+
"step": 615
|
4323 |
+
},
|
4324 |
+
{
|
4325 |
+
"epoch": 0.040256011240314825,
|
4326 |
+
"grad_norm": 1.8954159021377563,
|
4327 |
+
"learning_rate": 0.00012888972299776754,
|
4328 |
+
"loss": 3.0324,
|
4329 |
+
"step": 616
|
4330 |
+
},
|
4331 |
+
{
|
4332 |
+
"epoch": 0.04032136190791274,
|
4333 |
+
"grad_norm": 1.8624866008758545,
|
4334 |
+
"learning_rate": 0.00012868780419984482,
|
4335 |
+
"loss": 2.8377,
|
4336 |
+
"step": 617
|
4337 |
+
},
|
4338 |
+
{
|
4339 |
+
"epoch": 0.04038671257551065,
|
4340 |
+
"grad_norm": 1.9576878547668457,
|
4341 |
+
"learning_rate": 0.00012848575786839943,
|
4342 |
+
"loss": 3.0353,
|
4343 |
+
"step": 618
|
4344 |
+
},
|
4345 |
+
{
|
4346 |
+
"epoch": 0.040452063243108564,
|
4347 |
+
"grad_norm": 2.503295660018921,
|
4348 |
+
"learning_rate": 0.0001282835849016416,
|
4349 |
+
"loss": 3.1967,
|
4350 |
+
"step": 619
|
4351 |
+
},
|
4352 |
+
{
|
4353 |
+
"epoch": 0.040517413910706485,
|
4354 |
+
"grad_norm": 2.1141245365142822,
|
4355 |
+
"learning_rate": 0.00012808128619834461,
|
4356 |
+
"loss": 3.1058,
|
4357 |
+
"step": 620
|
4358 |
+
},
|
4359 |
+
{
|
4360 |
+
"epoch": 0.0405827645783044,
|
4361 |
+
"grad_norm": 1.9845447540283203,
|
4362 |
+
"learning_rate": 0.0001278788626578407,
|
4363 |
+
"loss": 2.9753,
|
4364 |
+
"step": 621
|
4365 |
+
},
|
4366 |
+
{
|
4367 |
+
"epoch": 0.04064811524590231,
|
4368 |
+
"grad_norm": 2.3292746543884277,
|
4369 |
+
"learning_rate": 0.00012767631518001698,
|
4370 |
+
"loss": 3.3823,
|
4371 |
+
"step": 622
|
4372 |
+
},
|
4373 |
+
{
|
4374 |
+
"epoch": 0.040713465913500224,
|
4375 |
+
"grad_norm": 1.9060697555541992,
|
4376 |
+
"learning_rate": 0.00012747364466531163,
|
4377 |
+
"loss": 2.8823,
|
4378 |
+
"step": 623
|
4379 |
+
},
|
4380 |
+
{
|
4381 |
+
"epoch": 0.04077881658109814,
|
4382 |
+
"grad_norm": 2.004152774810791,
|
4383 |
+
"learning_rate": 0.00012727085201470973,
|
4384 |
+
"loss": 3.0116,
|
4385 |
+
"step": 624
|
4386 |
+
},
|
4387 |
+
{
|
4388 |
+
"epoch": 0.04084416724869605,
|
4389 |
+
"grad_norm": 2.2095632553100586,
|
4390 |
+
"learning_rate": 0.00012706793812973941,
|
4391 |
+
"loss": 3.1009,
|
4392 |
+
"step": 625
|
4393 |
+
},
|
4394 |
+
{
|
4395 |
+
"epoch": 0.040909517916293964,
|
4396 |
+
"grad_norm": 2.1803863048553467,
|
4397 |
+
"learning_rate": 0.0001268649039124677,
|
4398 |
+
"loss": 2.8572,
|
4399 |
+
"step": 626
|
4400 |
+
},
|
4401 |
+
{
|
4402 |
+
"epoch": 0.04097486858389188,
|
4403 |
+
"grad_norm": 2.2665598392486572,
|
4404 |
+
"learning_rate": 0.00012666175026549662,
|
4405 |
+
"loss": 3.0518,
|
4406 |
+
"step": 627
|
4407 |
+
},
|
4408 |
+
{
|
4409 |
+
"epoch": 0.04104021925148979,
|
4410 |
+
"grad_norm": 2.5089712142944336,
|
4411 |
+
"learning_rate": 0.000126458478091959,
|
4412 |
+
"loss": 3.1027,
|
4413 |
+
"step": 628
|
4414 |
+
},
|
4415 |
+
{
|
4416 |
+
"epoch": 0.0411055699190877,
|
4417 |
+
"grad_norm": 2.315622329711914,
|
4418 |
+
"learning_rate": 0.00012625508829551473,
|
4419 |
+
"loss": 3.0324,
|
4420 |
+
"step": 629
|
4421 |
+
},
|
4422 |
+
{
|
4423 |
+
"epoch": 0.041170920586685616,
|
4424 |
+
"grad_norm": 2.4968509674072266,
|
4425 |
+
"learning_rate": 0.00012605158178034654,
|
4426 |
+
"loss": 2.9036,
|
4427 |
+
"step": 630
|
4428 |
+
},
|
4429 |
+
{
|
4430 |
+
"epoch": 0.04123627125428353,
|
4431 |
+
"grad_norm": 2.250495433807373,
|
4432 |
+
"learning_rate": 0.00012584795945115603,
|
4433 |
+
"loss": 2.816,
|
4434 |
+
"step": 631
|
4435 |
+
},
|
4436 |
+
{
|
4437 |
+
"epoch": 0.04130162192188144,
|
4438 |
+
"grad_norm": 2.2009661197662354,
|
4439 |
+
"learning_rate": 0.0001256442222131597,
|
4440 |
+
"loss": 2.8616,
|
4441 |
+
"step": 632
|
4442 |
+
},
|
4443 |
+
{
|
4444 |
+
"epoch": 0.041366972589479356,
|
4445 |
+
"grad_norm": 2.7355098724365234,
|
4446 |
+
"learning_rate": 0.0001254403709720848,
|
4447 |
+
"loss": 3.047,
|
4448 |
+
"step": 633
|
4449 |
+
},
|
4450 |
+
{
|
4451 |
+
"epoch": 0.041432323257077276,
|
4452 |
+
"grad_norm": 3.254558563232422,
|
4453 |
+
"learning_rate": 0.0001252364066341655,
|
4454 |
+
"loss": 3.284,
|
4455 |
+
"step": 634
|
4456 |
+
},
|
4457 |
+
{
|
4458 |
+
"epoch": 0.04149767392467519,
|
4459 |
+
"grad_norm": 3.5515785217285156,
|
4460 |
+
"learning_rate": 0.00012503233010613865,
|
4461 |
+
"loss": 3.206,
|
4462 |
+
"step": 635
|
4463 |
+
},
|
4464 |
+
{
|
4465 |
+
"epoch": 0.0415630245922731,
|
4466 |
+
"grad_norm": 2.704427480697632,
|
4467 |
+
"learning_rate": 0.00012482814229523997,
|
4468 |
+
"loss": 3.2234,
|
4469 |
+
"step": 636
|
4470 |
+
},
|
4471 |
+
{
|
4472 |
+
"epoch": 0.041628375259871016,
|
4473 |
+
"grad_norm": 3.272678852081299,
|
4474 |
+
"learning_rate": 0.00012462384410919975,
|
4475 |
+
"loss": 3.1285,
|
4476 |
+
"step": 637
|
4477 |
+
},
|
4478 |
+
{
|
4479 |
+
"epoch": 0.04169372592746893,
|
4480 |
+
"grad_norm": 3.133451461791992,
|
4481 |
+
"learning_rate": 0.00012441943645623903,
|
4482 |
+
"loss": 2.9501,
|
4483 |
+
"step": 638
|
4484 |
+
},
|
4485 |
+
{
|
4486 |
+
"epoch": 0.04175907659506684,
|
4487 |
+
"grad_norm": 3.1467037200927734,
|
4488 |
+
"learning_rate": 0.00012421492024506555,
|
4489 |
+
"loss": 3.1572,
|
4490 |
+
"step": 639
|
4491 |
+
},
|
4492 |
+
{
|
4493 |
+
"epoch": 0.041824427262664755,
|
4494 |
+
"grad_norm": 3.4635956287384033,
|
4495 |
+
"learning_rate": 0.00012401029638486953,
|
4496 |
+
"loss": 2.9697,
|
4497 |
+
"step": 640
|
4498 |
+
},
|
4499 |
+
{
|
4500 |
+
"epoch": 0.04188977793026267,
|
4501 |
+
"grad_norm": 4.2859320640563965,
|
4502 |
+
"learning_rate": 0.0001238055657853198,
|
4503 |
+
"loss": 3.2693,
|
4504 |
+
"step": 641
|
4505 |
+
},
|
4506 |
+
{
|
4507 |
+
"epoch": 0.04195512859786058,
|
4508 |
+
"grad_norm": 3.362356662750244,
|
4509 |
+
"learning_rate": 0.00012360072935655982,
|
4510 |
+
"loss": 2.9823,
|
4511 |
+
"step": 642
|
4512 |
+
},
|
4513 |
+
{
|
4514 |
+
"epoch": 0.042020479265458495,
|
4515 |
+
"grad_norm": 4.812988758087158,
|
4516 |
+
"learning_rate": 0.00012339578800920332,
|
4517 |
+
"loss": 2.679,
|
4518 |
+
"step": 643
|
4519 |
+
},
|
4520 |
+
{
|
4521 |
+
"epoch": 0.04208582993305641,
|
4522 |
+
"grad_norm": 3.6487369537353516,
|
4523 |
+
"learning_rate": 0.00012319074265433063,
|
4524 |
+
"loss": 2.6908,
|
4525 |
+
"step": 644
|
4526 |
+
},
|
4527 |
+
{
|
4528 |
+
"epoch": 0.04215118060065432,
|
4529 |
+
"grad_norm": 4.461695194244385,
|
4530 |
+
"learning_rate": 0.00012298559420348437,
|
4531 |
+
"loss": 3.1041,
|
4532 |
+
"step": 645
|
4533 |
+
},
|
4534 |
+
{
|
4535 |
+
"epoch": 0.042216531268252235,
|
4536 |
+
"grad_norm": 3.465256929397583,
|
4537 |
+
"learning_rate": 0.00012278034356866545,
|
4538 |
+
"loss": 2.6494,
|
4539 |
+
"step": 646
|
4540 |
+
},
|
4541 |
+
{
|
4542 |
+
"epoch": 0.04228188193585015,
|
4543 |
+
"grad_norm": 5.122613906860352,
|
4544 |
+
"learning_rate": 0.00012257499166232907,
|
4545 |
+
"loss": 3.0645,
|
4546 |
+
"step": 647
|
4547 |
+
},
|
4548 |
+
{
|
4549 |
+
"epoch": 0.04234723260344807,
|
4550 |
+
"grad_norm": 4.229803085327148,
|
4551 |
+
"learning_rate": 0.0001223695393973807,
|
4552 |
+
"loss": 2.926,
|
4553 |
+
"step": 648
|
4554 |
+
},
|
4555 |
+
{
|
4556 |
+
"epoch": 0.04241258327104598,
|
4557 |
+
"grad_norm": 5.684296607971191,
|
4558 |
+
"learning_rate": 0.0001221639876871719,
|
4559 |
+
"loss": 3.0356,
|
4560 |
+
"step": 649
|
4561 |
+
},
|
4562 |
+
{
|
4563 |
+
"epoch": 0.042477933938643894,
|
4564 |
+
"grad_norm": 5.228229999542236,
|
4565 |
+
"learning_rate": 0.0001219583374454963,
|
4566 |
+
"loss": 2.5974,
|
4567 |
+
"step": 650
|
4568 |
+
},
|
4569 |
+
{
|
4570 |
+
"epoch": 0.04254328460624181,
|
4571 |
+
"grad_norm": 16.97333526611328,
|
4572 |
+
"learning_rate": 0.00012175258958658564,
|
4573 |
+
"loss": 3.5355,
|
4574 |
+
"step": 651
|
4575 |
+
},
|
4576 |
+
{
|
4577 |
+
"epoch": 0.04260863527383972,
|
4578 |
+
"grad_norm": 6.152676105499268,
|
4579 |
+
"learning_rate": 0.00012154674502510555,
|
4580 |
+
"loss": 3.3318,
|
4581 |
+
"step": 652
|
4582 |
+
},
|
4583 |
+
{
|
4584 |
+
"epoch": 0.042673985941437634,
|
4585 |
+
"grad_norm": 12.877490043640137,
|
4586 |
+
"learning_rate": 0.00012134080467615159,
|
4587 |
+
"loss": 3.3578,
|
4588 |
+
"step": 653
|
4589 |
+
},
|
4590 |
+
{
|
4591 |
+
"epoch": 0.04273933660903555,
|
4592 |
+
"grad_norm": 15.447893142700195,
|
4593 |
+
"learning_rate": 0.00012113476945524513,
|
4594 |
+
"loss": 3.5556,
|
4595 |
+
"step": 654
|
4596 |
+
},
|
4597 |
+
{
|
4598 |
+
"epoch": 0.04280468727663346,
|
4599 |
+
"grad_norm": 7.900871276855469,
|
4600 |
+
"learning_rate": 0.00012092864027832933,
|
4601 |
+
"loss": 3.2906,
|
4602 |
+
"step": 655
|
4603 |
+
},
|
4604 |
+
{
|
4605 |
+
"epoch": 0.04287003794423137,
|
4606 |
+
"grad_norm": 3.3897945880889893,
|
4607 |
+
"learning_rate": 0.000120722418061765,
|
4608 |
+
"loss": 2.9694,
|
4609 |
+
"step": 656
|
4610 |
+
},
|
4611 |
+
{
|
4612 |
+
"epoch": 0.04293538861182929,
|
4613 |
+
"grad_norm": 3.3307113647460938,
|
4614 |
+
"learning_rate": 0.0001205161037223266,
|
4615 |
+
"loss": 3.2379,
|
4616 |
+
"step": 657
|
4617 |
+
},
|
4618 |
+
{
|
4619 |
+
"epoch": 0.0430007392794272,
|
4620 |
+
"grad_norm": 2.4997718334198,
|
4621 |
+
"learning_rate": 0.00012030969817719808,
|
4622 |
+
"loss": 3.101,
|
4623 |
+
"step": 658
|
4624 |
+
},
|
4625 |
+
{
|
4626 |
+
"epoch": 0.04306608994702511,
|
4627 |
+
"grad_norm": 2.665391445159912,
|
4628 |
+
"learning_rate": 0.00012010320234396894,
|
4629 |
+
"loss": 3.2717,
|
4630 |
+
"step": 659
|
4631 |
+
},
|
4632 |
+
{
|
4633 |
+
"epoch": 0.043131440614623026,
|
4634 |
+
"grad_norm": 3.035580635070801,
|
4635 |
+
"learning_rate": 0.00011989661714062999,
|
4636 |
+
"loss": 3.1335,
|
4637 |
+
"step": 660
|
4638 |
+
},
|
4639 |
+
{
|
4640 |
+
"epoch": 0.04319679128222094,
|
4641 |
+
"grad_norm": 1.964550495147705,
|
4642 |
+
"learning_rate": 0.0001196899434855693,
|
4643 |
+
"loss": 2.9821,
|
4644 |
+
"step": 661
|
4645 |
+
},
|
4646 |
+
{
|
4647 |
+
"epoch": 0.04326214194981886,
|
4648 |
+
"grad_norm": 2.2508432865142822,
|
4649 |
+
"learning_rate": 0.00011948318229756827,
|
4650 |
+
"loss": 2.8584,
|
4651 |
+
"step": 662
|
4652 |
+
},
|
4653 |
+
{
|
4654 |
+
"epoch": 0.04332749261741677,
|
4655 |
+
"grad_norm": 1.820594310760498,
|
4656 |
+
"learning_rate": 0.00011927633449579735,
|
4657 |
+
"loss": 2.8469,
|
4658 |
+
"step": 663
|
4659 |
+
},
|
4660 |
+
{
|
4661 |
+
"epoch": 0.043392843285014686,
|
4662 |
+
"grad_norm": 2.0927326679229736,
|
4663 |
+
"learning_rate": 0.0001190694009998121,
|
4664 |
+
"loss": 2.9665,
|
4665 |
+
"step": 664
|
4666 |
+
},
|
4667 |
+
{
|
4668 |
+
"epoch": 0.0434581939526126,
|
4669 |
+
"grad_norm": 1.9903268814086914,
|
4670 |
+
"learning_rate": 0.00011886238272954897,
|
4671 |
+
"loss": 3.0826,
|
4672 |
+
"step": 665
|
4673 |
+
},
|
4674 |
+
{
|
4675 |
+
"epoch": 0.04352354462021051,
|
4676 |
+
"grad_norm": 2.0736825466156006,
|
4677 |
+
"learning_rate": 0.00011865528060532127,
|
4678 |
+
"loss": 3.2923,
|
4679 |
+
"step": 666
|
4680 |
+
},
|
4681 |
+
{
|
4682 |
+
"epoch": 0.043588895287808425,
|
4683 |
+
"grad_norm": 1.9887921810150146,
|
4684 |
+
"learning_rate": 0.0001184480955478152,
|
4685 |
+
"loss": 3.1273,
|
4686 |
+
"step": 667
|
4687 |
+
},
|
4688 |
+
{
|
4689 |
+
"epoch": 0.04365424595540634,
|
4690 |
+
"grad_norm": 1.898668646812439,
|
4691 |
+
"learning_rate": 0.00011824082847808558,
|
4692 |
+
"loss": 3.0536,
|
4693 |
+
"step": 668
|
4694 |
+
},
|
4695 |
+
{
|
4696 |
+
"epoch": 0.04371959662300425,
|
4697 |
+
"grad_norm": 2.2333860397338867,
|
4698 |
+
"learning_rate": 0.00011803348031755179,
|
4699 |
+
"loss": 2.7913,
|
4700 |
+
"step": 669
|
4701 |
+
},
|
4702 |
+
{
|
4703 |
+
"epoch": 0.043784947290602165,
|
4704 |
+
"grad_norm": 2.0350661277770996,
|
4705 |
+
"learning_rate": 0.0001178260519879937,
|
4706 |
+
"loss": 2.8106,
|
4707 |
+
"step": 670
|
4708 |
+
},
|
4709 |
+
{
|
4710 |
+
"epoch": 0.04385029795820008,
|
4711 |
+
"grad_norm": 2.0229201316833496,
|
4712 |
+
"learning_rate": 0.00011761854441154767,
|
4713 |
+
"loss": 2.9744,
|
4714 |
+
"step": 671
|
4715 |
+
},
|
4716 |
+
{
|
4717 |
+
"epoch": 0.04391564862579799,
|
4718 |
+
"grad_norm": 2.2588329315185547,
|
4719 |
+
"learning_rate": 0.00011741095851070228,
|
4720 |
+
"loss": 3.1313,
|
4721 |
+
"step": 672
|
4722 |
+
},
|
4723 |
+
{
|
4724 |
+
"epoch": 0.043980999293395905,
|
4725 |
+
"grad_norm": 2.1912214756011963,
|
4726 |
+
"learning_rate": 0.00011720329520829429,
|
4727 |
+
"loss": 2.7155,
|
4728 |
+
"step": 673
|
4729 |
+
},
|
4730 |
+
{
|
4731 |
+
"epoch": 0.04404634996099382,
|
4732 |
+
"grad_norm": 2.086419105529785,
|
4733 |
+
"learning_rate": 0.0001169955554275046,
|
4734 |
+
"loss": 2.9657,
|
4735 |
+
"step": 674
|
4736 |
+
},
|
4737 |
+
{
|
4738 |
+
"epoch": 0.04411170062859173,
|
4739 |
+
"grad_norm": 2.2901723384857178,
|
4740 |
+
"learning_rate": 0.0001167877400918541,
|
4741 |
+
"loss": 3.1845,
|
4742 |
+
"step": 675
|
4743 |
+
},
|
4744 |
+
{
|
4745 |
+
"epoch": 0.04417705129618965,
|
4746 |
+
"grad_norm": 2.07425594329834,
|
4747 |
+
"learning_rate": 0.00011657985012519952,
|
4748 |
+
"loss": 2.9775,
|
4749 |
+
"step": 676
|
4750 |
+
},
|
4751 |
+
{
|
4752 |
+
"epoch": 0.044242401963787564,
|
4753 |
+
"grad_norm": 2.1768293380737305,
|
4754 |
+
"learning_rate": 0.00011637188645172944,
|
4755 |
+
"loss": 3.2035,
|
4756 |
+
"step": 677
|
4757 |
+
},
|
4758 |
+
{
|
4759 |
+
"epoch": 0.04430775263138548,
|
4760 |
+
"grad_norm": 2.202507734298706,
|
4761 |
+
"learning_rate": 0.00011616384999596006,
|
4762 |
+
"loss": 3.0454,
|
4763 |
+
"step": 678
|
4764 |
+
},
|
4765 |
+
{
|
4766 |
+
"epoch": 0.04437310329898339,
|
4767 |
+
"grad_norm": 2.209707736968994,
|
4768 |
+
"learning_rate": 0.00011595574168273111,
|
4769 |
+
"loss": 3.1953,
|
4770 |
+
"step": 679
|
4771 |
+
},
|
4772 |
+
{
|
4773 |
+
"epoch": 0.044438453966581304,
|
4774 |
+
"grad_norm": 2.144770383834839,
|
4775 |
+
"learning_rate": 0.0001157475624372018,
|
4776 |
+
"loss": 2.9541,
|
4777 |
+
"step": 680
|
4778 |
+
},
|
4779 |
+
{
|
4780 |
+
"epoch": 0.04450380463417922,
|
4781 |
+
"grad_norm": 2.4799458980560303,
|
4782 |
+
"learning_rate": 0.0001155393131848467,
|
4783 |
+
"loss": 3.2559,
|
4784 |
+
"step": 681
|
4785 |
+
},
|
4786 |
+
{
|
4787 |
+
"epoch": 0.04456915530177713,
|
4788 |
+
"grad_norm": 2.293464183807373,
|
4789 |
+
"learning_rate": 0.00011533099485145155,
|
4790 |
+
"loss": 3.1855,
|
4791 |
+
"step": 682
|
4792 |
+
},
|
4793 |
+
{
|
4794 |
+
"epoch": 0.04463450596937504,
|
4795 |
+
"grad_norm": 2.608764886856079,
|
4796 |
+
"learning_rate": 0.00011512260836310924,
|
4797 |
+
"loss": 2.9465,
|
4798 |
+
"step": 683
|
4799 |
+
},
|
4800 |
+
{
|
4801 |
+
"epoch": 0.04469985663697296,
|
4802 |
+
"grad_norm": 2.3550198078155518,
|
4803 |
+
"learning_rate": 0.00011491415464621562,
|
4804 |
+
"loss": 2.914,
|
4805 |
+
"step": 684
|
4806 |
+
},
|
4807 |
+
{
|
4808 |
+
"epoch": 0.04476520730457087,
|
4809 |
+
"grad_norm": 2.5664079189300537,
|
4810 |
+
"learning_rate": 0.00011470563462746541,
|
4811 |
+
"loss": 2.8957,
|
4812 |
+
"step": 685
|
4813 |
+
},
|
4814 |
+
{
|
4815 |
+
"epoch": 0.04483055797216878,
|
4816 |
+
"grad_norm": 2.570154905319214,
|
4817 |
+
"learning_rate": 0.00011449704923384812,
|
4818 |
+
"loss": 3.1221,
|
4819 |
+
"step": 686
|
4820 |
+
},
|
4821 |
+
{
|
4822 |
+
"epoch": 0.044895908639766696,
|
4823 |
+
"grad_norm": 6.49520206451416,
|
4824 |
+
"learning_rate": 0.00011428839939264382,
|
4825 |
+
"loss": 3.0106,
|
4826 |
+
"step": 687
|
4827 |
+
},
|
4828 |
+
{
|
4829 |
+
"epoch": 0.04496125930736461,
|
4830 |
+
"grad_norm": 3.2794463634490967,
|
4831 |
+
"learning_rate": 0.0001140796860314191,
|
4832 |
+
"loss": 3.0885,
|
4833 |
+
"step": 688
|
4834 |
+
},
|
4835 |
+
{
|
4836 |
+
"epoch": 0.04502660997496252,
|
4837 |
+
"grad_norm": 4.257885932922363,
|
4838 |
+
"learning_rate": 0.00011387091007802297,
|
4839 |
+
"loss": 3.0201,
|
4840 |
+
"step": 689
|
4841 |
+
},
|
4842 |
+
{
|
4843 |
+
"epoch": 0.045091960642560436,
|
4844 |
+
"grad_norm": 4.146145820617676,
|
4845 |
+
"learning_rate": 0.0001136620724605827,
|
4846 |
+
"loss": 3.2533,
|
4847 |
+
"step": 690
|
4848 |
+
},
|
4849 |
+
{
|
4850 |
+
"epoch": 0.045157311310158356,
|
4851 |
+
"grad_norm": 3.103212356567383,
|
4852 |
+
"learning_rate": 0.00011345317410749964,
|
4853 |
+
"loss": 2.7706,
|
4854 |
+
"step": 691
|
4855 |
+
},
|
4856 |
+
{
|
4857 |
+
"epoch": 0.04522266197775627,
|
4858 |
+
"grad_norm": 3.2333109378814697,
|
4859 |
+
"learning_rate": 0.00011324421594744516,
|
4860 |
+
"loss": 2.8265,
|
4861 |
+
"step": 692
|
4862 |
+
},
|
4863 |
+
{
|
4864 |
+
"epoch": 0.04528801264535418,
|
4865 |
+
"grad_norm": 5.440866947174072,
|
4866 |
+
"learning_rate": 0.00011303519890935656,
|
4867 |
+
"loss": 2.9213,
|
4868 |
+
"step": 693
|
4869 |
+
},
|
4870 |
+
{
|
4871 |
+
"epoch": 0.045353363312952095,
|
4872 |
+
"grad_norm": 4.879655838012695,
|
4873 |
+
"learning_rate": 0.00011282612392243286,
|
4874 |
+
"loss": 2.8601,
|
4875 |
+
"step": 694
|
4876 |
+
},
|
4877 |
+
{
|
4878 |
+
"epoch": 0.04541871398055001,
|
4879 |
+
"grad_norm": 3.385190486907959,
|
4880 |
+
"learning_rate": 0.00011261699191613066,
|
4881 |
+
"loss": 2.8724,
|
4882 |
+
"step": 695
|
4883 |
+
},
|
4884 |
+
{
|
4885 |
+
"epoch": 0.04548406464814792,
|
4886 |
+
"grad_norm": 5.638006687164307,
|
4887 |
+
"learning_rate": 0.00011240780382016005,
|
4888 |
+
"loss": 2.6965,
|
4889 |
+
"step": 696
|
4890 |
+
},
|
4891 |
+
{
|
4892 |
+
"epoch": 0.045549415315745835,
|
4893 |
+
"grad_norm": 6.110171318054199,
|
4894 |
+
"learning_rate": 0.00011219856056448051,
|
4895 |
+
"loss": 3.0768,
|
4896 |
+
"step": 697
|
4897 |
+
},
|
4898 |
+
{
|
4899 |
+
"epoch": 0.04561476598334375,
|
4900 |
+
"grad_norm": 3.941072463989258,
|
4901 |
+
"learning_rate": 0.00011198926307929664,
|
4902 |
+
"loss": 2.6718,
|
4903 |
+
"step": 698
|
4904 |
+
},
|
4905 |
+
{
|
4906 |
+
"epoch": 0.04568011665094166,
|
4907 |
+
"grad_norm": 3.8805644512176514,
|
4908 |
+
"learning_rate": 0.00011177991229505431,
|
4909 |
+
"loss": 2.6361,
|
4910 |
+
"step": 699
|
4911 |
+
},
|
4912 |
+
{
|
4913 |
+
"epoch": 0.045745467318539575,
|
4914 |
+
"grad_norm": 5.5727972984313965,
|
4915 |
+
"learning_rate": 0.00011157050914243614,
|
4916 |
+
"loss": 2.6005,
|
4917 |
+
"step": 700
|
4918 |
+
},
|
4919 |
+
{
|
4920 |
+
"epoch": 0.04581081798613749,
|
4921 |
+
"grad_norm": 14.964508056640625,
|
4922 |
+
"learning_rate": 0.00011136105455235766,
|
4923 |
+
"loss": 3.436,
|
4924 |
+
"step": 701
|
4925 |
+
},
|
4926 |
+
{
|
4927 |
+
"epoch": 0.0458761686537354,
|
4928 |
+
"grad_norm": 8.832298278808594,
|
4929 |
+
"learning_rate": 0.00011115154945596305,
|
4930 |
+
"loss": 3.2624,
|
4931 |
+
"step": 702
|
4932 |
+
},
|
4933 |
+
{
|
4934 |
+
"epoch": 0.045941519321333314,
|
4935 |
+
"grad_norm": 8.178778648376465,
|
4936 |
+
"learning_rate": 0.00011094199478462095,
|
4937 |
+
"loss": 3.0941,
|
4938 |
+
"step": 703
|
4939 |
+
},
|
4940 |
+
{
|
4941 |
+
"epoch": 0.04600686998893123,
|
4942 |
+
"grad_norm": 6.393551826477051,
|
4943 |
+
"learning_rate": 0.00011073239146992054,
|
4944 |
+
"loss": 3.2571,
|
4945 |
+
"step": 704
|
4946 |
+
},
|
4947 |
+
{
|
4948 |
+
"epoch": 0.04607222065652915,
|
4949 |
+
"grad_norm": 3.2871720790863037,
|
4950 |
+
"learning_rate": 0.00011052274044366711,
|
4951 |
+
"loss": 3.2479,
|
4952 |
+
"step": 705
|
4953 |
+
},
|
4954 |
+
{
|
4955 |
+
"epoch": 0.04613757132412706,
|
4956 |
+
"grad_norm": 4.299026966094971,
|
4957 |
+
"learning_rate": 0.00011031304263787812,
|
4958 |
+
"loss": 3.0419,
|
4959 |
+
"step": 706
|
4960 |
+
},
|
4961 |
+
{
|
4962 |
+
"epoch": 0.046202921991724974,
|
4963 |
+
"grad_norm": 2.4954004287719727,
|
4964 |
+
"learning_rate": 0.00011010329898477891,
|
4965 |
+
"loss": 3.1365,
|
4966 |
+
"step": 707
|
4967 |
+
},
|
4968 |
+
{
|
4969 |
+
"epoch": 0.04626827265932289,
|
4970 |
+
"grad_norm": 2.39926815032959,
|
4971 |
+
"learning_rate": 0.0001098935104167988,
|
4972 |
+
"loss": 3.1031,
|
4973 |
+
"step": 708
|
4974 |
+
},
|
4975 |
+
{
|
4976 |
+
"epoch": 0.0463336233269208,
|
4977 |
+
"grad_norm": 1.965259313583374,
|
4978 |
+
"learning_rate": 0.00010968367786656663,
|
4979 |
+
"loss": 3.0447,
|
4980 |
+
"step": 709
|
4981 |
+
},
|
4982 |
+
{
|
4983 |
+
"epoch": 0.046398973994518714,
|
4984 |
+
"grad_norm": 2.1634223461151123,
|
4985 |
+
"learning_rate": 0.00010947380226690684,
|
4986 |
+
"loss": 3.0273,
|
4987 |
+
"step": 710
|
4988 |
+
},
|
4989 |
+
{
|
4990 |
+
"epoch": 0.04646432466211663,
|
4991 |
+
"grad_norm": 1.858903408050537,
|
4992 |
+
"learning_rate": 0.00010926388455083522,
|
4993 |
+
"loss": 3.0245,
|
4994 |
+
"step": 711
|
4995 |
+
},
|
4996 |
+
{
|
4997 |
+
"epoch": 0.04652967532971454,
|
4998 |
+
"grad_norm": 2.939605474472046,
|
4999 |
+
"learning_rate": 0.00010905392565155477,
|
5000 |
+
"loss": 3.1052,
|
5001 |
+
"step": 712
|
5002 |
+
},
|
5003 |
+
{
|
5004 |
+
"epoch": 0.04659502599731245,
|
5005 |
+
"grad_norm": 1.939249873161316,
|
5006 |
+
"learning_rate": 0.00010884392650245165,
|
5007 |
+
"loss": 2.9436,
|
5008 |
+
"step": 713
|
5009 |
+
},
|
5010 |
+
{
|
5011 |
+
"epoch": 0.046660376664910366,
|
5012 |
+
"grad_norm": 1.8927584886550903,
|
5013 |
+
"learning_rate": 0.00010863388803709089,
|
5014 |
+
"loss": 3.0052,
|
5015 |
+
"step": 714
|
5016 |
+
},
|
5017 |
+
{
|
5018 |
+
"epoch": 0.04672572733250828,
|
5019 |
+
"grad_norm": 2.38651704788208,
|
5020 |
+
"learning_rate": 0.00010842381118921232,
|
5021 |
+
"loss": 2.8575,
|
5022 |
+
"step": 715
|
5023 |
+
},
|
5024 |
+
{
|
5025 |
+
"epoch": 0.04679107800010619,
|
5026 |
+
"grad_norm": 1.9476335048675537,
|
5027 |
+
"learning_rate": 0.00010821369689272638,
|
5028 |
+
"loss": 2.8868,
|
5029 |
+
"step": 716
|
5030 |
+
},
|
5031 |
+
{
|
5032 |
+
"epoch": 0.046856428667704106,
|
5033 |
+
"grad_norm": 2.5600156784057617,
|
5034 |
+
"learning_rate": 0.00010800354608171003,
|
5035 |
+
"loss": 3.0852,
|
5036 |
+
"step": 717
|
5037 |
+
},
|
5038 |
+
{
|
5039 |
+
"epoch": 0.04692177933530202,
|
5040 |
+
"grad_norm": 1.9691524505615234,
|
5041 |
+
"learning_rate": 0.00010779335969040252,
|
5042 |
+
"loss": 3.1693,
|
5043 |
+
"step": 718
|
5044 |
+
},
|
5045 |
+
{
|
5046 |
+
"epoch": 0.04698713000289994,
|
5047 |
+
"grad_norm": 2.0690503120422363,
|
5048 |
+
"learning_rate": 0.00010758313865320134,
|
5049 |
+
"loss": 3.0951,
|
5050 |
+
"step": 719
|
5051 |
+
},
|
5052 |
+
{
|
5053 |
+
"epoch": 0.04705248067049785,
|
5054 |
+
"grad_norm": 1.8928483724594116,
|
5055 |
+
"learning_rate": 0.00010737288390465792,
|
5056 |
+
"loss": 2.949,
|
5057 |
+
"step": 720
|
5058 |
+
},
|
5059 |
+
{
|
5060 |
+
"epoch": 0.047117831338095766,
|
5061 |
+
"grad_norm": 1.9570032358169556,
|
5062 |
+
"learning_rate": 0.00010716259637947357,
|
5063 |
+
"loss": 3.0925,
|
5064 |
+
"step": 721
|
5065 |
+
},
|
5066 |
+
{
|
5067 |
+
"epoch": 0.04718318200569368,
|
5068 |
+
"grad_norm": 2.15844988822937,
|
5069 |
+
"learning_rate": 0.00010695227701249537,
|
5070 |
+
"loss": 3.1285,
|
5071 |
+
"step": 722
|
5072 |
+
},
|
5073 |
+
{
|
5074 |
+
"epoch": 0.04724853267329159,
|
5075 |
+
"grad_norm": 2.1201603412628174,
|
5076 |
+
"learning_rate": 0.00010674192673871191,
|
5077 |
+
"loss": 2.9518,
|
5078 |
+
"step": 723
|
5079 |
+
},
|
5080 |
+
{
|
5081 |
+
"epoch": 0.047313883340889505,
|
5082 |
+
"grad_norm": 2.0877747535705566,
|
5083 |
+
"learning_rate": 0.00010653154649324917,
|
5084 |
+
"loss": 2.8219,
|
5085 |
+
"step": 724
|
5086 |
+
},
|
5087 |
+
{
|
5088 |
+
"epoch": 0.04737923400848742,
|
5089 |
+
"grad_norm": 2.1260170936584473,
|
5090 |
+
"learning_rate": 0.00010632113721136636,
|
5091 |
+
"loss": 2.989,
|
5092 |
+
"step": 725
|
5093 |
+
},
|
5094 |
+
{
|
5095 |
+
"epoch": 0.04744458467608533,
|
5096 |
+
"grad_norm": 2.3242621421813965,
|
5097 |
+
"learning_rate": 0.00010611069982845183,
|
5098 |
+
"loss": 3.1765,
|
5099 |
+
"step": 726
|
5100 |
+
},
|
5101 |
+
{
|
5102 |
+
"epoch": 0.047509935343683245,
|
5103 |
+
"grad_norm": 2.1452298164367676,
|
5104 |
+
"learning_rate": 0.00010590023528001884,
|
5105 |
+
"loss": 2.9238,
|
5106 |
+
"step": 727
|
5107 |
+
},
|
5108 |
+
{
|
5109 |
+
"epoch": 0.04757528601128116,
|
5110 |
+
"grad_norm": 2.3453991413116455,
|
5111 |
+
"learning_rate": 0.00010568974450170139,
|
5112 |
+
"loss": 2.9934,
|
5113 |
+
"step": 728
|
5114 |
+
},
|
5115 |
+
{
|
5116 |
+
"epoch": 0.04764063667887907,
|
5117 |
+
"grad_norm": 2.5484986305236816,
|
5118 |
+
"learning_rate": 0.00010547922842925008,
|
5119 |
+
"loss": 2.8265,
|
5120 |
+
"step": 729
|
5121 |
+
},
|
5122 |
+
{
|
5123 |
+
"epoch": 0.047705987346476984,
|
5124 |
+
"grad_norm": 2.7285537719726562,
|
5125 |
+
"learning_rate": 0.00010526868799852796,
|
5126 |
+
"loss": 3.4688,
|
5127 |
+
"step": 730
|
5128 |
+
},
|
5129 |
+
{
|
5130 |
+
"epoch": 0.0477713380140749,
|
5131 |
+
"grad_norm": 2.3990156650543213,
|
5132 |
+
"learning_rate": 0.0001050581241455064,
|
5133 |
+
"loss": 3.0999,
|
5134 |
+
"step": 731
|
5135 |
+
},
|
5136 |
+
{
|
5137 |
+
"epoch": 0.04783668868167281,
|
5138 |
+
"grad_norm": 3.584718704223633,
|
5139 |
+
"learning_rate": 0.00010484753780626089,
|
5140 |
+
"loss": 2.789,
|
5141 |
+
"step": 732
|
5142 |
+
},
|
5143 |
+
{
|
5144 |
+
"epoch": 0.04790203934927073,
|
5145 |
+
"grad_norm": 2.8623907566070557,
|
5146 |
+
"learning_rate": 0.00010463692991696685,
|
5147 |
+
"loss": 3.1882,
|
5148 |
+
"step": 733
|
5149 |
+
},
|
5150 |
+
{
|
5151 |
+
"epoch": 0.047967390016868644,
|
5152 |
+
"grad_norm": 4.1186909675598145,
|
5153 |
+
"learning_rate": 0.00010442630141389549,
|
5154 |
+
"loss": 2.9204,
|
5155 |
+
"step": 734
|
5156 |
+
},
|
5157 |
+
{
|
5158 |
+
"epoch": 0.04803274068446656,
|
5159 |
+
"grad_norm": 3.359563112258911,
|
5160 |
+
"learning_rate": 0.00010421565323340971,
|
5161 |
+
"loss": 3.1909,
|
5162 |
+
"step": 735
|
5163 |
+
},
|
5164 |
+
{
|
5165 |
+
"epoch": 0.04809809135206447,
|
5166 |
+
"grad_norm": 2.727571725845337,
|
5167 |
+
"learning_rate": 0.00010400498631195992,
|
5168 |
+
"loss": 3.3027,
|
5169 |
+
"step": 736
|
5170 |
+
},
|
5171 |
+
{
|
5172 |
+
"epoch": 0.048163442019662384,
|
5173 |
+
"grad_norm": 3.851203680038452,
|
5174 |
+
"learning_rate": 0.00010379430158607975,
|
5175 |
+
"loss": 3.3401,
|
5176 |
+
"step": 737
|
5177 |
+
},
|
5178 |
+
{
|
5179 |
+
"epoch": 0.0482287926872603,
|
5180 |
+
"grad_norm": 2.8851330280303955,
|
5181 |
+
"learning_rate": 0.000103583599992382,
|
5182 |
+
"loss": 3.091,
|
5183 |
+
"step": 738
|
5184 |
+
},
|
5185 |
+
{
|
5186 |
+
"epoch": 0.04829414335485821,
|
5187 |
+
"grad_norm": 2.6836740970611572,
|
5188 |
+
"learning_rate": 0.0001033728824675545,
|
5189 |
+
"loss": 2.9343,
|
5190 |
+
"step": 739
|
5191 |
+
},
|
5192 |
+
{
|
5193 |
+
"epoch": 0.04835949402245612,
|
5194 |
+
"grad_norm": 3.1017136573791504,
|
5195 |
+
"learning_rate": 0.0001031621499483559,
|
5196 |
+
"loss": 2.9053,
|
5197 |
+
"step": 740
|
5198 |
+
},
|
5199 |
+
{
|
5200 |
+
"epoch": 0.048424844690054036,
|
5201 |
+
"grad_norm": 3.8753886222839355,
|
5202 |
+
"learning_rate": 0.00010295140337161146,
|
5203 |
+
"loss": 2.8989,
|
5204 |
+
"step": 741
|
5205 |
+
},
|
5206 |
+
{
|
5207 |
+
"epoch": 0.04849019535765195,
|
5208 |
+
"grad_norm": 3.4842278957366943,
|
5209 |
+
"learning_rate": 0.00010274064367420897,
|
5210 |
+
"loss": 2.7641,
|
5211 |
+
"step": 742
|
5212 |
+
},
|
5213 |
+
{
|
5214 |
+
"epoch": 0.04855554602524986,
|
5215 |
+
"grad_norm": 3.0435166358947754,
|
5216 |
+
"learning_rate": 0.00010252987179309459,
|
5217 |
+
"loss": 3.0448,
|
5218 |
+
"step": 743
|
5219 |
+
},
|
5220 |
+
{
|
5221 |
+
"epoch": 0.048620896692847776,
|
5222 |
+
"grad_norm": 3.1794517040252686,
|
5223 |
+
"learning_rate": 0.00010231908866526851,
|
5224 |
+
"loss": 2.8337,
|
5225 |
+
"step": 744
|
5226 |
+
},
|
5227 |
+
{
|
5228 |
+
"epoch": 0.04868624736044569,
|
5229 |
+
"grad_norm": 7.32763147354126,
|
5230 |
+
"learning_rate": 0.00010210829522778111,
|
5231 |
+
"loss": 3.0432,
|
5232 |
+
"step": 745
|
5233 |
+
},
|
5234 |
+
{
|
5235 |
+
"epoch": 0.0487515980280436,
|
5236 |
+
"grad_norm": 3.655820846557617,
|
5237 |
+
"learning_rate": 0.00010189749241772844,
|
5238 |
+
"loss": 2.8711,
|
5239 |
+
"step": 746
|
5240 |
+
},
|
5241 |
+
{
|
5242 |
+
"epoch": 0.048816948695641516,
|
5243 |
+
"grad_norm": 3.610841751098633,
|
5244 |
+
"learning_rate": 0.00010168668117224825,
|
5245 |
+
"loss": 2.6454,
|
5246 |
+
"step": 747
|
5247 |
+
},
|
5248 |
+
{
|
5249 |
+
"epoch": 0.048882299363239436,
|
5250 |
+
"grad_norm": 4.61665153503418,
|
5251 |
+
"learning_rate": 0.00010147586242851585,
|
5252 |
+
"loss": 2.8728,
|
5253 |
+
"step": 748
|
5254 |
+
},
|
5255 |
+
{
|
5256 |
+
"epoch": 0.04894765003083735,
|
5257 |
+
"grad_norm": 6.172382354736328,
|
5258 |
+
"learning_rate": 0.00010126503712373982,
|
5259 |
+
"loss": 2.882,
|
5260 |
+
"step": 749
|
5261 |
+
},
|
5262 |
+
{
|
5263 |
+
"epoch": 0.04901300069843526,
|
5264 |
+
"grad_norm": 12.269244194030762,
|
5265 |
+
"learning_rate": 0.00010105420619515798,
|
5266 |
+
"loss": 3.1222,
|
5267 |
+
"step": 750
|
5268 |
+
},
|
5269 |
+
{
|
5270 |
+
"epoch": 0.04901300069843526,
|
5271 |
+
"eval_loss": 4.020401954650879,
|
5272 |
+
"eval_runtime": 1547.5208,
|
5273 |
+
"eval_samples_per_second": 16.654,
|
5274 |
+
"eval_steps_per_second": 8.327,
|
5275 |
+
"step": 750
|
5276 |
}
|
5277 |
],
|
5278 |
"logging_steps": 1,
|
|
|
5292 |
"attributes": {}
|
5293 |
}
|
5294 |
},
|
5295 |
+
"total_flos": 1.109738548887552e+18,
|
5296 |
"train_batch_size": 2,
|
5297 |
"trial_name": null,
|
5298 |
"trial_params": null
|