Training in progress, step 23000
Browse files- last-checkpoint/optimizer.pt +1 -1
- last-checkpoint/pytorch_model.bin +1 -1
- last-checkpoint/rng_state.pth +1 -1
- last-checkpoint/scaler.pt +1 -1
- last-checkpoint/scheduler.pt +1 -1
- last-checkpoint/trainer_state.json +22 -3
- pytorch_model.bin +1 -1
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 484085807
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:59cf842aa62a04e6a64174347236dcfa2cb8f057894aee22af49777aefd11630
|
3 |
size 484085807
|
last-checkpoint/pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 242031355
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b86f00b47dfc770eb9d65f03844b7c31c8bfb39bb680ff8fc43b325fea097fee
|
3 |
size 242031355
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14503
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:edb8a46ae691d815a589cfaf7942e6d1777507ff2e6817727b44ad9cb963543e
|
3 |
size 14503
|
last-checkpoint/scaler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 559
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:691923a4a87c660df20f265bf1dae078d92003f0028775818e1a0abb49477388
|
3 |
size 559
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 623
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:740731e0038abdda415682cdde2b15a11dd6d693c7306742d58a3232dac5dc37
|
3 |
size 623
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
{
|
2 |
"best_metric": 0.665252685546875,
|
3 |
"best_model_checkpoint": "/home2/s5431786/jb-internship/results/t5-small-CodeXGLUE-CONCODE-faster/checkpoint-22500",
|
4 |
-
"epoch": 7.
|
5 |
-
"global_step":
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
@@ -861,11 +861,30 @@
|
|
861 |
"eval_samples_per_second": 76.863,
|
862 |
"eval_steps_per_second": 2.421,
|
863 |
"step": 22500
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
864 |
}
|
865 |
],
|
866 |
"max_steps": 62500,
|
867 |
"num_train_epochs": 20,
|
868 |
-
"total_flos": 9.
|
869 |
"trial_name": null,
|
870 |
"trial_params": null
|
871 |
}
|
|
|
1 |
{
|
2 |
"best_metric": 0.665252685546875,
|
3 |
"best_model_checkpoint": "/home2/s5431786/jb-internship/results/t5-small-CodeXGLUE-CONCODE-faster/checkpoint-22500",
|
4 |
+
"epoch": 7.36,
|
5 |
+
"global_step": 23000,
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
|
|
861 |
"eval_samples_per_second": 76.863,
|
862 |
"eval_steps_per_second": 2.421,
|
863 |
"step": 22500
|
864 |
+
},
|
865 |
+
{
|
866 |
+
"epoch": 7.36,
|
867 |
+
"learning_rate": 0.00033269894736842105,
|
868 |
+
"loss": 0.4374,
|
869 |
+
"step": 23000
|
870 |
+
},
|
871 |
+
{
|
872 |
+
"epoch": 7.36,
|
873 |
+
"eval_bleu": 0.06757017098690109,
|
874 |
+
"eval_exact_match": 0.1,
|
875 |
+
"eval_loss": 0.6658646464347839,
|
876 |
+
"eval_rouge1": 0.512652953906156,
|
877 |
+
"eval_rouge2": 0.3561654225240507,
|
878 |
+
"eval_rougeL": 0.49934790688943353,
|
879 |
+
"eval_runtime": 26.0622,
|
880 |
+
"eval_samples_per_second": 76.74,
|
881 |
+
"eval_steps_per_second": 2.417,
|
882 |
+
"step": 23000
|
883 |
}
|
884 |
],
|
885 |
"max_steps": 62500,
|
886 |
"num_train_epochs": 20,
|
887 |
+
"total_flos": 9.614488714503782e+16,
|
888 |
"trial_name": null,
|
889 |
"trial_params": null
|
890 |
}
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 242031355
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b86f00b47dfc770eb9d65f03844b7c31c8bfb39bb680ff8fc43b325fea097fee
|
3 |
size 242031355
|