Training in progress, step 25500
Browse files- last-checkpoint/optimizer.pt +1 -1
- last-checkpoint/pytorch_model.bin +1 -1
- last-checkpoint/rng_state.pth +1 -1
- last-checkpoint/scaler.pt +1 -1
- last-checkpoint/scheduler.pt +1 -1
- last-checkpoint/trainer_state.json +22 -3
- pytorch_model.bin +1 -1
last-checkpoint/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 484085807
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a7527c57261fc6b84b25f10e84d7a47f548f88a62bd0fa0d957183e7145aacea
|
3 |
size 484085807
|
last-checkpoint/pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 242031355
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:47d2731690ffc8fe7195e2b3a3833ddac7dc2f679cea81a498051eee90784aca
|
3 |
size 242031355
|
last-checkpoint/rng_state.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14503
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:52787c2191e7252f249b04524a6abc0c25bed65d8c3ba82d59ef0245a669a19c
|
3 |
size 14503
|
last-checkpoint/scaler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 559
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:be2e9c956f0b1d4ab3f65bab8ec85df43acca002bbad5b017e2783caff41fbf9
|
3 |
size 559
|
last-checkpoint/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 623
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8041246d835ba95a9cd028a630a6f7c20f2f58aee863ef06a3ae7c7fd79315b2
|
3 |
size 623
|
last-checkpoint/trainer_state.json
CHANGED
@@ -1,8 +1,8 @@
|
|
1 |
{
|
2 |
"best_metric": 0.656362771987915,
|
3 |
"best_model_checkpoint": "/home2/s5431786/jb-internship/results/t5-small-CodeXGLUE-CONCODE-faster/checkpoint-24500",
|
4 |
-
"epoch": 8.
|
5 |
-
"global_step":
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
@@ -956,11 +956,30 @@
|
|
956 |
"eval_samples_per_second": 72.539,
|
957 |
"eval_steps_per_second": 2.285,
|
958 |
"step": 25000
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
959 |
}
|
960 |
],
|
961 |
"max_steps": 62500,
|
962 |
"num_train_epochs": 20,
|
963 |
-
"total_flos": 1.
|
964 |
"trial_name": null,
|
965 |
"trial_params": null
|
966 |
}
|
|
|
1 |
{
|
2 |
"best_metric": 0.656362771987915,
|
3 |
"best_model_checkpoint": "/home2/s5431786/jb-internship/results/t5-small-CodeXGLUE-CONCODE-faster/checkpoint-24500",
|
4 |
+
"epoch": 8.16,
|
5 |
+
"global_step": 25500,
|
6 |
"is_hyper_param_search": false,
|
7 |
"is_local_process_zero": true,
|
8 |
"is_world_process_zero": true,
|
|
|
956 |
"eval_samples_per_second": 72.539,
|
957 |
"eval_steps_per_second": 2.285,
|
958 |
"step": 25000
|
959 |
+
},
|
960 |
+
{
|
961 |
+
"epoch": 8.16,
|
962 |
+
"learning_rate": 0.00031165473684210526,
|
963 |
+
"loss": 0.4123,
|
964 |
+
"step": 25500
|
965 |
+
},
|
966 |
+
{
|
967 |
+
"epoch": 8.16,
|
968 |
+
"eval_bleu": 0.06810253515300806,
|
969 |
+
"eval_exact_match": 0.1015,
|
970 |
+
"eval_loss": 0.672680675983429,
|
971 |
+
"eval_rouge1": 0.517027248239716,
|
972 |
+
"eval_rouge2": 0.3614838699580787,
|
973 |
+
"eval_rougeL": 0.5045490816379123,
|
974 |
+
"eval_runtime": 25.969,
|
975 |
+
"eval_samples_per_second": 77.015,
|
976 |
+
"eval_steps_per_second": 2.426,
|
977 |
+
"step": 25500
|
978 |
}
|
979 |
],
|
980 |
"max_steps": 62500,
|
981 |
"num_train_epochs": 20,
|
982 |
+
"total_flos": 1.0660624165502976e+17,
|
983 |
"trial_name": null,
|
984 |
"trial_params": null
|
985 |
}
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 242031355
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:47d2731690ffc8fe7195e2b3a3833ddac7dc2f679cea81a498051eee90784aca
|
3 |
size 242031355
|