yoshitomo-matsubara
commited on
Commit
•
9ea3db7
1
Parent(s):
e274f87
added files
Browse files- config.json +26 -0
- pytorch_model.bin +3 -0
- special_tokens_map.json +1 -0
- tokenizer.json +0 -0
- tokenizer_config.json +1 -0
- training.log +49 -0
- vocab.txt +0 -0
config.json
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "bert-large-uncased",
|
3 |
+
"architectures": [
|
4 |
+
"BertForSequenceClassification"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"finetuning_task": "rte",
|
8 |
+
"gradient_checkpointing": false,
|
9 |
+
"hidden_act": "gelu",
|
10 |
+
"hidden_dropout_prob": 0.1,
|
11 |
+
"hidden_size": 1024,
|
12 |
+
"initializer_range": 0.02,
|
13 |
+
"intermediate_size": 4096,
|
14 |
+
"layer_norm_eps": 1e-12,
|
15 |
+
"max_position_embeddings": 512,
|
16 |
+
"model_type": "bert",
|
17 |
+
"num_attention_heads": 16,
|
18 |
+
"num_hidden_layers": 24,
|
19 |
+
"pad_token_id": 0,
|
20 |
+
"position_embedding_type": "absolute",
|
21 |
+
"problem_type": "single_label_classification",
|
22 |
+
"transformers_version": "4.6.1",
|
23 |
+
"type_vocab_size": 2,
|
24 |
+
"use_cache": true,
|
25 |
+
"vocab_size": 30522
|
26 |
+
}
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1cc0cfbe790c20bbc00bf2cba58aa3d0afa500ad9c4e971953082e53de345df7
|
3 |
+
size 1340746825
|
special_tokens_map.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "do_lower": true, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "bert-large-uncased"}
|
training.log
ADDED
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
2021-05-22 19:52:56,786 INFO __main__ Namespace(adjust_lr=False, config='torchdistill/configs/sample/glue/rte/ce/bert_large_uncased.yaml', log='log/glue/rte/ce/bert_large_uncased.txt', private_output='leaderboard/glue/standard/bert_large_uncased/', seed=None, student_only=False, task_name='rte', test_only=False, world_size=1)
|
2 |
+
2021-05-22 19:52:56,832 INFO __main__ Distributed environment: NO
|
3 |
+
Num processes: 1
|
4 |
+
Process index: 0
|
5 |
+
Local process index: 0
|
6 |
+
Device: cuda
|
7 |
+
Use FP16 precision: True
|
8 |
+
|
9 |
+
2021-05-22 19:53:31,901 INFO __main__ Start training
|
10 |
+
2021-05-22 19:53:31,901 INFO torchdistill.models.util [student model]
|
11 |
+
2021-05-22 19:53:31,901 INFO torchdistill.models.util Using the original student model
|
12 |
+
2021-05-22 19:53:31,901 INFO torchdistill.core.training Loss = 1.0 * OrgLoss
|
13 |
+
2021-05-22 19:53:36,610 INFO torchdistill.misc.log Epoch: [0] [ 0/312] eta: 0:01:00 lr: 4.99465811965812e-05 sample/s: 20.909627502592325 loss: 1.1803 (1.1803) time: 0.1948 data: 0.0035 max mem: 5355
|
14 |
+
2021-05-22 19:53:43,676 INFO torchdistill.misc.log Epoch: [0] [ 50/312] eta: 0:00:37 lr: 4.7275641025641026e-05 sample/s: 28.0015087940663 loss: 0.6993 (0.7507) time: 0.1425 data: 0.0017 max mem: 7364
|
15 |
+
2021-05-22 19:53:50,696 INFO torchdistill.misc.log Epoch: [0] [100/312] eta: 0:00:29 lr: 4.460470085470086e-05 sample/s: 28.603703072254238 loss: 0.6589 (0.7230) time: 0.1386 data: 0.0016 max mem: 7364
|
16 |
+
2021-05-22 19:53:57,721 INFO torchdistill.misc.log Epoch: [0] [150/312] eta: 0:00:22 lr: 4.1933760683760684e-05 sample/s: 28.46394082318211 loss: 0.6511 (0.7048) time: 0.1420 data: 0.0017 max mem: 7364
|
17 |
+
2021-05-22 19:54:04,797 INFO torchdistill.misc.log Epoch: [0] [200/312] eta: 0:00:15 lr: 3.9262820512820513e-05 sample/s: 30.642365442534203 loss: 0.6556 (0.6884) time: 0.1418 data: 0.0017 max mem: 7364
|
18 |
+
2021-05-22 19:54:11,917 INFO torchdistill.misc.log Epoch: [0] [250/312] eta: 0:00:08 lr: 3.659188034188034e-05 sample/s: 28.549188306163426 loss: 0.5260 (0.6722) time: 0.1422 data: 0.0017 max mem: 7364
|
19 |
+
2021-05-22 19:54:18,879 INFO torchdistill.misc.log Epoch: [0] [300/312] eta: 0:00:01 lr: 3.392094017094017e-05 sample/s: 32.85520747452711 loss: 0.5626 (0.6617) time: 0.1405 data: 0.0017 max mem: 7364
|
20 |
+
2021-05-22 19:54:20,395 INFO torchdistill.misc.log Epoch: [0] Total time: 0:00:43
|
21 |
+
2021-05-22 19:54:21,113 INFO /usr/local/lib/python3.7/dist-packages/datasets/metric.py Removing /root/.cache/huggingface/metrics/glue/rte/default_experiment-1-0.arrow
|
22 |
+
2021-05-22 19:54:21,114 INFO __main__ Validation: accuracy = 0.7111913357400722
|
23 |
+
2021-05-22 19:54:21,114 INFO __main__ Updating ckpt
|
24 |
+
2021-05-22 19:54:26,919 INFO torchdistill.misc.log Epoch: [1] [ 0/312] eta: 0:00:42 lr: 3.327991452991453e-05 sample/s: 30.052638461221544 loss: 0.9158 (0.9158) time: 0.1359 data: 0.0028 max mem: 7364
|
25 |
+
2021-05-22 19:54:33,925 INFO torchdistill.misc.log Epoch: [1] [ 50/312] eta: 0:00:36 lr: 3.0608974358974366e-05 sample/s: 27.751990353011625 loss: 0.4395 (0.5118) time: 0.1401 data: 0.0017 max mem: 7364
|
26 |
+
2021-05-22 19:54:41,004 INFO torchdistill.misc.log Epoch: [1] [100/312] eta: 0:00:29 lr: 2.793803418803419e-05 sample/s: 27.844522780532817 loss: 0.5357 (0.5354) time: 0.1406 data: 0.0017 max mem: 7364
|
27 |
+
2021-05-22 19:54:48,026 INFO torchdistill.misc.log Epoch: [1] [150/312] eta: 0:00:22 lr: 2.5267094017094017e-05 sample/s: 28.286567888912117 loss: 0.4564 (0.5309) time: 0.1389 data: 0.0017 max mem: 7364
|
28 |
+
2021-05-22 19:54:55,136 INFO torchdistill.misc.log Epoch: [1] [200/312] eta: 0:00:15 lr: 2.2596153846153846e-05 sample/s: 28.280893743626052 loss: 0.5141 (0.5348) time: 0.1406 data: 0.0017 max mem: 7364
|
29 |
+
2021-05-22 19:55:02,247 INFO torchdistill.misc.log Epoch: [1] [250/312] eta: 0:00:08 lr: 1.992521367521368e-05 sample/s: 27.716275688891827 loss: 0.5522 (0.5412) time: 0.1416 data: 0.0017 max mem: 7364
|
30 |
+
2021-05-22 19:55:09,289 INFO torchdistill.misc.log Epoch: [1] [300/312] eta: 0:00:01 lr: 1.7254273504273504e-05 sample/s: 32.32847169633171 loss: 0.5199 (0.5340) time: 0.1396 data: 0.0016 max mem: 7364
|
31 |
+
2021-05-22 19:55:10,829 INFO torchdistill.misc.log Epoch: [1] Total time: 0:00:44
|
32 |
+
2021-05-22 19:55:11,553 INFO /usr/local/lib/python3.7/dist-packages/datasets/metric.py Removing /root/.cache/huggingface/metrics/glue/rte/default_experiment-1-0.arrow
|
33 |
+
2021-05-22 19:55:11,554 INFO __main__ Validation: accuracy = 0.7075812274368231
|
34 |
+
2021-05-22 19:55:11,698 INFO torchdistill.misc.log Epoch: [2] [ 0/312] eta: 0:00:44 lr: 1.6613247863247862e-05 sample/s: 28.265599202097192 loss: 0.3159 (0.3159) time: 0.1434 data: 0.0019 max mem: 7364
|
35 |
+
2021-05-22 19:55:18,779 INFO torchdistill.misc.log Epoch: [2] [ 50/312] eta: 0:00:37 lr: 1.3942307692307693e-05 sample/s: 28.61760837860658 loss: 0.0392 (0.2136) time: 0.1403 data: 0.0017 max mem: 7364
|
36 |
+
2021-05-22 19:55:25,865 INFO torchdistill.misc.log Epoch: [2] [100/312] eta: 0:00:30 lr: 1.1271367521367522e-05 sample/s: 27.91173901687127 loss: 0.1969 (0.2786) time: 0.1434 data: 0.0017 max mem: 7364
|
37 |
+
2021-05-22 19:55:32,947 INFO torchdistill.misc.log Epoch: [2] [150/312] eta: 0:00:22 lr: 8.600427350427351e-06 sample/s: 28.14766048758227 loss: 0.1217 (0.2782) time: 0.1414 data: 0.0017 max mem: 7364
|
38 |
+
2021-05-22 19:55:40,055 INFO torchdistill.misc.log Epoch: [2] [200/312] eta: 0:00:15 lr: 5.929487179487179e-06 sample/s: 28.59614620884787 loss: 0.1386 (0.2659) time: 0.1419 data: 0.0017 max mem: 7364
|
39 |
+
2021-05-22 19:55:47,234 INFO torchdistill.misc.log Epoch: [2] [250/312] eta: 0:00:08 lr: 3.258547008547009e-06 sample/s: 28.29715935732188 loss: 0.0420 (0.2671) time: 0.1457 data: 0.0018 max mem: 7364
|
40 |
+
2021-05-22 19:55:54,275 INFO torchdistill.misc.log Epoch: [2] [300/312] eta: 0:00:01 lr: 5.876068376068376e-07 sample/s: 28.389669724956935 loss: 0.1216 (0.2705) time: 0.1419 data: 0.0017 max mem: 7364
|
41 |
+
2021-05-22 19:55:55,812 INFO torchdistill.misc.log Epoch: [2] Total time: 0:00:44
|
42 |
+
2021-05-22 19:55:56,531 INFO /usr/local/lib/python3.7/dist-packages/datasets/metric.py Removing /root/.cache/huggingface/metrics/glue/rte/default_experiment-1-0.arrow
|
43 |
+
2021-05-22 19:55:56,531 INFO __main__ Validation: accuracy = 0.7256317689530686
|
44 |
+
2021-05-22 19:55:56,532 INFO __main__ Updating ckpt
|
45 |
+
2021-05-22 19:56:10,518 INFO __main__ [Student: bert-large-uncased]
|
46 |
+
2021-05-22 19:56:11,255 INFO /usr/local/lib/python3.7/dist-packages/datasets/metric.py Removing /root/.cache/huggingface/metrics/glue/rte/default_experiment-1-0.arrow
|
47 |
+
2021-05-22 19:56:11,255 INFO __main__ Test: accuracy = 0.7256317689530686
|
48 |
+
2021-05-22 19:56:11,255 INFO __main__ Start prediction for private dataset(s)
|
49 |
+
2021-05-22 19:56:11,257 INFO __main__ rte/test: 3000 samples
|
vocab.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|