Faeze commited on
Commit
21aadae
1 Parent(s): e5056b2

Upload folder using huggingface_hub

Browse files
all_results.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 200.0,
3
+ "eval_accuracy": 78.44827586206897,
4
+ "eval_average_metrics": 77.31764307402186,
5
+ "eval_classification_report": "{\"acceptance\": {\"precision\": 0.75, \"recall\": 0.6666666666666666, \"f1-score\": 0.7058823529411765, \"support\": 9.0}, \"accusation\": {\"precision\": 0.9, \"recall\": 0.75, \"f1-score\": 0.8181818181818182, \"support\": 12.0}, \"appreciation\": {\"precision\": 0.6666666666666666, \"recall\": 0.8888888888888888, \"f1-score\": 0.761904761904762, \"support\": 9.0}, \"challenge\": {\"precision\": 0.6923076923076923, \"recall\": 0.8709677419354839, \"f1-score\": 0.7714285714285716, \"support\": 31.0}, \"informing statement\": {\"precision\": 0.8275862068965517, \"recall\": 0.6486486486486487, \"f1-score\": 0.7272727272727273, \"support\": 37.0}, \"question\": {\"precision\": 1.0, \"recall\": 1.0, \"f1-score\": 1.0, \"support\": 1.0}, \"rejection\": {\"precision\": 0.9, \"recall\": 1.0, \"f1-score\": 0.9473684210526316, \"support\": 9.0}, \"request\": {\"precision\": 1.0, \"recall\": 0.875, \"f1-score\": 0.9333333333333333, \"support\": 8.0}, \"accuracy\": 0.7844827586206896, \"macro avg\": {\"precision\": 0.8420700707338638, \"recall\": 0.8375214932674611, \"f1-score\": 0.8331714982643775, \"support\": 116.0}, \"weighted avg\": {\"precision\": 0.7994157596268179, \"recall\": 0.7844827586206896, \"f1-score\": 0.7831433183733814, \"support\": 116.0}}",
6
+ "eval_f1_macro": 74.05968873461136,
7
+ "eval_f1_micro": 78.44827586206897,
8
+ "eval_f1_weighted": 78.31433183733813,
9
+ "eval_loss": 0.5467008948326111,
10
+ "eval_runtime": 1.2934,
11
+ "eval_samples_per_second": 89.689,
12
+ "init_mem_cpu_alloc_delta": -542822400,
13
+ "init_mem_cpu_peaked_delta": 542826496,
14
+ "init_mem_gpu_alloc_delta": 891528192,
15
+ "init_mem_gpu_peaked_delta": 0,
16
+ "peak_memory": 5.305544921875,
17
+ "test_accuracy": 78.44827586206897,
18
+ "test_average_metrics": 77.31764307402186,
19
+ "test_classification_report": "{\"acceptance\": {\"precision\": 0.75, \"recall\": 0.6666666666666666, \"f1-score\": 0.7058823529411765, \"support\": 9.0}, \"accusation\": {\"precision\": 0.9, \"recall\": 0.75, \"f1-score\": 0.8181818181818182, \"support\": 12.0}, \"appreciation\": {\"precision\": 0.6666666666666666, \"recall\": 0.8888888888888888, \"f1-score\": 0.761904761904762, \"support\": 9.0}, \"challenge\": {\"precision\": 0.6923076923076923, \"recall\": 0.8709677419354839, \"f1-score\": 0.7714285714285716, \"support\": 31.0}, \"informing statement\": {\"precision\": 0.8275862068965517, \"recall\": 0.6486486486486487, \"f1-score\": 0.7272727272727273, \"support\": 37.0}, \"question\": {\"precision\": 1.0, \"recall\": 1.0, \"f1-score\": 1.0, \"support\": 1.0}, \"rejection\": {\"precision\": 0.9, \"recall\": 1.0, \"f1-score\": 0.9473684210526316, \"support\": 9.0}, \"request\": {\"precision\": 1.0, \"recall\": 0.875, \"f1-score\": 0.9333333333333333, \"support\": 8.0}, \"accuracy\": 0.7844827586206896, \"macro avg\": {\"precision\": 0.8420700707338638, \"recall\": 0.8375214932674611, \"f1-score\": 0.8331714982643775, \"support\": 116.0}, \"weighted avg\": {\"precision\": 0.7994157596268179, \"recall\": 0.7844827586206896, \"f1-score\": 0.7831433183733814, \"support\": 116.0}}",
20
+ "test_f1_macro": 74.05968873461136,
21
+ "test_f1_micro": 78.44827586206897,
22
+ "test_f1_weighted": 78.31433183733813,
23
+ "test_loss": 0.5467008948326111,
24
+ "test_runtime": 1.2778,
25
+ "test_samples_per_second": 90.778,
26
+ "total_time in minutes ": 41.508179166666665,
27
+ "train_mem_cpu_alloc_delta": 945156096,
28
+ "train_mem_cpu_peaked_delta": 40960,
29
+ "train_mem_gpu_alloc_delta": 40216576,
30
+ "train_mem_gpu_peaked_delta": 4624272384,
31
+ "train_runtime": 2488.3254,
32
+ "train_samples": 488,
33
+ "train_samples_per_second": 1.286
34
+ }
config.json ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "t5-base",
3
+ "architectures": [
4
+ "T5ForConditionalGeneration"
5
+ ],
6
+ "attn_method": "linear",
7
+ "attn_prefix_tuning": false,
8
+ "class_weights": [
9
+ 0.40939597315436244,
10
+ 0.46923076923076923,
11
+ 1.1296296296296295,
12
+ 1.525,
13
+ 1.525,
14
+ 1.6944444444444444,
15
+ 1.90625,
16
+ 8.714285714285714
17
+ ],
18
+ "d_ff": 3072,
19
+ "d_kv": 64,
20
+ "d_model": 768,
21
+ "decoder_start_token_id": 0,
22
+ "dropout_rate": 0.1,
23
+ "eos_token_id": 1,
24
+ "feed_forward_proj": "relu",
25
+ "fix_attention": false,
26
+ "gradient_checkpointing": false,
27
+ "id2label": {
28
+ "0": "LABEL_0",
29
+ "1": "LABEL_1",
30
+ "2": "LABEL_2",
31
+ "3": "LABEL_3",
32
+ "4": "LABEL_4",
33
+ "5": "LABEL_5",
34
+ "6": "LABEL_6",
35
+ "7": "LABEL_7",
36
+ "8": "LABEL_8"
37
+ },
38
+ "ignore_target": false,
39
+ "init_prefix_method": "random",
40
+ "initializer_factor": 1.0,
41
+ "is_contrastive": false,
42
+ "is_encoder_decoder": true,
43
+ "label2id": {
44
+ "LABEL_0": 0,
45
+ "LABEL_1": 1,
46
+ "LABEL_2": 2,
47
+ "LABEL_3": 3,
48
+ "LABEL_4": 4,
49
+ "LABEL_5": 5,
50
+ "LABEL_6": 6,
51
+ "LABEL_7": 7,
52
+ "LABEL_8": 8
53
+ },
54
+ "label_based": false,
55
+ "label_token_ids": [
56
+ [
57
+ 16,
58
+ 10454,
59
+ 2493,
60
+ 1
61
+ ],
62
+ [
63
+ 1921,
64
+ 1
65
+ ],
66
+ [
67
+ 17316,
68
+ 257,
69
+ 1
70
+ ],
71
+ [
72
+ 1690,
73
+ 1
74
+ ],
75
+ [
76
+ 11746,
77
+ 1
78
+ ],
79
+ [
80
+ 21530,
81
+ 1
82
+ ],
83
+ [
84
+ 11122,
85
+ 1
86
+ ],
87
+ [
88
+ 822,
89
+ 1
90
+ ],
91
+ [
92
+ 30430,
93
+ 1
94
+ ]
95
+ ],
96
+ "layer_norm_epsilon": 1e-06,
97
+ "learned_temperature": false,
98
+ "max_length": 4,
99
+ "model_type": "t5",
100
+ "n_positions": 512,
101
+ "normalize_prefixes": false,
102
+ "num_decoder_layers": 12,
103
+ "num_heads": 12,
104
+ "num_layers": 12,
105
+ "num_target": 1,
106
+ "output_past": true,
107
+ "pad_token_id": 0,
108
+ "prefix_num": 1,
109
+ "prefix_tuning": false,
110
+ "relative_attention_num_buckets": 32,
111
+ "shared_attn": false,
112
+ "task_specific_params": {
113
+ "summarization": {
114
+ "early_stopping": true,
115
+ "length_penalty": 2.0,
116
+ "max_length": 200,
117
+ "min_length": 30,
118
+ "no_repeat_ngram_size": 3,
119
+ "num_beams": 4,
120
+ "prefix": "summarize: "
121
+ },
122
+ "translation_en_to_de": {
123
+ "early_stopping": true,
124
+ "max_length": 300,
125
+ "num_beams": 4,
126
+ "prefix": "translate English to German: "
127
+ },
128
+ "translation_en_to_fr": {
129
+ "early_stopping": true,
130
+ "max_length": 300,
131
+ "num_beams": 4,
132
+ "prefix": "translate English to French: "
133
+ },
134
+ "translation_en_to_ro": {
135
+ "early_stopping": true,
136
+ "max_length": 300,
137
+ "num_beams": 4,
138
+ "prefix": "translate English to Romanian: "
139
+ }
140
+ },
141
+ "temperature": 2000,
142
+ "train_task_adapters": true,
143
+ "transformers_version": "4.6.0",
144
+ "use_cache": true,
145
+ "vocab_size": 32100
146
+ }
eval_instances_normalized_attn_scores.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50764592c09d3053c8ca12d3bd7108d14b2d3b3e93191cb4e40cfdac94418f81
3
+ size 1048
eval_results.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 200.0,
3
+ "eval_accuracy": 78.44827586206897,
4
+ "eval_average_metrics": 77.31764307402186,
5
+ "eval_classification_report": "{\"acceptance\": {\"precision\": 0.75, \"recall\": 0.6666666666666666, \"f1-score\": 0.7058823529411765, \"support\": 9.0}, \"accusation\": {\"precision\": 0.9, \"recall\": 0.75, \"f1-score\": 0.8181818181818182, \"support\": 12.0}, \"appreciation\": {\"precision\": 0.6666666666666666, \"recall\": 0.8888888888888888, \"f1-score\": 0.761904761904762, \"support\": 9.0}, \"challenge\": {\"precision\": 0.6923076923076923, \"recall\": 0.8709677419354839, \"f1-score\": 0.7714285714285716, \"support\": 31.0}, \"informing statement\": {\"precision\": 0.8275862068965517, \"recall\": 0.6486486486486487, \"f1-score\": 0.7272727272727273, \"support\": 37.0}, \"question\": {\"precision\": 1.0, \"recall\": 1.0, \"f1-score\": 1.0, \"support\": 1.0}, \"rejection\": {\"precision\": 0.9, \"recall\": 1.0, \"f1-score\": 0.9473684210526316, \"support\": 9.0}, \"request\": {\"precision\": 1.0, \"recall\": 0.875, \"f1-score\": 0.9333333333333333, \"support\": 8.0}, \"accuracy\": 0.7844827586206896, \"macro avg\": {\"precision\": 0.8420700707338638, \"recall\": 0.8375214932674611, \"f1-score\": 0.8331714982643775, \"support\": 116.0}, \"weighted avg\": {\"precision\": 0.7994157596268179, \"recall\": 0.7844827586206896, \"f1-score\": 0.7831433183733814, \"support\": 116.0}}",
6
+ "eval_f1_macro": 74.05968873461136,
7
+ "eval_f1_micro": 78.44827586206897,
8
+ "eval_f1_weighted": 78.31433183733813,
9
+ "eval_loss": 0.5467008948326111,
10
+ "eval_runtime": 1.2934,
11
+ "eval_samples_per_second": 89.689
12
+ }
performance_results.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "peak_memory": 5.305544921875,
3
+ "total_time in minutes ": 41.508179166666665
4
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d92987d3a9f67923b68ed1d90e3fdcb4d044e5b3a0ebc47a1b2e1512a641736e
3
+ size 898972302
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "additional_special_tokens": ["<extra_id_0>", "<extra_id_1>", "<extra_id_2>", "<extra_id_3>", "<extra_id_4>", "<extra_id_5>", "<extra_id_6>", "<extra_id_7>", "<extra_id_8>", "<extra_id_9>", "<extra_id_10>", "<extra_id_11>", "<extra_id_12>", "<extra_id_13>", "<extra_id_14>", "<extra_id_15>", "<extra_id_16>", "<extra_id_17>", "<extra_id_18>", "<extra_id_19>", "<extra_id_20>", "<extra_id_21>", "<extra_id_22>", "<extra_id_23>", "<extra_id_24>", "<extra_id_25>", "<extra_id_26>", "<extra_id_27>", "<extra_id_28>", "<extra_id_29>", "<extra_id_30>", "<extra_id_31>", "<extra_id_32>", "<extra_id_33>", "<extra_id_34>", "<extra_id_35>", "<extra_id_36>", "<extra_id_37>", "<extra_id_38>", "<extra_id_39>", "<extra_id_40>", "<extra_id_41>", "<extra_id_42>", "<extra_id_43>", "<extra_id_44>", "<extra_id_45>", "<extra_id_46>", "<extra_id_47>", "<extra_id_48>", "<extra_id_49>", "<extra_id_50>", "<extra_id_51>", "<extra_id_52>", "<extra_id_53>", "<extra_id_54>", "<extra_id_55>", "<extra_id_56>", "<extra_id_57>", "<extra_id_58>", "<extra_id_59>", "<extra_id_60>", "<extra_id_61>", "<extra_id_62>", "<extra_id_63>", "<extra_id_64>", "<extra_id_65>", "<extra_id_66>", "<extra_id_67>", "<extra_id_68>", "<extra_id_69>", "<extra_id_70>", "<extra_id_71>", "<extra_id_72>", "<extra_id_73>", "<extra_id_74>", "<extra_id_75>", "<extra_id_76>", "<extra_id_77>", "<extra_id_78>", "<extra_id_79>", "<extra_id_80>", "<extra_id_81>", "<extra_id_82>", "<extra_id_83>", "<extra_id_84>", "<extra_id_85>", "<extra_id_86>", "<extra_id_87>", "<extra_id_88>", "<extra_id_89>", "<extra_id_90>", "<extra_id_91>", "<extra_id_92>", "<extra_id_93>", "<extra_id_94>", "<extra_id_95>", "<extra_id_96>", "<extra_id_97>", "<extra_id_98>", "<extra_id_99>"]}
spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d60acb128cf7b7f2536e8f38a5b18a05535c9e14c7a355904270e15b0945ea86
3
+ size 791656
test_instances_normalized_attn_scores.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0cf18366eb389698056f39a6158bd36abae1b39c77767057ccaca0a3c413cd26
3
+ size 1048
test_results.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 200.0,
3
+ "test_accuracy": 78.44827586206897,
4
+ "test_average_metrics": 77.31764307402186,
5
+ "test_classification_report": "{\"acceptance\": {\"precision\": 0.75, \"recall\": 0.6666666666666666, \"f1-score\": 0.7058823529411765, \"support\": 9.0}, \"accusation\": {\"precision\": 0.9, \"recall\": 0.75, \"f1-score\": 0.8181818181818182, \"support\": 12.0}, \"appreciation\": {\"precision\": 0.6666666666666666, \"recall\": 0.8888888888888888, \"f1-score\": 0.761904761904762, \"support\": 9.0}, \"challenge\": {\"precision\": 0.6923076923076923, \"recall\": 0.8709677419354839, \"f1-score\": 0.7714285714285716, \"support\": 31.0}, \"informing statement\": {\"precision\": 0.8275862068965517, \"recall\": 0.6486486486486487, \"f1-score\": 0.7272727272727273, \"support\": 37.0}, \"question\": {\"precision\": 1.0, \"recall\": 1.0, \"f1-score\": 1.0, \"support\": 1.0}, \"rejection\": {\"precision\": 0.9, \"recall\": 1.0, \"f1-score\": 0.9473684210526316, \"support\": 9.0}, \"request\": {\"precision\": 1.0, \"recall\": 0.875, \"f1-score\": 0.9333333333333333, \"support\": 8.0}, \"accuracy\": 0.7844827586206896, \"macro avg\": {\"precision\": 0.8420700707338638, \"recall\": 0.8375214932674611, \"f1-score\": 0.8331714982643775, \"support\": 116.0}, \"weighted avg\": {\"precision\": 0.7994157596268179, \"recall\": 0.7844827586206896, \"f1-score\": 0.7831433183733814, \"support\": 116.0}}",
6
+ "test_f1_macro": 74.05968873461136,
7
+ "test_f1_micro": 78.44827586206897,
8
+ "test_f1_weighted": 78.31433183733813,
9
+ "test_loss": 0.5467008948326111,
10
+ "test_runtime": 1.2778,
11
+ "test_samples_per_second": 90.778
12
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "extra_ids": 100, "additional_special_tokens": ["<extra_id_0>", "<extra_id_1>", "<extra_id_2>", "<extra_id_3>", "<extra_id_4>", "<extra_id_5>", "<extra_id_6>", "<extra_id_7>", "<extra_id_8>", "<extra_id_9>", "<extra_id_10>", "<extra_id_11>", "<extra_id_12>", "<extra_id_13>", "<extra_id_14>", "<extra_id_15>", "<extra_id_16>", "<extra_id_17>", "<extra_id_18>", "<extra_id_19>", "<extra_id_20>", "<extra_id_21>", "<extra_id_22>", "<extra_id_23>", "<extra_id_24>", "<extra_id_25>", "<extra_id_26>", "<extra_id_27>", "<extra_id_28>", "<extra_id_29>", "<extra_id_30>", "<extra_id_31>", "<extra_id_32>", "<extra_id_33>", "<extra_id_34>", "<extra_id_35>", "<extra_id_36>", "<extra_id_37>", "<extra_id_38>", "<extra_id_39>", "<extra_id_40>", "<extra_id_41>", "<extra_id_42>", "<extra_id_43>", "<extra_id_44>", "<extra_id_45>", "<extra_id_46>", "<extra_id_47>", "<extra_id_48>", "<extra_id_49>", "<extra_id_50>", "<extra_id_51>", "<extra_id_52>", "<extra_id_53>", "<extra_id_54>", "<extra_id_55>", "<extra_id_56>", "<extra_id_57>", "<extra_id_58>", "<extra_id_59>", "<extra_id_60>", "<extra_id_61>", "<extra_id_62>", "<extra_id_63>", "<extra_id_64>", "<extra_id_65>", "<extra_id_66>", "<extra_id_67>", "<extra_id_68>", "<extra_id_69>", "<extra_id_70>", "<extra_id_71>", "<extra_id_72>", "<extra_id_73>", "<extra_id_74>", "<extra_id_75>", "<extra_id_76>", "<extra_id_77>", "<extra_id_78>", "<extra_id_79>", "<extra_id_80>", "<extra_id_81>", "<extra_id_82>", "<extra_id_83>", "<extra_id_84>", "<extra_id_85>", "<extra_id_86>", "<extra_id_87>", "<extra_id_88>", "<extra_id_89>", "<extra_id_90>", "<extra_id_91>", "<extra_id_92>", "<extra_id_93>", "<extra_id_94>", "<extra_id_95>", "<extra_id_96>", "<extra_id_97>", "<extra_id_98>", "<extra_id_99>"], "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "t5-base"}
train_results.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 200.0,
3
+ "init_mem_cpu_alloc_delta": -542822400,
4
+ "init_mem_cpu_peaked_delta": 542826496,
5
+ "init_mem_gpu_alloc_delta": 891528192,
6
+ "init_mem_gpu_peaked_delta": 0,
7
+ "train_mem_cpu_alloc_delta": 945156096,
8
+ "train_mem_cpu_peaked_delta": 40960,
9
+ "train_mem_gpu_alloc_delta": 40216576,
10
+ "train_mem_gpu_peaked_delta": 4624272384,
11
+ "train_runtime": 2488.3254,
12
+ "train_samples": 488,
13
+ "train_samples_per_second": 1.286
14
+ }
trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bba84e8f9278fc5a18ab91934681548b3029069868b066f1cea7cf3a287980de
3
+ size 3384