Model save
Browse files- README.md +0 -5
- config.json +35 -40
- model.safetensors +2 -2
- runs/Mar16_04-14-07_13e96b604141/events.out.tfevents.1710562451.13e96b604141.322.0 +3 -0
- training_args.bin +1 -1
README.md
CHANGED
@@ -3,8 +3,6 @@ license: apache-2.0
|
|
3 |
base_model: mse30/bart-base-finetuned-pubmed
|
4 |
tags:
|
5 |
- generated_from_trainer
|
6 |
-
metrics:
|
7 |
-
- accuracy
|
8 |
model-index:
|
9 |
- name: unipelt
|
10 |
results: []
|
@@ -16,9 +14,6 @@ should probably proofread and complete it, then remove this comment. -->
|
|
16 |
# unipelt
|
17 |
|
18 |
This model is a fine-tuned version of [mse30/bart-base-finetuned-pubmed](https://huggingface.co/mse30/bart-base-finetuned-pubmed) on an unknown dataset.
|
19 |
-
It achieves the following results on the evaluation set:
|
20 |
-
- Loss: 7.2003
|
21 |
-
- Accuracy: 0.0659
|
22 |
|
23 |
## Model description
|
24 |
|
|
|
3 |
base_model: mse30/bart-base-finetuned-pubmed
|
4 |
tags:
|
5 |
- generated_from_trainer
|
|
|
|
|
6 |
model-index:
|
7 |
- name: unipelt
|
8 |
results: []
|
|
|
14 |
# unipelt
|
15 |
|
16 |
This model is a fine-tuned version of [mse30/bart-base-finetuned-pubmed](https://huggingface.co/mse30/bart-base-finetuned-pubmed) on an unknown dataset.
|
|
|
|
|
|
|
17 |
|
18 |
## Model description
|
19 |
|
config.json
CHANGED
@@ -4,13 +4,10 @@
|
|
4 |
"activation_function": "gelu",
|
5 |
"adapters": {
|
6 |
"adapters": {
|
7 |
-
"
|
8 |
-
"adapter2": "847a59cd9f1cbf0e",
|
9 |
-
"adapter3": "847a59cd9f1cbf0e",
|
10 |
-
"unipelt": "65e04f01fd344156"
|
11 |
},
|
12 |
"config_map": {
|
13 |
-
"
|
14 |
"architecture": "union",
|
15 |
"configs": [
|
16 |
{
|
@@ -42,46 +39,44 @@
|
|
42 |
"prefix_length": 10,
|
43 |
"shared_gating": true,
|
44 |
"use_gating": true
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
45 |
}
|
46 |
]
|
47 |
-
},
|
48 |
-
"847a59cd9f1cbf0e": {
|
49 |
-
"adapter_residual_before_ln": false,
|
50 |
-
"cross_adapter": false,
|
51 |
-
"factorized_phm_W": true,
|
52 |
-
"factorized_phm_rule": false,
|
53 |
-
"hypercomplex_nonlinearity": "glorot-uniform",
|
54 |
-
"init_weights": "bert",
|
55 |
-
"inv_adapter": null,
|
56 |
-
"inv_adapter_reduction_factor": null,
|
57 |
-
"is_parallel": false,
|
58 |
-
"learn_phm": true,
|
59 |
-
"leave_out": [],
|
60 |
-
"ln_after": false,
|
61 |
-
"ln_before": false,
|
62 |
-
"mh_adapter": false,
|
63 |
-
"non_linearity": "relu",
|
64 |
-
"original_ln_after": true,
|
65 |
-
"original_ln_before": true,
|
66 |
-
"output_adapter": true,
|
67 |
-
"phm_bias": true,
|
68 |
-
"phm_c_init": "normal",
|
69 |
-
"phm_dim": 4,
|
70 |
-
"phm_init_range": 0.0001,
|
71 |
-
"phm_layer": false,
|
72 |
-
"phm_rank": 1,
|
73 |
-
"reduction_factor": 16,
|
74 |
-
"residual_before_ln": true,
|
75 |
-
"scaling": 1.0,
|
76 |
-
"shared_W_phm": false,
|
77 |
-
"shared_phm_rule": true,
|
78 |
-
"use_gating": true
|
79 |
}
|
80 |
},
|
81 |
"fusion_config_map": {},
|
82 |
-
"fusions": {
|
83 |
-
"adapter1,adapter2,adapter3": "dynamic"
|
84 |
-
}
|
85 |
},
|
86 |
"add_bias_logits": false,
|
87 |
"add_final_layer_norm": false,
|
|
|
4 |
"activation_function": "gelu",
|
5 |
"adapters": {
|
6 |
"adapters": {
|
7 |
+
"unipelt": "0815dd75d5ff5808"
|
|
|
|
|
|
|
8 |
},
|
9 |
"config_map": {
|
10 |
+
"0815dd75d5ff5808": {
|
11 |
"architecture": "union",
|
12 |
"configs": [
|
13 |
{
|
|
|
39 |
"prefix_length": 10,
|
40 |
"shared_gating": true,
|
41 |
"use_gating": true
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"adapter_residual_before_ln": false,
|
45 |
+
"cross_adapter": false,
|
46 |
+
"factorized_phm_W": true,
|
47 |
+
"factorized_phm_rule": false,
|
48 |
+
"hypercomplex_nonlinearity": "glorot-uniform",
|
49 |
+
"init_weights": "bert",
|
50 |
+
"inv_adapter": null,
|
51 |
+
"inv_adapter_reduction_factor": null,
|
52 |
+
"is_parallel": false,
|
53 |
+
"learn_phm": true,
|
54 |
+
"leave_out": [],
|
55 |
+
"ln_after": false,
|
56 |
+
"ln_before": false,
|
57 |
+
"mh_adapter": false,
|
58 |
+
"non_linearity": "relu",
|
59 |
+
"original_ln_after": true,
|
60 |
+
"original_ln_before": true,
|
61 |
+
"output_adapter": true,
|
62 |
+
"phm_bias": true,
|
63 |
+
"phm_c_init": "normal",
|
64 |
+
"phm_dim": 4,
|
65 |
+
"phm_init_range": 0.0001,
|
66 |
+
"phm_layer": false,
|
67 |
+
"phm_rank": 1,
|
68 |
+
"reduction_factor": 16,
|
69 |
+
"residual_before_ln": true,
|
70 |
+
"scaling": 1.0,
|
71 |
+
"shared_W_phm": false,
|
72 |
+
"shared_phm_rule": true,
|
73 |
+
"use_gating": true
|
74 |
}
|
75 |
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
76 |
}
|
77 |
},
|
78 |
"fusion_config_map": {},
|
79 |
+
"fusions": {}
|
|
|
|
|
80 |
},
|
81 |
"add_bias_logits": false,
|
82 |
"add_final_layer_norm": false,
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9929a3f9944a7877ee27cbf23be5300c743831eb9da6bf3f76f766d986216192
|
3 |
+
size 625051084
|
runs/Mar16_04-14-07_13e96b604141/events.out.tfevents.1710562451.13e96b604141.322.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e5bad1c2d9fcfb20ffc75442666af32ee62b29876cdc6d8040d5bfb98d19af46
|
3 |
+
size 5807
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4792
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:323920350a32a56049c4982458db48b80028ef6c3e17431e268a26cdb321f260
|
3 |
size 4792
|