Training complete!
Browse files- README.md +94 -3
- model.safetensors +1 -1
- runs/Nov08_21-19-04_5c31d1734f80/events.out.tfevents.1731100745.5c31d1734f80.511.1 +3 -0
- runs/Nov09_12-39-26_d305c3bf3f74/events.out.tfevents.1731155974.d305c3bf3f74.591.0 +3 -0
- runs/Nov09_12-39-26_d305c3bf3f74/events.out.tfevents.1731178116.d305c3bf3f74.591.1 +3 -0
- tokenizer_config.json +0 -8
- training_args.bin +2 -2
README.md
CHANGED
@@ -17,7 +17,7 @@ should probably proofread and complete it, then remove this comment. -->
|
|
17 |
|
18 |
This model is a fine-tuned version of [google/pegasus-cnn_dailymail](https://huggingface.co/google/pegasus-cnn_dailymail) on the samsum dataset.
|
19 |
It achieves the following results on the evaluation set:
|
20 |
-
- Loss:
|
21 |
|
22 |
## Model description
|
23 |
|
@@ -51,12 +51,103 @@ The following hyperparameters were used during training:
|
|
51 |
|
52 |
| Training Loss | Epoch | Step | Validation Loss |
|
53 |
|:-------------:|:------:|:----:|:---------------:|
|
54 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
55 |
|
56 |
|
57 |
### Framework versions
|
58 |
|
59 |
- Transformers 4.44.2
|
60 |
-
- Pytorch 2.
|
61 |
- Datasets 3.1.0
|
62 |
- Tokenizers 0.19.1
|
|
|
17 |
|
18 |
This model is a fine-tuned version of [google/pegasus-cnn_dailymail](https://huggingface.co/google/pegasus-cnn_dailymail) on the samsum dataset.
|
19 |
It achieves the following results on the evaluation set:
|
20 |
+
- Loss: 6.0105
|
21 |
|
22 |
## Model description
|
23 |
|
|
|
51 |
|
52 |
| Training Loss | Epoch | Step | Validation Loss |
|
53 |
|:-------------:|:------:|:----:|:---------------:|
|
54 |
+
| 10.6079 | 0.0109 | 10 | 10.7159 |
|
55 |
+
| 10.5191 | 0.0217 | 20 | 10.6397 |
|
56 |
+
| 10.5539 | 0.0326 | 30 | 10.5210 |
|
57 |
+
| 10.5117 | 0.0434 | 40 | 10.3669 |
|
58 |
+
| 10.3111 | 0.0543 | 50 | 10.1941 |
|
59 |
+
| 10.2 | 0.0652 | 60 | 10.0230 |
|
60 |
+
| 10.1121 | 0.0760 | 70 | 9.8584 |
|
61 |
+
| 10.0677 | 0.0869 | 80 | 9.7129 |
|
62 |
+
| 9.7897 | 0.0977 | 90 | 9.5781 |
|
63 |
+
| 9.744 | 0.1086 | 100 | 9.4538 |
|
64 |
+
| 9.533 | 0.1195 | 110 | 9.3431 |
|
65 |
+
| 9.5248 | 0.1303 | 120 | 9.2552 |
|
66 |
+
| 9.3331 | 0.1412 | 130 | 9.1575 |
|
67 |
+
| 9.2551 | 0.1520 | 140 | 9.0751 |
|
68 |
+
| 9.2382 | 0.1629 | 150 | 8.9993 |
|
69 |
+
| 9.1323 | 0.1738 | 160 | 8.9287 |
|
70 |
+
| 9.0574 | 0.1846 | 170 | 8.8628 |
|
71 |
+
| 9.0137 | 0.1955 | 180 | 8.7964 |
|
72 |
+
| 8.9097 | 0.2064 | 190 | 8.7340 |
|
73 |
+
| 8.8268 | 0.2172 | 200 | 8.6765 |
|
74 |
+
| 8.7116 | 0.2281 | 210 | 8.6173 |
|
75 |
+
| 8.7483 | 0.2389 | 220 | 8.5521 |
|
76 |
+
| 8.6252 | 0.2498 | 230 | 8.4884 |
|
77 |
+
| 8.5844 | 0.2607 | 240 | 8.4275 |
|
78 |
+
| 8.4614 | 0.2715 | 250 | 8.3626 |
|
79 |
+
| 8.4375 | 0.2824 | 260 | 8.2901 |
|
80 |
+
| 8.445 | 0.2932 | 270 | 8.2102 |
|
81 |
+
| 8.2966 | 0.3041 | 280 | 8.1135 |
|
82 |
+
| 8.0934 | 0.3150 | 290 | 8.0113 |
|
83 |
+
| 8.1551 | 0.3258 | 300 | 7.8961 |
|
84 |
+
| 8.0542 | 0.3367 | 310 | 7.7751 |
|
85 |
+
| 8.0183 | 0.3475 | 320 | 7.6504 |
|
86 |
+
| 7.9352 | 0.3584 | 330 | 7.5371 |
|
87 |
+
| 7.7615 | 0.3693 | 340 | 7.4084 |
|
88 |
+
| 7.6484 | 0.3801 | 350 | 7.2715 |
|
89 |
+
| 7.5845 | 0.3910 | 360 | 7.1340 |
|
90 |
+
| 7.4799 | 0.4018 | 370 | 7.0563 |
|
91 |
+
| 7.3388 | 0.4127 | 380 | 6.9495 |
|
92 |
+
| 7.1078 | 0.4236 | 390 | 6.8582 |
|
93 |
+
| 7.0819 | 0.4344 | 400 | 6.7707 |
|
94 |
+
| 7.0465 | 0.4453 | 410 | 6.6897 |
|
95 |
+
| 6.9038 | 0.4561 | 420 | 6.6184 |
|
96 |
+
| 6.9359 | 0.4670 | 430 | 6.5533 |
|
97 |
+
| 6.8038 | 0.4779 | 440 | 6.4962 |
|
98 |
+
| 6.8648 | 0.4887 | 450 | 6.4452 |
|
99 |
+
| 6.7589 | 0.4996 | 460 | 6.3966 |
|
100 |
+
| 6.6804 | 0.5105 | 470 | 6.3588 |
|
101 |
+
| 6.5603 | 0.5213 | 480 | 6.3247 |
|
102 |
+
| 6.6002 | 0.5322 | 490 | 6.2937 |
|
103 |
+
| 6.59 | 0.5430 | 500 | 6.2665 |
|
104 |
+
| 6.5103 | 0.5539 | 510 | 6.2434 |
|
105 |
+
| 6.4911 | 0.5648 | 520 | 6.2210 |
|
106 |
+
| 6.4606 | 0.5756 | 530 | 6.2054 |
|
107 |
+
| 6.5193 | 0.5865 | 540 | 6.1870 |
|
108 |
+
| 6.4794 | 0.5973 | 550 | 6.1724 |
|
109 |
+
| 6.4579 | 0.6082 | 560 | 6.1598 |
|
110 |
+
| 6.3855 | 0.6191 | 570 | 6.1482 |
|
111 |
+
| 6.3071 | 0.6299 | 580 | 6.1367 |
|
112 |
+
| 6.4043 | 0.6408 | 590 | 6.1279 |
|
113 |
+
| 6.354 | 0.6516 | 600 | 6.1188 |
|
114 |
+
| 6.4038 | 0.6625 | 610 | 6.1114 |
|
115 |
+
| 6.3475 | 0.6734 | 620 | 6.1008 |
|
116 |
+
| 6.257 | 0.6842 | 630 | 6.0958 |
|
117 |
+
| 6.4359 | 0.6951 | 640 | 6.0872 |
|
118 |
+
| 6.2238 | 0.7059 | 650 | 6.0820 |
|
119 |
+
| 6.3904 | 0.7168 | 660 | 6.0754 |
|
120 |
+
| 6.2488 | 0.7277 | 670 | 6.0706 |
|
121 |
+
| 6.2648 | 0.7385 | 680 | 6.0644 |
|
122 |
+
| 6.303 | 0.7494 | 690 | 6.0601 |
|
123 |
+
| 6.3133 | 0.7602 | 700 | 6.0553 |
|
124 |
+
| 6.3229 | 0.7711 | 710 | 6.0516 |
|
125 |
+
| 6.3165 | 0.7820 | 720 | 6.0469 |
|
126 |
+
| 6.3353 | 0.7928 | 730 | 6.0438 |
|
127 |
+
| 6.2581 | 0.8037 | 740 | 6.0391 |
|
128 |
+
| 6.2688 | 0.8146 | 750 | 6.0361 |
|
129 |
+
| 6.2193 | 0.8254 | 760 | 6.0342 |
|
130 |
+
| 6.2247 | 0.8363 | 770 | 6.0305 |
|
131 |
+
| 6.1711 | 0.8471 | 780 | 6.0284 |
|
132 |
+
| 6.3126 | 0.8580 | 790 | 6.0259 |
|
133 |
+
| 6.3182 | 0.8689 | 800 | 6.0239 |
|
134 |
+
| 6.2298 | 0.8797 | 810 | 6.0214 |
|
135 |
+
| 6.287 | 0.8906 | 820 | 6.0198 |
|
136 |
+
| 6.2472 | 0.9014 | 830 | 6.0181 |
|
137 |
+
| 6.205 | 0.9123 | 840 | 6.0165 |
|
138 |
+
| 6.2359 | 0.9232 | 850 | 6.0147 |
|
139 |
+
| 6.3013 | 0.9340 | 860 | 6.0135 |
|
140 |
+
| 6.2035 | 0.9449 | 870 | 6.0129 |
|
141 |
+
| 6.2529 | 0.9557 | 880 | 6.0122 |
|
142 |
+
| 6.2043 | 0.9666 | 890 | 6.0114 |
|
143 |
+
| 6.2785 | 0.9775 | 900 | 6.0110 |
|
144 |
+
| 6.3018 | 0.9883 | 910 | 6.0106 |
|
145 |
+
| 6.1616 | 0.9992 | 920 | 6.0105 |
|
146 |
|
147 |
|
148 |
### Framework versions
|
149 |
|
150 |
- Transformers 4.44.2
|
151 |
+
- Pytorch 2.5.0+cu121
|
152 |
- Datasets 3.1.0
|
153 |
- Tokenizers 0.19.1
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2283652852
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:350bcb46294043ec880a4d73a3bd459a53eaacd868c1f1177e3f818bb8d3a387
|
3 |
size 2283652852
|
runs/Nov08_21-19-04_5c31d1734f80/events.out.tfevents.1731100745.5c31d1734f80.511.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5f051f75dee81a4a5a1874152d8e38c2fbf343f75d4d4be9d13b0dca10927afc
|
3 |
+
size 14968
|
runs/Nov09_12-39-26_d305c3bf3f74/events.out.tfevents.1731155974.d305c3bf3f74.591.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b6d2302c76a96c835fe44e5d1de349e965a22e2dc5d1f953cbafbb3831869dba
|
3 |
+
size 50297
|
runs/Nov09_12-39-26_d305c3bf3f74/events.out.tfevents.1731178116.d305c3bf3f74.591.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b1bfd18ddf30229530b2c976e7c816b2944f96a1cc1e2a99c54409be69ac4520
|
3 |
+
size 359
|
tokenizer_config.json
CHANGED
@@ -847,14 +847,6 @@
|
|
847 |
"rstrip": false,
|
848 |
"single_word": false,
|
849 |
"special": true
|
850 |
-
},
|
851 |
-
"106": {
|
852 |
-
"content": "<n>",
|
853 |
-
"lstrip": false,
|
854 |
-
"normalized": false,
|
855 |
-
"rstrip": false,
|
856 |
-
"single_word": false,
|
857 |
-
"special": false
|
858 |
}
|
859 |
},
|
860 |
"additional_special_tokens": [
|
|
|
847 |
"rstrip": false,
|
848 |
"single_word": false,
|
849 |
"special": true
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
850 |
}
|
851 |
},
|
852 |
"additional_special_tokens": [
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f24998d3c7e810e5a579432719d204022a6d58fb25286f2373ca6607d3986095
|
3 |
+
size 5304
|