tiedeman commited on
Commit
6ba1d32
1 Parent(s): c713545

Initial commit

Browse files
.gitattributes CHANGED
@@ -25,3 +25,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
25
  *.zip filter=lfs diff=lfs merge=lfs -text
26
  *.zstandard filter=lfs diff=lfs merge=lfs -text
27
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
25
  *.zip filter=lfs diff=lfs merge=lfs -text
26
  *.zstandard filter=lfs diff=lfs merge=lfs -text
27
  *tfevents* filter=lfs diff=lfs merge=lfs -text
28
+ *.spm filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,387 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - be
4
+ - bg
5
+ - hr
6
+ - ru
7
+ - sh
8
+ - sl
9
+ - sr_Cyrl
10
+ - sr_Latn
11
+ - uk
12
+ - zle
13
+ - zls
14
+
15
+ tags:
16
+ - translation
17
+
18
+ license: cc-by-4.0
19
+ model-index:
20
+ - name: opus-mt-tc-big-zls-zle
21
+ results:
22
+ - task:
23
+ name: Translation bul-rus
24
+ type: translation
25
+ args: bul-rus
26
+ dataset:
27
+ name: flores101-devtest
28
+ type: flores_101
29
+ args: bul rus devtest
30
+ metrics:
31
+ - name: BLEU
32
+ type: bleu
33
+ value: 24.6
34
+ - task:
35
+ name: Translation bul-ukr
36
+ type: translation
37
+ args: bul-ukr
38
+ dataset:
39
+ name: flores101-devtest
40
+ type: flores_101
41
+ args: bul ukr devtest
42
+ metrics:
43
+ - name: BLEU
44
+ type: bleu
45
+ value: 22.9
46
+ - task:
47
+ name: Translation hrv-rus
48
+ type: translation
49
+ args: hrv-rus
50
+ dataset:
51
+ name: flores101-devtest
52
+ type: flores_101
53
+ args: hrv rus devtest
54
+ metrics:
55
+ - name: BLEU
56
+ type: bleu
57
+ value: 23.5
58
+ - task:
59
+ name: Translation hrv-ukr
60
+ type: translation
61
+ args: hrv-ukr
62
+ dataset:
63
+ name: flores101-devtest
64
+ type: flores_101
65
+ args: hrv ukr devtest
66
+ metrics:
67
+ - name: BLEU
68
+ type: bleu
69
+ value: 21.9
70
+ - task:
71
+ name: Translation mkd-rus
72
+ type: translation
73
+ args: mkd-rus
74
+ dataset:
75
+ name: flores101-devtest
76
+ type: flores_101
77
+ args: mkd rus devtest
78
+ metrics:
79
+ - name: BLEU
80
+ type: bleu
81
+ value: 24.3
82
+ - task:
83
+ name: Translation mkd-ukr
84
+ type: translation
85
+ args: mkd-ukr
86
+ dataset:
87
+ name: flores101-devtest
88
+ type: flores_101
89
+ args: mkd ukr devtest
90
+ metrics:
91
+ - name: BLEU
92
+ type: bleu
93
+ value: 22.5
94
+ - task:
95
+ name: Translation slv-rus
96
+ type: translation
97
+ args: slv-rus
98
+ dataset:
99
+ name: flores101-devtest
100
+ type: flores_101
101
+ args: slv rus devtest
102
+ metrics:
103
+ - name: BLEU
104
+ type: bleu
105
+ value: 22.0
106
+ - task:
107
+ name: Translation slv-ukr
108
+ type: translation
109
+ args: slv-ukr
110
+ dataset:
111
+ name: flores101-devtest
112
+ type: flores_101
113
+ args: slv ukr devtest
114
+ metrics:
115
+ - name: BLEU
116
+ type: bleu
117
+ value: 20.2
118
+ - task:
119
+ name: Translation srp_Cyrl-rus
120
+ type: translation
121
+ args: srp_Cyrl-rus
122
+ dataset:
123
+ name: flores101-devtest
124
+ type: flores_101
125
+ args: srp_Cyrl rus devtest
126
+ metrics:
127
+ - name: BLEU
128
+ type: bleu
129
+ value: 25.7
130
+ - task:
131
+ name: Translation srp_Cyrl-ukr
132
+ type: translation
133
+ args: srp_Cyrl-ukr
134
+ dataset:
135
+ name: flores101-devtest
136
+ type: flores_101
137
+ args: srp_Cyrl ukr devtest
138
+ metrics:
139
+ - name: BLEU
140
+ type: bleu
141
+ value: 24.4
142
+ - task:
143
+ name: Translation bul-rus
144
+ type: translation
145
+ args: bul-rus
146
+ dataset:
147
+ name: tatoeba-test-v2021-08-07
148
+ type: tatoeba_mt
149
+ args: bul-rus
150
+ metrics:
151
+ - name: BLEU
152
+ type: bleu
153
+ value: 52.6
154
+ - task:
155
+ name: Translation bul-ukr
156
+ type: translation
157
+ args: bul-ukr
158
+ dataset:
159
+ name: tatoeba-test-v2021-08-07
160
+ type: tatoeba_mt
161
+ args: bul-ukr
162
+ metrics:
163
+ - name: BLEU
164
+ type: bleu
165
+ value: 53.3
166
+ - task:
167
+ name: Translation hbs-rus
168
+ type: translation
169
+ args: hbs-rus
170
+ dataset:
171
+ name: tatoeba-test-v2021-08-07
172
+ type: tatoeba_mt
173
+ args: hbs-rus
174
+ metrics:
175
+ - name: BLEU
176
+ type: bleu
177
+ value: 58.5
178
+ - task:
179
+ name: Translation hbs-ukr
180
+ type: translation
181
+ args: hbs-ukr
182
+ dataset:
183
+ name: tatoeba-test-v2021-08-07
184
+ type: tatoeba_mt
185
+ args: hbs-ukr
186
+ metrics:
187
+ - name: BLEU
188
+ type: bleu
189
+ value: 52.3
190
+ - task:
191
+ name: Translation hrv-ukr
192
+ type: translation
193
+ args: hrv-ukr
194
+ dataset:
195
+ name: tatoeba-test-v2021-08-07
196
+ type: tatoeba_mt
197
+ args: hrv-ukr
198
+ metrics:
199
+ - name: BLEU
200
+ type: bleu
201
+ value: 50.0
202
+ - task:
203
+ name: Translation slv-rus
204
+ type: translation
205
+ args: slv-rus
206
+ dataset:
207
+ name: tatoeba-test-v2021-08-07
208
+ type: tatoeba_mt
209
+ args: slv-rus
210
+ metrics:
211
+ - name: BLEU
212
+ type: bleu
213
+ value: 27.3
214
+ - task:
215
+ name: Translation srp_Cyrl-rus
216
+ type: translation
217
+ args: srp_Cyrl-rus
218
+ dataset:
219
+ name: tatoeba-test-v2021-08-07
220
+ type: tatoeba_mt
221
+ args: srp_Cyrl-rus
222
+ metrics:
223
+ - name: BLEU
224
+ type: bleu
225
+ value: 56.2
226
+ - task:
227
+ name: Translation srp_Cyrl-ukr
228
+ type: translation
229
+ args: srp_Cyrl-ukr
230
+ dataset:
231
+ name: tatoeba-test-v2021-08-07
232
+ type: tatoeba_mt
233
+ args: srp_Cyrl-ukr
234
+ metrics:
235
+ - name: BLEU
236
+ type: bleu
237
+ value: 51.8
238
+ - task:
239
+ name: Translation srp_Latn-rus
240
+ type: translation
241
+ args: srp_Latn-rus
242
+ dataset:
243
+ name: tatoeba-test-v2021-08-07
244
+ type: tatoeba_mt
245
+ args: srp_Latn-rus
246
+ metrics:
247
+ - name: BLEU
248
+ type: bleu
249
+ value: 60.1
250
+ - task:
251
+ name: Translation srp_Latn-ukr
252
+ type: translation
253
+ args: srp_Latn-ukr
254
+ dataset:
255
+ name: tatoeba-test-v2021-08-07
256
+ type: tatoeba_mt
257
+ args: srp_Latn-ukr
258
+ metrics:
259
+ - name: BLEU
260
+ type: bleu
261
+ value: 55.8
262
+ ---
263
+ # opus-mt-tc-big-zls-zle
264
+
265
+ Neural machine translation model for translating from South Slavic languages (zls) to East Slavic languages (zle).
266
+
267
+ This model is part of the [OPUS-MT project](https://github.com/Helsinki-NLP/Opus-MT), an effort to make neural machine translation models widely available and accessible for many languages in the world. All models are originally trained using the amazing framework of [Marian NMT](https://marian-nmt.github.io/), an efficient NMT implementation written in pure C++. The models have been converted to pyTorch using the transformers library by huggingface. Training data is taken from [OPUS](https://opus.nlpl.eu/) and training pipelines use the procedures of [OPUS-MT-train](https://github.com/Helsinki-NLP/Opus-MT-train).
268
+
269
+ * Publications: [OPUS-MT – Building open translation services for the World](https://aclanthology.org/2020.eamt-1.61/) and [The Tatoeba Translation Challenge – Realistic Data Sets for Low Resource and Multilingual MT](https://aclanthology.org/2020.wmt-1.139/) (Please, cite if you use this model.)
270
+
271
+ ```
272
+ @inproceedings{tiedemann-thottingal-2020-opus,
273
+ title = "{OPUS}-{MT} {--} Building open translation services for the World",
274
+ author = {Tiedemann, J{\"o}rg and Thottingal, Santhosh},
275
+ booktitle = "Proceedings of the 22nd Annual Conference of the European Association for Machine Translation",
276
+ month = nov,
277
+ year = "2020",
278
+ address = "Lisboa, Portugal",
279
+ publisher = "European Association for Machine Translation",
280
+ url = "https://aclanthology.org/2020.eamt-1.61",
281
+ pages = "479--480",
282
+ }
283
+
284
+ @inproceedings{tiedemann-2020-tatoeba,
285
+ title = "The Tatoeba Translation Challenge {--} Realistic Data Sets for Low Resource and Multilingual {MT}",
286
+ author = {Tiedemann, J{\"o}rg},
287
+ booktitle = "Proceedings of the Fifth Conference on Machine Translation",
288
+ month = nov,
289
+ year = "2020",
290
+ address = "Online",
291
+ publisher = "Association for Computational Linguistics",
292
+ url = "https://aclanthology.org/2020.wmt-1.139",
293
+ pages = "1174--1182",
294
+ }
295
+ ```
296
+
297
+ ## Model info
298
+
299
+ * Release: 2022-03-23
300
+ * source language(s): bul hbs hrv slv srp_Cyrl srp_Latn
301
+ * target language(s): bel rus ukr
302
+ * valid target language labels: >>bel<< >>rus<< >>ukr<<
303
+ * model: transformer-big
304
+ * data: opusTCv20210807+bt ([source](https://github.com/Helsinki-NLP/Tatoeba-Challenge))
305
+ * tokenization: SentencePiece (spm32k,spm32k)
306
+ * original model: [opusTCv20210807+bt_transformer-big_2022-03-23.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/zls-zle/opusTCv20210807+bt_transformer-big_2022-03-23.zip)
307
+ * more information released models: [OPUS-MT zls-zle README](https://github.com/Helsinki-NLP/Tatoeba-Challenge/tree/master/models/zls-zle/README.md)
308
+ * more information about the model: [MarianMT](https://huggingface.co/docs/transformers/model_doc/marian)
309
+
310
+ This is a multilingual translation model with multiple target languages. A sentence initial language token is required in the form of `>>id<<` (id = valid target language ID), e.g. `>>bel<<`
311
+
312
+ ## Usage
313
+
314
+ A short example code:
315
+
316
+ ```python
317
+ from transformers import MarianMTModel, MarianTokenizer
318
+
319
+ src_text = [
320
+ ">>rus<< Gdje je brigadir?",
321
+ ">>ukr<< Zovem se Seli."
322
+ ]
323
+
324
+ model_name = "pytorch-models/opus-mt-tc-big-zls-zle"
325
+ tokenizer = MarianTokenizer.from_pretrained(model_name)
326
+ model = MarianMTModel.from_pretrained(model_name)
327
+ translated = model.generate(**tokenizer(src_text, return_tensors="pt", padding=True))
328
+
329
+ for t in translated:
330
+ print( tokenizer.decode(t, skip_special_tokens=True) )
331
+
332
+ # expected output:
333
+ # Где бригадир?
334
+ # Мене звати Саллі.
335
+ ```
336
+
337
+ You can also use OPUS-MT models with the transformers pipelines, for example:
338
+
339
+ ```python
340
+ from transformers import pipeline
341
+ pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-tc-big-zls-zle")
342
+ print(pipe(">>rus<< Gdje je brigadir?"))
343
+
344
+ # expected output: Где бригадир?
345
+ ```
346
+
347
+ ## Benchmarks
348
+
349
+ * test set translations: [opusTCv20210807+bt_transformer-big_2022-03-23.test.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/zls-zle/opusTCv20210807+bt_transformer-big_2022-03-23.test.txt)
350
+ * test set scores: [opusTCv20210807+bt_transformer-big_2022-03-23.eval.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/zls-zle/opusTCv20210807+bt_transformer-big_2022-03-23.eval.txt)
351
+ * benchmark results: [benchmark_results.txt](benchmark_results.txt)
352
+ * benchmark output: [benchmark_translations.zip](benchmark_translations.zip)
353
+
354
+ | langpair | testset | chr-F | BLEU | #sent | #words |
355
+ |----------|---------|-------|-------|-------|--------|
356
+ | bul-rus | tatoeba-test-v2021-08-07 | 0.71467 | 52.6 | 1247 | 7870 |
357
+ | bul-ukr | tatoeba-test-v2021-08-07 | 0.71757 | 53.3 | 1020 | 4932 |
358
+ | hbs-rus | tatoeba-test-v2021-08-07 | 0.74593 | 58.5 | 2500 | 14213 |
359
+ | hbs-ukr | tatoeba-test-v2021-08-07 | 0.70244 | 52.3 | 942 | 4961 |
360
+ | hrv-ukr | tatoeba-test-v2021-08-07 | 0.68931 | 50.0 | 389 | 2232 |
361
+ | slv-rus | tatoeba-test-v2021-08-07 | 0.42255 | 27.3 | 657 | 4056 |
362
+ | srp_Cyrl-rus | tatoeba-test-v2021-08-07 | 0.74112 | 56.2 | 881 | 5117 |
363
+ | srp_Cyrl-ukr | tatoeba-test-v2021-08-07 | 0.68915 | 51.8 | 205 | 1061 |
364
+ | srp_Latn-rus | tatoeba-test-v2021-08-07 | 0.75340 | 60.1 | 1483 | 8311 |
365
+ | srp_Latn-ukr | tatoeba-test-v2021-08-07 | 0.73106 | 55.8 | 348 | 1668 |
366
+ | bul-rus | flores101-devtest | 0.54226 | 24.6 | 1012 | 23295 |
367
+ | bul-ukr | flores101-devtest | 0.53382 | 22.9 | 1012 | 22810 |
368
+ | hrv-rus | flores101-devtest | 0.51726 | 23.5 | 1012 | 23295 |
369
+ | hrv-ukr | flores101-devtest | 0.51011 | 21.9 | 1012 | 22810 |
370
+ | mkd-bel | flores101-devtest | 0.40885 | 10.7 | 1012 | 24829 |
371
+ | mkd-rus | flores101-devtest | 0.52509 | 24.3 | 1012 | 23295 |
372
+ | mkd-ukr | flores101-devtest | 0.52021 | 22.5 | 1012 | 22810 |
373
+ | slv-rus | flores101-devtest | 0.50349 | 22.0 | 1012 | 23295 |
374
+ | slv-ukr | flores101-devtest | 0.49156 | 20.2 | 1012 | 22810 |
375
+ | srp_Cyrl-rus | flores101-devtest | 0.53656 | 25.7 | 1012 | 23295 |
376
+ | srp_Cyrl-ukr | flores101-devtest | 0.53623 | 24.4 | 1012 | 22810 |
377
+
378
+ ## Acknowledgements
379
+
380
+ The work is supported by the [European Language Grid](https://www.european-language-grid.eu/) as [pilot project 2866](https://live.european-language-grid.eu/catalogue/#/resource/projects/2866), by the [FoTran project](https://www.helsinki.fi/en/researchgroups/natural-language-understanding-with-cross-lingual-grounding), funded by the European Research Council (ERC) under the European Union’s Horizon 2020 research and innovation programme (grant agreement No 771113), and the [MeMAD project](https://memad.eu/), funded by the European Union’s Horizon 2020 Research and Innovation Programme under grant agreement No 780069. We are also grateful for the generous computational resources and IT infrastructure provided by [CSC -- IT Center for Science](https://www.csc.fi/), Finland.
381
+
382
+ ## Model conversion info
383
+
384
+ * transformers version: 4.16.2
385
+ * OPUS-MT git hash: 1bdabf7
386
+ * port time: Thu Mar 24 04:08:51 EET 2022
387
+ * port machine: LM0-400-22516.local
benchmark_results.txt ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ bul-bel flores101-dev 0.26451 5.4 997 23996
2
+ bul-rus flores101-dev 0.53809 24.5 997 22657
3
+ bul-ukr flores101-dev 0.53201 23.0 997 21841
4
+ hrv-bel flores101-dev 0.37822 9.5 997 23996
5
+ hrv-rus flores101-dev 0.52434 24.3 997 22657
6
+ hrv-ukr flores101-dev 0.50238 21.3 997 21841
7
+ mkd-bel flores101-dev 0.39938 10.4 997 23996
8
+ mkd-rus flores101-dev 0.52767 24.4 997 22657
9
+ mkd-ukr flores101-dev 0.51569 22.3 997 21841
10
+ slv-bel flores101-dev 0.26023 5.2 997 23996
11
+ slv-rus flores101-dev 0.50334 22.5 997 22657
12
+ slv-ukr flores101-dev 0.48671 19.8 997 21841
13
+ srp_Cyrl-bel flores101-dev 0.28408 6.5 997 23996
14
+ srp_Cyrl-rus flores101-dev 0.54202 26.3 997 22657
15
+ srp_Cyrl-ukr flores101-dev 0.53277 23.9 997 21841
16
+ bul-bel flores101-devtest 0.26832 5.7 1012 24829
17
+ bul-rus flores101-devtest 0.54226 24.6 1012 23295
18
+ bul-ukr flores101-devtest 0.53382 22.9 1012 22810
19
+ hrv-bel flores101-devtest 0.38693 10.0 1012 24829
20
+ hrv-rus flores101-devtest 0.51726 23.5 1012 23295
21
+ hrv-ukr flores101-devtest 0.51011 21.9 1012 22810
22
+ mkd-bel flores101-devtest 0.40885 10.7 1012 24829
23
+ mkd-rus flores101-devtest 0.52509 24.3 1012 23295
24
+ mkd-ukr flores101-devtest 0.52021 22.5 1012 22810
25
+ slv-bel flores101-devtest 0.25656 5.3 1012 24829
26
+ slv-rus flores101-devtest 0.50349 22.0 1012 23295
27
+ slv-ukr flores101-devtest 0.49156 20.2 1012 22810
28
+ srp_Cyrl-bel flores101-devtest 0.27510 5.9 1012 24829
29
+ srp_Cyrl-rus flores101-devtest 0.53656 25.7 1012 23295
30
+ srp_Cyrl-ukr flores101-devtest 0.53623 24.4 1012 22810
31
+ hbs-ukr tatoeba-test-v2020-07-28 0.70273 52.3 941 4959
32
+ slv-rus tatoeba-test-v2020-07-28 0.55682 38.0 378 2101
33
+ slv-ukr tatoeba-test-v2020-07-28 0.33072 19.1 848 3906
34
+ srp_Cyrl-ukr tatoeba-test-v2020-07-28 0.69049 51.9 204 1059
35
+ bul-ukr tatoeba-test-v2021-03-30 0.71799 53.4 1022 4943
36
+ hbs-ukr tatoeba-test-v2021-03-30 0.70251 52.2 953 5022
37
+ hrv-ukr tatoeba-test-v2021-03-30 0.68797 49.8 393 2258
38
+ slv-rus tatoeba-test-v2021-03-30 0.54542 37.6 447 2514
39
+ slv-ukr tatoeba-test-v2021-03-30 0.32763 18.6 916 4237
40
+ srp_Cyrl-ukr tatoeba-test-v2021-03-30 0.69237 52.1 209 1081
41
+ srp_Latn-ukr tatoeba-test-v2021-03-30 0.73129 55.7 351 1683
42
+ bul-rus tatoeba-test-v2021-08-07 0.71467 52.6 1247 7870
43
+ bul-ukr tatoeba-test-v2021-08-07 0.71757 53.3 1020 4932
44
+ hbs-rus tatoeba-test-v2021-08-07 0.74593 58.5 2500 14213
45
+ hbs-ukr tatoeba-test-v2021-08-07 0.70244 52.3 942 4961
46
+ hrv-ukr tatoeba-test-v2021-08-07 0.68931 50.0 389 2232
47
+ slv-rus tatoeba-test-v2021-08-07 0.42255 27.3 657 4056
48
+ slv-ukr tatoeba-test-v2021-08-07 0.31873 16.2 915 4370
49
+ srp_Cyrl-rus tatoeba-test-v2021-08-07 0.74112 56.2 881 5117
50
+ srp_Cyrl-ukr tatoeba-test-v2021-08-07 0.68915 51.8 205 1061
51
+ srp_Latn-rus tatoeba-test-v2021-08-07 0.75340 60.1 1483 8311
52
+ srp_Latn-ukr tatoeba-test-v2021-08-07 0.73106 55.8 348 1668
benchmark_translations.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4921168d9fbdb3c4ea5d37de1ebdd236b2687bffe77a2e46f8602d86491a9080
3
+ size 6603294
config.json ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_dropout": 0.0,
3
+ "activation_function": "relu",
4
+ "architectures": [
5
+ "MarianMTModel"
6
+ ],
7
+ "attention_dropout": 0.0,
8
+ "bad_words_ids": [
9
+ [
10
+ 57710
11
+ ]
12
+ ],
13
+ "bos_token_id": 0,
14
+ "classifier_dropout": 0.0,
15
+ "d_model": 1024,
16
+ "decoder_attention_heads": 16,
17
+ "decoder_ffn_dim": 4096,
18
+ "decoder_layerdrop": 0.0,
19
+ "decoder_layers": 6,
20
+ "decoder_start_token_id": 57710,
21
+ "decoder_vocab_size": 57711,
22
+ "dropout": 0.1,
23
+ "encoder_attention_heads": 16,
24
+ "encoder_ffn_dim": 4096,
25
+ "encoder_layerdrop": 0.0,
26
+ "encoder_layers": 6,
27
+ "eos_token_id": 10977,
28
+ "forced_eos_token_id": 10977,
29
+ "init_std": 0.02,
30
+ "is_encoder_decoder": true,
31
+ "max_length": 512,
32
+ "max_position_embeddings": 1024,
33
+ "model_type": "marian",
34
+ "normalize_embedding": false,
35
+ "num_beams": 4,
36
+ "num_hidden_layers": 6,
37
+ "pad_token_id": 57710,
38
+ "scale_embedding": true,
39
+ "share_encoder_decoder_embeddings": true,
40
+ "static_position_embeddings": true,
41
+ "torch_dtype": "float16",
42
+ "transformers_version": "4.18.0.dev0",
43
+ "use_cache": true,
44
+ "vocab_size": 57711
45
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0847bb7d69c42c3dc2655099b439495e2295aae7e8f2479b78ef8a8a566d0ff
3
+ size 589306115
source.spm ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0c1eee9419b953aa06d0719417d0945f8fe635a6584f93b5eb8734b988cf980
3
+ size 904350
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>"}
target.spm ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee7bf2911b3630257f6a10512279e320a7631459cbccf7d60715f9fe9f6147bb
3
+ size 1007925
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"source_lang": "zls", "target_lang": "zle", "unk_token": "<unk>", "eos_token": "</s>", "pad_token": "<pad>", "model_max_length": 512, "sp_model_kwargs": {}, "separate_vocabs": false, "special_tokens_map_file": null, "name_or_path": "marian-models/opusTCv20210807+bt_transformer-big_2022-03-23/zls-zle", "tokenizer_class": "MarianTokenizer"}
vocab.json ADDED
The diff for this file is too large to render. See raw diff