tiedeman commited on
Commit
c5d0bf0
1 Parent(s): d2fa7a5

Initial commit

Browse files
Files changed (5) hide show
  1. README.md +1 -2
  2. config.json +6 -10
  3. generation_config.json +16 -0
  4. model.safetensors +3 -0
  5. pytorch_model.bin +2 -2
README.md CHANGED
@@ -108,7 +108,6 @@ This model is part of the [OPUS-MT project](https://github.com/Helsinki-NLP/Opus
108
  - **Language(s):**
109
  - Source Language(s): aar acm afb amh apc ara arc arq arz bcw byn cop daa dsh gde gnd hau hbo heb hig irk jpa kab ker kqp ktb kxc lln lme meq mfh mfi mfk mif mlt mpg mqb muy oar orm pbi phn rif sgw shi shy som sur syc syr taq tig tir tmc tmh tmr ttr tzm wal xed zgh
110
  - Target Language(s): eng
111
- - Valid Target Language Labels:
112
  - **Original Model**: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-08-17.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/afa-eng/opusTCv20230926max50+bt+jhubc_transformer-big_2024-08-17.zip)
113
  - **Resources for more information:**
114
  - [OPUS-MT dashboard](https://opus.nlpl.eu/dashboard/index.php?pkg=opusmt&test=all&scoreslang=all&chart=standard&model=Tatoeba-MT-models/afa-eng/opusTCv20230926max50%2Bbt%2Bjhubc_transformer-big_2024-08-17)
@@ -233,5 +232,5 @@ The work is supported by the [HPLT project](https://hplt-project.org/), funded b
233
 
234
  * transformers version: 4.45.1
235
  * OPUS-MT git hash: a44ab31
236
- * port time: Sun Oct 6 21:19:30 EEST 2024
237
  * port machine: LM0-400-22516.local
 
108
  - **Language(s):**
109
  - Source Language(s): aar acm afb amh apc ara arc arq arz bcw byn cop daa dsh gde gnd hau hbo heb hig irk jpa kab ker kqp ktb kxc lln lme meq mfh mfi mfk mif mlt mpg mqb muy oar orm pbi phn rif sgw shi shy som sur syc syr taq tig tir tmc tmh tmr ttr tzm wal xed zgh
110
  - Target Language(s): eng
 
111
  - **Original Model**: [opusTCv20230926max50+bt+jhubc_transformer-big_2024-08-17.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/afa-eng/opusTCv20230926max50+bt+jhubc_transformer-big_2024-08-17.zip)
112
  - **Resources for more information:**
113
  - [OPUS-MT dashboard](https://opus.nlpl.eu/dashboard/index.php?pkg=opusmt&test=all&scoreslang=all&chart=standard&model=Tatoeba-MT-models/afa-eng/opusTCv20230926max50%2Bbt%2Bjhubc_transformer-big_2024-08-17)
 
232
 
233
  * transformers version: 4.45.1
234
  * OPUS-MT git hash: a44ab31
235
+ * port time: Sun Oct 6 21:43:08 EEST 2024
236
  * port machine: LM0-400-22516.local
config.json CHANGED
@@ -1,15 +1,11 @@
1
  {
 
2
  "activation_dropout": 0.0,
3
  "activation_function": "relu",
4
  "architectures": [
5
  "MarianMTModel"
6
  ],
7
  "attention_dropout": 0.0,
8
- "bad_words_ids": [
9
- [
10
- 61388
11
- ]
12
- ],
13
  "bos_token_id": 0,
14
  "classifier_dropout": 0.0,
15
  "d_model": 1024,
@@ -25,21 +21,21 @@
25
  "encoder_layerdrop": 0.0,
26
  "encoder_layers": 6,
27
  "eos_token_id": 835,
28
- "forced_eos_token_id": 835,
29
  "init_std": 0.02,
30
  "is_encoder_decoder": true,
31
- "max_length": 512,
32
  "max_position_embeddings": 1024,
33
  "model_type": "marian",
34
  "normalize_embedding": false,
35
- "num_beams": 4,
36
  "num_hidden_layers": 6,
37
  "pad_token_id": 61388,
38
  "scale_embedding": true,
39
  "share_encoder_decoder_embeddings": true,
40
  "static_position_embeddings": true,
41
- "torch_dtype": "float16",
42
- "transformers_version": "4.18.0.dev0",
43
  "use_cache": true,
44
  "vocab_size": 61389
45
  }
 
1
  {
2
+ "_name_or_path": "opus-mt-tc-bible-big-afa-en",
3
  "activation_dropout": 0.0,
4
  "activation_function": "relu",
5
  "architectures": [
6
  "MarianMTModel"
7
  ],
8
  "attention_dropout": 0.0,
 
 
 
 
 
9
  "bos_token_id": 0,
10
  "classifier_dropout": 0.0,
11
  "d_model": 1024,
 
21
  "encoder_layerdrop": 0.0,
22
  "encoder_layers": 6,
23
  "eos_token_id": 835,
24
+ "forced_eos_token_id": null,
25
  "init_std": 0.02,
26
  "is_encoder_decoder": true,
27
+ "max_length": null,
28
  "max_position_embeddings": 1024,
29
  "model_type": "marian",
30
  "normalize_embedding": false,
31
+ "num_beams": null,
32
  "num_hidden_layers": 6,
33
  "pad_token_id": 61388,
34
  "scale_embedding": true,
35
  "share_encoder_decoder_embeddings": true,
36
  "static_position_embeddings": true,
37
+ "torch_dtype": "float32",
38
+ "transformers_version": "4.45.1",
39
  "use_cache": true,
40
  "vocab_size": 61389
41
  }
generation_config.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bad_words_ids": [
4
+ [
5
+ 61388
6
+ ]
7
+ ],
8
+ "bos_token_id": 0,
9
+ "decoder_start_token_id": 61388,
10
+ "eos_token_id": 835,
11
+ "forced_eos_token_id": 835,
12
+ "max_length": 512,
13
+ "num_beams": 4,
14
+ "pad_token_id": 61388,
15
+ "transformers_version": "4.45.1"
16
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50877591d55dc8d738437a2ed1be89836d0a35d040a2e4eaa50c5fd54b71d847
3
+ size 957154020
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:02602a02e6bd7f4147f67725c7f153e20913fd60470db4c55006d1ab07ce0d91
3
- size 604378563
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c731267185face683281cc7bc2b685f94b17fcbdb3223edfb04dbe9d4a16c9a
3
+ size 957205253