ffsouza commited on
Commit
eee001b
1 Parent(s): 9f94e16

Training in progress, step 500

Browse files
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
config.json ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "sshleifer/tiny-mbart",
3
+ "_num_labels": 3,
4
+ "activation_dropout": 0.0,
5
+ "activation_function": "gelu",
6
+ "add_bias_logits": false,
7
+ "add_final_layer_norm": true,
8
+ "architectures": [
9
+ "MBartForConditionalGeneration"
10
+ ],
11
+ "attention_dropout": 0.0,
12
+ "bos_token_id": 0,
13
+ "classif_dropout": 0.0,
14
+ "classifier_dropout": 0.0,
15
+ "d_model": 2,
16
+ "decoder_attention_heads": 1,
17
+ "decoder_ffn_dim": 4,
18
+ "decoder_layerdrop": 0.0,
19
+ "decoder_layers": 2,
20
+ "dropout": 0.1,
21
+ "encoder_attention_heads": 1,
22
+ "encoder_ffn_dim": 4,
23
+ "encoder_layerdrop": 0.0,
24
+ "encoder_layers": 2,
25
+ "eos_token_id": 2,
26
+ "extra_pos_embeddings": 2,
27
+ "forced_eos_token_id": 2,
28
+ "id2label": {
29
+ "0": "LABEL_0",
30
+ "1": "LABEL_1",
31
+ "2": "LABEL_2"
32
+ },
33
+ "init_std": 0.02,
34
+ "is_encoder_decoder": true,
35
+ "label2id": {
36
+ "LABEL_0": 0,
37
+ "LABEL_1": 1,
38
+ "LABEL_2": 2
39
+ },
40
+ "max_position_embeddings": 1024,
41
+ "model_type": "mbart",
42
+ "normalize_before": true,
43
+ "normalize_embedding": true,
44
+ "num_beams": 2,
45
+ "num_hidden_layers": 12,
46
+ "output_past": true,
47
+ "pad_token_id": 1,
48
+ "scale_embedding": true,
49
+ "static_position_embeddings": false,
50
+ "torch_dtype": "float32",
51
+ "transformers_version": "4.12.5",
52
+ "use_cache": true,
53
+ "vocab_size": 250027
54
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73f4aba95bd5e2a878122352bac0491ce81e91fb2423d5784b7d88254d89d806
3
+ size 3052101
runs/Nov29_19-45-27_ffsouza/1638226737.5379748/events.out.tfevents.1638226737.ffsouza.13313.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf5d0af251525a5560be30f34e2c163b22204ba15753eda9ecc68301626fa288
3
+ size 4776
runs/Nov29_19-45-27_ffsouza/1638226865.5668783/events.out.tfevents.1638226865.ffsouza.13313.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ae4796fbd1b1dfa2b7b9ab62091ecb8b60c95076338ccf74459c82704a693b0
3
+ size 4776
runs/Nov29_19-45-27_ffsouza/events.out.tfevents.1638226737.ffsouza.13313.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f461af260da22af5d02f333f016542b3a6b0b1660dcd976165b4d43025c75cad
3
+ size 7762
runs/Nov29_20-01-18_ffsouza/1638226890.147298/events.out.tfevents.1638226890.ffsouza.13313.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1409ea1a99e4eb1a619c9432421ed1fdebe26042ad379118d6a3f43088aa67e0
3
+ size 4776
runs/Nov29_20-01-18_ffsouza/events.out.tfevents.1638226890.ffsouza.13313.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3e98186b97a66b7b35860ba5d2f8dc3c57f2d4dd99d47050dd686c5585a8502
3
+ size 3899
runs/Nov29_20-06-42_ffsouza/1638227214.2790768/events.out.tfevents.1638227214.ffsouza.18902.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6582066e6def019a20990d27e38ade7560a93735daaadfe1c55082116a581501
3
+ size 4776
runs/Nov29_20-06-42_ffsouza/events.out.tfevents.1638227214.ffsouza.18902.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6bae19ef964a84aa9b8842331f55666ecfbf975752ff4ed97be3b2e49db9deb0
3
+ size 4056
sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfc8146abe2a0488e9e2a0c56de7952f7c11ab059eca145a0a727afce0db2865
3
+ size 5069051
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": "<mask>", "additional_special_tokens": ["ar_AR", "cs_CZ", "de_DE", "en_XX", "es_XX", "et_EE", "fi_FI", "fr_XX", "gu_IN", "hi_IN", "it_IT", "ja_XX", "kk_KZ", "ko_KR", "lt_LT", "lv_LV", "my_MM", "ne_NP", "nl_XX", "ro_RO", "ru_RU", "si_LK", "tr_TR", "vi_VN", "zh_CN"]}
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "src_lang": null, "tgt_lang": null, "additional_special_tokens": null, "model_max_length": 1024, "special_tokens_map_file": "/home/ffsouza/.cache/huggingface/transformers/33fa7894ab257a74cede3060dca6d2fc609918785e80160f6c057723ece47292.0dc5b1041f62041ebbd23b1297f2f573769d5c97d8b7c28180ec86b8f6185aa8", "name_or_path": "sshleifer/tiny-mbart", "sp_model_kwargs": {}, "tokenizer_class": "MBartTokenizer"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3008fcbc1f4b651bcdd644678c2f8f616ae1104165a676d16aee7c7673f06069
3
+ size 2991