Tran Hoai Chau commited on
Commit
3517be0
1 Parent(s): 0b6bafa

Training in progress step 6000

Browse files
added_tokens.json CHANGED
@@ -1,4 +1 @@
1
- {
2
- "[CLS]": 50258,
3
- "[SEP]": 50257
4
- }
 
1
+ {"[CLS]": 50258, "[SEP]": 50257}
 
 
 
config.json CHANGED
@@ -13,14 +13,12 @@
13
  "bos_token_id": 0,
14
  "chunk_size_feed_forward": 0,
15
  "classifier_dropout": null,
16
- "cross_attention_hidden_size": null,
17
  "decoder_start_token_id": null,
18
  "diversity_penalty": 0.0,
19
  "do_sample": false,
20
  "early_stopping": false,
21
  "encoder_no_repeat_ngram_size": 0,
22
  "eos_token_id": 2,
23
- "exponential_decay_length_penalty": null,
24
  "finetuning_task": null,
25
  "forced_bos_token_id": null,
26
  "forced_eos_token_id": null,
@@ -74,9 +72,8 @@
74
  "top_p": 1.0,
75
  "torch_dtype": "float32",
76
  "torchscript": false,
77
- "transformers_version": "4.20.0",
78
  "type_vocab_size": 1,
79
- "typical_p": 1.0,
80
  "use_bfloat16": false,
81
  "use_cache": true,
82
  "vocab_size": 250002
@@ -92,14 +89,12 @@
92
  "bos_token_id": 0,
93
  "chunk_size_feed_forward": 0,
94
  "classifier_dropout": null,
95
- "cross_attention_hidden_size": null,
96
  "decoder_start_token_id": null,
97
  "diversity_penalty": 0.0,
98
  "do_sample": false,
99
  "early_stopping": false,
100
  "encoder_no_repeat_ngram_size": 0,
101
  "eos_token_id": 2,
102
- "exponential_decay_length_penalty": null,
103
  "finetuning_task": null,
104
  "forced_bos_token_id": null,
105
  "forced_eos_token_id": null,
@@ -153,9 +148,8 @@
153
  "top_p": 1.0,
154
  "torch_dtype": "float32",
155
  "torchscript": false,
156
- "transformers_version": "4.20.0",
157
  "type_vocab_size": 1,
158
- "typical_p": 1.0,
159
  "use_bfloat16": false,
160
  "use_cache": true,
161
  "vocab_size": 250002
 
13
  "bos_token_id": 0,
14
  "chunk_size_feed_forward": 0,
15
  "classifier_dropout": null,
 
16
  "decoder_start_token_id": null,
17
  "diversity_penalty": 0.0,
18
  "do_sample": false,
19
  "early_stopping": false,
20
  "encoder_no_repeat_ngram_size": 0,
21
  "eos_token_id": 2,
 
22
  "finetuning_task": null,
23
  "forced_bos_token_id": null,
24
  "forced_eos_token_id": null,
 
72
  "top_p": 1.0,
73
  "torch_dtype": "float32",
74
  "torchscript": false,
75
+ "transformers_version": "4.11.3",
76
  "type_vocab_size": 1,
 
77
  "use_bfloat16": false,
78
  "use_cache": true,
79
  "vocab_size": 250002
 
89
  "bos_token_id": 0,
90
  "chunk_size_feed_forward": 0,
91
  "classifier_dropout": null,
 
92
  "decoder_start_token_id": null,
93
  "diversity_penalty": 0.0,
94
  "do_sample": false,
95
  "early_stopping": false,
96
  "encoder_no_repeat_ngram_size": 0,
97
  "eos_token_id": 2,
 
98
  "finetuning_task": null,
99
  "forced_bos_token_id": null,
100
  "forced_eos_token_id": null,
 
148
  "top_p": 1.0,
149
  "torch_dtype": "float32",
150
  "torchscript": false,
151
+ "transformers_version": "4.11.3",
152
  "type_vocab_size": 1,
 
153
  "use_bfloat16": false,
154
  "use_cache": true,
155
  "vocab_size": 250002
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:14fc51e6d60c3ec9b03fe042f130b694eb91e160f2b7ac700981c0155cc3531e
3
- size 2339011307
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f89d6016408140c858fd37562c68db7b9025f75553e04f7e7e32836de931a0d0
3
+ size 2339050027
special_tokens_map.json CHANGED
@@ -1,6 +1 @@
1
- {
2
- "bos_token": "<|endoftext|>",
3
- "eos_token": "<|endoftext|>",
4
- "pad_token": "<|endoftext|>",
5
- "unk_token": "<|endoftext|>"
6
- }
 
1
+ {"bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "unk_token": "<|endoftext|>", "pad_token": "<|endoftext|>"}
 
 
 
 
 
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,10 +1 @@
1
- {
2
- "add_prefix_space": false,
3
- "bos_token": "<|endoftext|>",
4
- "eos_token": "<|endoftext|>",
5
- "model_max_length": 1024,
6
- "name_or_path": "EddieChen372/gpt2-jest",
7
- "special_tokens_map_file": null,
8
- "tokenizer_class": "GPT2Tokenizer",
9
- "unk_token": "<|endoftext|>"
10
- }
 
1
+ {"unk_token": "<|endoftext|>", "bos_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "add_prefix_space": false, "model_max_length": 1024, "name_or_path": "EddieChen372/gpt2-jest", "special_tokens_map_file": null, "tokenizer_class": "GPT2Tokenizer"}