lengyue233 commited on
Commit
f26514d
1 Parent(s): 14df5cb

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -1,3 +1,5 @@
1
  {
 
 
2
  "pad_token": "<|pad|>"
3
  }
 
1
  {
2
+ "bos_token": "<|begin_of_sequence|>",
3
+ "eos_token": "<|end_of_sequence|>",
4
  "pad_token": "<|pad|>"
5
  }
tokenizer.json CHANGED
@@ -5,7 +5,7 @@
5
  "added_tokens": [
6
  {
7
  "id": 0,
8
- "content": "<|im_start|>",
9
  "single_word": false,
10
  "lstrip": false,
11
  "rstrip": false,
@@ -14,7 +14,7 @@
14
  },
15
  {
16
  "id": 1,
17
- "content": "<|im_sep|>",
18
  "single_word": false,
19
  "lstrip": false,
20
  "rstrip": false,
@@ -23,7 +23,7 @@
23
  },
24
  {
25
  "id": 2,
26
- "content": "<|start_bytes|>",
27
  "single_word": false,
28
  "lstrip": false,
29
  "rstrip": false,
@@ -32,7 +32,7 @@
32
  },
33
  {
34
  "id": 3,
35
- "content": "<|end_bytes|>",
36
  "single_word": false,
37
  "lstrip": false,
38
  "rstrip": false,
@@ -95,10 +95,10 @@
95
  "fuse_unk": false,
96
  "byte_fallback": false,
97
  "vocab": {
98
- "<|im_start|>": 0,
99
- "<|im_sep|>": 1,
100
- "<|start_bytes|>": 2,
101
- "<|end_bytes|>": 3,
102
  "<|im_end|>": 4,
103
  "<|semantic|>": 5,
104
  "<|pad|>": 6,
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
8
+ "content": "<|begin_of_sequence|>",
9
  "single_word": false,
10
  "lstrip": false,
11
  "rstrip": false,
 
14
  },
15
  {
16
  "id": 1,
17
+ "content": "<|end_of_sequence|>",
18
  "single_word": false,
19
  "lstrip": false,
20
  "rstrip": false,
 
23
  },
24
  {
25
  "id": 2,
26
+ "content": "<|im_start|>",
27
  "single_word": false,
28
  "lstrip": false,
29
  "rstrip": false,
 
32
  },
33
  {
34
  "id": 3,
35
+ "content": "<|im_sep|>",
36
  "single_word": false,
37
  "lstrip": false,
38
  "rstrip": false,
 
95
  "fuse_unk": false,
96
  "byte_fallback": false,
97
  "vocab": {
98
+ "<|begin_of_sequence|>": 0,
99
+ "<|end_of_sequence|>": 1,
100
+ "<|im_start|>": 2,
101
+ "<|im_sep|>": 3,
102
  "<|im_end|>": 4,
103
  "<|semantic|>": 5,
104
  "<|pad|>": 6,
tokenizer_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "added_tokens_decoder": {
3
  "0": {
4
- "content": "<|im_start|>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
@@ -9,7 +9,7 @@
9
  "special": true
10
  },
11
  "1": {
12
- "content": "<|im_sep|>",
13
  "lstrip": false,
14
  "normalized": false,
15
  "rstrip": false,
@@ -17,7 +17,7 @@
17
  "special": true
18
  },
19
  "2": {
20
- "content": "<|start_bytes|>",
21
  "lstrip": false,
22
  "normalized": false,
23
  "rstrip": false,
@@ -25,7 +25,7 @@
25
  "special": true
26
  },
27
  "3": {
28
- "content": "<|end_bytes|>",
29
  "lstrip": false,
30
  "normalized": false,
31
  "rstrip": false,
@@ -57,7 +57,9 @@
57
  "special": true
58
  }
59
  },
 
60
  "clean_up_tokenization_spaces": true,
 
61
  "model_max_length": 1000000000000000019884624838656,
62
  "pad_token": "<|pad|>",
63
  "tokenizer_class": "PreTrainedTokenizerFast"
 
1
  {
2
  "added_tokens_decoder": {
3
  "0": {
4
+ "content": "<|begin_of_sequence|>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
 
9
  "special": true
10
  },
11
  "1": {
12
+ "content": "<|end_of_sequence|>",
13
  "lstrip": false,
14
  "normalized": false,
15
  "rstrip": false,
 
17
  "special": true
18
  },
19
  "2": {
20
+ "content": "<|im_start|>",
21
  "lstrip": false,
22
  "normalized": false,
23
  "rstrip": false,
 
25
  "special": true
26
  },
27
  "3": {
28
+ "content": "<|im_sep|>",
29
  "lstrip": false,
30
  "normalized": false,
31
  "rstrip": false,
 
57
  "special": true
58
  }
59
  },
60
+ "bos_token": "<|begin_of_sequence|>",
61
  "clean_up_tokenization_spaces": true,
62
+ "eos_token": "<|end_of_sequence|>",
63
  "model_max_length": 1000000000000000019884624838656,
64
  "pad_token": "<|pad|>",
65
  "tokenizer_class": "PreTrainedTokenizerFast"