sarvam-user commited on
Commit
9a86918
1 Parent(s): 09062ad

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -2,20 +2,13 @@
2
  "bos_token": {
3
  "content": "<s>",
4
  "lstrip": false,
5
- "normalized": true,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
  "content": "</s>",
11
  "lstrip": false,
12
- "normalized": true,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "[PAD]",
18
- "lstrip": false,
19
  "normalized": false,
20
  "rstrip": false,
21
  "single_word": false
@@ -23,7 +16,7 @@
23
  "unk_token": {
24
  "content": "<unk>",
25
  "lstrip": false,
26
- "normalized": true,
27
  "rstrip": false,
28
  "single_word": false
29
  }
 
2
  "bos_token": {
3
  "content": "<s>",
4
  "lstrip": false,
5
+ "normalized": false,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
  "eos_token": {
10
  "content": "</s>",
11
  "lstrip": false,
 
 
 
 
 
 
 
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
 
16
  "unk_token": {
17
  "content": "<unk>",
18
  "lstrip": false,
19
+ "normalized": false,
20
  "rstrip": false,
21
  "single_word": false
22
  }
tokenizer.model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d67cd5bb8170cf2af9d4bce126af808633773a743361aea8b0050d6eb0e5622b
3
- size 967552
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c384835e29a2bcdc9af37e169f13978dc6fbf2f94274f956ed2a42afa4b7e87
3
+ size 967614
tokenizer_config.json CHANGED
@@ -5,7 +5,7 @@
5
  "0": {
6
  "content": "<unk>",
7
  "lstrip": false,
8
- "normalized": true,
9
  "rstrip": false,
10
  "single_word": false,
11
  "special": true
@@ -13,7 +13,7 @@
13
  "1": {
14
  "content": "<s>",
15
  "lstrip": false,
16
- "normalized": true,
17
  "rstrip": false,
18
  "single_word": false,
19
  "special": true
@@ -21,14 +21,6 @@
21
  "2": {
22
  "content": "</s>",
23
  "lstrip": false,
24
- "normalized": true,
25
- "rstrip": false,
26
- "single_word": false,
27
- "special": true
28
- },
29
- "32000": {
30
- "content": "[PAD]",
31
- "lstrip": false,
32
  "normalized": false,
33
  "rstrip": false,
34
  "single_word": false,
@@ -40,11 +32,10 @@
40
  "eos_token": "</s>",
41
  "legacy": false,
42
  "model_max_length": 1000000000000000019884624838656,
43
- "pad_token": "[PAD]",
44
  "sp_model_kwargs": {},
45
  "spaces_between_special_tokens": false,
46
  "tokenizer_class": "LlamaTokenizer",
47
  "unk_token": "<unk>",
48
- "use_default_system_prompt": false,
49
- "use_fast": true
50
  }
 
5
  "0": {
6
  "content": "<unk>",
7
  "lstrip": false,
8
+ "normalized": false,
9
  "rstrip": false,
10
  "single_word": false,
11
  "special": true
 
13
  "1": {
14
  "content": "<s>",
15
  "lstrip": false,
16
+ "normalized": false,
17
  "rstrip": false,
18
  "single_word": false,
19
  "special": true
 
21
  "2": {
22
  "content": "</s>",
23
  "lstrip": false,
 
 
 
 
 
 
 
 
24
  "normalized": false,
25
  "rstrip": false,
26
  "single_word": false,
 
32
  "eos_token": "</s>",
33
  "legacy": false,
34
  "model_max_length": 1000000000000000019884624838656,
35
+ "pad_token": null,
36
  "sp_model_kwargs": {},
37
  "spaces_between_special_tokens": false,
38
  "tokenizer_class": "LlamaTokenizer",
39
  "unk_token": "<unk>",
40
+ "use_default_system_prompt": false
 
41
  }