dfurman commited on
Commit
8440776
1 Parent(s): 107b1a8

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -1,6 +1,24 @@
1
  {
2
- "bos_token": "<s>",
3
- "eos_token": "</s>",
4
- "pad_token": "</s>",
5
- "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  }
 
1
  {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<unk>",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
  }
tokenizer.json CHANGED
@@ -51,12 +51,6 @@
51
  "post_processor": {
52
  "type": "TemplateProcessing",
53
  "single": [
54
- {
55
- "SpecialToken": {
56
- "id": "<s>",
57
- "type_id": 0
58
- }
59
- },
60
  {
61
  "Sequence": {
62
  "id": "A",
@@ -65,24 +59,12 @@
65
  }
66
  ],
67
  "pair": [
68
- {
69
- "SpecialToken": {
70
- "id": "<s>",
71
- "type_id": 0
72
- }
73
- },
74
  {
75
  "Sequence": {
76
  "id": "A",
77
  "type_id": 0
78
  }
79
  },
80
- {
81
- "SpecialToken": {
82
- "id": "<s>",
83
- "type_id": 1
84
- }
85
- },
86
  {
87
  "Sequence": {
88
  "id": "B",
@@ -90,17 +72,7 @@
90
  }
91
  }
92
  ],
93
- "special_tokens": {
94
- "<s>": {
95
- "id": "<s>",
96
- "ids": [
97
- 1
98
- ],
99
- "tokens": [
100
- "<s>"
101
- ]
102
- }
103
- }
104
  },
105
  "decoder": {
106
  "type": "Sequence",
 
51
  "post_processor": {
52
  "type": "TemplateProcessing",
53
  "single": [
 
 
 
 
 
 
54
  {
55
  "Sequence": {
56
  "id": "A",
 
59
  }
60
  ],
61
  "pair": [
 
 
 
 
 
 
62
  {
63
  "Sequence": {
64
  "id": "A",
65
  "type_id": 0
66
  }
67
  },
 
 
 
 
 
 
68
  {
69
  "Sequence": {
70
  "id": "B",
 
72
  }
73
  }
74
  ],
75
+ "special_tokens": {}
 
 
 
 
 
 
 
 
 
 
76
  },
77
  "decoder": {
78
  "type": "Sequence",
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
tokenizer_config.json CHANGED
@@ -1,4 +1,6 @@
1
  {
 
 
2
  "added_tokens_decoder": {
3
  "0": {
4
  "content": "<unk>",
@@ -27,11 +29,12 @@
27
  },
28
  "additional_special_tokens": [],
29
  "bos_token": "<s>",
30
- "clean_up_tokenization_spaces": false,
 
31
  "eos_token": "</s>",
32
  "legacy": true,
33
  "model_max_length": 1000000000000000019884624838656,
34
- "pad_token": null,
35
  "sp_model_kwargs": {},
36
  "spaces_between_special_tokens": false,
37
  "tokenizer_class": "LlamaTokenizer",
 
1
  {
2
+ "add_bos_token": false,
3
+ "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
 
29
  },
30
  "additional_special_tokens": [],
31
  "bos_token": "<s>",
32
+ "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST] ' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token + ' ' }}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
33
+ "clean_up_tokenization_spaces": true,
34
  "eos_token": "</s>",
35
  "legacy": true,
36
  "model_max_length": 1000000000000000019884624838656,
37
+ "pad_token": "<unk>",
38
  "sp_model_kwargs": {},
39
  "spaces_between_special_tokens": false,
40
  "tokenizer_class": "LlamaTokenizer",