huseinzol05 commited on
Commit
5d868bb
1 Parent(s): f02408c

Upload tokenizer

Browse files
Files changed (2) hide show
  1. tokenizer.json +2 -35
  2. tokenizer_config.json +3 -5
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 16384,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
@@ -56,12 +51,6 @@
56
  "post_processor": {
57
  "type": "TemplateProcessing",
58
  "single": [
59
- {
60
- "SpecialToken": {
61
- "id": "<s>",
62
- "type_id": 0
63
- }
64
- },
65
  {
66
  "Sequence": {
67
  "id": "A",
@@ -70,24 +59,12 @@
70
  }
71
  ],
72
  "pair": [
73
- {
74
- "SpecialToken": {
75
- "id": "<s>",
76
- "type_id": 0
77
- }
78
- },
79
  {
80
  "Sequence": {
81
  "id": "A",
82
  "type_id": 0
83
  }
84
  },
85
- {
86
- "SpecialToken": {
87
- "id": "<s>",
88
- "type_id": 1
89
- }
90
- },
91
  {
92
  "Sequence": {
93
  "id": "B",
@@ -95,17 +72,7 @@
95
  }
96
  }
97
  ],
98
- "special_tokens": {
99
- "<s>": {
100
- "id": "<s>",
101
- "ids": [
102
- 1
103
- ],
104
- "tokens": [
105
- "<s>"
106
- ]
107
- }
108
- }
109
  },
110
  "decoder": {
111
  "type": "Sequence",
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
 
51
  "post_processor": {
52
  "type": "TemplateProcessing",
53
  "single": [
 
 
 
 
 
 
54
  {
55
  "Sequence": {
56
  "id": "A",
 
59
  }
60
  ],
61
  "pair": [
 
 
 
 
 
 
62
  {
63
  "Sequence": {
64
  "id": "A",
65
  "type_id": 0
66
  }
67
  },
 
 
 
 
 
 
68
  {
69
  "Sequence": {
70
  "id": "B",
 
72
  }
73
  }
74
  ],
75
+ "special_tokens": {}
 
 
 
 
 
 
 
 
 
 
76
  },
77
  "decoder": {
78
  "type": "Sequence",
tokenizer_config.json CHANGED
@@ -1,4 +1,6 @@
1
  {
 
 
2
  "added_tokens_decoder": {
3
  "0": {
4
  "content": "<unk>",
@@ -26,19 +28,15 @@
26
  }
27
  },
28
  "bos_token": "<s>",
29
- "chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% set system_message = false %}{% endif %}{% for message in loop_messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if loop.index0 == 0 and system_message != false %}{% set content = '<<SYS>>\\n' + system_message + '\\n<</SYS>>\\n\\n' + message['content'] %}{% else %}{% set content = message['content'] %}{% endif %}{% if message['role'] == 'user' %}{{ bos_token + '[INST] ' + content.strip() + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ ' ' + content.strip() + ' ' + eos_token }}{% endif %}{% endfor %}",
30
  "clean_up_tokenization_spaces": false,
31
  "eos_token": "</s>",
32
  "legacy": false,
33
- "max_length": 16384,
34
  "model_max_length": 1000000000000000019884624838656,
35
  "pad_token": "<unk>",
36
  "padding_side": "right",
37
  "sp_model_kwargs": {},
38
- "stride": 0,
39
  "tokenizer_class": "LlamaTokenizer",
40
- "truncation_side": "right",
41
- "truncation_strategy": "longest_first",
42
  "unk_token": "<unk>",
43
  "use_default_system_prompt": false
44
  }
 
1
  {
2
+ "add_bos_token": false,
3
+ "add_eos_token": false,
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
 
28
  }
29
  },
30
  "bos_token": "<s>",
31
+ "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
32
  "clean_up_tokenization_spaces": false,
33
  "eos_token": "</s>",
34
  "legacy": false,
 
35
  "model_max_length": 1000000000000000019884624838656,
36
  "pad_token": "<unk>",
37
  "padding_side": "right",
38
  "sp_model_kwargs": {},
 
39
  "tokenizer_class": "LlamaTokenizer",
 
 
40
  "unk_token": "<unk>",
41
  "use_default_system_prompt": false
42
  }