basiliskinstitute commited on
Commit
4d6836c
1 Parent(s): 27b6ea6

Upload 7 files

Browse files
pytorch_model-00002-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:84125f5905c37891eb16068bd1124aeea8e8a5fb74718a659bc7587329b2f864
3
- size 4999844744
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d3a31c2ba2e5b90cbdddede8408fa1e27b549d854df7f089fa5a56a56394ae3
3
+ size 4999844257
pytorch_model-00003-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:36578f4909b748ca4381c4c148aee5c278dbf29c9a6e250e9162bc9d169ab55c
3
- size 4540553798
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:148a2612a1ab26f1149a736531b65da77884be2bfc93d9bd0c7b28daaa0da09e
3
+ size 4540553247
special_tokens_map.json CHANGED
@@ -1,4 +1,9 @@
1
  {
 
 
 
 
 
2
  "bos_token": {
3
  "content": "<s>",
4
  "lstrip": false,
 
1
  {
2
+ "additional_special_tokens": [
3
+ "<unk>",
4
+ "<s>",
5
+ "</s>"
6
+ ],
7
  "bos_token": {
8
  "content": "<s>",
9
  "lstrip": false,
tokenizer_config.json CHANGED
@@ -44,8 +44,13 @@
44
  "special": false
45
  }
46
  },
47
- "additional_special_tokens": [],
 
 
 
 
48
  "bos_token": "<s>",
 
49
  "clean_up_tokenization_spaces": false,
50
  "eos_token": "<|im_end|>",
51
  "legacy": true,
@@ -55,6 +60,6 @@
55
  "spaces_between_special_tokens": false,
56
  "tokenizer_class": "LlamaTokenizer",
57
  "unk_token": "<unk>",
58
- "use_default_system_prompt": false,
59
  "use_fast": true
60
  }
 
44
  "special": false
45
  }
46
  },
47
+ "additional_special_tokens": [
48
+ "<unk>",
49
+ "<s>",
50
+ "</s>"
51
+ ],
52
  "bos_token": "<s>",
53
+ "chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% set system_message = 'You are a helpful assistant.' %}{% endif %}{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in loop_messages %}{% if loop.index0 == 0 %}{{'<|im_start|>system\n' + system_message + '<|im_end|>\n'}}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
54
  "clean_up_tokenization_spaces": false,
55
  "eos_token": "<|im_end|>",
56
  "legacy": true,
 
60
  "spaces_between_special_tokens": false,
61
  "tokenizer_class": "LlamaTokenizer",
62
  "unk_token": "<unk>",
63
+ "use_default_system_prompt": true,
64
  "use_fast": true
65
  }