ZwwWayne commited on
Commit
ecccbb5
1 Parent(s): f842a52

update model weights

Browse files
.gitattributes CHANGED
@@ -34,3 +34,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  tokenizer.model filter=lfs diff=lfs merge=lfs -text
 
 
 
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  tokenizer.model filter=lfs diff=lfs merge=lfs -text
37
+ model-00001-of-00002.safetensors filter=lfs diff=lfs merge=lfs -text
38
+ model-00002-of-00002.safetensors filter=lfs diff=lfs merge=lfs -text
config.json CHANGED
@@ -2,6 +2,7 @@
2
  "architectures": [
3
  "InternLM2ForCausalLM"
4
  ],
 
5
  "auto_map": {
6
  "AutoConfig": "configuration_internlm2.InternLM2Config",
7
  "AutoModelForCausalLM": "modeling_internlm2.InternLM2ForCausalLM",
@@ -21,14 +22,11 @@
21
  "num_key_value_heads": 8,
22
  "pad_token_id": 2,
23
  "rms_norm_eps": 1e-05,
24
- "rope_scaling": {
25
- "factor": 2.0,
26
- "type": "dynamic"
27
- },
28
  "rope_theta": 1000000,
29
  "tie_word_embeddings": false,
30
- "torch_dtype": "float16",
31
- "transformers_version": "4.37.2",
32
  "use_cache": true,
33
  "vocab_size": 92544
34
  }
 
2
  "architectures": [
3
  "InternLM2ForCausalLM"
4
  ],
5
+ "attn_implementation": "eager",
6
  "auto_map": {
7
  "AutoConfig": "configuration_internlm2.InternLM2Config",
8
  "AutoModelForCausalLM": "modeling_internlm2.InternLM2ForCausalLM",
 
22
  "num_key_value_heads": 8,
23
  "pad_token_id": 2,
24
  "rms_norm_eps": 1e-05,
25
+ "rope_scaling": null,
 
 
 
26
  "rope_theta": 1000000,
27
  "tie_word_embeddings": false,
28
+ "torch_dtype": "bfloat16",
29
+ "transformers_version": "4.37.1",
30
  "use_cache": true,
31
  "vocab_size": 92544
32
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 2,
6
- "transformers_version": "4.37.2"
7
  }
 
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 2,
6
+ "transformers_version": "4.37.1"
7
  }
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1fd2f90779b1457e6df4e1f3d994e6cbb5fca2bb91161b3e69a3d8ca14ddfa08
3
- size 1981392544
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8617bf2394506ece1f8e02c2f054a503b67cf46d28d7e87dbdb437dc9bdc027
3
+ size 1981392632
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7c3781fac6755224db8945b9c1638dc47d6bccede3eb837565d796c3f939675d
3
- size 1796846480
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4cb5fa37ac3f0c46748a9a8029c589e3c7a0b0ba9c1fa6bd225bcbbea80e9d24
3
+ size 1796846560
special_tokens_map.json CHANGED
@@ -7,8 +7,32 @@
7
  "<|interpreter|>",
8
  "<|plugin|>"
9
  ],
10
- "bos_token": "<s>",
11
- "eos_token": "</s>",
12
- "pad_token": "</s>",
13
- "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  }
 
7
  "<|interpreter|>",
8
  "<|plugin|>"
9
  ],
10
+ "bos_token": {
11
+ "content": "<s>",
12
+ "lstrip": false,
13
+ "normalized": false,
14
+ "rstrip": false,
15
+ "single_word": false
16
+ },
17
+ "eos_token": {
18
+ "content": "</s>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "</s>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ },
31
+ "unk_token": {
32
+ "content": "<unk>",
33
+ "lstrip": false,
34
+ "normalized": false,
35
+ "rstrip": false,
36
+ "single_word": false
37
+ }
38
  }
tokenizer_config.json CHANGED
@@ -1,17 +1,6 @@
1
  {
2
- "auto_map": {
3
- "AutoTokenizer": [
4
- "tokenization_internlm2.InternLM2Tokenizer",
5
- "tokenization_internlm2_fast.InternLM2TokenizerFast"
6
- ]
7
- },
8
- "bos_token": "<s>",
9
- "clean_up_tokenization_spaces": false,
10
- "eos_token": "</s>",
11
- "model_max_length": 1000000000000000019884624838656,
12
- "pad_token": "</s>",
13
- "tokenizer_class": "InternLM2Tokenizer",
14
- "unk_token": "<unk>",
15
  "added_tokens_decoder": {
16
  "0": {
17
  "content": "<unk>",
@@ -37,48 +26,48 @@
37
  "single_word": false,
38
  "special": true
39
  },
40
- "92543": {
41
- "content": "<|im_start|>",
42
  "lstrip": false,
43
  "normalized": false,
44
  "rstrip": false,
45
  "single_word": false,
46
  "special": true
47
  },
48
- "92542": {
49
- "content": "<|im_end|>",
50
  "lstrip": false,
51
  "normalized": false,
52
  "rstrip": false,
53
  "single_word": false,
54
  "special": true
55
  },
56
- "92541": {
57
- "content": "<|action_start|>",
58
  "lstrip": false,
59
  "normalized": false,
60
  "rstrip": false,
61
  "single_word": false,
62
  "special": true
63
  },
64
- "92540": {
65
- "content": "<|action_end|>",
66
  "lstrip": false,
67
  "normalized": false,
68
  "rstrip": false,
69
  "single_word": false,
70
  "special": true
71
  },
72
- "92539": {
73
- "content": "<|interpreter|>",
74
  "lstrip": false,
75
  "normalized": false,
76
  "rstrip": false,
77
  "single_word": false,
78
  "special": true
79
  },
80
- "92538": {
81
- "content": "<|plugin|>",
82
  "lstrip": false,
83
  "normalized": false,
84
  "rstrip": false,
@@ -94,5 +83,20 @@
94
  "<|interpreter|>",
95
  "<|plugin|>"
96
  ],
97
- "chat_template": "{{ bos_token }}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}"
98
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
 
 
 
 
 
 
 
 
 
 
 
4
  "added_tokens_decoder": {
5
  "0": {
6
  "content": "<unk>",
 
26
  "single_word": false,
27
  "special": true
28
  },
29
+ "92538": {
30
+ "content": "<|plugin|>",
31
  "lstrip": false,
32
  "normalized": false,
33
  "rstrip": false,
34
  "single_word": false,
35
  "special": true
36
  },
37
+ "92539": {
38
+ "content": "<|interpreter|>",
39
  "lstrip": false,
40
  "normalized": false,
41
  "rstrip": false,
42
  "single_word": false,
43
  "special": true
44
  },
45
+ "92540": {
46
+ "content": "<|action_end|>",
47
  "lstrip": false,
48
  "normalized": false,
49
  "rstrip": false,
50
  "single_word": false,
51
  "special": true
52
  },
53
+ "92541": {
54
+ "content": "<|action_start|>",
55
  "lstrip": false,
56
  "normalized": false,
57
  "rstrip": false,
58
  "single_word": false,
59
  "special": true
60
  },
61
+ "92542": {
62
+ "content": "<|im_end|>",
63
  "lstrip": false,
64
  "normalized": false,
65
  "rstrip": false,
66
  "single_word": false,
67
  "special": true
68
  },
69
+ "92543": {
70
+ "content": "<|im_start|>",
71
  "lstrip": false,
72
  "normalized": false,
73
  "rstrip": false,
 
83
  "<|interpreter|>",
84
  "<|plugin|>"
85
  ],
86
+ "auto_map": {
87
+ "AutoTokenizer": [
88
+ "tokenization_internlm2.InternLM2Tokenizer",
89
+ "tokenization_internlm2_fast.InternLM2TokenizerFast"
90
+ ]
91
+ },
92
+ "bos_token": "<s>",
93
+ "chat_template": "{{ bos_token }}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
94
+ "clean_up_tokenization_spaces": false,
95
+ "decode_with_prefix_space": false,
96
+ "eos_token": "</s>",
97
+ "model_max_length": 1000000000000000019884624838656,
98
+ "pad_token": "</s>",
99
+ "sp_model_kwargs": null,
100
+ "tokenizer_class": "InternLM2Tokenizer",
101
+ "unk_token": "<unk>"
102
+ }