bigchestnut commited on
Commit
4976084
·
verified ·
1 Parent(s): 6b131df

Upload Qwen2ForCausalLM

Browse files
config.json CHANGED
@@ -1,16 +1,16 @@
1
  {
2
- "_name_or_path": "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 151643,
8
- "eos_token_id": 151643,
9
  "hidden_act": "silu",
10
  "hidden_size": 1536,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 8960,
13
- "max_position_embeddings": 131072,
14
  "max_window_layers": 21,
15
  "model_type": "qwen2",
16
  "num_attention_heads": 12,
@@ -18,13 +18,12 @@
18
  "num_key_value_heads": 2,
19
  "rms_norm_eps": 1e-06,
20
  "rope_scaling": null,
21
- "rope_theta": 10000,
22
  "sliding_window": null,
23
- "tie_word_embeddings": false,
24
  "torch_dtype": "float32",
25
  "transformers_version": "4.46.1",
26
  "use_cache": true,
27
- "use_mrope": false,
28
  "use_sliding_window": false,
29
  "vocab_size": 151936
30
  }
 
1
  {
2
+ "_name_or_path": "Qwen/Qwen2.5-1.5B-Instruct",
3
  "architectures": [
4
  "Qwen2ForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
  "hidden_act": "silu",
10
  "hidden_size": 1536,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 8960,
13
+ "max_position_embeddings": 32768,
14
  "max_window_layers": 21,
15
  "model_type": "qwen2",
16
  "num_attention_heads": 12,
 
18
  "num_key_value_heads": 2,
19
  "rms_norm_eps": 1e-06,
20
  "rope_scaling": null,
21
+ "rope_theta": 1000000.0,
22
  "sliding_window": null,
23
+ "tie_word_embeddings": true,
24
  "torch_dtype": "float32",
25
  "transformers_version": "4.46.1",
26
  "use_cache": true,
 
27
  "use_sliding_window": false,
28
  "vocab_size": 151936
29
  }
generation_config.json CHANGED
@@ -1,9 +1,14 @@
1
  {
2
- "_from_model_config": true,
3
- "bos_token_id": 151646,
4
  "do_sample": true,
5
- "eos_token_id": 151643,
6
- "temperature": 0.6,
7
- "top_p": 0.95,
 
 
 
 
 
 
8
  "transformers_version": "4.46.1"
9
  }
 
1
  {
2
+ "bos_token_id": 151643,
 
3
  "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "repetition_penalty": 1.1,
10
+ "temperature": 0.7,
11
+ "top_k": 20,
12
+ "top_p": 0.8,
13
  "transformers_version": "4.46.1"
14
  }
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2ad7f8d4f65c82fd60863f9e7af6fd0b23397c27164738cff5a867981e827e57
3
  size 4996670464
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84ddf070d6d38cb6f8ad1a522a66aef753c7eb8ad92dc916103d277f14a0f1a6
3
  size 4996670464
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:037a96b27ac10bdb40e3b5457961a1da70a0d4652dcdcf40dc5c814244908126
3
- size 2111719976
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d7c6a0fa6b00809bb7e01e83f6a9b2cd9ccbbe5d717408a829affdd465a2104
3
+ size 1178224960
model.safetensors.index.json CHANGED
@@ -1,9 +1,8 @@
1
  {
2
  "metadata": {
3
- "total_size": 7108352000
4
  },
5
  "weight_map": {
6
- "lm_head.weight": "model-00002-of-00002.safetensors",
7
  "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
  "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
  "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 6174857216
4
  },
5
  "weight_map": {
 
6
  "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
7
  "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
  "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",