dodo2 commited on
Commit
655bf61
·
verified ·
1 Parent(s): 2b0173d

Upload LlamaForCausalLM

Browse files
config.json CHANGED
@@ -1,28 +1,40 @@
1
  {
2
- "_name_or_path": "meta-llama/Meta-Llama-3-8B-Instruct",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
- "eos_token_id": 128009,
 
 
 
 
 
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 14336,
14
- "max_position_embeddings": 8192,
 
15
  "model_type": "llama",
16
  "num_attention_heads": 32,
17
  "num_hidden_layers": 32,
18
  "num_key_value_heads": 8,
19
  "pretraining_tp": 1,
20
  "rms_norm_eps": 1e-05,
21
- "rope_scaling": null,
 
 
 
 
 
 
22
  "rope_theta": 500000.0,
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "float16",
25
- "transformers_version": "4.40.1",
26
  "use_cache": true,
27
  "vocab_size": 128256
28
  }
 
1
  {
2
+ "_name_or_path": "meta-llama/Llama-3.1-8B-Instruct",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
+ "eos_token_id": [
10
+ 128001,
11
+ 128008,
12
+ 128009
13
+ ],
14
+ "head_dim": 128,
15
  "hidden_act": "silu",
16
  "hidden_size": 4096,
17
  "initializer_range": 0.02,
18
  "intermediate_size": 14336,
19
+ "max_position_embeddings": 131072,
20
+ "mlp_bias": false,
21
  "model_type": "llama",
22
  "num_attention_heads": 32,
23
  "num_hidden_layers": 32,
24
  "num_key_value_heads": 8,
25
  "pretraining_tp": 1,
26
  "rms_norm_eps": 1e-05,
27
+ "rope_scaling": {
28
+ "factor": 8.0,
29
+ "high_freq_factor": 4.0,
30
+ "low_freq_factor": 1.0,
31
+ "original_max_position_embeddings": 8192,
32
+ "rope_type": "llama3"
33
+ },
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
  "torch_dtype": "float16",
37
+ "transformers_version": "4.48.0.dev0",
38
  "use_cache": true,
39
  "vocab_size": 128256
40
  }
generation_config.json CHANGED
@@ -3,10 +3,10 @@
3
  "do_sample": true,
4
  "eos_token_id": [
5
  128001,
 
6
  128009
7
  ],
8
- "max_length": 4096,
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
- "transformers_version": "4.40.1"
12
  }
 
3
  "do_sample": true,
4
  "eos_token_id": [
5
  128001,
6
+ 128008,
7
  128009
8
  ],
 
9
  "temperature": 0.6,
10
  "top_p": 0.9,
11
+ "transformers_version": "4.48.0.dev0"
12
  }
model-00001-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:db6c16b86bc94e4edd78884ff3bfb41c90eab6c681a3507a5875ed2dbbf55ef7
3
  size 4976698592
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeba141da2d9c3cc32feacba9ab93238674498535e86e924396630553902bcf5
3
  size 4976698592
model-00002-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ef2252fd0c716841fe555386a5cf95c075294a493c6e9673694345e4f567e2b8
3
  size 4999802616
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c29143afed5b3fa41c4066a6e20eb96ad8540effb2841feef4440e085d38e6d
3
  size 4999802616
model-00003-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f05b0e0714d676b388dd1634ed442878ab5b99a0f378ce411505276c3275737a
3
  size 4915916080
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6da28fb8e18d5c208fe59585748cbfc995188c7ec98271becb9977ac2427fd3c
3
  size 4915916080
model-00004-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d04db72068fd46f4770156e74f503bcdf4fdabb519a6391bb46b871218fb6ca4
3
  size 1168138808
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:013bc9e3d5d06232d8a358ad3cd323b732255d31e2debe3bf0c6325b88110ae0
3
  size 1168138808