ZeroXClem commited on
Commit
dd7efb1
·
verified ·
1 Parent(s): ad8b3a6

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +9 -4
config.json CHANGED
@@ -1,12 +1,17 @@
1
  {
2
- "_name_or_path": "bunnycore/Best-Mix-Llama-3.1-8B",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
- "eos_token_id": 128001,
 
 
 
 
 
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
@@ -30,8 +35,8 @@
30
  "rope_theta": 500000.0,
31
  "tie_word_embeddings": false,
32
  "torch_dtype": "float16",
33
- "transformers_version": "4.44.2",
34
  "unsloth_version": "2024.9",
35
  "use_cache": true,
36
  "vocab_size": 128256
37
- }
 
1
  {
2
+ "_name_or_path": "unsloth/Meta-Llama-3.1-8B",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
+ "eos_token_id": [
10
+ 128001,
11
+ 128008,
12
+ 128009
13
+ ],
14
+ "head_dim": 128,
15
  "hidden_act": "silu",
16
  "hidden_size": 4096,
17
  "initializer_range": 0.02,
 
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": false,
37
  "torch_dtype": "float16",
38
+ "transformers_version": "4.45.1",
39
  "unsloth_version": "2024.9",
40
  "use_cache": true,
41
  "vocab_size": 128256
42
+ }