codingwithlewis commited on
Commit
a8c4e8e
1 Parent(s): d4a4fac

Trained with Unsloth

Browse files
config.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
- "_name_or_path": "unsloth/mistral-7b-bnb-4bit",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
- "eos_token_id": 2,
9
  "hidden_act": "silu",
10
  "hidden_size": 4096,
11
  "initializer_range": 0.02,
@@ -15,7 +15,6 @@
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
17
  "num_key_value_heads": 8,
18
- "pad_token_id": 2,
19
  "rms_norm_eps": 1e-05,
20
  "rope_theta": 10000.0,
21
  "sliding_window": 4096,
@@ -23,6 +22,6 @@
23
  "torch_dtype": "bfloat16",
24
  "transformers_version": "4.39.0",
25
  "unsloth_version": "2024.4",
26
- "use_cache": true,
27
- "vocab_size": 32000
28
  }
 
1
  {
2
+ "_name_or_path": "unsloth/Hermes-2-Pro-Mistral-7B-bnb-4bit",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
8
+ "eos_token_id": 32000,
9
  "hidden_act": "silu",
10
  "hidden_size": 4096,
11
  "initializer_range": 0.02,
 
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
17
  "num_key_value_heads": 8,
 
18
  "rms_norm_eps": 1e-05,
19
  "rope_theta": 10000.0,
20
  "sliding_window": 4096,
 
22
  "torch_dtype": "bfloat16",
23
  "transformers_version": "4.39.0",
24
  "unsloth_version": "2024.4",
25
+ "use_cache": false,
26
+ "vocab_size": 32032
27
  }
generation_config.json CHANGED
@@ -1,6 +1,7 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
- "eos_token_id": 2,
 
5
  "transformers_version": "4.39.0"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": 32000,
6
  "transformers_version": "4.39.0"
7
  }
model-00001-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fb40606eebb577904764bf99f37b250dd4b6399c08cfc02096747ab9b255ef15
3
- size 4943162336
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97a74c3268cf773169ed1c207ecac6755aeb34c1641b20ab1f6210d363a6cf7b
3
+ size 4943424480
model-00002-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e8ab5552a94b5447e046b38e42c60677ea83c5191693f93f0a713d8b2a9167a9
3
  size 4999819336
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4447b5bc77adc79842909b069eaf5b55137b31f638e541fa6cd7482edea9cfd7
3
  size 4999819336
model-00003-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e17e71be75c45534a856dd31e340224b80e9f94aaa32d6fb28224a206a504ec0
3
- size 4540516344
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5dc7f0d5ea31f83d675b4832f4dcd987fbe5ac5a8cae2a5ad1c7f80f9535eff5
3
+ size 4540778488
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 14483464192
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00003-of-00003.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 14483988480
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00003-of-00003.safetensors",