divinetaco commited on
Commit
dd361a6
1 Parent(s): 404febd

Upload folder using huggingface_hub (#1)

Browse files

- 378b94bccd89d2dc8dee24b230d54f218c48acab906ba6a43685deaf832d493f (5058a28ff75ce469dd44864ab6553e0364f2a460)
- 74c5d333def954dc02f15259b87089972bb2154c05ec2b5517e8d0464457df45 (1e6916458ec1303b219a8b7b573a8eca9bf6debf)
- 0052bafb3f7946e6746367da8a352d7423d49ab841f678ff33117cf838fa68d9 (97cb5230044866392082401288f6c4afeb5ae91b)
- 3108c3f357660e1f2dd35fef060226c167ee47069a1939b755da9cba73abd33b (1f02320fe7c211d07bd6ea0af45f86a3b692e385)
- 0360c8d42e3f6d33fe3d0ea36da020a24eb5f0f1f9dc53e2e7f37b8b7225e740 (592ed3098d98b2d889d0f8df7fb6b6e702a8ec30)
- 2390831534a9e74803367b747525ba31b73fd948eea2402c8e8b1389f311f565 (c5c9788c8b0df80323df4b247e68c074bf00e2bd)
- 580295d74e06f2c1c4b1cd1755bf2ae286f6405da83b4b1557823346fcb3ea40 (8041426767ddd84045e8f8b1a5e8b23aff9d8fc5)
- 64847462956d0597fa6c440e7468f193532e3037cc1b67936a9df544450f1d93 (a3d5af320a3a1cf1c1e4c20f842ef9c8d2dfadb7)
- f297666c49e135b115919f38e3a11b84ee3306a93b75a14bee2abdec5eeaeed2 (c7c99455c0f001d878e858569f46ad4c42fa47ff)
- 2b583759a73975ecabeee83c47dc5c51e0456d6bb8cb5f06118a9c46e67084aa (a6cd21bfded895f298bb2acfecaafe8dcf5e8221)
- e35617735fb0901a6ac810c3b94beb37026c44e508940abece6e893559577569 (8dc4011c3abe9f1fe793cc00af5f525738b47a7f)
- 692238380963154926cafff5f5f66a0e72049d8dfcb93f617508857663138bf3 (a241897ab2026df079a49e2ada68a960f26fbf11)
- a73f0a7dd24d6da8e0e4e5c4ab08bd24c5486b7936036938d8c50668cdf2fa1b (ee971ece19f8a283783811cff1ebfcf72fb893a8)
- 70401767b663bd612725491903ac6f5f76a5714979f187aa60ba4c7986558769 (c58f35f309461996ff9c24bcb6d7d6ae1fa13647)
- 0a3aef7e624c9f81b9a3282772b9602cd5ffefa9a6ecf6c6e473d98a42ea10b6 (a7fceb2bf3dcee767842c2684d53b6083d6b4185)
- 35d88f9267ce3c4ff87a61d94fe5e23045e37ac1047bc3dd4a7e6902f7907f67 (2b896a0fee595b2341e59a0dc8aa16b4762522e4)
- 692c881ed8505035c7fe565f66717492efc9bfd477129c74bc83698d1a51bef2 (0d56e34b211b7dbf97c449453a168e7a89678e70)
- 63fb505da9bb22aa23341ea9b88b6378e1fc653a9e136e42297fffd5ffdbffb0 (1783f2bb3723f2031fd4b45275bed2f37cea41e8)
- 6b86ae74c143fbb9cb20011a878cea7d9af77b65bafd3a8935e1a693a9ef510c (36c29b6ec51210638aeabbd07612b90a5f3b0976)
- 0726be3ca1086b8fa28df22f0141f35725daff0f04466db2b2548a3748a5ab30 (de7b05510cedc282ffd2deced23d44eff7ac39a9)
- 0e15f4caf5d1872903dc6e391a9ceab248d3ad9b227899442da9676e4190e84f (0cf8a1053fa318b5155be8df6044da2fe9f6efeb)
- 0a527c4a6df72b6071762f471ea1ec2f7b3c6a1913c99670d8a687d3448a0eac (db23cf8a101e1efff1482605b79eb851724de7e8)
- 7ed6b2c10539a0eb8dd3507d7ddd38020308ccdf35238afe08f3bd48ee6b6c8f (db9232088201cd61746b280f63dff0dc2764da89)
- 67bc05154aa688bc8fdf98e295ad41e38bbd2d262df873400eeb23a8a7f92fc8 (d48ccf1f62c98e7ead302861dac67fb0e129570e)
- 2f309ed6d74639e7f6be980a31609cf382bd1b6179270636a77c06045b72bfa5 (9d2d6c43b5849d05cec2e2d40a20a44c4818cbec)

README.md ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model:
3
+ - 152334H/miqu-1-70b-sf
4
+ - NeverSleep/MiquMaid-v1-70B
5
+ - Sao10K/WinterGoddess-1.4x-70B-L2
6
+ library_name: transformers
7
+ tags:
8
+ - mergekit
9
+ - merge
10
+ ---
11
+ # aranea-ancilla-70b-v1.0
12
+ **aka MiquMaid-v1-70B + interleaved WinterGoddess-1.4x-70B-L2**
13
+
14
+ ![image/png](https://huggingface.co/divinetaco/aranea-ancilla-116b-v1.0/resolve/main/aranea-ancilla.png)
15
+
16
+ A [mergekit](https://github.com/arcee-ai/mergekit) frankenmerge based on [MiquMaid-v1-70B](https://huggingface.co/NeverSleep/MiquMaid-v1-70B) with interleaved layers of [Sao10K/WinterGoddess-1.4x-70B-L2](https://huggingface.co/Sao10K/WinterGoddess-1.4x-70B-L2).
17
+ This was the top performing model from a series of merge experiments to create a highly coherant creative writing model.
18
+
19
+ Tests consisted of a series of private benchmarks and manual comparisons. A number of different base models, interleave models and layer offsets were compared.
20
+
21
+ - Usable context ~32768
22
+ - Recommended context ~16384
23
+
24
+ Non frankenstein miqu-1 finetunes generally outperform their frankenstein counterparts at very long contexts due to coherency loss.
25
+ As a rough suggestion I might suggest swapping out to either [NeverSleep/MiquMaid-v1-70B](https://huggingface.co/NeverSleep/MiquMaid-v1-70B) or [152334H/miqu-1-70b-sf](https://huggingface.co/152334H/miqu-1-70b-sf) after 16k context.
26
+
27
+ Layers: 136
28
+
29
+ ### License
30
+
31
+ No license. Component models based on the [Mistral AI Miqu-1](https://huggingface.co/miqudev/miqu-1-70b/tree/main) llama2 finetune that was released without license.
32
+
33
+ ### Interesting observations from benchmarking
34
+
35
+ - 10 layer interleave stride with a 20 layer interleave width consistently outperformed alternatives combinations.
36
+ - Offsetting the interleaved model's first set of layers generally improved coherency. [14-30] reliably beat the [10-30] mergekit slice configuration for combinations of models.
37
+ - Quality of resulting merges can vary wildly. Whilst a merge of two strong models tends to produce a strong frankenstein model, this rule does not always hold true.
38
+
39
+ ### Quantizations
40
+
41
+ Exllamav2 quants will be available when bandwidth permits.
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "MiquMaid-v1-70B",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 1,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 8192,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 28672,
14
+ "max_position_embeddings": 32764,
15
+ "model_type": "llama",
16
+ "num_attention_heads": 64,
17
+ "num_hidden_layers": 136,
18
+ "num_key_value_heads": 8,
19
+ "pad_token_id": 0,
20
+ "pretraining_tp": 1,
21
+ "rms_norm_eps": 1e-05,
22
+ "rope_scaling": null,
23
+ "rope_theta": 1000000,
24
+ "tie_word_embeddings": false,
25
+ "torch_dtype": "float16",
26
+ "transformers_version": "4.37.2",
27
+ "use_cache": false,
28
+ "vocab_size": 32000
29
+ }
mergekit_config.yml ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ merge_method: linear
2
+ parameters:
3
+ weight: 1.0
4
+ slices:
5
+ - sources:
6
+ - model: MiquMaid-v1-70B
7
+ layer_range: [0, 20]
8
+ - sources:
9
+ - model: WinterGoddess-1.4x-70B-L2
10
+ layer_range: [14, 30]
11
+ - sources:
12
+ - model: MiquMaid-v1-70B
13
+ layer_range: [20, 40]
14
+ - sources:
15
+ - model: WinterGoddess-1.4x-70B-L2
16
+ layer_range: [30, 50]
17
+ - sources:
18
+ - model: MiquMaid-v1-70B
19
+ layer_range: [40, 60]
20
+ - sources:
21
+ - model: WinterGoddess-1.4x-70B-L2
22
+ layer_range: [50, 70]
23
+ - sources:
24
+ - model: MiquMaid-v1-70B
25
+ layer_range: [60, 80]
26
+ dtype: float16
27
+ tokenizer_source: model:MiquMaid-v1-70B
model-00001-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac699208e77d0a659d4d45e576dfe9755c898dc8b4376aa6d5083a84c64cd2bd
3
+ size 9533822824
model-00002-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d7a5003a7fb857a63c71df6c983fd09898599b961ad8f013ceb12436d3a1d2d
3
+ size 9663846048
model-00003-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c9cb0b418736da4d1b12409ad273bfb97dfb1a9b8de638e2c54957ff5b7fd7d
3
+ size 9982646056
model-00004-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:161238c5021d3600bb6e6859089b74d005dec5b2c6a7114fef4faa3f3759f9b3
3
+ size 9932314032
model-00005-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:769d2faf8abf2a794745bc2bc2e4c0f7f9d1ed72efa467f2b8a75a5c0696140f
3
+ size 9798080416
model-00006-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:06a368b69d94b0cd449e3036622b73b0e665319269f79aa023d01911ab467ad8
3
+ size 9949075592
model-00007-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:675ab5eacace97ca650e9affc144d2a4ad1d6cca8facc8cc101f0d0de6a6f669
3
+ size 9798113400
model-00008-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16348291f6d718e23480c57a7290dd598442c1333fce9584dd49a872ace651a1
3
+ size 9647101728
model-00009-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fea919560fc9a18f3568164985e58aedd123ab7640fbe6b5057e720ccd600da2
3
+ size 9965836432
model-00010-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5afd62a7510e752cf488400c39e03a4e07507148ae61098e446f3da8265329f
3
+ size 9680622696
model-00011-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:388131d8158565cae95793a1995c81f6faed8a8f71100e84f43380c3f8130cd4
3
+ size 9932347744
model-00012-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a168e9dea3a6996b76155f7873faaa99c1172c25a4df4a7316cea1c08ab4963
3
+ size 9781319608
model-00013-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c3aee01a97602175becdd09d0577ab94ad41986cf9dd0d28f48aabe93caf4ae
3
+ size 9798063944
model-00014-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3629b2dd052281c24da255083a28ef5d87d531ed4928e785b7946f6e69c78dc
3
+ size 9965852256
model-00015-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f15f19b3a2eddb911f3c06eed0223a0f122fcea7d7fea8e7df252fdc8606b18b
3
+ size 9814874288
model-00016-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bfa29770436e0c84b56209ec710d7d0bde45c8bb0889e1be4c3707e83abf6c3
3
+ size 9949092104
model-00017-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e477701476d7f3d95271121eb253205f5a95bfb791a83686028321ed2c71c9a
3
+ size 9798063200
model-00018-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d54146ef4b8118a6cadb45b135f70dd18667832f93c7437c46fa661555768cb1
3
+ size 9630325096
model-00019-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d413b5960d9c09d4f66e6403963f62b2b3b69aaf2265549f46c386342d734e0
3
+ size 9831650864
model-00020-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6efb01d506ee49c17d05336ef487c50a5317b7f7c8926f5849565678fb4521fd
3
+ size 9932314752
model-00021-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2370539dabb12c270113dcdf8d31f61ed8789daf004bfb0438f66124ac1fe4d
3
+ size 9663879104
model-00022-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3cd35e6dc27312213dc17b0abb0973f70fe55ac4ad32bdf18d7c8bc3945d4d1
3
+ size 9630291448
model-00023-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d56dea77b6d53ff7095793db197389bd84137da01a5afff26fe5363f30b5f32
3
+ size 9965869472
model-00024-of-00024.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:98a19d6ee2e0b01c869ac9042c5835aedff7bd30be6a066fc0793900193bf346
3
+ size 8141328816
model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"metadata": {"mergekit_version": "0.0.4.1"}, "weight_map": {"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00024.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00024.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00024.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00024.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00024.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00001-of-00024.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00024.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00001-of-00024.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00024.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00024.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00024.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00024.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00024.safetensors", "model.layers.1.input_layernorm.weight": "model-00001-of-00024.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00001-of-00024.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00024.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00001-of-00024.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00024.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00024.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00024.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00024.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00024.safetensors", "model.layers.0.input_layernorm.weight": "model-00001-of-00024.safetensors", "model.embed_tokens.weight": "model-00001-of-00024.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00024.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00024.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00024.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00024.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00001-of-00024.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00024.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00001-of-00024.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00024.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00024.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00024.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00024.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00024.safetensors", "model.layers.4.input_layernorm.weight": "model-00001-of-00024.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00001-of-00024.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00024.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00001-of-00024.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00024.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00024.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00024.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00024.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00024.safetensors", "model.layers.3.input_layernorm.weight": "model-00001-of-00024.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00001-of-00024.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00001-of-00024.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00024.safetensors", "model.layers.2.input_layernorm.weight": "model-00001-of-00024.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00001-of-00024.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00001-of-00024.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00002-of-00024.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00024.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00002-of-00024.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00024.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00024.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00024.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00024.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00024.safetensors", "model.layers.22.input_layernorm.weight": "model-00002-of-00024.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00002-of-00024.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00024.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00002-of-00024.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00024.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00024.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00024.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00024.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00024.safetensors", "model.layers.21.input_layernorm.weight": "model-00002-of-00024.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00002-of-00024.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00024.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00002-of-00024.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00024.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00024.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00024.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00024.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00024.safetensors", "model.layers.20.input_layernorm.weight": "model-00002-of-00024.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00024.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00024.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00024.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00002-of-00024.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00024.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00002-of-00024.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00024.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00024.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00024.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00024.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00024.safetensors", "model.layers.7.input_layernorm.weight": "model-00002-of-00024.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00002-of-00024.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00024.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00002-of-00024.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00024.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00024.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00024.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00024.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00024.safetensors", "model.layers.6.input_layernorm.weight": "model-00002-of-00024.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00002-of-00024.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00002-of-00024.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00003-of-00024.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00003-of-00024.safetensors", "model.layers.5.input_layernorm.weight": "model-00003-of-00024.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00003-of-00024.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00024.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00024.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00024.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00024.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00024.safetensors", "model.layers.28.input_layernorm.weight": "model-00003-of-00024.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00003-of-00024.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00024.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00003-of-00024.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00024.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00024.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00024.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00024.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00024.safetensors", "model.layers.27.input_layernorm.weight": "model-00003-of-00024.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00003-of-00024.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00024.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00003-of-00024.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00024.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00024.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00024.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00024.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00024.safetensors", "model.layers.26.input_layernorm.weight": "model-00003-of-00024.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00003-of-00024.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00024.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00003-of-00024.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00024.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00024.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00024.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00024.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00024.safetensors", "model.layers.25.input_layernorm.weight": "model-00003-of-00024.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00003-of-00024.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00024.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00003-of-00024.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00024.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00024.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00024.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00024.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00024.safetensors", "model.layers.24.input_layernorm.weight": "model-00003-of-00024.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00003-of-00024.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00024.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00003-of-00024.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00024.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00024.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00004-of-00024.safetensors", "model.layers.23.input_layernorm.weight": "model-00004-of-00024.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00004-of-00024.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00004-of-00024.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00004-of-00024.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00004-of-00024.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00004-of-00024.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00004-of-00024.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00004-of-00024.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00004-of-00024.safetensors", "model.layers.10.input_layernorm.weight": "model-00004-of-00024.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00004-of-00024.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00004-of-00024.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00004-of-00024.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00004-of-00024.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00004-of-00024.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00004-of-00024.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00004-of-00024.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00004-of-00024.safetensors", "model.layers.9.input_layernorm.weight": "model-00004-of-00024.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00004-of-00024.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00004-of-00024.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00004-of-00024.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00004-of-00024.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00004-of-00024.safetensors", "model.layers.8.input_layernorm.weight": "model-00004-of-00024.safetensors", "model.layers.34.mlp.down_proj.weight": "model-00004-of-00024.safetensors", "model.layers.34.mlp.gate_proj.weight": "model-00004-of-00024.safetensors", "model.layers.34.input_layernorm.weight": "model-00004-of-00024.safetensors", "model.layers.33.mlp.down_proj.weight": "model-00004-of-00024.safetensors", "model.layers.33.mlp.gate_proj.weight": "model-00004-of-00024.safetensors", "model.layers.33.mlp.up_proj.weight": "model-00004-of-00024.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00004-of-00024.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00004-of-00024.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00004-of-00024.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00004-of-00024.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00004-of-00024.safetensors", "model.layers.33.input_layernorm.weight": "model-00004-of-00024.safetensors", "model.layers.32.mlp.down_proj.weight": "model-00004-of-00024.safetensors", "model.layers.32.mlp.gate_proj.weight": "model-00004-of-00024.safetensors", "model.layers.32.mlp.up_proj.weight": "model-00004-of-00024.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00004-of-00024.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00004-of-00024.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00004-of-00024.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00004-of-00024.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00004-of-00024.safetensors", "model.layers.32.input_layernorm.weight": "model-00004-of-00024.safetensors", "model.layers.31.mlp.down_proj.weight": "model-00004-of-00024.safetensors", "model.layers.31.mlp.gate_proj.weight": "model-00005-of-00024.safetensors", "model.layers.31.mlp.up_proj.weight": "model-00005-of-00024.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00005-of-00024.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00005-of-00024.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00005-of-00024.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00005-of-00024.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00005-of-00024.safetensors", "model.layers.31.input_layernorm.weight": "model-00005-of-00024.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00005-of-00024.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00005-of-00024.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00005-of-00024.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00005-of-00024.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00005-of-00024.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00005-of-00024.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00005-of-00024.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00005-of-00024.safetensors", "model.layers.30.input_layernorm.weight": "model-00005-of-00024.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00005-of-00024.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00005-of-00024.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00005-of-00024.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00005-of-00024.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00005-of-00024.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00005-of-00024.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00005-of-00024.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00005-of-00024.safetensors", "model.layers.29.input_layernorm.weight": "model-00005-of-00024.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00005-of-00024.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00005-of-00024.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00005-of-00024.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00005-of-00024.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00005-of-00024.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00005-of-00024.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00005-of-00024.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00005-of-00024.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00005-of-00024.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00005-of-00024.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00005-of-00024.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00005-of-00024.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00005-of-00024.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00005-of-00024.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00005-of-00024.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00005-of-00024.safetensors", "model.layers.12.input_layernorm.weight": "model-00005-of-00024.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00005-of-00024.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00005-of-00024.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00005-of-00024.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00005-of-00024.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00005-of-00024.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00005-of-00024.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00005-of-00024.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00005-of-00024.safetensors", "model.layers.11.input_layernorm.weight": "model-00005-of-00024.safetensors", "model.layers.60.mlp.gate_proj.weight": "model-00006-of-00024.safetensors", "model.layers.60.self_attn.o_proj.weight": "model-00006-of-00024.safetensors", "model.layers.60.self_attn.k_proj.weight": "model-00006-of-00024.safetensors", "model.layers.59.mlp.down_proj.weight": "model-00006-of-00024.safetensors", "model.layers.59.mlp.gate_proj.weight": "model-00006-of-00024.safetensors", "model.layers.59.mlp.up_proj.weight": "model-00006-of-00024.safetensors", "model.layers.59.post_attention_layernorm.weight": "model-00006-of-00024.safetensors", "model.layers.59.self_attn.o_proj.weight": "model-00006-of-00024.safetensors", "model.layers.59.self_attn.v_proj.weight": "model-00006-of-00024.safetensors", "model.layers.59.self_attn.k_proj.weight": "model-00006-of-00024.safetensors", "model.layers.59.self_attn.q_proj.weight": "model-00006-of-00024.safetensors", "model.layers.59.input_layernorm.weight": "model-00006-of-00024.safetensors", "model.layers.58.mlp.down_proj.weight": "model-00006-of-00024.safetensors", "model.layers.58.mlp.gate_proj.weight": "model-00006-of-00024.safetensors", "model.layers.58.mlp.up_proj.weight": "model-00006-of-00024.safetensors", "model.layers.58.post_attention_layernorm.weight": "model-00006-of-00024.safetensors", "model.layers.58.self_attn.o_proj.weight": "model-00006-of-00024.safetensors", "model.layers.58.self_attn.v_proj.weight": "model-00006-of-00024.safetensors", "model.layers.58.self_attn.k_proj.weight": "model-00006-of-00024.safetensors", "model.layers.58.self_attn.q_proj.weight": "model-00006-of-00024.safetensors", "model.layers.58.input_layernorm.weight": "model-00006-of-00024.safetensors", "model.layers.57.mlp.down_proj.weight": "model-00006-of-00024.safetensors", "model.layers.57.mlp.gate_proj.weight": "model-00006-of-00024.safetensors", "model.layers.57.mlp.up_proj.weight": "model-00006-of-00024.safetensors", "model.layers.57.post_attention_layernorm.weight": "model-00006-of-00024.safetensors", "model.layers.57.self_attn.o_proj.weight": "model-00006-of-00024.safetensors", "model.layers.57.self_attn.v_proj.weight": "model-00006-of-00024.safetensors", "model.layers.57.self_attn.k_proj.weight": "model-00006-of-00024.safetensors", "model.layers.57.self_attn.q_proj.weight": "model-00006-of-00024.safetensors", "model.layers.57.input_layernorm.weight": "model-00006-of-00024.safetensors", "model.layers.56.mlp.down_proj.weight": "model-00006-of-00024.safetensors", "model.layers.56.mlp.gate_proj.weight": "model-00006-of-00024.safetensors", "model.layers.56.mlp.up_proj.weight": "model-00006-of-00024.safetensors", "model.layers.56.post_attention_layernorm.weight": "model-00006-of-00024.safetensors", "model.layers.56.self_attn.o_proj.weight": "model-00006-of-00024.safetensors", "model.layers.56.self_attn.v_proj.weight": "model-00006-of-00024.safetensors", "model.layers.56.self_attn.k_proj.weight": "model-00006-of-00024.safetensors", "model.layers.56.self_attn.q_proj.weight": "model-00006-of-00024.safetensors", "model.layers.56.input_layernorm.weight": "model-00006-of-00024.safetensors", "model.layers.35.mlp.down_proj.weight": "model-00006-of-00024.safetensors", "model.layers.35.mlp.gate_proj.weight": "model-00006-of-00024.safetensors", "model.layers.35.mlp.up_proj.weight": "model-00006-of-00024.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00006-of-00024.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00006-of-00024.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00006-of-00024.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00006-of-00024.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00006-of-00024.safetensors", "model.layers.35.input_layernorm.weight": "model-00006-of-00024.safetensors", "model.layers.34.mlp.up_proj.weight": "model-00006-of-00024.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00006-of-00024.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00006-of-00024.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00006-of-00024.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00006-of-00024.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00006-of-00024.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00007-of-00024.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00007-of-00024.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00007-of-00024.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00007-of-00024.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00007-of-00024.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00007-of-00024.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00007-of-00024.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00007-of-00024.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00007-of-00024.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00007-of-00024.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00007-of-00024.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00007-of-00024.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00007-of-00024.safetensors", "model.layers.15.input_layernorm.weight": "model-00007-of-00024.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00007-of-00024.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00007-of-00024.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00007-of-00024.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00007-of-00024.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00007-of-00024.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00007-of-00024.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00007-of-00024.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00007-of-00024.safetensors", "model.layers.14.input_layernorm.weight": "model-00007-of-00024.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00007-of-00024.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00007-of-00024.safetensors", "model.layers.13.input_layernorm.weight": "model-00007-of-00024.safetensors", "model.layers.66.input_layernorm.weight": "model-00007-of-00024.safetensors", "model.layers.65.mlp.down_proj.weight": "model-00007-of-00024.safetensors", "model.layers.65.mlp.gate_proj.weight": "model-00007-of-00024.safetensors", "model.layers.65.mlp.up_proj.weight": "model-00007-of-00024.safetensors", "model.layers.65.post_attention_layernorm.weight": "model-00007-of-00024.safetensors", "model.layers.65.self_attn.o_proj.weight": "model-00007-of-00024.safetensors", "model.layers.65.self_attn.v_proj.weight": "model-00007-of-00024.safetensors", "model.layers.65.self_attn.k_proj.weight": "model-00007-of-00024.safetensors", "model.layers.65.self_attn.q_proj.weight": "model-00007-of-00024.safetensors", "model.layers.65.input_layernorm.weight": "model-00007-of-00024.safetensors", "model.layers.64.mlp.down_proj.weight": "model-00007-of-00024.safetensors", "model.layers.64.mlp.gate_proj.weight": "model-00007-of-00024.safetensors", "model.layers.64.mlp.up_proj.weight": "model-00007-of-00024.safetensors", "model.layers.64.post_attention_layernorm.weight": "model-00007-of-00024.safetensors", "model.layers.64.self_attn.o_proj.weight": "model-00007-of-00024.safetensors", "model.layers.64.self_attn.v_proj.weight": "model-00007-of-00024.safetensors", "model.layers.64.self_attn.k_proj.weight": "model-00007-of-00024.safetensors", "model.layers.64.self_attn.q_proj.weight": "model-00007-of-00024.safetensors", "model.layers.64.input_layernorm.weight": "model-00007-of-00024.safetensors", "model.layers.63.mlp.down_proj.weight": "model-00007-of-00024.safetensors", "model.layers.63.mlp.gate_proj.weight": "model-00007-of-00024.safetensors", "model.layers.63.mlp.up_proj.weight": "model-00007-of-00024.safetensors", "model.layers.63.post_attention_layernorm.weight": "model-00007-of-00024.safetensors", "model.layers.63.self_attn.o_proj.weight": "model-00007-of-00024.safetensors", "model.layers.63.self_attn.v_proj.weight": "model-00007-of-00024.safetensors", "model.layers.63.self_attn.k_proj.weight": "model-00007-of-00024.safetensors", "model.layers.63.self_attn.q_proj.weight": "model-00007-of-00024.safetensors", "model.layers.63.input_layernorm.weight": "model-00007-of-00024.safetensors", "model.layers.62.mlp.down_proj.weight": "model-00008-of-00024.safetensors", "model.layers.62.mlp.gate_proj.weight": "model-00008-of-00024.safetensors", "model.layers.62.mlp.up_proj.weight": "model-00008-of-00024.safetensors", "model.layers.62.post_attention_layernorm.weight": "model-00008-of-00024.safetensors", "model.layers.62.self_attn.o_proj.weight": "model-00008-of-00024.safetensors", "model.layers.62.self_attn.v_proj.weight": "model-00008-of-00024.safetensors", "model.layers.62.self_attn.k_proj.weight": "model-00008-of-00024.safetensors", "model.layers.62.self_attn.q_proj.weight": "model-00008-of-00024.safetensors", "model.layers.62.input_layernorm.weight": "model-00008-of-00024.safetensors", "model.layers.61.mlp.down_proj.weight": "model-00008-of-00024.safetensors", "model.layers.61.mlp.gate_proj.weight": "model-00008-of-00024.safetensors", "model.layers.61.mlp.up_proj.weight": "model-00008-of-00024.safetensors", "model.layers.61.post_attention_layernorm.weight": "model-00008-of-00024.safetensors", "model.layers.61.self_attn.o_proj.weight": "model-00008-of-00024.safetensors", "model.layers.61.self_attn.v_proj.weight": "model-00008-of-00024.safetensors", "model.layers.61.self_attn.k_proj.weight": "model-00008-of-00024.safetensors", "model.layers.61.self_attn.q_proj.weight": "model-00008-of-00024.safetensors", "model.layers.61.input_layernorm.weight": "model-00008-of-00024.safetensors", "model.layers.60.mlp.down_proj.weight": "model-00008-of-00024.safetensors", "model.layers.60.mlp.up_proj.weight": "model-00008-of-00024.safetensors", "model.layers.60.post_attention_layernorm.weight": "model-00008-of-00024.safetensors", "model.layers.60.self_attn.v_proj.weight": "model-00008-of-00024.safetensors", "model.layers.60.self_attn.q_proj.weight": "model-00008-of-00024.safetensors", "model.layers.60.input_layernorm.weight": "model-00008-of-00024.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00008-of-00024.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00008-of-00024.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00008-of-00024.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00008-of-00024.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00008-of-00024.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00008-of-00024.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00008-of-00024.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00008-of-00024.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00008-of-00024.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00008-of-00024.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00008-of-00024.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00008-of-00024.safetensors", "model.layers.18.input_layernorm.weight": "model-00008-of-00024.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00008-of-00024.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00008-of-00024.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00008-of-00024.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00008-of-00024.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00008-of-00024.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00008-of-00024.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00008-of-00024.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00008-of-00024.safetensors", "model.layers.17.input_layernorm.weight": "model-00008-of-00024.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00008-of-00024.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00008-of-00024.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00008-of-00024.safetensors", "model.layers.16.input_layernorm.weight": "model-00008-of-00024.safetensors", "model.layers.71.mlp.down_proj.weight": "model-00008-of-00024.safetensors", "model.layers.71.mlp.gate_proj.weight": "model-00009-of-00024.safetensors", "model.layers.71.self_attn.o_proj.weight": "model-00009-of-00024.safetensors", "model.layers.71.self_attn.v_proj.weight": "model-00009-of-00024.safetensors", "model.layers.71.self_attn.k_proj.weight": "model-00009-of-00024.safetensors", "model.layers.71.self_attn.q_proj.weight": "model-00009-of-00024.safetensors", "model.layers.71.input_layernorm.weight": "model-00009-of-00024.safetensors", "model.layers.70.mlp.down_proj.weight": "model-00009-of-00024.safetensors", "model.layers.70.mlp.gate_proj.weight": "model-00009-of-00024.safetensors", "model.layers.70.mlp.up_proj.weight": "model-00009-of-00024.safetensors", "model.layers.70.post_attention_layernorm.weight": "model-00009-of-00024.safetensors", "model.layers.70.self_attn.o_proj.weight": "model-00009-of-00024.safetensors", "model.layers.70.self_attn.v_proj.weight": "model-00009-of-00024.safetensors", "model.layers.70.self_attn.k_proj.weight": "model-00009-of-00024.safetensors", "model.layers.70.self_attn.q_proj.weight": "model-00009-of-00024.safetensors", "model.layers.70.input_layernorm.weight": "model-00009-of-00024.safetensors", "model.layers.69.mlp.down_proj.weight": "model-00009-of-00024.safetensors", "model.layers.69.mlp.gate_proj.weight": "model-00009-of-00024.safetensors", "model.layers.69.mlp.up_proj.weight": "model-00009-of-00024.safetensors", "model.layers.69.post_attention_layernorm.weight": "model-00009-of-00024.safetensors", "model.layers.69.self_attn.o_proj.weight": "model-00009-of-00024.safetensors", "model.layers.69.self_attn.v_proj.weight": "model-00009-of-00024.safetensors", "model.layers.69.self_attn.k_proj.weight": "model-00009-of-00024.safetensors", "model.layers.69.self_attn.q_proj.weight": "model-00009-of-00024.safetensors", "model.layers.69.input_layernorm.weight": "model-00009-of-00024.safetensors", "model.layers.68.mlp.down_proj.weight": "model-00009-of-00024.safetensors", "model.layers.68.mlp.gate_proj.weight": "model-00009-of-00024.safetensors", "model.layers.68.mlp.up_proj.weight": "model-00009-of-00024.safetensors", "model.layers.68.post_attention_layernorm.weight": "model-00009-of-00024.safetensors", "model.layers.68.self_attn.o_proj.weight": "model-00009-of-00024.safetensors", "model.layers.68.self_attn.v_proj.weight": "model-00009-of-00024.safetensors", "model.layers.68.self_attn.k_proj.weight": "model-00009-of-00024.safetensors", "model.layers.68.self_attn.q_proj.weight": "model-00009-of-00024.safetensors", "model.layers.68.input_layernorm.weight": "model-00009-of-00024.safetensors", "model.layers.67.mlp.down_proj.weight": "model-00009-of-00024.safetensors", "model.layers.67.mlp.gate_proj.weight": "model-00009-of-00024.safetensors", "model.layers.67.mlp.up_proj.weight": "model-00009-of-00024.safetensors", "model.layers.67.post_attention_layernorm.weight": "model-00009-of-00024.safetensors", "model.layers.67.self_attn.o_proj.weight": "model-00009-of-00024.safetensors", "model.layers.67.self_attn.v_proj.weight": "model-00009-of-00024.safetensors", "model.layers.67.self_attn.k_proj.weight": "model-00009-of-00024.safetensors", "model.layers.67.self_attn.q_proj.weight": "model-00009-of-00024.safetensors", "model.layers.67.input_layernorm.weight": "model-00009-of-00024.safetensors", "model.layers.66.mlp.down_proj.weight": "model-00009-of-00024.safetensors", "model.layers.66.mlp.gate_proj.weight": "model-00009-of-00024.safetensors", "model.layers.66.mlp.up_proj.weight": "model-00009-of-00024.safetensors", "model.layers.66.post_attention_layernorm.weight": "model-00009-of-00024.safetensors", "model.layers.66.self_attn.o_proj.weight": "model-00009-of-00024.safetensors", "model.layers.66.self_attn.v_proj.weight": "model-00009-of-00024.safetensors", "model.layers.66.self_attn.k_proj.weight": "model-00009-of-00024.safetensors", "model.layers.66.self_attn.q_proj.weight": "model-00009-of-00024.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00009-of-00024.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00009-of-00024.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00009-of-00024.safetensors", "model.layers.37.mlp.down_proj.weight": "model-00009-of-00024.safetensors", "model.layers.37.mlp.gate_proj.weight": "model-00010-of-00024.safetensors", "model.layers.37.mlp.up_proj.weight": "model-00010-of-00024.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00010-of-00024.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00010-of-00024.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00010-of-00024.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00010-of-00024.safetensors", "model.layers.37.self_attn.q_proj.weight": "model-00010-of-00024.safetensors", "model.layers.37.input_layernorm.weight": "model-00010-of-00024.safetensors", "model.layers.36.mlp.down_proj.weight": "model-00010-of-00024.safetensors", "model.layers.36.mlp.gate_proj.weight": "model-00010-of-00024.safetensors", "model.layers.36.mlp.up_proj.weight": "model-00010-of-00024.safetensors", "model.layers.36.post_attention_layernorm.weight": "model-00010-of-00024.safetensors", "model.layers.36.self_attn.o_proj.weight": "model-00010-of-00024.safetensors", "model.layers.36.self_attn.v_proj.weight": "model-00010-of-00024.safetensors", "model.layers.36.self_attn.k_proj.weight": "model-00010-of-00024.safetensors", "model.layers.36.self_attn.q_proj.weight": "model-00010-of-00024.safetensors", "model.layers.36.input_layernorm.weight": "model-00010-of-00024.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00010-of-00024.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00010-of-00024.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00010-of-00024.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00010-of-00024.safetensors", "model.layers.19.input_layernorm.weight": "model-00010-of-00024.safetensors", "model.layers.97.mlp.gate_proj.weight": "model-00010-of-00024.safetensors", "model.layers.97.mlp.up_proj.weight": "model-00010-of-00024.safetensors", "model.layers.97.self_attn.k_proj.weight": "model-00010-of-00024.safetensors", "model.layers.96.mlp.down_proj.weight": "model-00010-of-00024.safetensors", "model.layers.96.mlp.gate_proj.weight": "model-00010-of-00024.safetensors", "model.layers.96.mlp.up_proj.weight": "model-00010-of-00024.safetensors", "model.layers.96.post_attention_layernorm.weight": "model-00010-of-00024.safetensors", "model.layers.96.self_attn.o_proj.weight": "model-00010-of-00024.safetensors", "model.layers.96.self_attn.v_proj.weight": "model-00010-of-00024.safetensors", "model.layers.96.self_attn.k_proj.weight": "model-00010-of-00024.safetensors", "model.layers.96.self_attn.q_proj.weight": "model-00010-of-00024.safetensors", "model.layers.96.input_layernorm.weight": "model-00010-of-00024.safetensors", "model.layers.75.mlp.down_proj.weight": "model-00010-of-00024.safetensors", "model.layers.75.mlp.gate_proj.weight": "model-00010-of-00024.safetensors", "model.layers.75.mlp.up_proj.weight": "model-00010-of-00024.safetensors", "model.layers.75.post_attention_layernorm.weight": "model-00010-of-00024.safetensors", "model.layers.75.self_attn.o_proj.weight": "model-00010-of-00024.safetensors", "model.layers.75.self_attn.v_proj.weight": "model-00010-of-00024.safetensors", "model.layers.75.self_attn.k_proj.weight": "model-00010-of-00024.safetensors", "model.layers.75.self_attn.q_proj.weight": "model-00010-of-00024.safetensors", "model.layers.75.input_layernorm.weight": "model-00010-of-00024.safetensors", "model.layers.74.mlp.down_proj.weight": "model-00010-of-00024.safetensors", "model.layers.74.mlp.gate_proj.weight": "model-00010-of-00024.safetensors", "model.layers.74.mlp.up_proj.weight": "model-00011-of-00024.safetensors", "model.layers.74.post_attention_layernorm.weight": "model-00011-of-00024.safetensors", "model.layers.74.self_attn.o_proj.weight": "model-00011-of-00024.safetensors", "model.layers.74.self_attn.v_proj.weight": "model-00011-of-00024.safetensors", "model.layers.74.self_attn.k_proj.weight": "model-00011-of-00024.safetensors", "model.layers.74.self_attn.q_proj.weight": "model-00011-of-00024.safetensors", "model.layers.74.input_layernorm.weight": "model-00011-of-00024.safetensors", "model.layers.73.mlp.down_proj.weight": "model-00011-of-00024.safetensors", "model.layers.73.mlp.gate_proj.weight": "model-00011-of-00024.safetensors", "model.layers.73.mlp.up_proj.weight": "model-00011-of-00024.safetensors", "model.layers.73.post_attention_layernorm.weight": "model-00011-of-00024.safetensors", "model.layers.73.self_attn.o_proj.weight": "model-00011-of-00024.safetensors", "model.layers.73.self_attn.v_proj.weight": "model-00011-of-00024.safetensors", "model.layers.73.self_attn.k_proj.weight": "model-00011-of-00024.safetensors", "model.layers.73.self_attn.q_proj.weight": "model-00011-of-00024.safetensors", "model.layers.73.input_layernorm.weight": "model-00011-of-00024.safetensors", "model.layers.72.mlp.down_proj.weight": "model-00011-of-00024.safetensors", "model.layers.72.mlp.gate_proj.weight": "model-00011-of-00024.safetensors", "model.layers.72.mlp.up_proj.weight": "model-00011-of-00024.safetensors", "model.layers.72.post_attention_layernorm.weight": "model-00011-of-00024.safetensors", "model.layers.72.self_attn.o_proj.weight": "model-00011-of-00024.safetensors", "model.layers.72.self_attn.v_proj.weight": "model-00011-of-00024.safetensors", "model.layers.72.self_attn.k_proj.weight": "model-00011-of-00024.safetensors", "model.layers.72.self_attn.q_proj.weight": "model-00011-of-00024.safetensors", "model.layers.72.input_layernorm.weight": "model-00011-of-00024.safetensors", "model.layers.71.mlp.up_proj.weight": "model-00011-of-00024.safetensors", "model.layers.71.post_attention_layernorm.weight": "model-00011-of-00024.safetensors", "model.layers.40.mlp.down_proj.weight": "model-00011-of-00024.safetensors", "model.layers.40.mlp.gate_proj.weight": "model-00011-of-00024.safetensors", "model.layers.40.mlp.up_proj.weight": "model-00011-of-00024.safetensors", "model.layers.40.post_attention_layernorm.weight": "model-00011-of-00024.safetensors", "model.layers.40.self_attn.o_proj.weight": "model-00011-of-00024.safetensors", "model.layers.40.self_attn.v_proj.weight": "model-00011-of-00024.safetensors", "model.layers.40.self_attn.k_proj.weight": "model-00011-of-00024.safetensors", "model.layers.40.self_attn.q_proj.weight": "model-00011-of-00024.safetensors", "model.layers.40.input_layernorm.weight": "model-00011-of-00024.safetensors", "model.layers.39.mlp.down_proj.weight": "model-00011-of-00024.safetensors", "model.layers.39.mlp.gate_proj.weight": "model-00011-of-00024.safetensors", "model.layers.39.mlp.up_proj.weight": "model-00011-of-00024.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00011-of-00024.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00011-of-00024.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00011-of-00024.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00011-of-00024.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00011-of-00024.safetensors", "model.layers.39.input_layernorm.weight": "model-00011-of-00024.safetensors", "model.layers.38.mlp.down_proj.weight": "model-00011-of-00024.safetensors", "model.layers.38.mlp.gate_proj.weight": "model-00011-of-00024.safetensors", "model.layers.38.mlp.up_proj.weight": "model-00011-of-00024.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00011-of-00024.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00011-of-00024.safetensors", "model.layers.38.input_layernorm.weight": "model-00011-of-00024.safetensors", "model.layers.103.self_attn.o_proj.weight": "model-00011-of-00024.safetensors", "model.layers.103.self_attn.v_proj.weight": "model-00011-of-00024.safetensors", "model.layers.103.self_attn.k_proj.weight": "model-00011-of-00024.safetensors", "model.layers.103.self_attn.q_proj.weight": "model-00011-of-00024.safetensors", "model.layers.103.input_layernorm.weight": "model-00011-of-00024.safetensors", "model.layers.102.mlp.down_proj.weight": "model-00012-of-00024.safetensors", "model.layers.102.mlp.gate_proj.weight": "model-00012-of-00024.safetensors", "model.layers.102.mlp.up_proj.weight": "model-00012-of-00024.safetensors", "model.layers.102.post_attention_layernorm.weight": "model-00012-of-00024.safetensors", "model.layers.102.self_attn.o_proj.weight": "model-00012-of-00024.safetensors", "model.layers.102.self_attn.v_proj.weight": "model-00012-of-00024.safetensors", "model.layers.102.self_attn.k_proj.weight": "model-00012-of-00024.safetensors", "model.layers.102.self_attn.q_proj.weight": "model-00012-of-00024.safetensors", "model.layers.102.input_layernorm.weight": "model-00012-of-00024.safetensors", "model.layers.101.mlp.down_proj.weight": "model-00012-of-00024.safetensors", "model.layers.101.mlp.gate_proj.weight": "model-00012-of-00024.safetensors", "model.layers.101.mlp.up_proj.weight": "model-00012-of-00024.safetensors", "model.layers.101.post_attention_layernorm.weight": "model-00012-of-00024.safetensors", "model.layers.101.self_attn.o_proj.weight": "model-00012-of-00024.safetensors", "model.layers.101.self_attn.v_proj.weight": "model-00012-of-00024.safetensors", "model.layers.101.self_attn.k_proj.weight": "model-00012-of-00024.safetensors", "model.layers.101.self_attn.q_proj.weight": "model-00012-of-00024.safetensors", "model.layers.101.input_layernorm.weight": "model-00012-of-00024.safetensors", "model.layers.100.mlp.down_proj.weight": "model-00012-of-00024.safetensors", "model.layers.100.mlp.gate_proj.weight": "model-00012-of-00024.safetensors", "model.layers.100.mlp.up_proj.weight": "model-00012-of-00024.safetensors", "model.layers.100.post_attention_layernorm.weight": "model-00012-of-00024.safetensors", "model.layers.100.self_attn.o_proj.weight": "model-00012-of-00024.safetensors", "model.layers.100.self_attn.v_proj.weight": "model-00012-of-00024.safetensors", "model.layers.100.self_attn.k_proj.weight": "model-00012-of-00024.safetensors", "model.layers.100.self_attn.q_proj.weight": "model-00012-of-00024.safetensors", "model.layers.100.input_layernorm.weight": "model-00012-of-00024.safetensors", "model.layers.99.mlp.down_proj.weight": "model-00012-of-00024.safetensors", "model.layers.99.mlp.gate_proj.weight": "model-00012-of-00024.safetensors", "model.layers.99.mlp.up_proj.weight": "model-00012-of-00024.safetensors", "model.layers.99.post_attention_layernorm.weight": "model-00012-of-00024.safetensors", "model.layers.99.self_attn.o_proj.weight": "model-00012-of-00024.safetensors", "model.layers.99.self_attn.v_proj.weight": "model-00012-of-00024.safetensors", "model.layers.99.self_attn.k_proj.weight": "model-00012-of-00024.safetensors", "model.layers.99.self_attn.q_proj.weight": "model-00012-of-00024.safetensors", "model.layers.99.input_layernorm.weight": "model-00012-of-00024.safetensors", "model.layers.98.mlp.down_proj.weight": "model-00012-of-00024.safetensors", "model.layers.98.mlp.gate_proj.weight": "model-00012-of-00024.safetensors", "model.layers.98.mlp.up_proj.weight": "model-00012-of-00024.safetensors", "model.layers.98.post_attention_layernorm.weight": "model-00012-of-00024.safetensors", "model.layers.98.self_attn.o_proj.weight": "model-00012-of-00024.safetensors", "model.layers.98.self_attn.v_proj.weight": "model-00012-of-00024.safetensors", "model.layers.98.self_attn.k_proj.weight": "model-00012-of-00024.safetensors", "model.layers.98.self_attn.q_proj.weight": "model-00012-of-00024.safetensors", "model.layers.98.input_layernorm.weight": "model-00012-of-00024.safetensors", "model.layers.97.mlp.down_proj.weight": "model-00012-of-00024.safetensors", "model.layers.97.post_attention_layernorm.weight": "model-00012-of-00024.safetensors", "model.layers.97.self_attn.o_proj.weight": "model-00012-of-00024.safetensors", "model.layers.97.self_attn.v_proj.weight": "model-00012-of-00024.safetensors", "model.layers.97.self_attn.q_proj.weight": "model-00012-of-00024.safetensors", "model.layers.97.input_layernorm.weight": "model-00012-of-00024.safetensors", "model.layers.43.mlp.gate_proj.weight": "model-00012-of-00024.safetensors", "model.layers.43.mlp.up_proj.weight": "model-00013-of-00024.safetensors", "model.layers.43.self_attn.o_proj.weight": "model-00013-of-00024.safetensors", "model.layers.43.self_attn.v_proj.weight": "model-00013-of-00024.safetensors", "model.layers.43.self_attn.k_proj.weight": "model-00013-of-00024.safetensors", "model.layers.43.self_attn.q_proj.weight": "model-00013-of-00024.safetensors", "model.layers.42.mlp.down_proj.weight": "model-00013-of-00024.safetensors", "model.layers.42.mlp.gate_proj.weight": "model-00013-of-00024.safetensors", "model.layers.42.mlp.up_proj.weight": "model-00013-of-00024.safetensors", "model.layers.42.post_attention_layernorm.weight": "model-00013-of-00024.safetensors", "model.layers.42.self_attn.o_proj.weight": "model-00013-of-00024.safetensors", "model.layers.42.self_attn.v_proj.weight": "model-00013-of-00024.safetensors", "model.layers.42.self_attn.k_proj.weight": "model-00013-of-00024.safetensors", "model.layers.42.self_attn.q_proj.weight": "model-00013-of-00024.safetensors", "model.layers.42.input_layernorm.weight": "model-00013-of-00024.safetensors", "model.layers.41.mlp.down_proj.weight": "model-00013-of-00024.safetensors", "model.layers.41.mlp.gate_proj.weight": "model-00013-of-00024.safetensors", "model.layers.41.mlp.up_proj.weight": "model-00013-of-00024.safetensors", "model.layers.41.post_attention_layernorm.weight": "model-00013-of-00024.safetensors", "model.layers.41.self_attn.o_proj.weight": "model-00013-of-00024.safetensors", "model.layers.41.self_attn.v_proj.weight": "model-00013-of-00024.safetensors", "model.layers.41.self_attn.k_proj.weight": "model-00013-of-00024.safetensors", "model.layers.41.self_attn.q_proj.weight": "model-00013-of-00024.safetensors", "model.layers.41.input_layernorm.weight": "model-00013-of-00024.safetensors", "model.layers.108.mlp.down_proj.weight": "model-00013-of-00024.safetensors", "model.layers.108.mlp.gate_proj.weight": "model-00013-of-00024.safetensors", "model.layers.108.mlp.up_proj.weight": "model-00013-of-00024.safetensors", "model.layers.108.post_attention_layernorm.weight": "model-00013-of-00024.safetensors", "model.layers.108.self_attn.o_proj.weight": "model-00013-of-00024.safetensors", "model.layers.108.self_attn.v_proj.weight": "model-00013-of-00024.safetensors", "model.layers.108.self_attn.k_proj.weight": "model-00013-of-00024.safetensors", "model.layers.108.self_attn.q_proj.weight": "model-00013-of-00024.safetensors", "model.layers.108.input_layernorm.weight": "model-00013-of-00024.safetensors", "model.layers.107.mlp.down_proj.weight": "model-00013-of-00024.safetensors", "model.layers.107.mlp.gate_proj.weight": "model-00013-of-00024.safetensors", "model.layers.107.mlp.up_proj.weight": "model-00013-of-00024.safetensors", "model.layers.107.post_attention_layernorm.weight": "model-00013-of-00024.safetensors", "model.layers.107.self_attn.o_proj.weight": "model-00013-of-00024.safetensors", "model.layers.107.self_attn.v_proj.weight": "model-00013-of-00024.safetensors", "model.layers.107.self_attn.k_proj.weight": "model-00013-of-00024.safetensors", "model.layers.107.self_attn.q_proj.weight": "model-00013-of-00024.safetensors", "model.layers.107.input_layernorm.weight": "model-00013-of-00024.safetensors", "model.layers.106.mlp.down_proj.weight": "model-00013-of-00024.safetensors", "model.layers.106.mlp.gate_proj.weight": "model-00013-of-00024.safetensors", "model.layers.106.mlp.up_proj.weight": "model-00013-of-00024.safetensors", "model.layers.106.post_attention_layernorm.weight": "model-00013-of-00024.safetensors", "model.layers.106.self_attn.o_proj.weight": "model-00013-of-00024.safetensors", "model.layers.106.self_attn.v_proj.weight": "model-00013-of-00024.safetensors", "model.layers.106.self_attn.k_proj.weight": "model-00013-of-00024.safetensors", "model.layers.106.self_attn.q_proj.weight": "model-00013-of-00024.safetensors", "model.layers.106.input_layernorm.weight": "model-00013-of-00024.safetensors", "model.layers.105.mlp.down_proj.weight": "model-00013-of-00024.safetensors", "model.layers.105.mlp.gate_proj.weight": "model-00014-of-00024.safetensors", "model.layers.105.mlp.up_proj.weight": "model-00014-of-00024.safetensors", "model.layers.105.post_attention_layernorm.weight": "model-00014-of-00024.safetensors", "model.layers.105.self_attn.o_proj.weight": "model-00014-of-00024.safetensors", "model.layers.105.self_attn.v_proj.weight": "model-00014-of-00024.safetensors", "model.layers.105.self_attn.k_proj.weight": "model-00014-of-00024.safetensors", "model.layers.105.self_attn.q_proj.weight": "model-00014-of-00024.safetensors", "model.layers.105.input_layernorm.weight": "model-00014-of-00024.safetensors", "model.layers.104.mlp.down_proj.weight": "model-00014-of-00024.safetensors", "model.layers.104.mlp.gate_proj.weight": "model-00014-of-00024.safetensors", "model.layers.104.mlp.up_proj.weight": "model-00014-of-00024.safetensors", "model.layers.104.post_attention_layernorm.weight": "model-00014-of-00024.safetensors", "model.layers.104.self_attn.o_proj.weight": "model-00014-of-00024.safetensors", "model.layers.104.self_attn.v_proj.weight": "model-00014-of-00024.safetensors", "model.layers.104.self_attn.k_proj.weight": "model-00014-of-00024.safetensors", "model.layers.104.self_attn.q_proj.weight": "model-00014-of-00024.safetensors", "model.layers.104.input_layernorm.weight": "model-00014-of-00024.safetensors", "model.layers.103.mlp.down_proj.weight": "model-00014-of-00024.safetensors", "model.layers.103.mlp.gate_proj.weight": "model-00014-of-00024.safetensors", "model.layers.103.mlp.up_proj.weight": "model-00014-of-00024.safetensors", "model.layers.103.post_attention_layernorm.weight": "model-00014-of-00024.safetensors", "model.layers.46.mlp.gate_proj.weight": "model-00014-of-00024.safetensors", "model.layers.46.self_attn.o_proj.weight": "model-00014-of-00024.safetensors", "model.layers.46.self_attn.v_proj.weight": "model-00014-of-00024.safetensors", "model.layers.46.self_attn.k_proj.weight": "model-00014-of-00024.safetensors", "model.layers.46.self_attn.q_proj.weight": "model-00014-of-00024.safetensors", "model.layers.45.mlp.down_proj.weight": "model-00014-of-00024.safetensors", "model.layers.45.mlp.gate_proj.weight": "model-00014-of-00024.safetensors", "model.layers.45.mlp.up_proj.weight": "model-00014-of-00024.safetensors", "model.layers.45.post_attention_layernorm.weight": "model-00014-of-00024.safetensors", "model.layers.45.self_attn.o_proj.weight": "model-00014-of-00024.safetensors", "model.layers.45.self_attn.v_proj.weight": "model-00014-of-00024.safetensors", "model.layers.45.self_attn.k_proj.weight": "model-00014-of-00024.safetensors", "model.layers.45.self_attn.q_proj.weight": "model-00014-of-00024.safetensors", "model.layers.45.input_layernorm.weight": "model-00014-of-00024.safetensors", "model.layers.44.mlp.down_proj.weight": "model-00014-of-00024.safetensors", "model.layers.44.mlp.gate_proj.weight": "model-00014-of-00024.safetensors", "model.layers.44.mlp.up_proj.weight": "model-00014-of-00024.safetensors", "model.layers.44.post_attention_layernorm.weight": "model-00014-of-00024.safetensors", "model.layers.44.self_attn.o_proj.weight": "model-00014-of-00024.safetensors", "model.layers.44.self_attn.v_proj.weight": "model-00014-of-00024.safetensors", "model.layers.44.self_attn.k_proj.weight": "model-00014-of-00024.safetensors", "model.layers.44.self_attn.q_proj.weight": "model-00014-of-00024.safetensors", "model.layers.44.input_layernorm.weight": "model-00014-of-00024.safetensors", "model.layers.43.mlp.down_proj.weight": "model-00014-of-00024.safetensors", "model.layers.43.post_attention_layernorm.weight": "model-00014-of-00024.safetensors", "model.layers.43.input_layernorm.weight": "model-00014-of-00024.safetensors", "model.layers.114.mlp.down_proj.weight": "model-00014-of-00024.safetensors", "model.layers.114.mlp.gate_proj.weight": "model-00014-of-00024.safetensors", "model.layers.114.mlp.up_proj.weight": "model-00015-of-00024.safetensors", "model.layers.114.post_attention_layernorm.weight": "model-00015-of-00024.safetensors", "model.layers.114.self_attn.k_proj.weight": "model-00015-of-00024.safetensors", "model.layers.114.input_layernorm.weight": "model-00015-of-00024.safetensors", "model.layers.113.mlp.down_proj.weight": "model-00015-of-00024.safetensors", "model.layers.113.mlp.gate_proj.weight": "model-00015-of-00024.safetensors", "model.layers.113.mlp.up_proj.weight": "model-00015-of-00024.safetensors", "model.layers.113.post_attention_layernorm.weight": "model-00015-of-00024.safetensors", "model.layers.113.self_attn.o_proj.weight": "model-00015-of-00024.safetensors", "model.layers.113.self_attn.v_proj.weight": "model-00015-of-00024.safetensors", "model.layers.113.self_attn.k_proj.weight": "model-00015-of-00024.safetensors", "model.layers.113.self_attn.q_proj.weight": "model-00015-of-00024.safetensors", "model.layers.113.input_layernorm.weight": "model-00015-of-00024.safetensors", "model.layers.112.mlp.down_proj.weight": "model-00015-of-00024.safetensors", "model.layers.112.mlp.gate_proj.weight": "model-00015-of-00024.safetensors", "model.layers.112.mlp.up_proj.weight": "model-00015-of-00024.safetensors", "model.layers.112.post_attention_layernorm.weight": "model-00015-of-00024.safetensors", "model.layers.112.self_attn.o_proj.weight": "model-00015-of-00024.safetensors", "model.layers.112.self_attn.v_proj.weight": "model-00015-of-00024.safetensors", "model.layers.112.self_attn.k_proj.weight": "model-00015-of-00024.safetensors", "model.layers.112.self_attn.q_proj.weight": "model-00015-of-00024.safetensors", "model.layers.112.input_layernorm.weight": "model-00015-of-00024.safetensors", "model.layers.111.mlp.down_proj.weight": "model-00015-of-00024.safetensors", "model.layers.111.mlp.gate_proj.weight": "model-00015-of-00024.safetensors", "model.layers.111.mlp.up_proj.weight": "model-00015-of-00024.safetensors", "model.layers.111.post_attention_layernorm.weight": "model-00015-of-00024.safetensors", "model.layers.111.self_attn.o_proj.weight": "model-00015-of-00024.safetensors", "model.layers.111.self_attn.v_proj.weight": "model-00015-of-00024.safetensors", "model.layers.111.self_attn.k_proj.weight": "model-00015-of-00024.safetensors", "model.layers.111.self_attn.q_proj.weight": "model-00015-of-00024.safetensors", "model.layers.111.input_layernorm.weight": "model-00015-of-00024.safetensors", "model.layers.110.mlp.down_proj.weight": "model-00015-of-00024.safetensors", "model.layers.110.mlp.gate_proj.weight": "model-00015-of-00024.safetensors", "model.layers.110.mlp.up_proj.weight": "model-00015-of-00024.safetensors", "model.layers.110.post_attention_layernorm.weight": "model-00015-of-00024.safetensors", "model.layers.110.self_attn.o_proj.weight": "model-00015-of-00024.safetensors", "model.layers.110.self_attn.v_proj.weight": "model-00015-of-00024.safetensors", "model.layers.110.self_attn.k_proj.weight": "model-00015-of-00024.safetensors", "model.layers.110.self_attn.q_proj.weight": "model-00015-of-00024.safetensors", "model.layers.110.input_layernorm.weight": "model-00015-of-00024.safetensors", "model.layers.109.mlp.down_proj.weight": "model-00015-of-00024.safetensors", "model.layers.109.mlp.gate_proj.weight": "model-00015-of-00024.safetensors", "model.layers.109.mlp.up_proj.weight": "model-00015-of-00024.safetensors", "model.layers.109.post_attention_layernorm.weight": "model-00015-of-00024.safetensors", "model.layers.109.self_attn.o_proj.weight": "model-00015-of-00024.safetensors", "model.layers.109.self_attn.v_proj.weight": "model-00015-of-00024.safetensors", "model.layers.109.self_attn.k_proj.weight": "model-00015-of-00024.safetensors", "model.layers.109.self_attn.q_proj.weight": "model-00015-of-00024.safetensors", "model.layers.109.input_layernorm.weight": "model-00015-of-00024.safetensors", "model.layers.49.self_attn.o_proj.weight": "model-00015-of-00024.safetensors", "model.layers.49.self_attn.v_proj.weight": "model-00015-of-00024.safetensors", "model.layers.49.self_attn.k_proj.weight": "model-00015-of-00024.safetensors", "model.layers.49.self_attn.q_proj.weight": "model-00015-of-00024.safetensors", "model.layers.48.mlp.down_proj.weight": "model-00015-of-00024.safetensors", "model.layers.48.mlp.gate_proj.weight": "model-00016-of-00024.safetensors", "model.layers.48.mlp.up_proj.weight": "model-00016-of-00024.safetensors", "model.layers.48.post_attention_layernorm.weight": "model-00016-of-00024.safetensors", "model.layers.48.self_attn.o_proj.weight": "model-00016-of-00024.safetensors", "model.layers.48.self_attn.v_proj.weight": "model-00016-of-00024.safetensors", "model.layers.48.self_attn.k_proj.weight": "model-00016-of-00024.safetensors", "model.layers.48.self_attn.q_proj.weight": "model-00016-of-00024.safetensors", "model.layers.48.input_layernorm.weight": "model-00016-of-00024.safetensors", "model.layers.47.mlp.down_proj.weight": "model-00016-of-00024.safetensors", "model.layers.47.mlp.gate_proj.weight": "model-00016-of-00024.safetensors", "model.layers.47.mlp.up_proj.weight": "model-00016-of-00024.safetensors", "model.layers.47.post_attention_layernorm.weight": "model-00016-of-00024.safetensors", "model.layers.47.self_attn.o_proj.weight": "model-00016-of-00024.safetensors", "model.layers.47.self_attn.v_proj.weight": "model-00016-of-00024.safetensors", "model.layers.47.self_attn.k_proj.weight": "model-00016-of-00024.safetensors", "model.layers.47.self_attn.q_proj.weight": "model-00016-of-00024.safetensors", "model.layers.47.input_layernorm.weight": "model-00016-of-00024.safetensors", "model.layers.46.mlp.down_proj.weight": "model-00016-of-00024.safetensors", "model.layers.46.mlp.up_proj.weight": "model-00016-of-00024.safetensors", "model.layers.46.post_attention_layernorm.weight": "model-00016-of-00024.safetensors", "model.layers.46.input_layernorm.weight": "model-00016-of-00024.safetensors", "model.layers.115.mlp.down_proj.weight": "model-00016-of-00024.safetensors", "model.layers.115.mlp.gate_proj.weight": "model-00016-of-00024.safetensors", "model.layers.115.mlp.up_proj.weight": "model-00016-of-00024.safetensors", "model.layers.115.post_attention_layernorm.weight": "model-00016-of-00024.safetensors", "model.layers.115.self_attn.o_proj.weight": "model-00016-of-00024.safetensors", "model.layers.115.self_attn.v_proj.weight": "model-00016-of-00024.safetensors", "model.layers.115.self_attn.k_proj.weight": "model-00016-of-00024.safetensors", "model.layers.115.self_attn.q_proj.weight": "model-00016-of-00024.safetensors", "model.layers.115.input_layernorm.weight": "model-00016-of-00024.safetensors", "model.layers.114.self_attn.o_proj.weight": "model-00016-of-00024.safetensors", "model.layers.114.self_attn.v_proj.weight": "model-00016-of-00024.safetensors", "model.layers.114.self_attn.q_proj.weight": "model-00016-of-00024.safetensors", "model.layers.52.self_attn.v_proj.weight": "model-00016-of-00024.safetensors", "model.layers.52.self_attn.k_proj.weight": "model-00016-of-00024.safetensors", "model.layers.52.self_attn.q_proj.weight": "model-00016-of-00024.safetensors", "model.layers.51.mlp.down_proj.weight": "model-00016-of-00024.safetensors", "model.layers.51.mlp.gate_proj.weight": "model-00016-of-00024.safetensors", "model.layers.51.mlp.up_proj.weight": "model-00016-of-00024.safetensors", "model.layers.51.post_attention_layernorm.weight": "model-00016-of-00024.safetensors", "model.layers.51.self_attn.o_proj.weight": "model-00016-of-00024.safetensors", "model.layers.51.self_attn.v_proj.weight": "model-00016-of-00024.safetensors", "model.layers.51.self_attn.k_proj.weight": "model-00016-of-00024.safetensors", "model.layers.51.self_attn.q_proj.weight": "model-00016-of-00024.safetensors", "model.layers.51.input_layernorm.weight": "model-00016-of-00024.safetensors", "model.layers.50.mlp.down_proj.weight": "model-00016-of-00024.safetensors", "model.layers.50.mlp.gate_proj.weight": "model-00016-of-00024.safetensors", "model.layers.50.mlp.up_proj.weight": "model-00016-of-00024.safetensors", "model.layers.50.post_attention_layernorm.weight": "model-00016-of-00024.safetensors", "model.layers.50.self_attn.o_proj.weight": "model-00016-of-00024.safetensors", "model.layers.50.self_attn.v_proj.weight": "model-00016-of-00024.safetensors", "model.layers.50.self_attn.k_proj.weight": "model-00016-of-00024.safetensors", "model.layers.50.self_attn.q_proj.weight": "model-00016-of-00024.safetensors", "model.layers.50.input_layernorm.weight": "model-00016-of-00024.safetensors", "model.layers.49.mlp.down_proj.weight": "model-00016-of-00024.safetensors", "model.layers.49.mlp.gate_proj.weight": "model-00017-of-00024.safetensors", "model.layers.49.mlp.up_proj.weight": "model-00017-of-00024.safetensors", "model.layers.49.post_attention_layernorm.weight": "model-00017-of-00024.safetensors", "model.layers.49.input_layernorm.weight": "model-00017-of-00024.safetensors", "model.layers.54.mlp.down_proj.weight": "model-00017-of-00024.safetensors", "model.layers.54.mlp.gate_proj.weight": "model-00017-of-00024.safetensors", "model.layers.54.mlp.up_proj.weight": "model-00017-of-00024.safetensors", "model.layers.54.post_attention_layernorm.weight": "model-00017-of-00024.safetensors", "model.layers.54.self_attn.o_proj.weight": "model-00017-of-00024.safetensors", "model.layers.54.self_attn.v_proj.weight": "model-00017-of-00024.safetensors", "model.layers.54.self_attn.k_proj.weight": "model-00017-of-00024.safetensors", "model.layers.54.self_attn.q_proj.weight": "model-00017-of-00024.safetensors", "model.layers.54.input_layernorm.weight": "model-00017-of-00024.safetensors", "model.layers.53.mlp.down_proj.weight": "model-00017-of-00024.safetensors", "model.layers.53.mlp.gate_proj.weight": "model-00017-of-00024.safetensors", "model.layers.53.mlp.up_proj.weight": "model-00017-of-00024.safetensors", "model.layers.53.post_attention_layernorm.weight": "model-00017-of-00024.safetensors", "model.layers.53.self_attn.o_proj.weight": "model-00017-of-00024.safetensors", "model.layers.53.self_attn.v_proj.weight": "model-00017-of-00024.safetensors", "model.layers.53.self_attn.k_proj.weight": "model-00017-of-00024.safetensors", "model.layers.53.self_attn.q_proj.weight": "model-00017-of-00024.safetensors", "model.layers.53.input_layernorm.weight": "model-00017-of-00024.safetensors", "model.layers.52.mlp.down_proj.weight": "model-00017-of-00024.safetensors", "model.layers.52.mlp.gate_proj.weight": "model-00017-of-00024.safetensors", "model.layers.52.mlp.up_proj.weight": "model-00017-of-00024.safetensors", "model.layers.52.post_attention_layernorm.weight": "model-00017-of-00024.safetensors", "model.layers.52.self_attn.o_proj.weight": "model-00017-of-00024.safetensors", "model.layers.52.input_layernorm.weight": "model-00017-of-00024.safetensors", "model.layers.77.mlp.gate_proj.weight": "model-00017-of-00024.safetensors", "model.layers.77.mlp.up_proj.weight": "model-00017-of-00024.safetensors", "model.layers.77.self_attn.o_proj.weight": "model-00017-of-00024.safetensors", "model.layers.77.self_attn.v_proj.weight": "model-00017-of-00024.safetensors", "model.layers.77.self_attn.k_proj.weight": "model-00017-of-00024.safetensors", "model.layers.77.self_attn.q_proj.weight": "model-00017-of-00024.safetensors", "model.layers.76.mlp.down_proj.weight": "model-00017-of-00024.safetensors", "model.layers.76.mlp.gate_proj.weight": "model-00017-of-00024.safetensors", "model.layers.76.mlp.up_proj.weight": "model-00017-of-00024.safetensors", "model.layers.76.post_attention_layernorm.weight": "model-00017-of-00024.safetensors", "model.layers.76.self_attn.o_proj.weight": "model-00017-of-00024.safetensors", "model.layers.76.self_attn.v_proj.weight": "model-00017-of-00024.safetensors", "model.layers.76.self_attn.k_proj.weight": "model-00017-of-00024.safetensors", "model.layers.76.self_attn.q_proj.weight": "model-00017-of-00024.safetensors", "model.layers.76.input_layernorm.weight": "model-00017-of-00024.safetensors", "model.layers.55.mlp.down_proj.weight": "model-00017-of-00024.safetensors", "model.layers.55.mlp.gate_proj.weight": "model-00017-of-00024.safetensors", "model.layers.55.mlp.up_proj.weight": "model-00018-of-00024.safetensors", "model.layers.55.post_attention_layernorm.weight": "model-00018-of-00024.safetensors", "model.layers.55.self_attn.o_proj.weight": "model-00018-of-00024.safetensors", "model.layers.55.self_attn.v_proj.weight": "model-00018-of-00024.safetensors", "model.layers.55.self_attn.k_proj.weight": "model-00018-of-00024.safetensors", "model.layers.55.self_attn.q_proj.weight": "model-00018-of-00024.safetensors", "model.layers.55.input_layernorm.weight": "model-00018-of-00024.safetensors", "model.layers.80.mlp.gate_proj.weight": "model-00018-of-00024.safetensors", "model.layers.80.self_attn.o_proj.weight": "model-00018-of-00024.safetensors", "model.layers.80.self_attn.v_proj.weight": "model-00018-of-00024.safetensors", "model.layers.80.self_attn.k_proj.weight": "model-00018-of-00024.safetensors", "model.layers.80.self_attn.q_proj.weight": "model-00018-of-00024.safetensors", "model.layers.79.mlp.down_proj.weight": "model-00018-of-00024.safetensors", "model.layers.79.mlp.gate_proj.weight": "model-00018-of-00024.safetensors", "model.layers.79.mlp.up_proj.weight": "model-00018-of-00024.safetensors", "model.layers.79.post_attention_layernorm.weight": "model-00018-of-00024.safetensors", "model.layers.79.self_attn.o_proj.weight": "model-00018-of-00024.safetensors", "model.layers.79.self_attn.v_proj.weight": "model-00018-of-00024.safetensors", "model.layers.79.self_attn.k_proj.weight": "model-00018-of-00024.safetensors", "model.layers.79.self_attn.q_proj.weight": "model-00018-of-00024.safetensors", "model.layers.79.input_layernorm.weight": "model-00018-of-00024.safetensors", "model.layers.78.mlp.down_proj.weight": "model-00018-of-00024.safetensors", "model.layers.78.mlp.gate_proj.weight": "model-00018-of-00024.safetensors", "model.layers.78.mlp.up_proj.weight": "model-00018-of-00024.safetensors", "model.layers.78.post_attention_layernorm.weight": "model-00018-of-00024.safetensors", "model.layers.78.self_attn.o_proj.weight": "model-00018-of-00024.safetensors", "model.layers.78.self_attn.v_proj.weight": "model-00018-of-00024.safetensors", "model.layers.78.self_attn.k_proj.weight": "model-00018-of-00024.safetensors", "model.layers.78.self_attn.q_proj.weight": "model-00018-of-00024.safetensors", "model.layers.78.input_layernorm.weight": "model-00018-of-00024.safetensors", "model.layers.77.mlp.down_proj.weight": "model-00018-of-00024.safetensors", "model.layers.77.post_attention_layernorm.weight": "model-00018-of-00024.safetensors", "model.layers.77.input_layernorm.weight": "model-00018-of-00024.safetensors", "model.layers.83.self_attn.o_proj.weight": "model-00018-of-00024.safetensors", "model.layers.83.self_attn.v_proj.weight": "model-00018-of-00024.safetensors", "model.layers.83.self_attn.k_proj.weight": "model-00018-of-00024.safetensors", "model.layers.83.self_attn.q_proj.weight": "model-00018-of-00024.safetensors", "model.layers.82.mlp.down_proj.weight": "model-00018-of-00024.safetensors", "model.layers.82.mlp.gate_proj.weight": "model-00018-of-00024.safetensors", "model.layers.82.mlp.up_proj.weight": "model-00018-of-00024.safetensors", "model.layers.82.post_attention_layernorm.weight": "model-00018-of-00024.safetensors", "model.layers.82.self_attn.o_proj.weight": "model-00018-of-00024.safetensors", "model.layers.82.self_attn.v_proj.weight": "model-00018-of-00024.safetensors", "model.layers.82.self_attn.k_proj.weight": "model-00018-of-00024.safetensors", "model.layers.82.self_attn.q_proj.weight": "model-00018-of-00024.safetensors", "model.layers.82.input_layernorm.weight": "model-00018-of-00024.safetensors", "model.layers.81.mlp.down_proj.weight": "model-00018-of-00024.safetensors", "model.layers.81.mlp.gate_proj.weight": "model-00018-of-00024.safetensors", "model.layers.81.mlp.up_proj.weight": "model-00018-of-00024.safetensors", "model.layers.81.post_attention_layernorm.weight": "model-00018-of-00024.safetensors", "model.layers.81.self_attn.o_proj.weight": "model-00018-of-00024.safetensors", "model.layers.81.self_attn.v_proj.weight": "model-00018-of-00024.safetensors", "model.layers.81.self_attn.k_proj.weight": "model-00018-of-00024.safetensors", "model.layers.81.self_attn.q_proj.weight": "model-00018-of-00024.safetensors", "model.layers.81.input_layernorm.weight": "model-00018-of-00024.safetensors", "model.layers.80.mlp.down_proj.weight": "model-00018-of-00024.safetensors", "model.layers.80.mlp.up_proj.weight": "model-00019-of-00024.safetensors", "model.layers.80.post_attention_layernorm.weight": "model-00019-of-00024.safetensors", "model.layers.80.input_layernorm.weight": "model-00019-of-00024.safetensors", "model.layers.86.self_attn.v_proj.weight": "model-00019-of-00024.safetensors", "model.layers.86.self_attn.k_proj.weight": "model-00019-of-00024.safetensors", "model.layers.86.self_attn.q_proj.weight": "model-00019-of-00024.safetensors", "model.layers.85.mlp.down_proj.weight": "model-00019-of-00024.safetensors", "model.layers.85.mlp.gate_proj.weight": "model-00019-of-00024.safetensors", "model.layers.85.mlp.up_proj.weight": "model-00019-of-00024.safetensors", "model.layers.85.post_attention_layernorm.weight": "model-00019-of-00024.safetensors", "model.layers.85.self_attn.o_proj.weight": "model-00019-of-00024.safetensors", "model.layers.85.self_attn.v_proj.weight": "model-00019-of-00024.safetensors", "model.layers.85.self_attn.k_proj.weight": "model-00019-of-00024.safetensors", "model.layers.85.self_attn.q_proj.weight": "model-00019-of-00024.safetensors", "model.layers.85.input_layernorm.weight": "model-00019-of-00024.safetensors", "model.layers.84.mlp.down_proj.weight": "model-00019-of-00024.safetensors", "model.layers.84.mlp.gate_proj.weight": "model-00019-of-00024.safetensors", "model.layers.84.mlp.up_proj.weight": "model-00019-of-00024.safetensors", "model.layers.84.post_attention_layernorm.weight": "model-00019-of-00024.safetensors", "model.layers.84.self_attn.o_proj.weight": "model-00019-of-00024.safetensors", "model.layers.84.self_attn.v_proj.weight": "model-00019-of-00024.safetensors", "model.layers.84.self_attn.k_proj.weight": "model-00019-of-00024.safetensors", "model.layers.84.self_attn.q_proj.weight": "model-00019-of-00024.safetensors", "model.layers.84.input_layernorm.weight": "model-00019-of-00024.safetensors", "model.layers.83.mlp.down_proj.weight": "model-00019-of-00024.safetensors", "model.layers.83.mlp.gate_proj.weight": "model-00019-of-00024.safetensors", "model.layers.83.mlp.up_proj.weight": "model-00019-of-00024.safetensors", "model.layers.83.post_attention_layernorm.weight": "model-00019-of-00024.safetensors", "model.layers.83.input_layernorm.weight": "model-00019-of-00024.safetensors", "model.layers.88.mlp.down_proj.weight": "model-00019-of-00024.safetensors", "model.layers.88.mlp.gate_proj.weight": "model-00019-of-00024.safetensors", "model.layers.88.mlp.up_proj.weight": "model-00019-of-00024.safetensors", "model.layers.88.post_attention_layernorm.weight": "model-00019-of-00024.safetensors", "model.layers.88.self_attn.o_proj.weight": "model-00019-of-00024.safetensors", "model.layers.88.self_attn.v_proj.weight": "model-00019-of-00024.safetensors", "model.layers.88.self_attn.k_proj.weight": "model-00019-of-00024.safetensors", "model.layers.88.self_attn.q_proj.weight": "model-00019-of-00024.safetensors", "model.layers.88.input_layernorm.weight": "model-00019-of-00024.safetensors", "model.layers.87.mlp.down_proj.weight": "model-00019-of-00024.safetensors", "model.layers.87.mlp.gate_proj.weight": "model-00019-of-00024.safetensors", "model.layers.87.mlp.up_proj.weight": "model-00019-of-00024.safetensors", "model.layers.87.post_attention_layernorm.weight": "model-00019-of-00024.safetensors", "model.layers.87.self_attn.o_proj.weight": "model-00019-of-00024.safetensors", "model.layers.87.self_attn.v_proj.weight": "model-00019-of-00024.safetensors", "model.layers.87.self_attn.k_proj.weight": "model-00019-of-00024.safetensors", "model.layers.87.self_attn.q_proj.weight": "model-00019-of-00024.safetensors", "model.layers.87.input_layernorm.weight": "model-00019-of-00024.safetensors", "model.layers.86.mlp.down_proj.weight": "model-00019-of-00024.safetensors", "model.layers.86.mlp.gate_proj.weight": "model-00019-of-00024.safetensors", "model.layers.86.mlp.up_proj.weight": "model-00020-of-00024.safetensors", "model.layers.86.post_attention_layernorm.weight": "model-00020-of-00024.safetensors", "model.layers.86.self_attn.o_proj.weight": "model-00020-of-00024.safetensors", "model.layers.86.input_layernorm.weight": "model-00020-of-00024.safetensors", "model.layers.91.mlp.gate_proj.weight": "model-00020-of-00024.safetensors", "model.layers.91.mlp.up_proj.weight": "model-00020-of-00024.safetensors", "model.layers.91.self_attn.o_proj.weight": "model-00020-of-00024.safetensors", "model.layers.91.self_attn.v_proj.weight": "model-00020-of-00024.safetensors", "model.layers.91.self_attn.k_proj.weight": "model-00020-of-00024.safetensors", "model.layers.91.self_attn.q_proj.weight": "model-00020-of-00024.safetensors", "model.layers.90.mlp.down_proj.weight": "model-00020-of-00024.safetensors", "model.layers.90.mlp.gate_proj.weight": "model-00020-of-00024.safetensors", "model.layers.90.mlp.up_proj.weight": "model-00020-of-00024.safetensors", "model.layers.90.post_attention_layernorm.weight": "model-00020-of-00024.safetensors", "model.layers.90.self_attn.o_proj.weight": "model-00020-of-00024.safetensors", "model.layers.90.self_attn.v_proj.weight": "model-00020-of-00024.safetensors", "model.layers.90.self_attn.k_proj.weight": "model-00020-of-00024.safetensors", "model.layers.90.self_attn.q_proj.weight": "model-00020-of-00024.safetensors", "model.layers.90.input_layernorm.weight": "model-00020-of-00024.safetensors", "model.layers.89.mlp.down_proj.weight": "model-00020-of-00024.safetensors", "model.layers.89.mlp.gate_proj.weight": "model-00020-of-00024.safetensors", "model.layers.89.mlp.up_proj.weight": "model-00020-of-00024.safetensors", "model.layers.89.post_attention_layernorm.weight": "model-00020-of-00024.safetensors", "model.layers.89.self_attn.o_proj.weight": "model-00020-of-00024.safetensors", "model.layers.89.self_attn.v_proj.weight": "model-00020-of-00024.safetensors", "model.layers.89.self_attn.k_proj.weight": "model-00020-of-00024.safetensors", "model.layers.89.self_attn.q_proj.weight": "model-00020-of-00024.safetensors", "model.layers.89.input_layernorm.weight": "model-00020-of-00024.safetensors", "model.layers.94.mlp.gate_proj.weight": "model-00020-of-00024.safetensors", "model.layers.94.self_attn.o_proj.weight": "model-00020-of-00024.safetensors", "model.layers.94.self_attn.v_proj.weight": "model-00020-of-00024.safetensors", "model.layers.94.self_attn.k_proj.weight": "model-00020-of-00024.safetensors", "model.layers.94.self_attn.q_proj.weight": "model-00020-of-00024.safetensors", "model.layers.93.mlp.down_proj.weight": "model-00020-of-00024.safetensors", "model.layers.93.mlp.gate_proj.weight": "model-00020-of-00024.safetensors", "model.layers.93.mlp.up_proj.weight": "model-00020-of-00024.safetensors", "model.layers.93.post_attention_layernorm.weight": "model-00020-of-00024.safetensors", "model.layers.93.self_attn.o_proj.weight": "model-00020-of-00024.safetensors", "model.layers.93.self_attn.v_proj.weight": "model-00020-of-00024.safetensors", "model.layers.93.self_attn.k_proj.weight": "model-00020-of-00024.safetensors", "model.layers.93.self_attn.q_proj.weight": "model-00020-of-00024.safetensors", "model.layers.93.input_layernorm.weight": "model-00020-of-00024.safetensors", "model.layers.92.mlp.down_proj.weight": "model-00020-of-00024.safetensors", "model.layers.92.mlp.gate_proj.weight": "model-00020-of-00024.safetensors", "model.layers.92.mlp.up_proj.weight": "model-00020-of-00024.safetensors", "model.layers.92.post_attention_layernorm.weight": "model-00020-of-00024.safetensors", "model.layers.92.self_attn.o_proj.weight": "model-00020-of-00024.safetensors", "model.layers.92.self_attn.v_proj.weight": "model-00020-of-00024.safetensors", "model.layers.92.self_attn.k_proj.weight": "model-00020-of-00024.safetensors", "model.layers.92.self_attn.q_proj.weight": "model-00020-of-00024.safetensors", "model.layers.92.input_layernorm.weight": "model-00020-of-00024.safetensors", "model.layers.91.mlp.down_proj.weight": "model-00020-of-00024.safetensors", "model.layers.91.post_attention_layernorm.weight": "model-00020-of-00024.safetensors", "model.layers.91.input_layernorm.weight": "model-00020-of-00024.safetensors", "model.layers.117.self_attn.o_proj.weight": "model-00021-of-00024.safetensors", "model.layers.117.self_attn.v_proj.weight": "model-00021-of-00024.safetensors", "model.layers.117.self_attn.k_proj.weight": "model-00021-of-00024.safetensors", "model.layers.117.self_attn.q_proj.weight": "model-00021-of-00024.safetensors", "model.layers.116.mlp.down_proj.weight": "model-00021-of-00024.safetensors", "model.layers.116.mlp.gate_proj.weight": "model-00021-of-00024.safetensors", "model.layers.116.mlp.up_proj.weight": "model-00021-of-00024.safetensors", "model.layers.116.post_attention_layernorm.weight": "model-00021-of-00024.safetensors", "model.layers.116.self_attn.o_proj.weight": "model-00021-of-00024.safetensors", "model.layers.116.self_attn.v_proj.weight": "model-00021-of-00024.safetensors", "model.layers.116.self_attn.k_proj.weight": "model-00021-of-00024.safetensors", "model.layers.116.self_attn.q_proj.weight": "model-00021-of-00024.safetensors", "model.layers.116.input_layernorm.weight": "model-00021-of-00024.safetensors", "model.layers.95.mlp.down_proj.weight": "model-00021-of-00024.safetensors", "model.layers.95.mlp.gate_proj.weight": "model-00021-of-00024.safetensors", "model.layers.95.mlp.up_proj.weight": "model-00021-of-00024.safetensors", "model.layers.95.post_attention_layernorm.weight": "model-00021-of-00024.safetensors", "model.layers.95.self_attn.o_proj.weight": "model-00021-of-00024.safetensors", "model.layers.95.self_attn.v_proj.weight": "model-00021-of-00024.safetensors", "model.layers.95.self_attn.k_proj.weight": "model-00021-of-00024.safetensors", "model.layers.95.self_attn.q_proj.weight": "model-00021-of-00024.safetensors", "model.layers.95.input_layernorm.weight": "model-00021-of-00024.safetensors", "model.layers.94.mlp.down_proj.weight": "model-00021-of-00024.safetensors", "model.layers.94.mlp.up_proj.weight": "model-00021-of-00024.safetensors", "model.layers.94.post_attention_layernorm.weight": "model-00021-of-00024.safetensors", "model.layers.94.input_layernorm.weight": "model-00021-of-00024.safetensors", "model.layers.120.self_attn.v_proj.weight": "model-00021-of-00024.safetensors", "model.layers.120.self_attn.k_proj.weight": "model-00021-of-00024.safetensors", "model.layers.120.self_attn.q_proj.weight": "model-00021-of-00024.safetensors", "model.layers.119.mlp.down_proj.weight": "model-00021-of-00024.safetensors", "model.layers.119.mlp.gate_proj.weight": "model-00021-of-00024.safetensors", "model.layers.119.mlp.up_proj.weight": "model-00021-of-00024.safetensors", "model.layers.119.post_attention_layernorm.weight": "model-00021-of-00024.safetensors", "model.layers.119.self_attn.o_proj.weight": "model-00021-of-00024.safetensors", "model.layers.119.self_attn.v_proj.weight": "model-00021-of-00024.safetensors", "model.layers.119.self_attn.k_proj.weight": "model-00021-of-00024.safetensors", "model.layers.119.self_attn.q_proj.weight": "model-00021-of-00024.safetensors", "model.layers.119.input_layernorm.weight": "model-00021-of-00024.safetensors", "model.layers.118.mlp.down_proj.weight": "model-00021-of-00024.safetensors", "model.layers.118.mlp.gate_proj.weight": "model-00021-of-00024.safetensors", "model.layers.118.mlp.up_proj.weight": "model-00021-of-00024.safetensors", "model.layers.118.post_attention_layernorm.weight": "model-00021-of-00024.safetensors", "model.layers.118.self_attn.o_proj.weight": "model-00021-of-00024.safetensors", "model.layers.118.self_attn.v_proj.weight": "model-00021-of-00024.safetensors", "model.layers.118.self_attn.k_proj.weight": "model-00021-of-00024.safetensors", "model.layers.118.self_attn.q_proj.weight": "model-00021-of-00024.safetensors", "model.layers.118.input_layernorm.weight": "model-00021-of-00024.safetensors", "model.layers.117.mlp.down_proj.weight": "model-00021-of-00024.safetensors", "model.layers.117.mlp.gate_proj.weight": "model-00021-of-00024.safetensors", "model.layers.117.mlp.up_proj.weight": "model-00021-of-00024.safetensors", "model.layers.117.post_attention_layernorm.weight": "model-00021-of-00024.safetensors", "model.layers.117.input_layernorm.weight": "model-00021-of-00024.safetensors", "model.layers.122.mlp.down_proj.weight": "model-00022-of-00024.safetensors", "model.layers.122.mlp.gate_proj.weight": "model-00022-of-00024.safetensors", "model.layers.122.mlp.up_proj.weight": "model-00022-of-00024.safetensors", "model.layers.122.post_attention_layernorm.weight": "model-00022-of-00024.safetensors", "model.layers.122.self_attn.o_proj.weight": "model-00022-of-00024.safetensors", "model.layers.122.self_attn.v_proj.weight": "model-00022-of-00024.safetensors", "model.layers.122.self_attn.k_proj.weight": "model-00022-of-00024.safetensors", "model.layers.122.self_attn.q_proj.weight": "model-00022-of-00024.safetensors", "model.layers.122.input_layernorm.weight": "model-00022-of-00024.safetensors", "model.layers.121.mlp.down_proj.weight": "model-00022-of-00024.safetensors", "model.layers.121.mlp.gate_proj.weight": "model-00022-of-00024.safetensors", "model.layers.121.mlp.up_proj.weight": "model-00022-of-00024.safetensors", "model.layers.121.post_attention_layernorm.weight": "model-00022-of-00024.safetensors", "model.layers.121.self_attn.o_proj.weight": "model-00022-of-00024.safetensors", "model.layers.121.self_attn.v_proj.weight": "model-00022-of-00024.safetensors", "model.layers.121.self_attn.k_proj.weight": "model-00022-of-00024.safetensors", "model.layers.121.self_attn.q_proj.weight": "model-00022-of-00024.safetensors", "model.layers.121.input_layernorm.weight": "model-00022-of-00024.safetensors", "model.layers.120.mlp.down_proj.weight": "model-00022-of-00024.safetensors", "model.layers.120.mlp.gate_proj.weight": "model-00022-of-00024.safetensors", "model.layers.120.mlp.up_proj.weight": "model-00022-of-00024.safetensors", "model.layers.120.post_attention_layernorm.weight": "model-00022-of-00024.safetensors", "model.layers.120.self_attn.o_proj.weight": "model-00022-of-00024.safetensors", "model.layers.120.input_layernorm.weight": "model-00022-of-00024.safetensors", "model.layers.125.mlp.gate_proj.weight": "model-00022-of-00024.safetensors", "model.layers.125.mlp.up_proj.weight": "model-00022-of-00024.safetensors", "model.layers.125.self_attn.o_proj.weight": "model-00022-of-00024.safetensors", "model.layers.125.self_attn.v_proj.weight": "model-00022-of-00024.safetensors", "model.layers.125.self_attn.k_proj.weight": "model-00022-of-00024.safetensors", "model.layers.125.self_attn.q_proj.weight": "model-00022-of-00024.safetensors", "model.layers.124.mlp.down_proj.weight": "model-00022-of-00024.safetensors", "model.layers.124.mlp.gate_proj.weight": "model-00022-of-00024.safetensors", "model.layers.124.mlp.up_proj.weight": "model-00022-of-00024.safetensors", "model.layers.124.post_attention_layernorm.weight": "model-00022-of-00024.safetensors", "model.layers.124.self_attn.o_proj.weight": "model-00022-of-00024.safetensors", "model.layers.124.self_attn.v_proj.weight": "model-00022-of-00024.safetensors", "model.layers.124.self_attn.k_proj.weight": "model-00022-of-00024.safetensors", "model.layers.124.self_attn.q_proj.weight": "model-00022-of-00024.safetensors", "model.layers.124.input_layernorm.weight": "model-00022-of-00024.safetensors", "model.layers.123.mlp.down_proj.weight": "model-00022-of-00024.safetensors", "model.layers.123.mlp.gate_proj.weight": "model-00022-of-00024.safetensors", "model.layers.123.mlp.up_proj.weight": "model-00022-of-00024.safetensors", "model.layers.123.post_attention_layernorm.weight": "model-00022-of-00024.safetensors", "model.layers.123.self_attn.o_proj.weight": "model-00022-of-00024.safetensors", "model.layers.123.self_attn.v_proj.weight": "model-00022-of-00024.safetensors", "model.layers.123.self_attn.k_proj.weight": "model-00022-of-00024.safetensors", "model.layers.123.self_attn.q_proj.weight": "model-00022-of-00024.safetensors", "model.layers.123.input_layernorm.weight": "model-00022-of-00024.safetensors", "model.layers.128.mlp.gate_proj.weight": "model-00023-of-00024.safetensors", "model.layers.128.self_attn.o_proj.weight": "model-00023-of-00024.safetensors", "model.layers.128.self_attn.v_proj.weight": "model-00023-of-00024.safetensors", "model.layers.128.self_attn.k_proj.weight": "model-00023-of-00024.safetensors", "model.layers.128.self_attn.q_proj.weight": "model-00023-of-00024.safetensors", "model.layers.127.mlp.down_proj.weight": "model-00023-of-00024.safetensors", "model.layers.127.mlp.gate_proj.weight": "model-00023-of-00024.safetensors", "model.layers.127.mlp.up_proj.weight": "model-00023-of-00024.safetensors", "model.layers.127.post_attention_layernorm.weight": "model-00023-of-00024.safetensors", "model.layers.127.self_attn.o_proj.weight": "model-00023-of-00024.safetensors", "model.layers.127.self_attn.v_proj.weight": "model-00023-of-00024.safetensors", "model.layers.127.self_attn.k_proj.weight": "model-00023-of-00024.safetensors", "model.layers.127.self_attn.q_proj.weight": "model-00023-of-00024.safetensors", "model.layers.127.input_layernorm.weight": "model-00023-of-00024.safetensors", "model.layers.126.mlp.down_proj.weight": "model-00023-of-00024.safetensors", "model.layers.126.mlp.gate_proj.weight": "model-00023-of-00024.safetensors", "model.layers.126.mlp.up_proj.weight": "model-00023-of-00024.safetensors", "model.layers.126.post_attention_layernorm.weight": "model-00023-of-00024.safetensors", "model.layers.126.self_attn.o_proj.weight": "model-00023-of-00024.safetensors", "model.layers.126.self_attn.v_proj.weight": "model-00023-of-00024.safetensors", "model.layers.126.self_attn.k_proj.weight": "model-00023-of-00024.safetensors", "model.layers.126.self_attn.q_proj.weight": "model-00023-of-00024.safetensors", "model.layers.126.input_layernorm.weight": "model-00023-of-00024.safetensors", "model.layers.125.mlp.down_proj.weight": "model-00023-of-00024.safetensors", "model.layers.125.post_attention_layernorm.weight": "model-00023-of-00024.safetensors", "model.layers.125.input_layernorm.weight": "model-00023-of-00024.safetensors", "model.layers.131.self_attn.o_proj.weight": "model-00023-of-00024.safetensors", "model.layers.131.self_attn.v_proj.weight": "model-00023-of-00024.safetensors", "model.layers.131.self_attn.k_proj.weight": "model-00023-of-00024.safetensors", "model.layers.131.self_attn.q_proj.weight": "model-00023-of-00024.safetensors", "model.layers.130.mlp.down_proj.weight": "model-00023-of-00024.safetensors", "model.layers.130.mlp.gate_proj.weight": "model-00023-of-00024.safetensors", "model.layers.130.mlp.up_proj.weight": "model-00023-of-00024.safetensors", "model.layers.130.post_attention_layernorm.weight": "model-00023-of-00024.safetensors", "model.layers.130.self_attn.o_proj.weight": "model-00023-of-00024.safetensors", "model.layers.130.self_attn.v_proj.weight": "model-00023-of-00024.safetensors", "model.layers.130.self_attn.k_proj.weight": "model-00023-of-00024.safetensors", "model.layers.130.self_attn.q_proj.weight": "model-00023-of-00024.safetensors", "model.layers.130.input_layernorm.weight": "model-00023-of-00024.safetensors", "model.layers.129.mlp.down_proj.weight": "model-00023-of-00024.safetensors", "model.layers.129.mlp.gate_proj.weight": "model-00023-of-00024.safetensors", "model.layers.129.mlp.up_proj.weight": "model-00023-of-00024.safetensors", "model.layers.129.post_attention_layernorm.weight": "model-00023-of-00024.safetensors", "model.layers.129.self_attn.o_proj.weight": "model-00023-of-00024.safetensors", "model.layers.129.self_attn.v_proj.weight": "model-00023-of-00024.safetensors", "model.layers.129.self_attn.k_proj.weight": "model-00023-of-00024.safetensors", "model.layers.129.self_attn.q_proj.weight": "model-00023-of-00024.safetensors", "model.layers.129.input_layernorm.weight": "model-00023-of-00024.safetensors", "model.layers.128.mlp.down_proj.weight": "model-00023-of-00024.safetensors", "model.layers.128.mlp.up_proj.weight": "model-00023-of-00024.safetensors", "model.layers.128.post_attention_layernorm.weight": "model-00023-of-00024.safetensors", "model.layers.128.input_layernorm.weight": "model-00023-of-00024.safetensors", "model.layers.134.self_attn.v_proj.weight": "model-00023-of-00024.safetensors", "model.layers.134.self_attn.k_proj.weight": "model-00023-of-00024.safetensors", "model.layers.134.self_attn.q_proj.weight": "model-00023-of-00024.safetensors", "model.layers.133.mlp.down_proj.weight": "model-00023-of-00024.safetensors", "model.layers.133.mlp.gate_proj.weight": "model-00024-of-00024.safetensors", "model.layers.133.mlp.up_proj.weight": "model-00024-of-00024.safetensors", "model.layers.133.post_attention_layernorm.weight": "model-00024-of-00024.safetensors", "model.layers.133.self_attn.o_proj.weight": "model-00024-of-00024.safetensors", "model.layers.133.self_attn.v_proj.weight": "model-00024-of-00024.safetensors", "model.layers.133.self_attn.k_proj.weight": "model-00024-of-00024.safetensors", "model.layers.133.self_attn.q_proj.weight": "model-00024-of-00024.safetensors", "model.layers.133.input_layernorm.weight": "model-00024-of-00024.safetensors", "model.layers.132.mlp.down_proj.weight": "model-00024-of-00024.safetensors", "model.layers.132.mlp.gate_proj.weight": "model-00024-of-00024.safetensors", "model.layers.132.mlp.up_proj.weight": "model-00024-of-00024.safetensors", "model.layers.132.post_attention_layernorm.weight": "model-00024-of-00024.safetensors", "model.layers.132.self_attn.o_proj.weight": "model-00024-of-00024.safetensors", "model.layers.132.self_attn.v_proj.weight": "model-00024-of-00024.safetensors", "model.layers.132.self_attn.k_proj.weight": "model-00024-of-00024.safetensors", "model.layers.132.self_attn.q_proj.weight": "model-00024-of-00024.safetensors", "model.layers.132.input_layernorm.weight": "model-00024-of-00024.safetensors", "model.layers.131.mlp.down_proj.weight": "model-00024-of-00024.safetensors", "model.layers.131.mlp.gate_proj.weight": "model-00024-of-00024.safetensors", "model.layers.131.mlp.up_proj.weight": "model-00024-of-00024.safetensors", "model.layers.131.post_attention_layernorm.weight": "model-00024-of-00024.safetensors", "model.layers.131.input_layernorm.weight": "model-00024-of-00024.safetensors", "lm_head.weight": "model-00024-of-00024.safetensors", "model.norm.weight": "model-00024-of-00024.safetensors", "model.layers.135.mlp.down_proj.weight": "model-00024-of-00024.safetensors", "model.layers.135.mlp.gate_proj.weight": "model-00024-of-00024.safetensors", "model.layers.135.mlp.up_proj.weight": "model-00024-of-00024.safetensors", "model.layers.135.post_attention_layernorm.weight": "model-00024-of-00024.safetensors", "model.layers.135.self_attn.o_proj.weight": "model-00024-of-00024.safetensors", "model.layers.135.self_attn.v_proj.weight": "model-00024-of-00024.safetensors", "model.layers.135.self_attn.k_proj.weight": "model-00024-of-00024.safetensors", "model.layers.135.self_attn.q_proj.weight": "model-00024-of-00024.safetensors", "model.layers.135.input_layernorm.weight": "model-00024-of-00024.safetensors", "model.layers.134.mlp.down_proj.weight": "model-00024-of-00024.safetensors", "model.layers.134.mlp.gate_proj.weight": "model-00024-of-00024.safetensors", "model.layers.134.mlp.up_proj.weight": "model-00024-of-00024.safetensors", "model.layers.134.post_attention_layernorm.weight": "model-00024-of-00024.safetensors", "model.layers.134.self_attn.o_proj.weight": "model-00024-of-00024.safetensors", "model.layers.134.input_layernorm.weight": "model-00024-of-00024.safetensors"}}
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
tokenizer_config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<s>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ }
29
+ },
30
+ "bos_token": "<s>",
31
+ "clean_up_tokenization_spaces": false,
32
+ "eos_token": "</s>",
33
+ "legacy": false,
34
+ "model_max_length": 1000000000000000019884624838656,
35
+ "pad_token": "<unk>",
36
+ "sp_model_kwargs": {},
37
+ "spaces_between_special_tokens": false,
38
+ "tokenizer_class": "LlamaTokenizer",
39
+ "trust_remote_code": false,
40
+ "unk_token": "<unk>",
41
+ "use_default_system_prompt": false,
42
+ "use_fast": true
43
+ }