README.md ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: other
3
+ ---
4
+ # Join our Discord! https://discord.gg/Nbv9pQ88Xb
5
+ ## Nearly 2000 members strong 💪
6
+ ---
7
+
8
+ [BeaverAI](https://huggingface.co/BeaverAI) proudly presents...
9
+
10
+ # Behemoth 123B v1.1 🦣 - Creative Edition
11
+
12
+ *When you spend your whole life living under a dome, even the idea of an ocean seems impossible to imagine.*
13
+
14
+ ![image/png](https://cdn-uploads.huggingface.co/production/uploads/65f2fd1c25b848bd061b5c2e/5405NZoj_ptSMO_qM09EW.png)
15
+
16
+ ## Description
17
+
18
+ > One of the few other models that's done this for me is the OG Command R 35B. So seeing Behemoth v1.1 have a similar feel to that but with much higher general intelligence really makes it a favourite of mine
19
+
20
+ > I was real happy with v1.1 the other day. I've done some tests on v1 and it's a lot better.
21
+
22
+ > v1 had those glimpses of creativity, but now it's more consistent (with v1.1). It feels like a new model in comparison.
23
+
24
+ > v1 had slop bro. v1.1 makes it irrelevant. The jump is like 720p to 4k. Seriously.
25
+
26
+ > The creativity for v1.1 is off the charts compared to v1, like it's juiced. v1 had these moments that I would say... 'Shit, let I never seen a model respond with prose like this, let me regenerate to see what else I get.' Now, even though every regeneration had a flow of possibilities, sometimes, those possibilities never came. v1.1 is comparable to xxx for the first time, every generation. It directs and guides the scene, scenario and characters unlike anything else
27
+
28
+ > It's about the f***ing prose man. The atmosphere that revolves around the characters. Not just the damn dialogue or introspection. v1.1 will pull from a message 7 generations ago. That window I opened will appear in a future response with the noise from the courtyard filtering through it. The experience of not knowing what this model will produce because it's different than anything else is what keeps it engaging.
29
+
30
+ ## Links
31
+ - Original: https://huggingface.co/TheDrummer/Behemoth-123B-v1.1
32
+ - GGUF: https://huggingface.co/TheDrummer/Behemoth-123B-v1.1-GGUF
33
+ - iMatrix: https://huggingface.co/bartowski/Behemoth-123B-v1.1-GGUF (recommended for smaller quants)
34
+
35
+ ## Arsenal (Supported Chat Templates)
36
+ - Mistral
37
+ - Smart, adaptable, familiar
38
+ - Metharme (Pygmalion in ST)
39
+ - Creative, unhinged, unique
40
+ - Alpaca
41
+ - Creative, unique, unhinged
42
+ - Text Completion
43
+ - You can mix it up and see which works best for you.
44
+
45
+ ### Favorite RP Format
46
+ `*action* Dialogue *thoughts* Dialogue *narration*` in 1st person PoV
47
+
48
+ ## What's Next?
49
+ - Already have plans for a v2!
50
+
51
+ ## Special Thanks
52
+ - Thank you to each and everyone who donated in [Ko-Fi](https://ko-fi.com/thedrummer) to make our venture a little bit easier.
53
+ - KinjiHakari777, Dr. Fjut, Kistara, Pseudo, AlexTheVP, Dakkidaze, EvarinSharath'fe, ONTHEREDTEAM, F, Mariana, Garg, Silva, Grozi, & **Phaelon**
54
+
55
+ ![image/png](https://cdn-uploads.huggingface.co/production/uploads/65f2fd1c25b848bd061b5c2e/KvyYIIA1zkxQNEdGro007.png)
56
+
57
+ <audio controls src="https://cdn-uploads.huggingface.co/production/uploads/65f2fd1c25b848bd061b5c2e/FNWdi0WlH-Xd3fjkGVPpp.mpga"></audio>
config.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "BeaverAI/Behemoth-123B-v1e",
3
+ "architectures": [
4
+ "MistralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 12288,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 28672,
14
+ "max_position_embeddings": 131072,
15
+ "model_type": "mistral",
16
+ "num_attention_heads": 96,
17
+ "num_hidden_layers": 88,
18
+ "num_key_value_heads": 8,
19
+ "rms_norm_eps": 1e-05,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": null,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.45.2",
25
+ "use_cache": true,
26
+ "vocab_size": 32768,
27
+ "quantization_config": {
28
+ "quant_method": "exl2",
29
+ "version": "0.2.3",
30
+ "bits": 8.0,
31
+ "head_bits": 6,
32
+ "calibration": {
33
+ "rows": 115,
34
+ "length": 8192,
35
+ "dataset": "(default)"
36
+ }
37
+ }
38
+ }
huggingface-metadata.txt ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ url: https://huggingface.co/TheDrummer/Behemoth-123B-v1.1
2
+ branch: main
3
+ download date: 2024-10-27 15:34:32
4
+ sha256sum:
5
+ 264b1eb923212d07927a4990418cce3c71c5bbe89130d243183259dd6f8d3bd5 model-00001-of-00051.safetensors
6
+ dc74b1adf1ae9e3fcfd2307612a39cf3386301ae295a4292015353e35b1f0eb6 model-00002-of-00051.safetensors
7
+ f181ce5386badf3a0774102a4dcaeef897404d323161a64174cc81c981d8f1ba model-00003-of-00051.safetensors
8
+ f9268a4dd9e41cd30c16fefb4d728f2c8c69eba28f45a6ad21d93e4dd0f3a072 model-00004-of-00051.safetensors
9
+ d4a816869f0d8293a56f378a1a137a8b32d8b257ebca0eb5b9b714faf8434531 model-00005-of-00051.safetensors
10
+ b5cf1e926d2e83d359c1258f697fa5dbd63faf484e66db5114bc7f79e2a82852 model-00006-of-00051.safetensors
11
+ 8f5cb1f3bc9a6f32686e76b85787aded86554090eeab8b26188b22cbf33b742b model-00007-of-00051.safetensors
12
+ 38810dadb40af8b7aca825402c7547f70e9f295f610b4298f156d3c5b669a07f model-00008-of-00051.safetensors
13
+ 24ca87690dd3fb0dc12f4b6ebfaa11fd778f60222b9e73b10c2ec2185d821643 model-00009-of-00051.safetensors
14
+ 6aa18d158885b8a498a45ad9ff1609fc6628a60d8320c4704a198ecc88156b24 model-00010-of-00051.safetensors
15
+ 10b8ec0d720f7b65affe08d23e767f967cb510748591ecc68f1af8e5c176eb79 model-00011-of-00051.safetensors
16
+ c09c605f80e4526d2ab8da3a672445ab056d96322d4d058e54f6d82602b9e630 model-00012-of-00051.safetensors
17
+ 87421a48f8399ceb263214d309d22cf74e7c9faaa9b55eaf4421912b5cb6687c model-00013-of-00051.safetensors
18
+ 023ae06464d244265fe8508891ba52f6cd52d1666abbb0e3db2c4214a0f85da4 model-00014-of-00051.safetensors
19
+ 0684c6398e6e2e3805034060c7973624072989a1180089d3c99d13dfccf3c47f model-00015-of-00051.safetensors
20
+ ffa085c5925df31836543561e8de9e943809f4da280689c2cef8bda929ac9cc3 model-00016-of-00051.safetensors
21
+ 810a5527afcb218d41c01fb082f861336bbe8f12ef7901912e9149fdff4334d1 model-00017-of-00051.safetensors
22
+ e717d152bf5826b682e91eebc42a63b9ef70c324bba0847b8b69f0a781172eaa model-00018-of-00051.safetensors
23
+ 237ed3d8b3c51140b25bc2d025910db80a892d1418098fdc11bb66372a4f823a model-00019-of-00051.safetensors
24
+ 30af27d9c62cf4e0b9348c49ad4abf628b2ecb6d187261c2229736bbe239cf1d model-00020-of-00051.safetensors
25
+ 59ba859c4c07c6d100e26c7854a47d91681eeceb00b9f2a277a65688c9893eaf model-00021-of-00051.safetensors
26
+ 55b5203e7d8f33c8a3c1f710894609ffdca7c0eb2ffd1b6571de159297eb34e4 model-00022-of-00051.safetensors
27
+ 79139876510a761944eb863be93442ce917b7d8000e520bcb8143f4825f306a5 model-00023-of-00051.safetensors
28
+ 431151e4d899466aedda94311dfdbebed1677b74c55e23d4555308ba37b7c014 model-00024-of-00051.safetensors
29
+ 454deeef346eca9c73559b10f0967e595cbe2d01ba0062f86bdb9f15c1332043 model-00025-of-00051.safetensors
30
+ 3b31cbaeeb30d4f434612af5fa38ed7ae4388c861cbd0c2b12ec487665d6e502 model-00026-of-00051.safetensors
31
+ f4877cbb3e76ee29d3d7d4e417c681386ba9c9cea2547d69c928c820c984d710 model-00027-of-00051.safetensors
32
+ f0b4f13075467671da39b68c75ff6d295a6583ff8231dec8ccd9344ea08b4363 model-00028-of-00051.safetensors
33
+ 426978a3e8c591dae088cd918658b7f8e83ab70a80a38a0132b43cf57783b184 model-00029-of-00051.safetensors
34
+ c8ddf99ec31334fe79449a314d5aa023ec4d24d327fda3420cc1865ab6d8fbdc model-00030-of-00051.safetensors
35
+ 2c26dc77abb988c5ea7a3f5a54ca3e481f626927dab20417912c6a54f8c6e1de model-00031-of-00051.safetensors
36
+ fb4ff0270cd1de14b9b0e3322a106e89379f232ef1754cfe3a3817c0aa91c880 model-00032-of-00051.safetensors
37
+ fde36e3dce140565a3ed1c9893c03acef3b9f0a47f69bdee3c4fc6c51f846dc6 model-00033-of-00051.safetensors
38
+ 86a7dc2dbe0829d3f72e437387b88718dfe69ed0b01975c459d5dbaf1ebd67ca model-00034-of-00051.safetensors
39
+ cdaa0f63caa7b01b7903ba1e141849664eef7c4db5e84ef4bf7e496760fc4f1c model-00035-of-00051.safetensors
40
+ 3b26b5ecf500c0f57a6490a6df944d054ae345463968b2136e94536e3875eca2 model-00036-of-00051.safetensors
41
+ ac50cb0ecac4d2e4a134ccd1853d801233f082d14144f638c45f59e5408ba516 model-00037-of-00051.safetensors
42
+ 2a752f791002df357f7e950071339b1e3128d8b9678c623e30909beb0c25cffe model-00038-of-00051.safetensors
43
+ efc389a3b09cda7093c28d5122925d0272fff50c28bff0e02818c08c7090d98b model-00039-of-00051.safetensors
44
+ 11e7591862728371f1fcc51c279bfb66d75e6f2762a460652abf20eec7ff9699 model-00040-of-00051.safetensors
45
+ 20b29f7d2b62e42f09401594db633dfa67f57b24d85a69e2411fb9a750969ad6 model-00041-of-00051.safetensors
46
+ ced8e4ccc18946784060cb5df9c8008380df205f84b0103be6371222b7a4612b model-00042-of-00051.safetensors
47
+ d91f3fd5fcb3496b52f843952488a956331f994d8194d630e9794d70c8f25388 model-00043-of-00051.safetensors
48
+ a49f2af5347a527d27f18a477dae26263ba52a9c245390d20adcf93d341d443c model-00044-of-00051.safetensors
49
+ b1dae1b7652f2fab3b3dbb33cd091b9d6bc47a287de2d619f1a6ef281995d9c7 model-00045-of-00051.safetensors
50
+ 9e2609cf8e63a9822336a4aa929868a4dde1279d15681277769eb7dbd41a5a29 model-00046-of-00051.safetensors
51
+ cf236ea95bb31622afe03e02fefde8163833181a79893a7d1ff9c1d588728079 model-00047-of-00051.safetensors
52
+ 31692f1816054aff8de5c055e719b22e96de87cf5dd70da062908b40bcc6a1ac model-00048-of-00051.safetensors
53
+ f94d01fb9b66eb1439a14882a399dc54e781640e6af338ffab49fd0705780ee9 model-00049-of-00051.safetensors
54
+ 901ab612e9901d563dc1832fb13b8bbb939a0f904a57d6d7cc4b53f03ee6cf43 model-00050-of-00051.safetensors
55
+ 0c924a31790268a4bdf7f5d11e51ed8e071633da444a85c1754e1363391f7388 model-00051-of-00051.safetensors
56
+ 59f95e28944c062244741268596badc900df86c7f5ded05088d2da22a7379e06 tokenizer.model
model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"metadata": {"mergekit_version": "0.0.4.4", "total_size": 245220139008}, "weight_map": {"lm_head.weight": "model-00001-of-00051.safetensors", "model.embed_tokens.weight": "model-00001-of-00051.safetensors", "model.layers.0.input_layernorm.weight": "model-00001-of-00051.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00001-of-00051.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00051.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00001-of-00051.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00051.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00051.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00051.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00051.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00051.safetensors", "model.layers.1.input_layernorm.weight": "model-00001-of-00051.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00002-of-00051.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00002-of-00051.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00002-of-00051.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00002-of-00051.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00002-of-00051.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00002-of-00051.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00002-of-00051.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00002-of-00051.safetensors", "model.layers.10.input_layernorm.weight": "model-00002-of-00051.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00002-of-00051.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00051.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00002-of-00051.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00051.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00051.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00003-of-00051.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00003-of-00051.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00003-of-00051.safetensors", "model.layers.11.input_layernorm.weight": "model-00003-of-00051.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00003-of-00051.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00051.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00003-of-00051.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00051.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00051.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00051.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00051.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00051.safetensors", "model.layers.12.input_layernorm.weight": "model-00003-of-00051.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00003-of-00051.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00003-of-00051.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00004-of-00051.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00004-of-00051.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00004-of-00051.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00004-of-00051.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00004-of-00051.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00004-of-00051.safetensors", "model.layers.13.input_layernorm.weight": "model-00004-of-00051.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00004-of-00051.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00004-of-00051.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00004-of-00051.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00004-of-00051.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00004-of-00051.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00004-of-00051.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00004-of-00051.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00004-of-00051.safetensors", "model.layers.14.input_layernorm.weight": "model-00004-of-00051.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00004-of-00051.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00005-of-00051.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00005-of-00051.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00005-of-00051.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00005-of-00051.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00005-of-00051.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00005-of-00051.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00005-of-00051.safetensors", "model.layers.15.input_layernorm.weight": "model-00005-of-00051.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00005-of-00051.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00005-of-00051.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00005-of-00051.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00005-of-00051.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00005-of-00051.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00005-of-00051.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00005-of-00051.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00005-of-00051.safetensors", "model.layers.16.input_layernorm.weight": "model-00005-of-00051.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00006-of-00051.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00006-of-00051.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00006-of-00051.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00006-of-00051.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00006-of-00051.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00006-of-00051.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00006-of-00051.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00006-of-00051.safetensors", "model.layers.17.input_layernorm.weight": "model-00006-of-00051.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00006-of-00051.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00006-of-00051.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00006-of-00051.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00006-of-00051.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00006-of-00051.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00007-of-00051.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00007-of-00051.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00007-of-00051.safetensors", "model.layers.18.input_layernorm.weight": "model-00007-of-00051.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00007-of-00051.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00007-of-00051.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00007-of-00051.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00007-of-00051.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00007-of-00051.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00007-of-00051.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00007-of-00051.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00007-of-00051.safetensors", "model.layers.19.input_layernorm.weight": "model-00007-of-00051.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00007-of-00051.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00007-of-00051.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00008-of-00051.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00008-of-00051.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00008-of-00051.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00008-of-00051.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00008-of-00051.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00008-of-00051.safetensors", "model.layers.2.input_layernorm.weight": "model-00008-of-00051.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00008-of-00051.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00008-of-00051.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00008-of-00051.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00008-of-00051.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00008-of-00051.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00008-of-00051.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00008-of-00051.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00008-of-00051.safetensors", "model.layers.20.input_layernorm.weight": "model-00008-of-00051.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00008-of-00051.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00009-of-00051.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00009-of-00051.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00009-of-00051.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00009-of-00051.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00009-of-00051.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00009-of-00051.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00009-of-00051.safetensors", "model.layers.21.input_layernorm.weight": "model-00009-of-00051.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00009-of-00051.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00009-of-00051.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00009-of-00051.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00009-of-00051.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00009-of-00051.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00009-of-00051.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00009-of-00051.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00009-of-00051.safetensors", "model.layers.22.input_layernorm.weight": "model-00009-of-00051.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00010-of-00051.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00010-of-00051.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00010-of-00051.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00010-of-00051.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00010-of-00051.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00010-of-00051.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00010-of-00051.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00010-of-00051.safetensors", "model.layers.23.input_layernorm.weight": "model-00010-of-00051.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00010-of-00051.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00010-of-00051.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00010-of-00051.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00010-of-00051.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00010-of-00051.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00011-of-00051.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00011-of-00051.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00011-of-00051.safetensors", "model.layers.24.input_layernorm.weight": "model-00011-of-00051.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00011-of-00051.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00011-of-00051.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00011-of-00051.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00011-of-00051.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00011-of-00051.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00011-of-00051.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00011-of-00051.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00011-of-00051.safetensors", "model.layers.25.input_layernorm.weight": "model-00011-of-00051.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00011-of-00051.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00011-of-00051.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00012-of-00051.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00012-of-00051.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00012-of-00051.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00012-of-00051.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00012-of-00051.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00012-of-00051.safetensors", "model.layers.26.input_layernorm.weight": "model-00012-of-00051.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00012-of-00051.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00012-of-00051.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00012-of-00051.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00012-of-00051.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00012-of-00051.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00012-of-00051.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00012-of-00051.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00012-of-00051.safetensors", "model.layers.27.input_layernorm.weight": "model-00012-of-00051.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00012-of-00051.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00013-of-00051.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00013-of-00051.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00013-of-00051.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00013-of-00051.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00013-of-00051.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00013-of-00051.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00013-of-00051.safetensors", "model.layers.28.input_layernorm.weight": "model-00013-of-00051.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00013-of-00051.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00013-of-00051.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00013-of-00051.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00013-of-00051.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00013-of-00051.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00013-of-00051.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00013-of-00051.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00013-of-00051.safetensors", "model.layers.29.input_layernorm.weight": "model-00013-of-00051.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00014-of-00051.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00014-of-00051.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00014-of-00051.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00014-of-00051.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00014-of-00051.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00014-of-00051.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00014-of-00051.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00014-of-00051.safetensors", "model.layers.3.input_layernorm.weight": "model-00014-of-00051.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00014-of-00051.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00014-of-00051.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00014-of-00051.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00014-of-00051.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00014-of-00051.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00015-of-00051.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00015-of-00051.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00015-of-00051.safetensors", "model.layers.30.input_layernorm.weight": "model-00015-of-00051.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00015-of-00051.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00015-of-00051.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00015-of-00051.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00015-of-00051.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00015-of-00051.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00015-of-00051.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00015-of-00051.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00015-of-00051.safetensors", "model.layers.31.input_layernorm.weight": "model-00015-of-00051.safetensors", "model.layers.31.mlp.down_proj.weight": "model-00015-of-00051.safetensors", "model.layers.31.mlp.gate_proj.weight": "model-00015-of-00051.safetensors", "model.layers.31.mlp.up_proj.weight": "model-00016-of-00051.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00016-of-00051.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00016-of-00051.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00016-of-00051.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00016-of-00051.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00016-of-00051.safetensors", "model.layers.32.input_layernorm.weight": "model-00016-of-00051.safetensors", "model.layers.32.mlp.down_proj.weight": "model-00016-of-00051.safetensors", "model.layers.32.mlp.gate_proj.weight": "model-00016-of-00051.safetensors", "model.layers.32.mlp.up_proj.weight": "model-00016-of-00051.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00016-of-00051.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00016-of-00051.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00016-of-00051.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00016-of-00051.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00016-of-00051.safetensors", "model.layers.33.input_layernorm.weight": "model-00016-of-00051.safetensors", "model.layers.33.mlp.down_proj.weight": "model-00016-of-00051.safetensors", "model.layers.33.mlp.gate_proj.weight": "model-00017-of-00051.safetensors", "model.layers.33.mlp.up_proj.weight": "model-00017-of-00051.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00017-of-00051.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00017-of-00051.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00017-of-00051.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00017-of-00051.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00017-of-00051.safetensors", "model.layers.34.input_layernorm.weight": "model-00017-of-00051.safetensors", "model.layers.34.mlp.down_proj.weight": "model-00017-of-00051.safetensors", "model.layers.34.mlp.gate_proj.weight": "model-00017-of-00051.safetensors", "model.layers.34.mlp.up_proj.weight": "model-00017-of-00051.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00017-of-00051.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00017-of-00051.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00017-of-00051.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00017-of-00051.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00017-of-00051.safetensors", "model.layers.35.input_layernorm.weight": "model-00017-of-00051.safetensors", "model.layers.35.mlp.down_proj.weight": "model-00018-of-00051.safetensors", "model.layers.35.mlp.gate_proj.weight": "model-00018-of-00051.safetensors", "model.layers.35.mlp.up_proj.weight": "model-00018-of-00051.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00018-of-00051.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00018-of-00051.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00018-of-00051.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00018-of-00051.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00018-of-00051.safetensors", "model.layers.36.input_layernorm.weight": "model-00018-of-00051.safetensors", "model.layers.36.mlp.down_proj.weight": "model-00018-of-00051.safetensors", "model.layers.36.mlp.gate_proj.weight": "model-00018-of-00051.safetensors", "model.layers.36.mlp.up_proj.weight": "model-00018-of-00051.safetensors", "model.layers.36.post_attention_layernorm.weight": "model-00018-of-00051.safetensors", "model.layers.36.self_attn.k_proj.weight": "model-00018-of-00051.safetensors", "model.layers.36.self_attn.o_proj.weight": "model-00019-of-00051.safetensors", "model.layers.36.self_attn.q_proj.weight": "model-00019-of-00051.safetensors", "model.layers.36.self_attn.v_proj.weight": "model-00019-of-00051.safetensors", "model.layers.37.input_layernorm.weight": "model-00019-of-00051.safetensors", "model.layers.37.mlp.down_proj.weight": "model-00019-of-00051.safetensors", "model.layers.37.mlp.gate_proj.weight": "model-00019-of-00051.safetensors", "model.layers.37.mlp.up_proj.weight": "model-00019-of-00051.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00019-of-00051.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00019-of-00051.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00019-of-00051.safetensors", "model.layers.37.self_attn.q_proj.weight": "model-00019-of-00051.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00019-of-00051.safetensors", "model.layers.38.input_layernorm.weight": "model-00019-of-00051.safetensors", "model.layers.38.mlp.down_proj.weight": "model-00019-of-00051.safetensors", "model.layers.38.mlp.gate_proj.weight": "model-00019-of-00051.safetensors", "model.layers.38.mlp.up_proj.weight": "model-00020-of-00051.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00020-of-00051.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00020-of-00051.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00020-of-00051.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00020-of-00051.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00020-of-00051.safetensors", "model.layers.39.input_layernorm.weight": "model-00020-of-00051.safetensors", "model.layers.39.mlp.down_proj.weight": "model-00020-of-00051.safetensors", "model.layers.39.mlp.gate_proj.weight": "model-00020-of-00051.safetensors", "model.layers.39.mlp.up_proj.weight": "model-00020-of-00051.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00020-of-00051.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00020-of-00051.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00020-of-00051.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00020-of-00051.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00020-of-00051.safetensors", "model.layers.4.input_layernorm.weight": "model-00020-of-00051.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00020-of-00051.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00021-of-00051.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00021-of-00051.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00021-of-00051.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00021-of-00051.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00021-of-00051.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00021-of-00051.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00021-of-00051.safetensors", "model.layers.40.input_layernorm.weight": "model-00021-of-00051.safetensors", "model.layers.40.mlp.down_proj.weight": "model-00021-of-00051.safetensors", "model.layers.40.mlp.gate_proj.weight": "model-00021-of-00051.safetensors", "model.layers.40.mlp.up_proj.weight": "model-00021-of-00051.safetensors", "model.layers.40.post_attention_layernorm.weight": "model-00021-of-00051.safetensors", "model.layers.40.self_attn.k_proj.weight": "model-00021-of-00051.safetensors", "model.layers.40.self_attn.o_proj.weight": "model-00021-of-00051.safetensors", "model.layers.40.self_attn.q_proj.weight": "model-00021-of-00051.safetensors", "model.layers.40.self_attn.v_proj.weight": "model-00021-of-00051.safetensors", "model.layers.41.input_layernorm.weight": "model-00021-of-00051.safetensors", "model.layers.41.mlp.down_proj.weight": "model-00022-of-00051.safetensors", "model.layers.41.mlp.gate_proj.weight": "model-00022-of-00051.safetensors", "model.layers.41.mlp.up_proj.weight": "model-00022-of-00051.safetensors", "model.layers.41.post_attention_layernorm.weight": "model-00022-of-00051.safetensors", "model.layers.41.self_attn.k_proj.weight": "model-00022-of-00051.safetensors", "model.layers.41.self_attn.o_proj.weight": "model-00022-of-00051.safetensors", "model.layers.41.self_attn.q_proj.weight": "model-00022-of-00051.safetensors", "model.layers.41.self_attn.v_proj.weight": "model-00022-of-00051.safetensors", "model.layers.42.input_layernorm.weight": "model-00022-of-00051.safetensors", "model.layers.42.mlp.down_proj.weight": "model-00022-of-00051.safetensors", "model.layers.42.mlp.gate_proj.weight": "model-00022-of-00051.safetensors", "model.layers.42.mlp.up_proj.weight": "model-00022-of-00051.safetensors", "model.layers.42.post_attention_layernorm.weight": "model-00022-of-00051.safetensors", "model.layers.42.self_attn.k_proj.weight": "model-00022-of-00051.safetensors", "model.layers.42.self_attn.o_proj.weight": "model-00023-of-00051.safetensors", "model.layers.42.self_attn.q_proj.weight": "model-00023-of-00051.safetensors", "model.layers.42.self_attn.v_proj.weight": "model-00023-of-00051.safetensors", "model.layers.43.input_layernorm.weight": "model-00023-of-00051.safetensors", "model.layers.43.mlp.down_proj.weight": "model-00023-of-00051.safetensors", "model.layers.43.mlp.gate_proj.weight": "model-00023-of-00051.safetensors", "model.layers.43.mlp.up_proj.weight": "model-00023-of-00051.safetensors", "model.layers.43.post_attention_layernorm.weight": "model-00023-of-00051.safetensors", "model.layers.43.self_attn.k_proj.weight": "model-00023-of-00051.safetensors", "model.layers.43.self_attn.o_proj.weight": "model-00023-of-00051.safetensors", "model.layers.43.self_attn.q_proj.weight": "model-00023-of-00051.safetensors", "model.layers.43.self_attn.v_proj.weight": "model-00023-of-00051.safetensors", "model.layers.44.input_layernorm.weight": "model-00023-of-00051.safetensors", "model.layers.44.mlp.down_proj.weight": "model-00023-of-00051.safetensors", "model.layers.44.mlp.gate_proj.weight": "model-00023-of-00051.safetensors", "model.layers.44.mlp.up_proj.weight": "model-00024-of-00051.safetensors", "model.layers.44.post_attention_layernorm.weight": "model-00024-of-00051.safetensors", "model.layers.44.self_attn.k_proj.weight": "model-00024-of-00051.safetensors", "model.layers.44.self_attn.o_proj.weight": "model-00024-of-00051.safetensors", "model.layers.44.self_attn.q_proj.weight": "model-00024-of-00051.safetensors", "model.layers.44.self_attn.v_proj.weight": "model-00024-of-00051.safetensors", "model.layers.45.input_layernorm.weight": "model-00024-of-00051.safetensors", "model.layers.45.mlp.down_proj.weight": "model-00024-of-00051.safetensors", "model.layers.45.mlp.gate_proj.weight": "model-00024-of-00051.safetensors", "model.layers.45.mlp.up_proj.weight": "model-00024-of-00051.safetensors", "model.layers.45.post_attention_layernorm.weight": "model-00024-of-00051.safetensors", "model.layers.45.self_attn.k_proj.weight": "model-00024-of-00051.safetensors", "model.layers.45.self_attn.o_proj.weight": "model-00024-of-00051.safetensors", "model.layers.45.self_attn.q_proj.weight": "model-00024-of-00051.safetensors", "model.layers.45.self_attn.v_proj.weight": "model-00024-of-00051.safetensors", "model.layers.46.input_layernorm.weight": "model-00024-of-00051.safetensors", "model.layers.46.mlp.down_proj.weight": "model-00024-of-00051.safetensors", "model.layers.46.mlp.gate_proj.weight": "model-00025-of-00051.safetensors", "model.layers.46.mlp.up_proj.weight": "model-00025-of-00051.safetensors", "model.layers.46.post_attention_layernorm.weight": "model-00025-of-00051.safetensors", "model.layers.46.self_attn.k_proj.weight": "model-00025-of-00051.safetensors", "model.layers.46.self_attn.o_proj.weight": "model-00025-of-00051.safetensors", "model.layers.46.self_attn.q_proj.weight": "model-00025-of-00051.safetensors", "model.layers.46.self_attn.v_proj.weight": "model-00025-of-00051.safetensors", "model.layers.47.input_layernorm.weight": "model-00025-of-00051.safetensors", "model.layers.47.mlp.down_proj.weight": "model-00025-of-00051.safetensors", "model.layers.47.mlp.gate_proj.weight": "model-00025-of-00051.safetensors", "model.layers.47.mlp.up_proj.weight": "model-00025-of-00051.safetensors", "model.layers.47.post_attention_layernorm.weight": "model-00025-of-00051.safetensors", "model.layers.47.self_attn.k_proj.weight": "model-00025-of-00051.safetensors", "model.layers.47.self_attn.o_proj.weight": "model-00025-of-00051.safetensors", "model.layers.47.self_attn.q_proj.weight": "model-00025-of-00051.safetensors", "model.layers.47.self_attn.v_proj.weight": "model-00025-of-00051.safetensors", "model.layers.48.input_layernorm.weight": "model-00025-of-00051.safetensors", "model.layers.48.mlp.down_proj.weight": "model-00026-of-00051.safetensors", "model.layers.48.mlp.gate_proj.weight": "model-00026-of-00051.safetensors", "model.layers.48.mlp.up_proj.weight": "model-00026-of-00051.safetensors", "model.layers.48.post_attention_layernorm.weight": "model-00026-of-00051.safetensors", "model.layers.48.self_attn.k_proj.weight": "model-00026-of-00051.safetensors", "model.layers.48.self_attn.o_proj.weight": "model-00026-of-00051.safetensors", "model.layers.48.self_attn.q_proj.weight": "model-00026-of-00051.safetensors", "model.layers.48.self_attn.v_proj.weight": "model-00026-of-00051.safetensors", "model.layers.49.input_layernorm.weight": "model-00026-of-00051.safetensors", "model.layers.49.mlp.down_proj.weight": "model-00026-of-00051.safetensors", "model.layers.49.mlp.gate_proj.weight": "model-00026-of-00051.safetensors", "model.layers.49.mlp.up_proj.weight": "model-00026-of-00051.safetensors", "model.layers.49.post_attention_layernorm.weight": "model-00026-of-00051.safetensors", "model.layers.49.self_attn.k_proj.weight": "model-00026-of-00051.safetensors", "model.layers.49.self_attn.o_proj.weight": "model-00027-of-00051.safetensors", "model.layers.49.self_attn.q_proj.weight": "model-00027-of-00051.safetensors", "model.layers.49.self_attn.v_proj.weight": "model-00027-of-00051.safetensors", "model.layers.5.input_layernorm.weight": "model-00027-of-00051.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00027-of-00051.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00027-of-00051.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00027-of-00051.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00027-of-00051.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00027-of-00051.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00027-of-00051.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00027-of-00051.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00027-of-00051.safetensors", "model.layers.50.input_layernorm.weight": "model-00027-of-00051.safetensors", "model.layers.50.mlp.down_proj.weight": "model-00027-of-00051.safetensors", "model.layers.50.mlp.gate_proj.weight": "model-00027-of-00051.safetensors", "model.layers.50.mlp.up_proj.weight": "model-00028-of-00051.safetensors", "model.layers.50.post_attention_layernorm.weight": "model-00028-of-00051.safetensors", "model.layers.50.self_attn.k_proj.weight": "model-00028-of-00051.safetensors", "model.layers.50.self_attn.o_proj.weight": "model-00028-of-00051.safetensors", "model.layers.50.self_attn.q_proj.weight": "model-00028-of-00051.safetensors", "model.layers.50.self_attn.v_proj.weight": "model-00028-of-00051.safetensors", "model.layers.51.input_layernorm.weight": "model-00028-of-00051.safetensors", "model.layers.51.mlp.down_proj.weight": "model-00028-of-00051.safetensors", "model.layers.51.mlp.gate_proj.weight": "model-00028-of-00051.safetensors", "model.layers.51.mlp.up_proj.weight": "model-00028-of-00051.safetensors", "model.layers.51.post_attention_layernorm.weight": "model-00028-of-00051.safetensors", "model.layers.51.self_attn.k_proj.weight": "model-00028-of-00051.safetensors", "model.layers.51.self_attn.o_proj.weight": "model-00028-of-00051.safetensors", "model.layers.51.self_attn.q_proj.weight": "model-00028-of-00051.safetensors", "model.layers.51.self_attn.v_proj.weight": "model-00028-of-00051.safetensors", "model.layers.52.input_layernorm.weight": "model-00028-of-00051.safetensors", "model.layers.52.mlp.down_proj.weight": "model-00028-of-00051.safetensors", "model.layers.52.mlp.gate_proj.weight": "model-00029-of-00051.safetensors", "model.layers.52.mlp.up_proj.weight": "model-00029-of-00051.safetensors", "model.layers.52.post_attention_layernorm.weight": "model-00029-of-00051.safetensors", "model.layers.52.self_attn.k_proj.weight": "model-00029-of-00051.safetensors", "model.layers.52.self_attn.o_proj.weight": "model-00029-of-00051.safetensors", "model.layers.52.self_attn.q_proj.weight": "model-00029-of-00051.safetensors", "model.layers.52.self_attn.v_proj.weight": "model-00029-of-00051.safetensors", "model.layers.53.input_layernorm.weight": "model-00029-of-00051.safetensors", "model.layers.53.mlp.down_proj.weight": "model-00029-of-00051.safetensors", "model.layers.53.mlp.gate_proj.weight": "model-00029-of-00051.safetensors", "model.layers.53.mlp.up_proj.weight": "model-00029-of-00051.safetensors", "model.layers.53.post_attention_layernorm.weight": "model-00029-of-00051.safetensors", "model.layers.53.self_attn.k_proj.weight": "model-00029-of-00051.safetensors", "model.layers.53.self_attn.o_proj.weight": "model-00029-of-00051.safetensors", "model.layers.53.self_attn.q_proj.weight": "model-00029-of-00051.safetensors", "model.layers.53.self_attn.v_proj.weight": "model-00029-of-00051.safetensors", "model.layers.54.input_layernorm.weight": "model-00029-of-00051.safetensors", "model.layers.54.mlp.down_proj.weight": "model-00030-of-00051.safetensors", "model.layers.54.mlp.gate_proj.weight": "model-00030-of-00051.safetensors", "model.layers.54.mlp.up_proj.weight": "model-00030-of-00051.safetensors", "model.layers.54.post_attention_layernorm.weight": "model-00030-of-00051.safetensors", "model.layers.54.self_attn.k_proj.weight": "model-00030-of-00051.safetensors", "model.layers.54.self_attn.o_proj.weight": "model-00030-of-00051.safetensors", "model.layers.54.self_attn.q_proj.weight": "model-00030-of-00051.safetensors", "model.layers.54.self_attn.v_proj.weight": "model-00030-of-00051.safetensors", "model.layers.55.input_layernorm.weight": "model-00030-of-00051.safetensors", "model.layers.55.mlp.down_proj.weight": "model-00030-of-00051.safetensors", "model.layers.55.mlp.gate_proj.weight": "model-00030-of-00051.safetensors", "model.layers.55.mlp.up_proj.weight": "model-00030-of-00051.safetensors", "model.layers.55.post_attention_layernorm.weight": "model-00030-of-00051.safetensors", "model.layers.55.self_attn.k_proj.weight": "model-00030-of-00051.safetensors", "model.layers.55.self_attn.o_proj.weight": "model-00031-of-00051.safetensors", "model.layers.55.self_attn.q_proj.weight": "model-00031-of-00051.safetensors", "model.layers.55.self_attn.v_proj.weight": "model-00031-of-00051.safetensors", "model.layers.56.input_layernorm.weight": "model-00031-of-00051.safetensors", "model.layers.56.mlp.down_proj.weight": "model-00031-of-00051.safetensors", "model.layers.56.mlp.gate_proj.weight": "model-00031-of-00051.safetensors", "model.layers.56.mlp.up_proj.weight": "model-00031-of-00051.safetensors", "model.layers.56.post_attention_layernorm.weight": "model-00031-of-00051.safetensors", "model.layers.56.self_attn.k_proj.weight": "model-00031-of-00051.safetensors", "model.layers.56.self_attn.o_proj.weight": "model-00031-of-00051.safetensors", "model.layers.56.self_attn.q_proj.weight": "model-00031-of-00051.safetensors", "model.layers.56.self_attn.v_proj.weight": "model-00031-of-00051.safetensors", "model.layers.57.input_layernorm.weight": "model-00031-of-00051.safetensors", "model.layers.57.mlp.down_proj.weight": "model-00031-of-00051.safetensors", "model.layers.57.mlp.gate_proj.weight": "model-00031-of-00051.safetensors", "model.layers.57.mlp.up_proj.weight": "model-00032-of-00051.safetensors", "model.layers.57.post_attention_layernorm.weight": "model-00032-of-00051.safetensors", "model.layers.57.self_attn.k_proj.weight": "model-00032-of-00051.safetensors", "model.layers.57.self_attn.o_proj.weight": "model-00032-of-00051.safetensors", "model.layers.57.self_attn.q_proj.weight": "model-00032-of-00051.safetensors", "model.layers.57.self_attn.v_proj.weight": "model-00032-of-00051.safetensors", "model.layers.58.input_layernorm.weight": "model-00032-of-00051.safetensors", "model.layers.58.mlp.down_proj.weight": "model-00032-of-00051.safetensors", "model.layers.58.mlp.gate_proj.weight": "model-00032-of-00051.safetensors", "model.layers.58.mlp.up_proj.weight": "model-00032-of-00051.safetensors", "model.layers.58.post_attention_layernorm.weight": "model-00032-of-00051.safetensors", "model.layers.58.self_attn.k_proj.weight": "model-00032-of-00051.safetensors", "model.layers.58.self_attn.o_proj.weight": "model-00032-of-00051.safetensors", "model.layers.58.self_attn.q_proj.weight": "model-00032-of-00051.safetensors", "model.layers.58.self_attn.v_proj.weight": "model-00032-of-00051.safetensors", "model.layers.59.input_layernorm.weight": "model-00032-of-00051.safetensors", "model.layers.59.mlp.down_proj.weight": "model-00032-of-00051.safetensors", "model.layers.59.mlp.gate_proj.weight": "model-00033-of-00051.safetensors", "model.layers.59.mlp.up_proj.weight": "model-00033-of-00051.safetensors", "model.layers.59.post_attention_layernorm.weight": "model-00033-of-00051.safetensors", "model.layers.59.self_attn.k_proj.weight": "model-00033-of-00051.safetensors", "model.layers.59.self_attn.o_proj.weight": "model-00033-of-00051.safetensors", "model.layers.59.self_attn.q_proj.weight": "model-00033-of-00051.safetensors", "model.layers.59.self_attn.v_proj.weight": "model-00033-of-00051.safetensors", "model.layers.6.input_layernorm.weight": "model-00033-of-00051.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00033-of-00051.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00033-of-00051.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00033-of-00051.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00033-of-00051.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00033-of-00051.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00033-of-00051.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00033-of-00051.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00033-of-00051.safetensors", "model.layers.60.input_layernorm.weight": "model-00033-of-00051.safetensors", "model.layers.60.mlp.down_proj.weight": "model-00034-of-00051.safetensors", "model.layers.60.mlp.gate_proj.weight": "model-00034-of-00051.safetensors", "model.layers.60.mlp.up_proj.weight": "model-00034-of-00051.safetensors", "model.layers.60.post_attention_layernorm.weight": "model-00034-of-00051.safetensors", "model.layers.60.self_attn.k_proj.weight": "model-00034-of-00051.safetensors", "model.layers.60.self_attn.o_proj.weight": "model-00034-of-00051.safetensors", "model.layers.60.self_attn.q_proj.weight": "model-00034-of-00051.safetensors", "model.layers.60.self_attn.v_proj.weight": "model-00034-of-00051.safetensors", "model.layers.61.input_layernorm.weight": "model-00034-of-00051.safetensors", "model.layers.61.mlp.down_proj.weight": "model-00034-of-00051.safetensors", "model.layers.61.mlp.gate_proj.weight": "model-00034-of-00051.safetensors", "model.layers.61.mlp.up_proj.weight": "model-00034-of-00051.safetensors", "model.layers.61.post_attention_layernorm.weight": "model-00034-of-00051.safetensors", "model.layers.61.self_attn.k_proj.weight": "model-00034-of-00051.safetensors", "model.layers.61.self_attn.o_proj.weight": "model-00035-of-00051.safetensors", "model.layers.61.self_attn.q_proj.weight": "model-00035-of-00051.safetensors", "model.layers.61.self_attn.v_proj.weight": "model-00035-of-00051.safetensors", "model.layers.62.input_layernorm.weight": "model-00035-of-00051.safetensors", "model.layers.62.mlp.down_proj.weight": "model-00035-of-00051.safetensors", "model.layers.62.mlp.gate_proj.weight": "model-00035-of-00051.safetensors", "model.layers.62.mlp.up_proj.weight": "model-00035-of-00051.safetensors", "model.layers.62.post_attention_layernorm.weight": "model-00035-of-00051.safetensors", "model.layers.62.self_attn.k_proj.weight": "model-00035-of-00051.safetensors", "model.layers.62.self_attn.o_proj.weight": "model-00035-of-00051.safetensors", "model.layers.62.self_attn.q_proj.weight": "model-00035-of-00051.safetensors", "model.layers.62.self_attn.v_proj.weight": "model-00035-of-00051.safetensors", "model.layers.63.input_layernorm.weight": "model-00035-of-00051.safetensors", "model.layers.63.mlp.down_proj.weight": "model-00035-of-00051.safetensors", "model.layers.63.mlp.gate_proj.weight": "model-00035-of-00051.safetensors", "model.layers.63.mlp.up_proj.weight": "model-00036-of-00051.safetensors", "model.layers.63.post_attention_layernorm.weight": "model-00036-of-00051.safetensors", "model.layers.63.self_attn.k_proj.weight": "model-00036-of-00051.safetensors", "model.layers.63.self_attn.o_proj.weight": "model-00036-of-00051.safetensors", "model.layers.63.self_attn.q_proj.weight": "model-00036-of-00051.safetensors", "model.layers.63.self_attn.v_proj.weight": "model-00036-of-00051.safetensors", "model.layers.64.input_layernorm.weight": "model-00036-of-00051.safetensors", "model.layers.64.mlp.down_proj.weight": "model-00036-of-00051.safetensors", "model.layers.64.mlp.gate_proj.weight": "model-00036-of-00051.safetensors", "model.layers.64.mlp.up_proj.weight": "model-00036-of-00051.safetensors", "model.layers.64.post_attention_layernorm.weight": "model-00036-of-00051.safetensors", "model.layers.64.self_attn.k_proj.weight": "model-00036-of-00051.safetensors", "model.layers.64.self_attn.o_proj.weight": "model-00036-of-00051.safetensors", "model.layers.64.self_attn.q_proj.weight": "model-00036-of-00051.safetensors", "model.layers.64.self_attn.v_proj.weight": "model-00036-of-00051.safetensors", "model.layers.65.input_layernorm.weight": "model-00036-of-00051.safetensors", "model.layers.65.mlp.down_proj.weight": "model-00036-of-00051.safetensors", "model.layers.65.mlp.gate_proj.weight": "model-00037-of-00051.safetensors", "model.layers.65.mlp.up_proj.weight": "model-00037-of-00051.safetensors", "model.layers.65.post_attention_layernorm.weight": "model-00037-of-00051.safetensors", "model.layers.65.self_attn.k_proj.weight": "model-00037-of-00051.safetensors", "model.layers.65.self_attn.o_proj.weight": "model-00037-of-00051.safetensors", "model.layers.65.self_attn.q_proj.weight": "model-00037-of-00051.safetensors", "model.layers.65.self_attn.v_proj.weight": "model-00037-of-00051.safetensors", "model.layers.66.input_layernorm.weight": "model-00037-of-00051.safetensors", "model.layers.66.mlp.down_proj.weight": "model-00037-of-00051.safetensors", "model.layers.66.mlp.gate_proj.weight": "model-00037-of-00051.safetensors", "model.layers.66.mlp.up_proj.weight": "model-00037-of-00051.safetensors", "model.layers.66.post_attention_layernorm.weight": "model-00037-of-00051.safetensors", "model.layers.66.self_attn.k_proj.weight": "model-00037-of-00051.safetensors", "model.layers.66.self_attn.o_proj.weight": "model-00037-of-00051.safetensors", "model.layers.66.self_attn.q_proj.weight": "model-00037-of-00051.safetensors", "model.layers.66.self_attn.v_proj.weight": "model-00037-of-00051.safetensors", "model.layers.67.input_layernorm.weight": "model-00037-of-00051.safetensors", "model.layers.67.mlp.down_proj.weight": "model-00038-of-00051.safetensors", "model.layers.67.mlp.gate_proj.weight": "model-00038-of-00051.safetensors", "model.layers.67.mlp.up_proj.weight": "model-00038-of-00051.safetensors", "model.layers.67.post_attention_layernorm.weight": "model-00038-of-00051.safetensors", "model.layers.67.self_attn.k_proj.weight": "model-00038-of-00051.safetensors", "model.layers.67.self_attn.o_proj.weight": "model-00038-of-00051.safetensors", "model.layers.67.self_attn.q_proj.weight": "model-00038-of-00051.safetensors", "model.layers.67.self_attn.v_proj.weight": "model-00038-of-00051.safetensors", "model.layers.68.input_layernorm.weight": "model-00038-of-00051.safetensors", "model.layers.68.mlp.down_proj.weight": "model-00038-of-00051.safetensors", "model.layers.68.mlp.gate_proj.weight": "model-00038-of-00051.safetensors", "model.layers.68.mlp.up_proj.weight": "model-00038-of-00051.safetensors", "model.layers.68.post_attention_layernorm.weight": "model-00038-of-00051.safetensors", "model.layers.68.self_attn.k_proj.weight": "model-00038-of-00051.safetensors", "model.layers.68.self_attn.o_proj.weight": "model-00039-of-00051.safetensors", "model.layers.68.self_attn.q_proj.weight": "model-00039-of-00051.safetensors", "model.layers.68.self_attn.v_proj.weight": "model-00039-of-00051.safetensors", "model.layers.69.input_layernorm.weight": "model-00039-of-00051.safetensors", "model.layers.69.mlp.down_proj.weight": "model-00039-of-00051.safetensors", "model.layers.69.mlp.gate_proj.weight": "model-00039-of-00051.safetensors", "model.layers.69.mlp.up_proj.weight": "model-00039-of-00051.safetensors", "model.layers.69.post_attention_layernorm.weight": "model-00039-of-00051.safetensors", "model.layers.69.self_attn.k_proj.weight": "model-00039-of-00051.safetensors", "model.layers.69.self_attn.o_proj.weight": "model-00039-of-00051.safetensors", "model.layers.69.self_attn.q_proj.weight": "model-00039-of-00051.safetensors", "model.layers.69.self_attn.v_proj.weight": "model-00039-of-00051.safetensors", "model.layers.7.input_layernorm.weight": "model-00039-of-00051.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00039-of-00051.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00039-of-00051.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00040-of-00051.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00040-of-00051.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00040-of-00051.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00040-of-00051.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00040-of-00051.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00040-of-00051.safetensors", "model.layers.70.input_layernorm.weight": "model-00040-of-00051.safetensors", "model.layers.70.mlp.down_proj.weight": "model-00040-of-00051.safetensors", "model.layers.70.mlp.gate_proj.weight": "model-00040-of-00051.safetensors", "model.layers.70.mlp.up_proj.weight": "model-00040-of-00051.safetensors", "model.layers.70.post_attention_layernorm.weight": "model-00040-of-00051.safetensors", "model.layers.70.self_attn.k_proj.weight": "model-00040-of-00051.safetensors", "model.layers.70.self_attn.o_proj.weight": "model-00040-of-00051.safetensors", "model.layers.70.self_attn.q_proj.weight": "model-00040-of-00051.safetensors", "model.layers.70.self_attn.v_proj.weight": "model-00040-of-00051.safetensors", "model.layers.71.input_layernorm.weight": "model-00040-of-00051.safetensors", "model.layers.71.mlp.down_proj.weight": "model-00040-of-00051.safetensors", "model.layers.71.mlp.gate_proj.weight": "model-00041-of-00051.safetensors", "model.layers.71.mlp.up_proj.weight": "model-00041-of-00051.safetensors", "model.layers.71.post_attention_layernorm.weight": "model-00041-of-00051.safetensors", "model.layers.71.self_attn.k_proj.weight": "model-00041-of-00051.safetensors", "model.layers.71.self_attn.o_proj.weight": "model-00041-of-00051.safetensors", "model.layers.71.self_attn.q_proj.weight": "model-00041-of-00051.safetensors", "model.layers.71.self_attn.v_proj.weight": "model-00041-of-00051.safetensors", "model.layers.72.input_layernorm.weight": "model-00041-of-00051.safetensors", "model.layers.72.mlp.down_proj.weight": "model-00041-of-00051.safetensors", "model.layers.72.mlp.gate_proj.weight": "model-00041-of-00051.safetensors", "model.layers.72.mlp.up_proj.weight": "model-00041-of-00051.safetensors", "model.layers.72.post_attention_layernorm.weight": "model-00041-of-00051.safetensors", "model.layers.72.self_attn.k_proj.weight": "model-00041-of-00051.safetensors", "model.layers.72.self_attn.o_proj.weight": "model-00041-of-00051.safetensors", "model.layers.72.self_attn.q_proj.weight": "model-00041-of-00051.safetensors", "model.layers.72.self_attn.v_proj.weight": "model-00041-of-00051.safetensors", "model.layers.73.input_layernorm.weight": "model-00041-of-00051.safetensors", "model.layers.73.mlp.down_proj.weight": "model-00042-of-00051.safetensors", "model.layers.73.mlp.gate_proj.weight": "model-00042-of-00051.safetensors", "model.layers.73.mlp.up_proj.weight": "model-00042-of-00051.safetensors", "model.layers.73.post_attention_layernorm.weight": "model-00042-of-00051.safetensors", "model.layers.73.self_attn.k_proj.weight": "model-00042-of-00051.safetensors", "model.layers.73.self_attn.o_proj.weight": "model-00042-of-00051.safetensors", "model.layers.73.self_attn.q_proj.weight": "model-00042-of-00051.safetensors", "model.layers.73.self_attn.v_proj.weight": "model-00042-of-00051.safetensors", "model.layers.74.input_layernorm.weight": "model-00042-of-00051.safetensors", "model.layers.74.mlp.down_proj.weight": "model-00042-of-00051.safetensors", "model.layers.74.mlp.gate_proj.weight": "model-00042-of-00051.safetensors", "model.layers.74.mlp.up_proj.weight": "model-00042-of-00051.safetensors", "model.layers.74.post_attention_layernorm.weight": "model-00042-of-00051.safetensors", "model.layers.74.self_attn.k_proj.weight": "model-00042-of-00051.safetensors", "model.layers.74.self_attn.o_proj.weight": "model-00043-of-00051.safetensors", "model.layers.74.self_attn.q_proj.weight": "model-00043-of-00051.safetensors", "model.layers.74.self_attn.v_proj.weight": "model-00043-of-00051.safetensors", "model.layers.75.input_layernorm.weight": "model-00043-of-00051.safetensors", "model.layers.75.mlp.down_proj.weight": "model-00043-of-00051.safetensors", "model.layers.75.mlp.gate_proj.weight": "model-00043-of-00051.safetensors", "model.layers.75.mlp.up_proj.weight": "model-00043-of-00051.safetensors", "model.layers.75.post_attention_layernorm.weight": "model-00043-of-00051.safetensors", "model.layers.75.self_attn.k_proj.weight": "model-00043-of-00051.safetensors", "model.layers.75.self_attn.o_proj.weight": "model-00043-of-00051.safetensors", "model.layers.75.self_attn.q_proj.weight": "model-00043-of-00051.safetensors", "model.layers.75.self_attn.v_proj.weight": "model-00043-of-00051.safetensors", "model.layers.76.input_layernorm.weight": "model-00043-of-00051.safetensors", "model.layers.76.mlp.down_proj.weight": "model-00043-of-00051.safetensors", "model.layers.76.mlp.gate_proj.weight": "model-00043-of-00051.safetensors", "model.layers.76.mlp.up_proj.weight": "model-00044-of-00051.safetensors", "model.layers.76.post_attention_layernorm.weight": "model-00044-of-00051.safetensors", "model.layers.76.self_attn.k_proj.weight": "model-00044-of-00051.safetensors", "model.layers.76.self_attn.o_proj.weight": "model-00044-of-00051.safetensors", "model.layers.76.self_attn.q_proj.weight": "model-00044-of-00051.safetensors", "model.layers.76.self_attn.v_proj.weight": "model-00044-of-00051.safetensors", "model.layers.77.input_layernorm.weight": "model-00044-of-00051.safetensors", "model.layers.77.mlp.down_proj.weight": "model-00044-of-00051.safetensors", "model.layers.77.mlp.gate_proj.weight": "model-00044-of-00051.safetensors", "model.layers.77.mlp.up_proj.weight": "model-00044-of-00051.safetensors", "model.layers.77.post_attention_layernorm.weight": "model-00044-of-00051.safetensors", "model.layers.77.self_attn.k_proj.weight": "model-00044-of-00051.safetensors", "model.layers.77.self_attn.o_proj.weight": "model-00044-of-00051.safetensors", "model.layers.77.self_attn.q_proj.weight": "model-00044-of-00051.safetensors", "model.layers.77.self_attn.v_proj.weight": "model-00044-of-00051.safetensors", "model.layers.78.input_layernorm.weight": "model-00044-of-00051.safetensors", "model.layers.78.mlp.down_proj.weight": "model-00044-of-00051.safetensors", "model.layers.78.mlp.gate_proj.weight": "model-00045-of-00051.safetensors", "model.layers.78.mlp.up_proj.weight": "model-00045-of-00051.safetensors", "model.layers.78.post_attention_layernorm.weight": "model-00045-of-00051.safetensors", "model.layers.78.self_attn.k_proj.weight": "model-00045-of-00051.safetensors", "model.layers.78.self_attn.o_proj.weight": "model-00045-of-00051.safetensors", "model.layers.78.self_attn.q_proj.weight": "model-00045-of-00051.safetensors", "model.layers.78.self_attn.v_proj.weight": "model-00045-of-00051.safetensors", "model.layers.79.input_layernorm.weight": "model-00045-of-00051.safetensors", "model.layers.79.mlp.down_proj.weight": "model-00045-of-00051.safetensors", "model.layers.79.mlp.gate_proj.weight": "model-00045-of-00051.safetensors", "model.layers.79.mlp.up_proj.weight": "model-00045-of-00051.safetensors", "model.layers.79.post_attention_layernorm.weight": "model-00045-of-00051.safetensors", "model.layers.79.self_attn.k_proj.weight": "model-00045-of-00051.safetensors", "model.layers.79.self_attn.o_proj.weight": "model-00045-of-00051.safetensors", "model.layers.79.self_attn.q_proj.weight": "model-00045-of-00051.safetensors", "model.layers.79.self_attn.v_proj.weight": "model-00045-of-00051.safetensors", "model.layers.8.input_layernorm.weight": "model-00045-of-00051.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00046-of-00051.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00046-of-00051.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00046-of-00051.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00046-of-00051.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00046-of-00051.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00046-of-00051.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00046-of-00051.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00046-of-00051.safetensors", "model.layers.80.input_layernorm.weight": "model-00046-of-00051.safetensors", "model.layers.80.mlp.down_proj.weight": "model-00046-of-00051.safetensors", "model.layers.80.mlp.gate_proj.weight": "model-00046-of-00051.safetensors", "model.layers.80.mlp.up_proj.weight": "model-00046-of-00051.safetensors", "model.layers.80.post_attention_layernorm.weight": "model-00046-of-00051.safetensors", "model.layers.80.self_attn.k_proj.weight": "model-00046-of-00051.safetensors", "model.layers.80.self_attn.o_proj.weight": "model-00047-of-00051.safetensors", "model.layers.80.self_attn.q_proj.weight": "model-00047-of-00051.safetensors", "model.layers.80.self_attn.v_proj.weight": "model-00047-of-00051.safetensors", "model.layers.81.input_layernorm.weight": "model-00047-of-00051.safetensors", "model.layers.81.mlp.down_proj.weight": "model-00047-of-00051.safetensors", "model.layers.81.mlp.gate_proj.weight": "model-00047-of-00051.safetensors", "model.layers.81.mlp.up_proj.weight": "model-00047-of-00051.safetensors", "model.layers.81.post_attention_layernorm.weight": "model-00047-of-00051.safetensors", "model.layers.81.self_attn.k_proj.weight": "model-00047-of-00051.safetensors", "model.layers.81.self_attn.o_proj.weight": "model-00047-of-00051.safetensors", "model.layers.81.self_attn.q_proj.weight": "model-00047-of-00051.safetensors", "model.layers.81.self_attn.v_proj.weight": "model-00047-of-00051.safetensors", "model.layers.82.input_layernorm.weight": "model-00047-of-00051.safetensors", "model.layers.82.mlp.down_proj.weight": "model-00047-of-00051.safetensors", "model.layers.82.mlp.gate_proj.weight": "model-00047-of-00051.safetensors", "model.layers.82.mlp.up_proj.weight": "model-00048-of-00051.safetensors", "model.layers.82.post_attention_layernorm.weight": "model-00048-of-00051.safetensors", "model.layers.82.self_attn.k_proj.weight": "model-00048-of-00051.safetensors", "model.layers.82.self_attn.o_proj.weight": "model-00048-of-00051.safetensors", "model.layers.82.self_attn.q_proj.weight": "model-00048-of-00051.safetensors", "model.layers.82.self_attn.v_proj.weight": "model-00048-of-00051.safetensors", "model.layers.83.input_layernorm.weight": "model-00048-of-00051.safetensors", "model.layers.83.mlp.down_proj.weight": "model-00048-of-00051.safetensors", "model.layers.83.mlp.gate_proj.weight": "model-00048-of-00051.safetensors", "model.layers.83.mlp.up_proj.weight": "model-00048-of-00051.safetensors", "model.layers.83.post_attention_layernorm.weight": "model-00048-of-00051.safetensors", "model.layers.83.self_attn.k_proj.weight": "model-00048-of-00051.safetensors", "model.layers.83.self_attn.o_proj.weight": "model-00048-of-00051.safetensors", "model.layers.83.self_attn.q_proj.weight": "model-00048-of-00051.safetensors", "model.layers.83.self_attn.v_proj.weight": "model-00048-of-00051.safetensors", "model.layers.84.input_layernorm.weight": "model-00048-of-00051.safetensors", "model.layers.84.mlp.down_proj.weight": "model-00048-of-00051.safetensors", "model.layers.84.mlp.gate_proj.weight": "model-00049-of-00051.safetensors", "model.layers.84.mlp.up_proj.weight": "model-00049-of-00051.safetensors", "model.layers.84.post_attention_layernorm.weight": "model-00049-of-00051.safetensors", "model.layers.84.self_attn.k_proj.weight": "model-00049-of-00051.safetensors", "model.layers.84.self_attn.o_proj.weight": "model-00049-of-00051.safetensors", "model.layers.84.self_attn.q_proj.weight": "model-00049-of-00051.safetensors", "model.layers.84.self_attn.v_proj.weight": "model-00049-of-00051.safetensors", "model.layers.85.input_layernorm.weight": "model-00049-of-00051.safetensors", "model.layers.85.mlp.down_proj.weight": "model-00049-of-00051.safetensors", "model.layers.85.mlp.gate_proj.weight": "model-00049-of-00051.safetensors", "model.layers.85.mlp.up_proj.weight": "model-00049-of-00051.safetensors", "model.layers.85.post_attention_layernorm.weight": "model-00049-of-00051.safetensors", "model.layers.85.self_attn.k_proj.weight": "model-00049-of-00051.safetensors", "model.layers.85.self_attn.o_proj.weight": "model-00049-of-00051.safetensors", "model.layers.85.self_attn.q_proj.weight": "model-00049-of-00051.safetensors", "model.layers.85.self_attn.v_proj.weight": "model-00049-of-00051.safetensors", "model.layers.86.input_layernorm.weight": "model-00049-of-00051.safetensors", "model.layers.86.mlp.down_proj.weight": "model-00050-of-00051.safetensors", "model.layers.86.mlp.gate_proj.weight": "model-00050-of-00051.safetensors", "model.layers.86.mlp.up_proj.weight": "model-00050-of-00051.safetensors", "model.layers.86.post_attention_layernorm.weight": "model-00050-of-00051.safetensors", "model.layers.86.self_attn.k_proj.weight": "model-00050-of-00051.safetensors", "model.layers.86.self_attn.o_proj.weight": "model-00050-of-00051.safetensors", "model.layers.86.self_attn.q_proj.weight": "model-00050-of-00051.safetensors", "model.layers.86.self_attn.v_proj.weight": "model-00050-of-00051.safetensors", "model.layers.87.input_layernorm.weight": "model-00050-of-00051.safetensors", "model.layers.87.mlp.down_proj.weight": "model-00050-of-00051.safetensors", "model.layers.87.mlp.gate_proj.weight": "model-00050-of-00051.safetensors", "model.layers.87.mlp.up_proj.weight": "model-00050-of-00051.safetensors", "model.layers.87.post_attention_layernorm.weight": "model-00050-of-00051.safetensors", "model.layers.87.self_attn.k_proj.weight": "model-00050-of-00051.safetensors", "model.layers.87.self_attn.o_proj.weight": "model-00051-of-00051.safetensors", "model.layers.87.self_attn.q_proj.weight": "model-00051-of-00051.safetensors", "model.layers.87.self_attn.v_proj.weight": "model-00051-of-00051.safetensors", "model.layers.9.input_layernorm.weight": "model-00051-of-00051.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00051-of-00051.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00051-of-00051.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00051-of-00051.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00051-of-00051.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00051-of-00051.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00051-of-00051.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00051-of-00051.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00051-of-00051.safetensors", "model.norm.weight": "model-00051-of-00051.safetensors"}}
output-00001-of-00013.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a5858b53c4f9eabc3e1d3583f9777375f0063e2a7aeb8fa0e2348adf8f340b7
3
+ size 8437552464
output-00002-of-00013.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18a15fa2e001cc8821655460cb1eb3f1f2329285d3ec504832f9d6acdcbc454e
3
+ size 8411776104
output-00003-of-00013.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db47e1bd5756a05094369fe2554d35073cab5cb9c7759dbff2df45086010487c
3
+ size 8556068248
output-00004-of-00013.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3b06fb19cb72b8117b6fe9b50070e1c5c00bdfeb51ed474e957b2404b0fa4a3
3
+ size 8575170804
output-00005-of-00013.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b954c4ac946bd9c65aa1fe85852fff675faf4a11bf2535123c9410bff2a7fa3
3
+ size 8517607144
output-00006-of-00013.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f588c65d70888567b1ee601a5db3d54108b2b3be67c361d5280ca5ddbd8158a9
3
+ size 8424227932
output-00007-of-00013.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62dc30d0118ac6da1cc7e97433fa89c77ebe25c24eae0a54b5e852360299fdc2
3
+ size 8589300824
output-00008-of-00013.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e6d5f42e4f6d70291fdab33830ac10c663c0749e240f5d6ad6b4d621305d1ff
3
+ size 8380852568
output-00009-of-00013.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:badccb51af508187a0ab46a082e6c59e008a9fd41e927377634beeca848be431
3
+ size 8307530744
output-00010-of-00013.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a71a777f546b8eb67b707cbcfe4f2d1199ef1039a01fcddf9324e47257f3a58
3
+ size 8456127568
output-00011-of-00013.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6995aee89bca72c788212b64611eb09e915806ae61e6956cdfcb5c6755c85ee2
3
+ size 8580302664
output-00012-of-00013.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e7039090c92055985df791dea0618bfc4f2eb9a3d44de26698b9ea8913d43ed
3
+ size 8396402968
output-00013-of-00013.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59792de0ea55374d507c4731eff6017c319b71efe4bb0487ea75da299fcea049
3
+ size 671687872
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59f95e28944c062244741268596badc900df86c7f5ded05088d2da22a7379e06
3
+ size 587583
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff