Upload folder using huggingface_hub (#1)
Browse files- 29211901bcae80f27dd879898277069dd7deb3dc73b9d77f34395ddc26acf925 (2cd0d278da7fe472e964ff284a6f388dd2ad5a7a)
- 1c5e1be6cb814ad1043490ec5a75484433c711887da7b7eec39abd842e4f7ec8 (fb2b495b6fdc81e9f0c5edba5b06f3f6024cb702)
- 1890b38a1266b2e61624dd87e379448820e430ba8fe776c92b1cf03d17d0c738 (51ad32cdbe23f054471268cac270d8c8d4f86ffb)
- e33a4c24c8eafc1dfd30ee249af2c9bed621b405bd1f5457bb3154233b1a874e (51f5ef5bf277d733fd7d73c74f762806903f4d04)
- f65fe21def7992366d76779b4ef637500a58ebf30f45ebcb15dc113bc3ffa07e (6fa9fc1d08689011202d368864df66cb9ff99885)
- 8adf0c619058f12dff756ad25852be013be7e86877aee450443ef9301eab4aec (51813a1e914e5504bfb275f0db774be9deffb3e7)
- 56474eea38327268b47789fb85b640cd09c57fcddd2e4b1f5d80d7b2d0f4ff8b (25d4f4134080ed7d74d59856bb078ea41cf45e5c)
- ba55cbb795c8cb276ef0b6068b104248338609836d80eaa583c8db307286607f (36b096b363d66823a55dcb1def45f6091344472b)
- 59166d93618d5d6625656a5db9374adf432091ddf344ca7a47a1bea2eee966ee (ed50bfadf857e517b625f614afa3b4abc7ad54d1)
- c5be46c8346df922875087b039075f129e4a4869f1a7cb518d8abd813250fb12 (b2efa71be8b5440a006deb94b38c691ddcf8b1dc)
- e2e2604828c5b204f3bdd5bd4af38af9c269e9c81bdfb6b0d04cf601db8bc3bc (400d5009a06e555ae20c0c6ddbd6fb52080d99c3)
- c1a542c6258945130252cc217330f148e2956698c0dd08ec8a92b64182297f65 (2dd2dc2c48a909baf201f6065283c0e66728f797)
- 6e651c6b38e0a043baa6e9b7528e3a6fc6b464cdb963dd07b95aa12f6c348c83 (579f130f87e0c5f4956ca6ee5d597c62defeabaa)
- a614181b177c1c2e53105f0aabed96d73e6fc4da8356b4fc322ec47e945b8bb7 (47fdd43453b712ba0441aca58c0c2e203e5f4476)
- 8b6a4bcddb47527310dd1cc573a5417bf2e39c209cccead54809e531f0618355 (50a281a7ebe22b0bf2f635ded6bd6879e34656f8)
- a874c3e0532558d3bce20e956749262735eae155260fcaa1b2a09c68d13f1bc2 (cc97c62fc8cfaa970806dbe76d2f47714ceab1fc)
- 462a1b0935ad441373386bf65b90ac03c7d36fd7af590194e99685a1857f5211 (77ce0173a00c830514a455c95bdab4370ac93c18)
- 7427ca2002f6ac55799a1ccf6dbf639c049188d5223e2d29afc570dc31756574 (94a5302cff30ff7f81f31f073ef5c50748af011b)
- 1eaa0a198aac17c08895ce9dac55cd8fdfc27f9a5e729966b3f3b2d44289cebd (434deb01c60aade3cb9c913e90d7f2e40eefc950)
- 5a854824fd29d0292de6d29cd4e1ec1d9fc0f375cb92466db7609d667465a093 (5bb8aa8e05b7037df6ee7a5cd8eaf11a0d720774)
- README.md +59 -0
- config.json +29 -0
- model-00001-of-00020.safetensors +3 -0
- model-00002-of-00020.safetensors +3 -0
- model-00003-of-00020.safetensors +3 -0
- model-00004-of-00020.safetensors +3 -0
- model-00005-of-00020.safetensors +3 -0
- model-00006-of-00020.safetensors +3 -0
- model-00007-of-00020.safetensors +3 -0
- model-00008-of-00020.safetensors +3 -0
- model-00009-of-00020.safetensors +3 -0
- model-00010-of-00020.safetensors +3 -0
- model-00011-of-00020.safetensors +3 -0
- model-00012-of-00020.safetensors +3 -0
- model-00013-of-00020.safetensors +3 -0
- model-00014-of-00020.safetensors +3 -0
- model-00015-of-00020.safetensors +3 -0
- model-00016-of-00020.safetensors +3 -0
- model-00017-of-00020.safetensors +3 -0
- model-00018-of-00020.safetensors +3 -0
- model-00019-of-00020.safetensors +3 -0
- model-00020-of-00020.safetensors +3 -0
- model.safetensors.index.json +330 -0
- special_tokens_map.json +23 -0
- tokenizer.json +0 -0
- tokenizer.model +3 -0
- tokenizer_config.json +44 -0
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
base_model: karakuri-ai/karakuri-lm-8x7b-chat-v0.1
|
3 |
+
datasets:
|
4 |
+
- OpenAssistant/oasst2
|
5 |
+
- nvidia/HelpSteer
|
6 |
+
language:
|
7 |
+
- en
|
8 |
+
- ja
|
9 |
+
library_name: transformers
|
10 |
+
license: apache-2.0
|
11 |
+
tags:
|
12 |
+
- mixtral
|
13 |
+
- steerlm
|
14 |
+
- mlx
|
15 |
+
model-index:
|
16 |
+
- name: karakuri-ai/karakuri-lm-8x7b-chat-v0.1
|
17 |
+
results:
|
18 |
+
- task:
|
19 |
+
type: text-generation
|
20 |
+
name: Text Generation
|
21 |
+
dataset:
|
22 |
+
name: MT-Bench
|
23 |
+
type: unknown
|
24 |
+
metrics:
|
25 |
+
- type: unknown
|
26 |
+
value: 7.39375
|
27 |
+
name: score
|
28 |
+
- type: unknown
|
29 |
+
value: 7.540625
|
30 |
+
name: score
|
31 |
+
source:
|
32 |
+
url: https://huggingface.co/spaces/lmsys/mt-bench
|
33 |
+
---
|
34 |
+
|
35 |
+
# mlx-community/karakuri-lm-8x7b-chat-v0.1
|
36 |
+
|
37 |
+
The Model [mlx-community/karakuri-lm-8x7b-chat-v0.1](https://huggingface.co/mlx-community/karakuri-lm-8x7b-chat-v0.1) was converted to MLX format from [karakuri-ai/karakuri-lm-8x7b-chat-v0.1](https://huggingface.co/karakuri-ai/karakuri-lm-8x7b-chat-v0.1) using mlx-lm version **0.19.0**.
|
38 |
+
|
39 |
+
## Use with mlx
|
40 |
+
|
41 |
+
```bash
|
42 |
+
pip install mlx-lm
|
43 |
+
```
|
44 |
+
|
45 |
+
```python
|
46 |
+
from mlx_lm import load, generate
|
47 |
+
|
48 |
+
model, tokenizer = load("mlx-community/karakuri-lm-8x7b-chat-v0.1")
|
49 |
+
|
50 |
+
prompt="hello"
|
51 |
+
|
52 |
+
if hasattr(tokenizer, "apply_chat_template") and tokenizer.chat_template is not None:
|
53 |
+
messages = [{"role": "user", "content": prompt}]
|
54 |
+
prompt = tokenizer.apply_chat_template(
|
55 |
+
messages, tokenize=False, add_generation_prompt=True
|
56 |
+
)
|
57 |
+
|
58 |
+
response = generate(model, tokenizer, prompt=prompt, verbose=True)
|
59 |
+
```
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"architectures": [
|
3 |
+
"MixtralForCausalLM"
|
4 |
+
],
|
5 |
+
"attention_dropout": 0.0,
|
6 |
+
"bos_token_id": 1,
|
7 |
+
"eos_token_id": 2,
|
8 |
+
"hidden_act": "silu",
|
9 |
+
"hidden_size": 4096,
|
10 |
+
"initializer_range": 0.02,
|
11 |
+
"intermediate_size": 14336,
|
12 |
+
"max_position_embeddings": 32768,
|
13 |
+
"model_type": "mixtral",
|
14 |
+
"num_attention_heads": 32,
|
15 |
+
"num_experts_per_tok": 2,
|
16 |
+
"num_hidden_layers": 32,
|
17 |
+
"num_key_value_heads": 8,
|
18 |
+
"num_local_experts": 8,
|
19 |
+
"output_router_logits": false,
|
20 |
+
"rms_norm_eps": 1e-05,
|
21 |
+
"rope_theta": 1000000.0,
|
22 |
+
"router_aux_loss_coef": 0.02,
|
23 |
+
"sliding_window": null,
|
24 |
+
"tie_word_embeddings": false,
|
25 |
+
"torch_dtype": "bfloat16",
|
26 |
+
"transformers_version": "4.39.3",
|
27 |
+
"use_cache": true,
|
28 |
+
"vocab_size": 32000
|
29 |
+
}
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d5bc76500291403fb497ecfe7ff8f8b9d01edd6fd834dabd9e70897e9071b079
|
3 |
+
size 5127686301
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d26405ffd91097ba23e7df2b7d62281bff6b974c1a70e999c028d42a6bf5072d
|
3 |
+
size 4865558824
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d97396e4173c8082970662e569dee87873bb47b8c36dbebd2da4d3640a6cdaf9
|
3 |
+
size 4865558806
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cac42ab6e21e68f94dddb887fd1adfad17c6a34c0008a285235a6bb78275f077
|
3 |
+
size 4781590017
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0db06e855b4f35bd370e60e48cb84937bd9d73da9f4034b164d7fde9e091cf45
|
3 |
+
size 4865558822
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:72c19f5eeea4c31332254cfdf1a85a3f59e9b0cb53b4be057028163fc45cb6e3
|
3 |
+
size 4865558817
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:550726b0129df8ab9bacc0cc8319a7677e1a319c4f13d7a1b5956b6834614f9c
|
3 |
+
size 4781590029
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:266d752006d60748f937091e4ce215889fcb93c0fa0d823bbd02f23064ed7323
|
3 |
+
size 4865558843
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:64a2e58a505d6ab2635d24cca6e72135c9490d42f2fabfd3ed0575827519030c
|
3 |
+
size 4865558815
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7ebb87dc4d4de4972e1df28793c5c19c21f15517334fcc086feec7182e1e393a
|
3 |
+
size 4781590029
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d1935e794675580b51b8decbb6295137869595c27c7fbc354be123876c7bd8e6
|
3 |
+
size 4865558843
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6b55e4a95945cd2b9187d33ec92d7ac0a3fdd19a20bd1b94a8b63fda6fa0e2e2
|
3 |
+
size 4865558823
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ac62f41b769c32ab0178999a7807e48b111b6b5b4faf919fae7b9a0e26f92838
|
3 |
+
size 4781590029
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fe8b70b8a79faef7b405b9c74ca7c86e4e020b80b87b88d3308d9fdc66b8598e
|
3 |
+
size 4865558839
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:615aeb5aa51067604a1804023cb3a330cca3edbab320bd74dba12af2308b0903
|
3 |
+
size 4865558837
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:80231fff6ce876dc949e7ee4c73ba3661e13ba5bf18f481ebf4a262435a645bb
|
3 |
+
size 4781590025
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0af774c340504432ab90cc205642a6bb6b38371d8df4907ba161f391658f7dfd
|
3 |
+
size 4865558841
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7655f62dc1df923704f18b07d01a2d786d2a46d2850344a3c820dc30434f9140
|
3 |
+
size 4865558821
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ad28969d5e935360bbcae269dff9b85e68901ddda89ba076ef2dcf478ee1cc00
|
3 |
+
size 4781590029
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0cade03a975108dd36018287c76d6708637cb342f6c07ebe728288a27fe1ba53
|
3 |
+
size 1201693251
|
@@ -0,0 +1,330 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"metadata": {
|
3 |
+
"total_size": 93405585408
|
4 |
+
},
|
5 |
+
"weight_map": {
|
6 |
+
"lm_head.weight": "model-00020-of-00020.safetensors",
|
7 |
+
"model.embed_tokens.weight": "model-00001-of-00020.safetensors",
|
8 |
+
"model.layers.0.block_sparse_moe.gate.weight": "model-00001-of-00020.safetensors",
|
9 |
+
"model.layers.0.block_sparse_moe.switch_mlp.down_proj.weight": "model-00001-of-00020.safetensors",
|
10 |
+
"model.layers.0.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00001-of-00020.safetensors",
|
11 |
+
"model.layers.0.block_sparse_moe.switch_mlp.up_proj.weight": "model-00001-of-00020.safetensors",
|
12 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00020.safetensors",
|
13 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00020.safetensors",
|
14 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00020.safetensors",
|
15 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00020.safetensors",
|
16 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00020.safetensors",
|
17 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00020.safetensors",
|
18 |
+
"model.layers.1.block_sparse_moe.gate.weight": "model-00001-of-00020.safetensors",
|
19 |
+
"model.layers.1.block_sparse_moe.switch_mlp.down_proj.weight": "model-00002-of-00020.safetensors",
|
20 |
+
"model.layers.1.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00001-of-00020.safetensors",
|
21 |
+
"model.layers.1.block_sparse_moe.switch_mlp.up_proj.weight": "model-00001-of-00020.safetensors",
|
22 |
+
"model.layers.1.input_layernorm.weight": "model-00002-of-00020.safetensors",
|
23 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00002-of-00020.safetensors",
|
24 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00020.safetensors",
|
25 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00020.safetensors",
|
26 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00020.safetensors",
|
27 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00020.safetensors",
|
28 |
+
"model.layers.10.block_sparse_moe.gate.weight": "model-00006-of-00020.safetensors",
|
29 |
+
"model.layers.10.block_sparse_moe.switch_mlp.down_proj.weight": "model-00007-of-00020.safetensors",
|
30 |
+
"model.layers.10.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00007-of-00020.safetensors",
|
31 |
+
"model.layers.10.block_sparse_moe.switch_mlp.up_proj.weight": "model-00007-of-00020.safetensors",
|
32 |
+
"model.layers.10.input_layernorm.weight": "model-00007-of-00020.safetensors",
|
33 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00007-of-00020.safetensors",
|
34 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00006-of-00020.safetensors",
|
35 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00006-of-00020.safetensors",
|
36 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00006-of-00020.safetensors",
|
37 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00006-of-00020.safetensors",
|
38 |
+
"model.layers.11.block_sparse_moe.gate.weight": "model-00007-of-00020.safetensors",
|
39 |
+
"model.layers.11.block_sparse_moe.switch_mlp.down_proj.weight": "model-00008-of-00020.safetensors",
|
40 |
+
"model.layers.11.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00007-of-00020.safetensors",
|
41 |
+
"model.layers.11.block_sparse_moe.switch_mlp.up_proj.weight": "model-00007-of-00020.safetensors",
|
42 |
+
"model.layers.11.input_layernorm.weight": "model-00008-of-00020.safetensors",
|
43 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00008-of-00020.safetensors",
|
44 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00007-of-00020.safetensors",
|
45 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00007-of-00020.safetensors",
|
46 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00007-of-00020.safetensors",
|
47 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00007-of-00020.safetensors",
|
48 |
+
"model.layers.12.block_sparse_moe.gate.weight": "model-00008-of-00020.safetensors",
|
49 |
+
"model.layers.12.block_sparse_moe.switch_mlp.down_proj.weight": "model-00008-of-00020.safetensors",
|
50 |
+
"model.layers.12.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00008-of-00020.safetensors",
|
51 |
+
"model.layers.12.block_sparse_moe.switch_mlp.up_proj.weight": "model-00008-of-00020.safetensors",
|
52 |
+
"model.layers.12.input_layernorm.weight": "model-00008-of-00020.safetensors",
|
53 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00008-of-00020.safetensors",
|
54 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00008-of-00020.safetensors",
|
55 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00008-of-00020.safetensors",
|
56 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00008-of-00020.safetensors",
|
57 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00008-of-00020.safetensors",
|
58 |
+
"model.layers.13.block_sparse_moe.gate.weight": "model-00008-of-00020.safetensors",
|
59 |
+
"model.layers.13.block_sparse_moe.switch_mlp.down_proj.weight": "model-00009-of-00020.safetensors",
|
60 |
+
"model.layers.13.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00008-of-00020.safetensors",
|
61 |
+
"model.layers.13.block_sparse_moe.switch_mlp.up_proj.weight": "model-00009-of-00020.safetensors",
|
62 |
+
"model.layers.13.input_layernorm.weight": "model-00009-of-00020.safetensors",
|
63 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00009-of-00020.safetensors",
|
64 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00008-of-00020.safetensors",
|
65 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00008-of-00020.safetensors",
|
66 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00008-of-00020.safetensors",
|
67 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00008-of-00020.safetensors",
|
68 |
+
"model.layers.14.block_sparse_moe.gate.weight": "model-00009-of-00020.safetensors",
|
69 |
+
"model.layers.14.block_sparse_moe.switch_mlp.down_proj.weight": "model-00009-of-00020.safetensors",
|
70 |
+
"model.layers.14.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00009-of-00020.safetensors",
|
71 |
+
"model.layers.14.block_sparse_moe.switch_mlp.up_proj.weight": "model-00009-of-00020.safetensors",
|
72 |
+
"model.layers.14.input_layernorm.weight": "model-00009-of-00020.safetensors",
|
73 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00009-of-00020.safetensors",
|
74 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00009-of-00020.safetensors",
|
75 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00009-of-00020.safetensors",
|
76 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00009-of-00020.safetensors",
|
77 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00009-of-00020.safetensors",
|
78 |
+
"model.layers.15.block_sparse_moe.gate.weight": "model-00009-of-00020.safetensors",
|
79 |
+
"model.layers.15.block_sparse_moe.switch_mlp.down_proj.weight": "model-00010-of-00020.safetensors",
|
80 |
+
"model.layers.15.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00010-of-00020.safetensors",
|
81 |
+
"model.layers.15.block_sparse_moe.switch_mlp.up_proj.weight": "model-00010-of-00020.safetensors",
|
82 |
+
"model.layers.15.input_layernorm.weight": "model-00010-of-00020.safetensors",
|
83 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00010-of-00020.safetensors",
|
84 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00009-of-00020.safetensors",
|
85 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00009-of-00020.safetensors",
|
86 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00009-of-00020.safetensors",
|
87 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00009-of-00020.safetensors",
|
88 |
+
"model.layers.16.block_sparse_moe.gate.weight": "model-00010-of-00020.safetensors",
|
89 |
+
"model.layers.16.block_sparse_moe.switch_mlp.down_proj.weight": "model-00011-of-00020.safetensors",
|
90 |
+
"model.layers.16.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00010-of-00020.safetensors",
|
91 |
+
"model.layers.16.block_sparse_moe.switch_mlp.up_proj.weight": "model-00010-of-00020.safetensors",
|
92 |
+
"model.layers.16.input_layernorm.weight": "model-00011-of-00020.safetensors",
|
93 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00011-of-00020.safetensors",
|
94 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00010-of-00020.safetensors",
|
95 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00010-of-00020.safetensors",
|
96 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00010-of-00020.safetensors",
|
97 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00010-of-00020.safetensors",
|
98 |
+
"model.layers.17.block_sparse_moe.gate.weight": "model-00011-of-00020.safetensors",
|
99 |
+
"model.layers.17.block_sparse_moe.switch_mlp.down_proj.weight": "model-00011-of-00020.safetensors",
|
100 |
+
"model.layers.17.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00011-of-00020.safetensors",
|
101 |
+
"model.layers.17.block_sparse_moe.switch_mlp.up_proj.weight": "model-00011-of-00020.safetensors",
|
102 |
+
"model.layers.17.input_layernorm.weight": "model-00011-of-00020.safetensors",
|
103 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00011-of-00020.safetensors",
|
104 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00011-of-00020.safetensors",
|
105 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00011-of-00020.safetensors",
|
106 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00011-of-00020.safetensors",
|
107 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00011-of-00020.safetensors",
|
108 |
+
"model.layers.18.block_sparse_moe.gate.weight": "model-00011-of-00020.safetensors",
|
109 |
+
"model.layers.18.block_sparse_moe.switch_mlp.down_proj.weight": "model-00012-of-00020.safetensors",
|
110 |
+
"model.layers.18.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00011-of-00020.safetensors",
|
111 |
+
"model.layers.18.block_sparse_moe.switch_mlp.up_proj.weight": "model-00012-of-00020.safetensors",
|
112 |
+
"model.layers.18.input_layernorm.weight": "model-00012-of-00020.safetensors",
|
113 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00012-of-00020.safetensors",
|
114 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00011-of-00020.safetensors",
|
115 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00011-of-00020.safetensors",
|
116 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00011-of-00020.safetensors",
|
117 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00011-of-00020.safetensors",
|
118 |
+
"model.layers.19.block_sparse_moe.gate.weight": "model-00012-of-00020.safetensors",
|
119 |
+
"model.layers.19.block_sparse_moe.switch_mlp.down_proj.weight": "model-00012-of-00020.safetensors",
|
120 |
+
"model.layers.19.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00012-of-00020.safetensors",
|
121 |
+
"model.layers.19.block_sparse_moe.switch_mlp.up_proj.weight": "model-00012-of-00020.safetensors",
|
122 |
+
"model.layers.19.input_layernorm.weight": "model-00012-of-00020.safetensors",
|
123 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00012-of-00020.safetensors",
|
124 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00012-of-00020.safetensors",
|
125 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00012-of-00020.safetensors",
|
126 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00012-of-00020.safetensors",
|
127 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00012-of-00020.safetensors",
|
128 |
+
"model.layers.2.block_sparse_moe.gate.weight": "model-00002-of-00020.safetensors",
|
129 |
+
"model.layers.2.block_sparse_moe.switch_mlp.down_proj.weight": "model-00002-of-00020.safetensors",
|
130 |
+
"model.layers.2.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00002-of-00020.safetensors",
|
131 |
+
"model.layers.2.block_sparse_moe.switch_mlp.up_proj.weight": "model-00002-of-00020.safetensors",
|
132 |
+
"model.layers.2.input_layernorm.weight": "model-00002-of-00020.safetensors",
|
133 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00002-of-00020.safetensors",
|
134 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00002-of-00020.safetensors",
|
135 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00002-of-00020.safetensors",
|
136 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00002-of-00020.safetensors",
|
137 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00002-of-00020.safetensors",
|
138 |
+
"model.layers.20.block_sparse_moe.gate.weight": "model-00012-of-00020.safetensors",
|
139 |
+
"model.layers.20.block_sparse_moe.switch_mlp.down_proj.weight": "model-00013-of-00020.safetensors",
|
140 |
+
"model.layers.20.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00013-of-00020.safetensors",
|
141 |
+
"model.layers.20.block_sparse_moe.switch_mlp.up_proj.weight": "model-00013-of-00020.safetensors",
|
142 |
+
"model.layers.20.input_layernorm.weight": "model-00013-of-00020.safetensors",
|
143 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00013-of-00020.safetensors",
|
144 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00012-of-00020.safetensors",
|
145 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00012-of-00020.safetensors",
|
146 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00012-of-00020.safetensors",
|
147 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00012-of-00020.safetensors",
|
148 |
+
"model.layers.21.block_sparse_moe.gate.weight": "model-00013-of-00020.safetensors",
|
149 |
+
"model.layers.21.block_sparse_moe.switch_mlp.down_proj.weight": "model-00014-of-00020.safetensors",
|
150 |
+
"model.layers.21.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00013-of-00020.safetensors",
|
151 |
+
"model.layers.21.block_sparse_moe.switch_mlp.up_proj.weight": "model-00013-of-00020.safetensors",
|
152 |
+
"model.layers.21.input_layernorm.weight": "model-00014-of-00020.safetensors",
|
153 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00014-of-00020.safetensors",
|
154 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00013-of-00020.safetensors",
|
155 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00013-of-00020.safetensors",
|
156 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00013-of-00020.safetensors",
|
157 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00013-of-00020.safetensors",
|
158 |
+
"model.layers.22.block_sparse_moe.gate.weight": "model-00014-of-00020.safetensors",
|
159 |
+
"model.layers.22.block_sparse_moe.switch_mlp.down_proj.weight": "model-00014-of-00020.safetensors",
|
160 |
+
"model.layers.22.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00014-of-00020.safetensors",
|
161 |
+
"model.layers.22.block_sparse_moe.switch_mlp.up_proj.weight": "model-00014-of-00020.safetensors",
|
162 |
+
"model.layers.22.input_layernorm.weight": "model-00014-of-00020.safetensors",
|
163 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00014-of-00020.safetensors",
|
164 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00014-of-00020.safetensors",
|
165 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00014-of-00020.safetensors",
|
166 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00014-of-00020.safetensors",
|
167 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00014-of-00020.safetensors",
|
168 |
+
"model.layers.23.block_sparse_moe.gate.weight": "model-00014-of-00020.safetensors",
|
169 |
+
"model.layers.23.block_sparse_moe.switch_mlp.down_proj.weight": "model-00015-of-00020.safetensors",
|
170 |
+
"model.layers.23.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00014-of-00020.safetensors",
|
171 |
+
"model.layers.23.block_sparse_moe.switch_mlp.up_proj.weight": "model-00015-of-00020.safetensors",
|
172 |
+
"model.layers.23.input_layernorm.weight": "model-00015-of-00020.safetensors",
|
173 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00015-of-00020.safetensors",
|
174 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00014-of-00020.safetensors",
|
175 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00014-of-00020.safetensors",
|
176 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00014-of-00020.safetensors",
|
177 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00014-of-00020.safetensors",
|
178 |
+
"model.layers.24.block_sparse_moe.gate.weight": "model-00015-of-00020.safetensors",
|
179 |
+
"model.layers.24.block_sparse_moe.switch_mlp.down_proj.weight": "model-00015-of-00020.safetensors",
|
180 |
+
"model.layers.24.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00015-of-00020.safetensors",
|
181 |
+
"model.layers.24.block_sparse_moe.switch_mlp.up_proj.weight": "model-00015-of-00020.safetensors",
|
182 |
+
"model.layers.24.input_layernorm.weight": "model-00015-of-00020.safetensors",
|
183 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00015-of-00020.safetensors",
|
184 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00015-of-00020.safetensors",
|
185 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00015-of-00020.safetensors",
|
186 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00015-of-00020.safetensors",
|
187 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00015-of-00020.safetensors",
|
188 |
+
"model.layers.25.block_sparse_moe.gate.weight": "model-00015-of-00020.safetensors",
|
189 |
+
"model.layers.25.block_sparse_moe.switch_mlp.down_proj.weight": "model-00016-of-00020.safetensors",
|
190 |
+
"model.layers.25.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00016-of-00020.safetensors",
|
191 |
+
"model.layers.25.block_sparse_moe.switch_mlp.up_proj.weight": "model-00016-of-00020.safetensors",
|
192 |
+
"model.layers.25.input_layernorm.weight": "model-00016-of-00020.safetensors",
|
193 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00016-of-00020.safetensors",
|
194 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00015-of-00020.safetensors",
|
195 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00015-of-00020.safetensors",
|
196 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00015-of-00020.safetensors",
|
197 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00015-of-00020.safetensors",
|
198 |
+
"model.layers.26.block_sparse_moe.gate.weight": "model-00016-of-00020.safetensors",
|
199 |
+
"model.layers.26.block_sparse_moe.switch_mlp.down_proj.weight": "model-00017-of-00020.safetensors",
|
200 |
+
"model.layers.26.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00016-of-00020.safetensors",
|
201 |
+
"model.layers.26.block_sparse_moe.switch_mlp.up_proj.weight": "model-00016-of-00020.safetensors",
|
202 |
+
"model.layers.26.input_layernorm.weight": "model-00017-of-00020.safetensors",
|
203 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00017-of-00020.safetensors",
|
204 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00016-of-00020.safetensors",
|
205 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00016-of-00020.safetensors",
|
206 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00016-of-00020.safetensors",
|
207 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00016-of-00020.safetensors",
|
208 |
+
"model.layers.27.block_sparse_moe.gate.weight": "model-00017-of-00020.safetensors",
|
209 |
+
"model.layers.27.block_sparse_moe.switch_mlp.down_proj.weight": "model-00017-of-00020.safetensors",
|
210 |
+
"model.layers.27.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00017-of-00020.safetensors",
|
211 |
+
"model.layers.27.block_sparse_moe.switch_mlp.up_proj.weight": "model-00017-of-00020.safetensors",
|
212 |
+
"model.layers.27.input_layernorm.weight": "model-00017-of-00020.safetensors",
|
213 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00017-of-00020.safetensors",
|
214 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00017-of-00020.safetensors",
|
215 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00017-of-00020.safetensors",
|
216 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00017-of-00020.safetensors",
|
217 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00017-of-00020.safetensors",
|
218 |
+
"model.layers.28.block_sparse_moe.gate.weight": "model-00017-of-00020.safetensors",
|
219 |
+
"model.layers.28.block_sparse_moe.switch_mlp.down_proj.weight": "model-00018-of-00020.safetensors",
|
220 |
+
"model.layers.28.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00017-of-00020.safetensors",
|
221 |
+
"model.layers.28.block_sparse_moe.switch_mlp.up_proj.weight": "model-00018-of-00020.safetensors",
|
222 |
+
"model.layers.28.input_layernorm.weight": "model-00018-of-00020.safetensors",
|
223 |
+
"model.layers.28.post_attention_layernorm.weight": "model-00018-of-00020.safetensors",
|
224 |
+
"model.layers.28.self_attn.k_proj.weight": "model-00017-of-00020.safetensors",
|
225 |
+
"model.layers.28.self_attn.o_proj.weight": "model-00017-of-00020.safetensors",
|
226 |
+
"model.layers.28.self_attn.q_proj.weight": "model-00017-of-00020.safetensors",
|
227 |
+
"model.layers.28.self_attn.v_proj.weight": "model-00017-of-00020.safetensors",
|
228 |
+
"model.layers.29.block_sparse_moe.gate.weight": "model-00018-of-00020.safetensors",
|
229 |
+
"model.layers.29.block_sparse_moe.switch_mlp.down_proj.weight": "model-00018-of-00020.safetensors",
|
230 |
+
"model.layers.29.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00018-of-00020.safetensors",
|
231 |
+
"model.layers.29.block_sparse_moe.switch_mlp.up_proj.weight": "model-00018-of-00020.safetensors",
|
232 |
+
"model.layers.29.input_layernorm.weight": "model-00018-of-00020.safetensors",
|
233 |
+
"model.layers.29.post_attention_layernorm.weight": "model-00018-of-00020.safetensors",
|
234 |
+
"model.layers.29.self_attn.k_proj.weight": "model-00018-of-00020.safetensors",
|
235 |
+
"model.layers.29.self_attn.o_proj.weight": "model-00018-of-00020.safetensors",
|
236 |
+
"model.layers.29.self_attn.q_proj.weight": "model-00018-of-00020.safetensors",
|
237 |
+
"model.layers.29.self_attn.v_proj.weight": "model-00018-of-00020.safetensors",
|
238 |
+
"model.layers.3.block_sparse_moe.gate.weight": "model-00002-of-00020.safetensors",
|
239 |
+
"model.layers.3.block_sparse_moe.switch_mlp.down_proj.weight": "model-00003-of-00020.safetensors",
|
240 |
+
"model.layers.3.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00002-of-00020.safetensors",
|
241 |
+
"model.layers.3.block_sparse_moe.switch_mlp.up_proj.weight": "model-00003-of-00020.safetensors",
|
242 |
+
"model.layers.3.input_layernorm.weight": "model-00003-of-00020.safetensors",
|
243 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00003-of-00020.safetensors",
|
244 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00002-of-00020.safetensors",
|
245 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00002-of-00020.safetensors",
|
246 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00002-of-00020.safetensors",
|
247 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00002-of-00020.safetensors",
|
248 |
+
"model.layers.30.block_sparse_moe.gate.weight": "model-00018-of-00020.safetensors",
|
249 |
+
"model.layers.30.block_sparse_moe.switch_mlp.down_proj.weight": "model-00019-of-00020.safetensors",
|
250 |
+
"model.layers.30.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00019-of-00020.safetensors",
|
251 |
+
"model.layers.30.block_sparse_moe.switch_mlp.up_proj.weight": "model-00019-of-00020.safetensors",
|
252 |
+
"model.layers.30.input_layernorm.weight": "model-00019-of-00020.safetensors",
|
253 |
+
"model.layers.30.post_attention_layernorm.weight": "model-00019-of-00020.safetensors",
|
254 |
+
"model.layers.30.self_attn.k_proj.weight": "model-00018-of-00020.safetensors",
|
255 |
+
"model.layers.30.self_attn.o_proj.weight": "model-00018-of-00020.safetensors",
|
256 |
+
"model.layers.30.self_attn.q_proj.weight": "model-00018-of-00020.safetensors",
|
257 |
+
"model.layers.30.self_attn.v_proj.weight": "model-00018-of-00020.safetensors",
|
258 |
+
"model.layers.31.block_sparse_moe.gate.weight": "model-00019-of-00020.safetensors",
|
259 |
+
"model.layers.31.block_sparse_moe.switch_mlp.down_proj.weight": "model-00020-of-00020.safetensors",
|
260 |
+
"model.layers.31.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00019-of-00020.safetensors",
|
261 |
+
"model.layers.31.block_sparse_moe.switch_mlp.up_proj.weight": "model-00019-of-00020.safetensors",
|
262 |
+
"model.layers.31.input_layernorm.weight": "model-00020-of-00020.safetensors",
|
263 |
+
"model.layers.31.post_attention_layernorm.weight": "model-00020-of-00020.safetensors",
|
264 |
+
"model.layers.31.self_attn.k_proj.weight": "model-00019-of-00020.safetensors",
|
265 |
+
"model.layers.31.self_attn.o_proj.weight": "model-00019-of-00020.safetensors",
|
266 |
+
"model.layers.31.self_attn.q_proj.weight": "model-00019-of-00020.safetensors",
|
267 |
+
"model.layers.31.self_attn.v_proj.weight": "model-00019-of-00020.safetensors",
|
268 |
+
"model.layers.4.block_sparse_moe.gate.weight": "model-00003-of-00020.safetensors",
|
269 |
+
"model.layers.4.block_sparse_moe.switch_mlp.down_proj.weight": "model-00003-of-00020.safetensors",
|
270 |
+
"model.layers.4.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00003-of-00020.safetensors",
|
271 |
+
"model.layers.4.block_sparse_moe.switch_mlp.up_proj.weight": "model-00003-of-00020.safetensors",
|
272 |
+
"model.layers.4.input_layernorm.weight": "model-00003-of-00020.safetensors",
|
273 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00003-of-00020.safetensors",
|
274 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00003-of-00020.safetensors",
|
275 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00003-of-00020.safetensors",
|
276 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00003-of-00020.safetensors",
|
277 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00003-of-00020.safetensors",
|
278 |
+
"model.layers.5.block_sparse_moe.gate.weight": "model-00003-of-00020.safetensors",
|
279 |
+
"model.layers.5.block_sparse_moe.switch_mlp.down_proj.weight": "model-00004-of-00020.safetensors",
|
280 |
+
"model.layers.5.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00004-of-00020.safetensors",
|
281 |
+
"model.layers.5.block_sparse_moe.switch_mlp.up_proj.weight": "model-00004-of-00020.safetensors",
|
282 |
+
"model.layers.5.input_layernorm.weight": "model-00004-of-00020.safetensors",
|
283 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00004-of-00020.safetensors",
|
284 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00003-of-00020.safetensors",
|
285 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00003-of-00020.safetensors",
|
286 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00003-of-00020.safetensors",
|
287 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00003-of-00020.safetensors",
|
288 |
+
"model.layers.6.block_sparse_moe.gate.weight": "model-00004-of-00020.safetensors",
|
289 |
+
"model.layers.6.block_sparse_moe.switch_mlp.down_proj.weight": "model-00005-of-00020.safetensors",
|
290 |
+
"model.layers.6.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00004-of-00020.safetensors",
|
291 |
+
"model.layers.6.block_sparse_moe.switch_mlp.up_proj.weight": "model-00004-of-00020.safetensors",
|
292 |
+
"model.layers.6.input_layernorm.weight": "model-00005-of-00020.safetensors",
|
293 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00005-of-00020.safetensors",
|
294 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00004-of-00020.safetensors",
|
295 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00004-of-00020.safetensors",
|
296 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00004-of-00020.safetensors",
|
297 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00004-of-00020.safetensors",
|
298 |
+
"model.layers.7.block_sparse_moe.gate.weight": "model-00005-of-00020.safetensors",
|
299 |
+
"model.layers.7.block_sparse_moe.switch_mlp.down_proj.weight": "model-00005-of-00020.safetensors",
|
300 |
+
"model.layers.7.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00005-of-00020.safetensors",
|
301 |
+
"model.layers.7.block_sparse_moe.switch_mlp.up_proj.weight": "model-00005-of-00020.safetensors",
|
302 |
+
"model.layers.7.input_layernorm.weight": "model-00005-of-00020.safetensors",
|
303 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00005-of-00020.safetensors",
|
304 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00005-of-00020.safetensors",
|
305 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00005-of-00020.safetensors",
|
306 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00005-of-00020.safetensors",
|
307 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00005-of-00020.safetensors",
|
308 |
+
"model.layers.8.block_sparse_moe.gate.weight": "model-00005-of-00020.safetensors",
|
309 |
+
"model.layers.8.block_sparse_moe.switch_mlp.down_proj.weight": "model-00006-of-00020.safetensors",
|
310 |
+
"model.layers.8.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00005-of-00020.safetensors",
|
311 |
+
"model.layers.8.block_sparse_moe.switch_mlp.up_proj.weight": "model-00006-of-00020.safetensors",
|
312 |
+
"model.layers.8.input_layernorm.weight": "model-00006-of-00020.safetensors",
|
313 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00006-of-00020.safetensors",
|
314 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00005-of-00020.safetensors",
|
315 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00005-of-00020.safetensors",
|
316 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00005-of-00020.safetensors",
|
317 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00005-of-00020.safetensors",
|
318 |
+
"model.layers.9.block_sparse_moe.gate.weight": "model-00006-of-00020.safetensors",
|
319 |
+
"model.layers.9.block_sparse_moe.switch_mlp.down_proj.weight": "model-00006-of-00020.safetensors",
|
320 |
+
"model.layers.9.block_sparse_moe.switch_mlp.gate_proj.weight": "model-00006-of-00020.safetensors",
|
321 |
+
"model.layers.9.block_sparse_moe.switch_mlp.up_proj.weight": "model-00006-of-00020.safetensors",
|
322 |
+
"model.layers.9.input_layernorm.weight": "model-00006-of-00020.safetensors",
|
323 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00006-of-00020.safetensors",
|
324 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00006-of-00020.safetensors",
|
325 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00006-of-00020.safetensors",
|
326 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00006-of-00020.safetensors",
|
327 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00006-of-00020.safetensors",
|
328 |
+
"model.norm.weight": "model-00020-of-00020.safetensors"
|
329 |
+
}
|
330 |
+
}
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"unk_token": {
|
17 |
+
"content": "<unk>",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
}
|
23 |
+
}
|
The diff for this file is too large to render.
See raw diff
|
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
|
3 |
+
size 493443
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"add_prefix_space": null,
|
5 |
+
"added_tokens_decoder": {
|
6 |
+
"0": {
|
7 |
+
"content": "<unk>",
|
8 |
+
"lstrip": false,
|
9 |
+
"normalized": false,
|
10 |
+
"rstrip": false,
|
11 |
+
"single_word": false,
|
12 |
+
"special": true
|
13 |
+
},
|
14 |
+
"1": {
|
15 |
+
"content": "<s>",
|
16 |
+
"lstrip": false,
|
17 |
+
"normalized": false,
|
18 |
+
"rstrip": false,
|
19 |
+
"single_word": false,
|
20 |
+
"special": true
|
21 |
+
},
|
22 |
+
"2": {
|
23 |
+
"content": "</s>",
|
24 |
+
"lstrip": false,
|
25 |
+
"normalized": false,
|
26 |
+
"rstrip": false,
|
27 |
+
"single_word": false,
|
28 |
+
"special": true
|
29 |
+
}
|
30 |
+
},
|
31 |
+
"additional_special_tokens": [],
|
32 |
+
"bos_token": "<s>",
|
33 |
+
"chat_template": "{{ bos_token }}{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% set system_message = false %}{% endif %}{% for message in loop_messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if loop.index0 == 0 and system_message != false %}{% set content = '<<SYS>>\\n' + system_message + '\\n<</SYS>>\\n\\n' + message['content'] %}{% else %}{% set content = message['content'] %}{% endif %}{% if message['role'] == 'user' %}{% set helpfulness = message['helpfulness']|string or '4' %}{% set correctness = message['correctness']|string or '4' %}{% set coherence = message['coherence']|string or '4' %}{% set complexity = message['complexity']|string or '4' %}{% set verbosity = message['verbosity']|string or '4' %}{% set quality = message['quality']|string or '4' %}{% set toxicity = message['toxicity']|string or '0' %}{% set humor = message['humor']|string or '0' %}{% set creativity = message['creativity']|string or '0' %}{{ '[INST] ' + content + ' [ATTR] helpfulness: ' + helpfulness + ' correctness: ' + correctness + ' coherence: ' + coherence + ' complexity: ' + complexity + ' verbosity: ' + verbosity + ' quality: ' + quality + ' toxicity: ' + toxicity + ' humor: ' + humor + ' creativity: ' + creativity + ' [/ATTR] [/INST]' }}{% elif message['role'] == 'assistant' %}{{ content + eos_token }}{% endif %}{% endfor %}",
|
34 |
+
"clean_up_tokenization_spaces": false,
|
35 |
+
"eos_token": "</s>",
|
36 |
+
"legacy": true,
|
37 |
+
"model_max_length": 1000000000000000019884624838656,
|
38 |
+
"pad_token": null,
|
39 |
+
"sp_model_kwargs": {},
|
40 |
+
"spaces_between_special_tokens": false,
|
41 |
+
"tokenizer_class": "LlamaTokenizer",
|
42 |
+
"unk_token": "<unk>",
|
43 |
+
"use_default_system_prompt": false
|
44 |
+
}
|