giraffe176
commited on
Commit
•
2ca100e
1
Parent(s):
c651b8a
Upload folder using huggingface_hub
Browse files- README.md +7 -8
- mergekit_config.yml +4 -4
- model-00001-of-00002.safetensors +1 -1
- model-00002-of-00002.safetensors +1 -1
README.md
CHANGED
@@ -23,10 +23,10 @@ This model was merged using the [DARE](https://arxiv.org/abs/2311.03099) [TIES](
|
|
23 |
### Models Merged
|
24 |
|
25 |
The following models were included in the merge:
|
26 |
-
* [
|
27 |
* [NeverSleep/Noromaid-7B-0.4-DPO](https://huggingface.co/NeverSleep/Noromaid-7B-0.4-DPO)
|
28 |
* [senseable/WestLake-7B-v2](https://huggingface.co/senseable/WestLake-7B-v2)
|
29 |
-
* [
|
30 |
|
31 |
### Configuration
|
32 |
|
@@ -39,19 +39,19 @@ models:
|
|
39 |
- model: senseable/WestLake-7B-v2
|
40 |
parameters:
|
41 |
density: 0.58
|
42 |
-
weight: [0.
|
43 |
- model: NeverSleep/Noromaid-7B-0.4-DPO
|
44 |
parameters:
|
45 |
density: 0.58
|
46 |
-
weight: [0.
|
47 |
- model: argilla/distilabeled-OpenHermes-2.5-Mistral-7B
|
48 |
parameters:
|
49 |
density: 0.58
|
50 |
-
weight: [.
|
51 |
- model: mlabonne/AlphaMonarch-7B
|
52 |
parameters:
|
53 |
density: 0.58
|
54 |
-
weight: [0.
|
55 |
merge_method: dare_ties
|
56 |
base_model: mistralai/Mistral-7B-v0.1
|
57 |
parameters:
|
@@ -59,11 +59,10 @@ parameters:
|
|
59 |
dtype: bfloat16
|
60 |
|
61 |
```
|
62 |
-
|
63 |
### Benchmark Testing
|
64 |
|
65 |
| | MT-Bench | EQ-Bench v2.1 |
|
66 |
|---------------------------------------------------------|---------------------------------------------|---------------------------------------------|
|
67 |
-
| giraffe176/WestLake_Noromaid_OpenHermes_neural-chatv0.1 | 7.786932 | **
|
68 |
| claude-v1 | **7.900000** | 76.83 [Leaderboard](https://eqbench.com/) |
|
69 |
| | [(Paper)](https://arxiv.org/abs/2306.05685) | [(Paper)](https://arxiv.org/abs/2312.06281) |
|
|
|
23 |
### Models Merged
|
24 |
|
25 |
The following models were included in the merge:
|
26 |
+
* [mlabonne/AlphaMonarch-7B](https://huggingface.co/mlabonne/AlphaMonarch-7B)
|
27 |
* [NeverSleep/Noromaid-7B-0.4-DPO](https://huggingface.co/NeverSleep/Noromaid-7B-0.4-DPO)
|
28 |
* [senseable/WestLake-7B-v2](https://huggingface.co/senseable/WestLake-7B-v2)
|
29 |
+
* [argilla/distilabeled-OpenHermes-2.5-Mistral-7B](https://huggingface.co/argilla/distilabeled-OpenHermes-2.5-Mistral-7B)
|
30 |
|
31 |
### Configuration
|
32 |
|
|
|
39 |
- model: senseable/WestLake-7B-v2
|
40 |
parameters:
|
41 |
density: 0.58
|
42 |
+
weight: [0.50, 0.40, 0.25, 0.05]
|
43 |
- model: NeverSleep/Noromaid-7B-0.4-DPO
|
44 |
parameters:
|
45 |
density: 0.58
|
46 |
+
weight: [0.05, 0.05, 0.25, 0.40]
|
47 |
- model: argilla/distilabeled-OpenHermes-2.5-Mistral-7B
|
48 |
parameters:
|
49 |
density: 0.58
|
50 |
+
weight: [0.40, 0.50, 0.25, 0.05]
|
51 |
- model: mlabonne/AlphaMonarch-7B
|
52 |
parameters:
|
53 |
density: 0.58
|
54 |
+
weight: [0.05, 0.05, 0.25, 0.50]
|
55 |
merge_method: dare_ties
|
56 |
base_model: mistralai/Mistral-7B-v0.1
|
57 |
parameters:
|
|
|
59 |
dtype: bfloat16
|
60 |
|
61 |
```
|
|
|
62 |
### Benchmark Testing
|
63 |
|
64 |
| | MT-Bench | EQ-Bench v2.1 |
|
65 |
|---------------------------------------------------------|---------------------------------------------|---------------------------------------------|
|
66 |
+
| giraffe176/WestLake_Noromaid_OpenHermes_neural-chatv0.1 | 7.786932 | **77.41** (1 Shot, ooba) |
|
67 |
| claude-v1 | **7.900000** | 76.83 [Leaderboard](https://eqbench.com/) |
|
68 |
| | [(Paper)](https://arxiv.org/abs/2306.05685) | [(Paper)](https://arxiv.org/abs/2312.06281) |
|
mergekit_config.yml
CHANGED
@@ -4,19 +4,19 @@ models:
|
|
4 |
- model: senseable/WestLake-7B-v2
|
5 |
parameters:
|
6 |
density: 0.58
|
7 |
-
weight: [0.
|
8 |
- model: NeverSleep/Noromaid-7B-0.4-DPO
|
9 |
parameters:
|
10 |
density: 0.58
|
11 |
-
weight: [0.
|
12 |
- model: argilla/distilabeled-OpenHermes-2.5-Mistral-7B
|
13 |
parameters:
|
14 |
density: 0.58
|
15 |
-
weight: [.
|
16 |
- model: mlabonne/AlphaMonarch-7B
|
17 |
parameters:
|
18 |
density: 0.58
|
19 |
-
weight: [0.
|
20 |
merge_method: dare_ties
|
21 |
base_model: mistralai/Mistral-7B-v0.1
|
22 |
parameters:
|
|
|
4 |
- model: senseable/WestLake-7B-v2
|
5 |
parameters:
|
6 |
density: 0.58
|
7 |
+
weight: [0.50, 0.40, 0.25, 0.05]
|
8 |
- model: NeverSleep/Noromaid-7B-0.4-DPO
|
9 |
parameters:
|
10 |
density: 0.58
|
11 |
+
weight: [0.05, 0.05, 0.25, 0.40]
|
12 |
- model: argilla/distilabeled-OpenHermes-2.5-Mistral-7B
|
13 |
parameters:
|
14 |
density: 0.58
|
15 |
+
weight: [0.40, 0.50, 0.25, 0.05]
|
16 |
- model: mlabonne/AlphaMonarch-7B
|
17 |
parameters:
|
18 |
density: 0.58
|
19 |
+
weight: [0.05, 0.05, 0.25, 0.50]
|
20 |
merge_method: dare_ties
|
21 |
base_model: mistralai/Mistral-7B-v0.1
|
22 |
parameters:
|
model-00001-of-00002.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 9976535752
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:be74d486aae32ac846bad1160b5df8172d488a89f901d7cf6e00448e6e6cd388
|
3 |
size 9976535752
|
model-00002-of-00002.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4506962288
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:488e6c68c831f35af21805f424498dc5edc3b49aa79232275bddd26dca08a262
|
3 |
size 4506962288
|