File size: 496 Bytes
6b33813
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
models:
  - model: ibm/merlinite-7b 
    parameters:
      weight: 1
      density: 1
  - model: Undi95/Toppy-M-7B
    parameters:
      weight: 0.3
  - model: jondurbin/bagel-dpo-7b-v0.4
    parameters:
      weight: 0.2
  - model: senseable/WestLake-7B-v2 
    parameters:
      weight: 0.2
  - model: l3utterfly/mistral-7b-v0.1-layla-v4
    parameters:
      weight: 0.2
merge_method: ties
base_model: Franken-Maid
parameters:
  density: 0.4
  int8_mask: true
  normalize: true
dtype: bfloat16