File size: 335 Bytes
86375e4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
models:
  - model: nbeerbower/bruphin-theta
    layer_range: [0, 32]
  - model: Epiculous/Mika-7B
    layer_range: [0, 32]
merge_method: slerp
base_model: Epiculous/Mika-7B
parameters:
  t:
    - filter: self_attn
      value: [0, 0.5, 0.3, 0.7, 1]
    - filter: mlp
      value: [1, 0.5, 0.7, 0.3, 0]
    - value: 0.5
dtype: bfloat16