File size: 352 Bytes
248a001 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
models:
- model: cstr/llama3-8b-spaetzle-v13
# no parameters necessary for base model
- model: nbeerbower/llama-3-wissenschaft-8B-v2
parameters:
density: 0.65
weight: 0.4
merge_method: dare_ties
base_model: cstr/llama3-8b-spaetzle-v13
parameters:
int8_mask: true
dtype: bfloat16
random_seed: 0
tokenizer_source: base
|