models: | |
- model: cstr/llama3-8b-spaetzle-v33 | |
# no parameters necessary for base model | |
- model: HeshamHaroon/Arabic-llama3 | |
parameters: | |
density: 0.65 | |
weight: 0.4 | |
- model: mlabonne/NeuralDaredevil-8B-abliterated | |
parameters: | |
density: 0.65 | |
weight: 0.2 | |
merge_method: dare_ties | |
base_model: cstr/llama3-8b-spaetzle-v33 | |
parameters: | |
int8_mask: true | |
dtype: bfloat16 | |
random_seed: 0 | |
tokenizer_source: base | |