File size: 382 Bytes
44d4acb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 |
# Merge configuration for ZeroXClem/LLama3.1-Hawkish-Theia-Fireball-8B using Model Stock
models:
- model: Chainbase-Labs/Theia-Llama-3.1-8B-v1
- model: EpistemeAI/Fireball-Meta-Llama-3.2-8B-Instruct-agent-003-128k-code-DPO
- model: mukaj/Llama-3.1-Hawkish-8B
merge_method: model_stock
base_model: mukaj/Llama-3.1-Hawkish-8B
normalize: false
int8_mask: true
dtype: bfloat16
|