phixtral-4x2_8 / mergekit_moe_config.yml
mlabonne's picture
Upload folder using huggingface_hub
bc8a3bc
raw
history blame
376 Bytes
base_model: cognitivecomputations/dolphin-2_6-phi-2
gate_mode: cheap_embed
experts:
- source_model: cognitivecomputations/dolphin-2_6-phi-2
positive_prompts: [""]
- source_model: lxuechen/phi-2-dpo
positive_prompts: [""]
- source_model: Yhyu13/phi-2-sft-dpo-gpt4_en-ep1
positive_prompts: [""]
- source_model: mrm8488/phi-2-coder
positive_prompts: [""]