laser-dolphin-mixtral-2x7b-dpo / mergekit_moe_config.yml
macadeliccc's picture
Upload 11 files
dd39bb7
raw
history blame
446 Bytes
base_model: cognitivecomputations/dolphin-2.6-mistral-7b-dpo-laser
gate_mode: hidden
dtype: bfloat16
experts:
- source_model: cognitivecomputations/dolphin-2.1-mistral-7b
positive_prompts:
- "code"
- "solutions"
- "chat"
- "questions"
- source_model: cognitivecomputations/dolphin-2.6-mistral-7b-dpo-laser
positive_prompts:
- "mathematics"
- "optimization"
- "python"
- "instruction"