base_model: mistralai/Mistral-7B-Instruct-v0.2 gate_mode: hidden # one of "hidden", "cheap_embed", or "random" dtype: bfloat16 # output dtype (float32, float16, or bfloat16) experts: - source_model: SanjiWatsuki/Kunoichi-DPO-v2-7B positive_prompts: - "roleplay" - source_model: mistralai/Mistral-7B-Instruct-v0.2 positive_prompts: - "chat" #"{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}"