mistral_16x7B / mergekit_moe_config.yml
kloodia's picture
Upload folder using huggingface_hub
76b6063 verified
raw
history blame contribute delete
930 Bytes
base_model: mistralai/Mistral-7B-v0.1
dtype: float16
gate_mode: random
experts:
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2
- source_model: mistralai/Mistral-7B-Instruct-v0.2