MN-Halide-12b-v1.0 / mergekit_config.yml
Azazelle's picture
Upload 18 files
fddb585 verified
base_model: SillyTilly/mistralai_Mistral-Nemo-Base-2407
dtype: float32
merge_method: model_stock
slices:
- sources:
- layer_range: [0, 40]
model: nbeerbower/Lyra4-Gutenberg-12B
- layer_range: [0, 40]
model: nbeerbower/mistral-nemo-gutenberg-12B-v4
- layer_range: [0, 40]
model: elinas/Chronos-Gold-12B-1.0
- layer_range: [0, 40]
model: UsernameJustAnother/Nemo-12B-Marlin-v8
- layer_range: [0, 40]
model: TheDrummer/Rocinante-12B-v1.1
- layer_range: [0, 40]
model: Epiculous/Azure_Dusk-v0.2
- layer_range: [0, 40]
model: Epiculous/Crimson_Dawn-v0.2
- layer_range: [0, 40]
model: TheDrummer/Rocinante-12B-v1+jtatman/mistral_nemo_12b_reasoning_psychology_lora
- layer_range: [0, 40]
model: nbeerbower/mistral-nemo-wissenschaft-12B
- layer_range: [0, 40]
model: nbeerbower/mistral-nemo-bophades-12B
- layer_range: [0, 40]
model: anthracite-org/magnum-v2.5-12b-kto+mpasila/Mistral-freeLiPPA-LoRA-12B
- layer_range: [0, 40]
model: nbeerbower/mistral-nemo-cc-12B
- layer_range: [0, 40]
model: anthracite-org/magnum-v2-12b
- layer_range: [0, 40]
model: anthracite-org/magnum-v2.5-12b-kto+jeiku/Aura-NeMo-12B
- layer_range: [0, 40]
model: SillyTilly/mistralai_Mistral-Nemo-Base-2407
tokenizer_source: unsloth/Mistral-Nemo-Base-2407