MN-Chunky-Lotus-12B / mergekit_config.yml
FallenMerick's picture
Upload folder using huggingface_hub
a064235 verified
raw
history blame contribute delete
436 Bytes
models:
- model: Epiculous/Violet_Twilight-v0.2
parameters:
weight: 1.0
density: 1.0
- model: nbeerbower/mistral-nemo-gutenberg-12B-v4
parameters:
weight: 1.0
density: 0.54
- model: flammenai/Mahou-1.5-mistral-nemo-12B
parameters:
weight: 1.0
density: 0.26
merge_method: ties
base_model: TheDrummer/Rocinante-12B-v1.1
parameters:
normalize: true
dtype: bfloat16