MisTyr-ties / mergekit_config.yml
FredrikBL's picture
Upload folder using huggingface_hub
71bbf1f verified
raw
history blame
510 Bytes
models:
- model: AI-Sweden-Models/tyr
# no parameters necessary for base model
- model: timpal0l/Mistral-7B-v0.1-flashback-v2
parameters:
density: 0.5
weight: 0.5
- model: mlabonne/NeuralHermes-2.5-Mistral-7B
parameters:
density: 0.5
weight: 0.3
- model: RJuro/munin-neuralbeagle-7b
parameters:
density: 0.5
weight: [0, 0.3, 0.7, 1] # weight gradient
merge_method: ties
base_model: AI-Sweden-Models/tyr
parameters:
normalize: true
dtype: bfloat16