Medmerge-tulu-70b / mergekit_config.yml
satyamt's picture
Upload folder using huggingface_hub
9f9c8e0 verified
raw
history blame
470 Bytes
models:
- model: NousResearch/Llama-2-70b-hf
# no parameters necessary for base model
- model: wanglab/ClinicalCamel-70B
parameters:
weight: 0.08
density: 0.45
- model: epfl-llm/meditron-70b
parameters:
weight: 0.08
density: 0.45
- model: allenai/tulu-2-dpo-70b
parameters:
weight: 0.08
density: 0.45
merge_method: dare_ties
base_model: NousResearch/Llama-2-70b-hf
parameters:
int8_mask: true
dtype: bfloat16