Hermyale-stack-90B / mergekit_config.yml
mergesloppa123123's picture
Upload folder using huggingface_hub
c3d0626 verified
raw
history blame contribute delete
518 Bytes
slices:
- sources:
- model: ../Hermes-3-Llama-3.1-70B
layer_range: [0, 21] # 21
- sources:
- model: ../L3.1-70B-Euryale-v2.2
layer_range: [16, 36] # 20
- sources:
- model: ../Hermes-3-Llama-3.1-70B
layer_range: [30, 50] # 20
- sources:
- model: ../L3.1-70B-Euryale-v2.2
layer_range: [40, 64] # 24
- sources:
- model: ../Hermes-3-Llama-3.1-70B
layer_range: [60, 80] # 20
tokenizer_source: ../Hermes-3-Llama-3.1-70B
merge_method: passthrough
dtype: bfloat16