Venus-120b-v1.0 / mergekit-config.yml
nsfwthrowitaway69
add mergekit config file
0116a9d
raw
history blame contribute delete
660 Bytes
slices:
- sources:
- model: Sao10K/Euryale-1.3-L2-70B
layer_range: [0, 20]
- sources:
- model: NousResearch/Nous-Hermes-Llama2-70b
layer_range: [10, 30]
- sources:
- model: migtissera/SynthIA-70B-v1.5
layer_range: [20, 40]
- sources:
- model: Sao10K/Euryale-1.3-L2-70B
layer_range: [30, 50]
- sources:
- model: migtissera/SynthIA-70B-v1.5
layer_range: [40, 60]
- sources:
- model: NousResearch/Nous-Hermes-Llama2-70b
layer_range: [50, 70]
- sources:
- model: Sao10K/Euryale-1.3-L2-70B
layer_range: [60, 80]
merge_method: passthrough
dtype: float16