File size: 597 Bytes
0fb3218 |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
models:
- model: C:/Users/Jacoby/Downloads/text-generation-webui-main/models/BAAI_Infinity-Instruct-7M-Gen-mistral-7B
- model: C:/Users/Jacoby/Downloads/text-generation-webui-main/models/SanjiWatsuki_Kunoichi-7B
merge_method: slerp
base_model: C:/Users/Jacoby/Downloads/text-generation-webui-main/models/SanjiWatsuki_Kunoichi-7B
parameters:
t:
- value: [0, 0, 0.2, 0.3, 0.4, 0.5, 0.4, 0.3, 0.2, 0, 0] # Preserving the first and last layers of Miqu untouched is key for good results
embed_slerp: true # This is super important otherwise the merge will fail
dtype: float16
|