|
models: |
|
- model: /media/data5/hf_models/Mistral-7B-v0.1 |
|
|
|
- model: /media/data5/hf_models/dolphin-2.2.1-mistral-7b |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/SciPhi-Mistral-7B-32k |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/samantha-1.2-mistral-7b |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/docsgpt-7b-mistral |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/Starling-LM-7B-alpha |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/MetaMath-Cybertron-Starling |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/Mistral-7B-OpenOrca |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/v1olet_marcoroni-go-bruins-merge-7B |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/MistralHermes-CodePro-7B-v1 |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/MAmmoTH-7B-Mistral |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/OpenHermes-2.5-Mistral-7B |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/OpenHermes-2.5-neural-chat-v3-3-Slerp |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/NeuralHermes-2.5-Mistral-7B |
|
parameters: |
|
weight: 0.08 |
|
density: 0.4 |
|
- model: /media/data5/hf_models/Mistral-7B-Instruct-v0.2 |
|
parameters: |
|
weight: 0.08 |
|
density: 0.5 |
|
merge_method: dare_ties |
|
base_model: /media/data5/hf_models/Mistral-7B-v0.1 |
|
parameters: |
|
int8_mask: true |
|
dtype: bfloat16 |
|
|