models: | |
- model: Kaoeiri/Qwenwify-32B-v3 | |
parameters: | |
weight: 1.0 | |
density: 0.85 | |
- model: EVA-UNIT-01/EVA-Qwen2.5-32B-v0.2 | |
parameters: | |
weight: 0.50 | |
density: 0.42 | |
- model: Sao10K/32B-Qwen2.5-Kunou-v1 | |
parameters: | |
weight: 0.30 | |
density: 0.75 | |
- model: Dans-DiscountModels/Qwen2.5-32B-ChatML | |
parameters: | |
weight: 0.10 | |
density: 0.85 | |
- model: OpenBuddy/openbuddy-qwq-32b-v24.2-200k | |
parameters: | |
weight: 0.25 | |
density: 0.85 | |
- model: Saxo/Linkbricks-Horizon-AI-Japanese-Base-32B | |
parameters: | |
weight: 0.20 | |
density: 0.82 | |
- model: allura-org/Qwen2.5-32b-RP-Ink | |
parameters: | |
weight: 0.28 | |
density: 0.78 | |
- model: AiCloser/Qwen2.5-32B-AGI | |
parameters: | |
weight: 0.12 | |
density: 0.68 | |
- model: huihui-ai/QwQ-32B-Preview-abliterated | |
parameters: | |
weight: 0.14 | |
density: 0.65 | |
- model: huihui-ai/Qwen2.5-32B-Instruct-abliterated | |
parameters: | |
weight: 0.23 | |
density: 0.75 | |
merge_method: dare_ties | |
base_model: Qwen/QwQ-32B-Preview | |
parameters: | |
density: 0.90 | |
epsilon: 0.07 | |
lambda: 1.35 | |
random_seed: 42 | |
dtype: bfloat16 | |
tokenizer_source: union | |