base_model: unsloth/Meta-Llama-3.1-8B-Instruct dtype: bfloat16 merge_method: dare_ties slices: - sources: - layer_range: [0, 32] model: akjindal53244/Llama-3.1-Storm-8B parameters: density: 0.8 weight: 0.13 - layer_range: [0, 32] model: arcee-ai/Llama-3.1-SuperNova-Lite parameters: density: 1.0 weight: 0.37 - layer_range: [0, 32] model: Orenguteng/Llama-3.1-8B-Lexi-Uncensored-V2 parameters: density: 1.0 weight: 0.13 - layer_range: [0, 32] model: NCSOFT/Llama-VARCO-8B-Instruct parameters: density: 0.8 weight: 0.37 - layer_range: [0, 32] model: unsloth/Meta-Llama-3.1-8B-Instruct tokenizer_source: base