models: - model: Nitral-AI/Echidna-7b-128k #no parameters necessary for base model - model: gradientai/Llama-3-8B-Instruct-262k parameters: density: 0.5 weight: 0.5 merge_method: dare_ties base_model: Nitral-AI/Echidna-7b-128k parameters: normalize: false int8_mask: true dtype: float16