models: - model: ChuGyouk/ko-med-gemma-2-9b-it-merge2 layer_range: [0, 42] - model: valeriojob/MedGPT-Gemma2-9B-BA-v.1 layer_range: [0, 42] - model: Gunulhona/Gemma-Ko-Merge layer_range: [0, 42] - model: TheDrummer/Tiger-Gemma-9B-v3 layer_range: [0, 42] - model: anthracite-org/magnum-v3-9b-customgemma2 layer_range: [0, 42] - model: princeton-nlp/gemma-2-9b-it-SimPO layer_range: [0, 42] - model: Metin/Gemma-2-9b-it-TR-DPO-V1 layer_range: [0, 42] merge_method: model_stock base_model: Gunulhona/Gemma-Ko-Merge dtype: float16