File size: 312 Bytes
9182cc7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
models:
  - model: prithivMLmods/QwQ-LCoT-7B-Instruct
    parameters:
      density: 0.5
      weight: 0.5
  - model: AIDC-AI/Marco-o1
    parameters:
      density: 0.5
      weight: 0.5

merge_method: ties
base_model: happzy2633/qwen2.5-7b-ins-v3
parameters:
  normalize: false
  int8_mask: true
dtype: float16