File size: 473 Bytes
6d6be3a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
models:
  - model: trashpanda-org/Qwen2.5-32B-Marigold-v0-exp
    parameters:
      weight: 1
      density: 1
  - model: trashpanda-org/Qwen2.5-32B-Marigold-v0
    parameters:
      weight: 1
      density: 1
  - model: Qwen/QwQ-32B
    parameters:
      weight: 0.9
      density: 0.9
merge_method: ties
base_model: Qwen/Qwen2.5-32B
parameters:
  weight: 0.9
  density: 0.9
  normalize: true
  int8_mask: true
tokenizer_source: Qwen/Qwen2.5-32B-Instruct
dtype: bfloat16