File size: 413 Bytes
f12479f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
models:
  - model: nbeerbower/gemma2-gutenberg-9B
    parameters:
      density: 0.3
      weight: 0.4
  - model: lemon07r/Gemma-2-Ataraxy-9B
    parameters:
      density: 0.3
      weight: 0.3
  - model: UCLA-AGI/Gemma-2-9B-It-SPPO-Iter3
    parameters:
      density: 0.3
      weight: 0.3
merge_method: ties
base_model: ifable/gemma-2-Ifable-9B
parameters:
  normalize: true
  int8_mask: false
dtype: bfloat16