File size: 524 Bytes
fba4e0d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
models:
  - model: google/gemma-2-9b-it 
    # no parameters necessary for base model
  - model: wzhouad/gemma-2-9b-it-WPO-HB
    parameters:
      density: 0.55
      weight: 0.6
  - model: princeton-nlp/gemma-2-9b-it-SimPO 
    parameters:
      density: 0.35
      weight: 0.6
  - model: UCLA-AGI/Gemma-2-9B-It-SPPO-Iter3
    parameters:
      density: 0.25
      weight: 0.4
merge_method: della
base_model: google/gemma-2-9b-it
parameters:
  normalize: true
  int8_mask: true
  lambda: 1.0
  epsilon: 0.1
dtype: float16