File size: 549 Bytes
efd9f66
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26

slices:
  - sources:
      - model: HuggingFaceH4/mistral-7b-grok
        layer_range: [0, 32]
      - model: senseable/WestLake-7B-v2
        layer_range: [0, 32]
merge_method: slerp
base_model: HuggingFaceH4/mistral-7b-grok
parameters:
  t:
    - filter: lm_head
      value: [0.75]
    - filter: embed_tokens
      value: [0.75]
    - filter: self_attn
      value: [0.75,0.25]
    - filter: mlp
      value: [0.25,0.75]
    - filter: layernorm
      value: [0.5,0.5]
    - filter: modelnorm
      value: [0.75]
    - value: 0.5
dtype: bfloat16