File size: 265 Bytes
5696e02
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13


slices:
  - sources:
    - model: gradientai/Llama-3-8B-Instruct-Gradient-1048k
      layer_range: [0, 16]
  - sources:
    - model: failspy/Llama-3-8B-Instruct-MopeyMule
      layer_range: [8, 32]
merge_method: passthrough
tokenizer_source: union
dtype: float16