Experiment28T3q_ShadowOgno / mergekit_config.yml
MaziyarPanahi's picture
f3aa4c9a25d0fa6628f341c5311a5af6ae30449ae98be60b12e6646c10f3235b
70a0ad2 verified
raw
history blame
431 Bytes
slices:
- sources:
- model: 'automerger/M7Yamshadowexperiment28-7B'
layer_range: [0, 32]
- model: 'automerger/YamExperiment28-7B'
layer_range: [0, 32]
merge_method: slerp
base_model: 'automerger/M7Yamshadowexperiment28-7B'
parameters:
t:
- filter: self_attn
value: [0, 0.5, 0.3, 0.7, 1]
- filter: mlp
value: [1, 0.5, 0.7, 0.3, 0]
- value: 0.5
dtype: bfloat16
random_seed: 0