L3-Umbral-Mind-RP-v1.0-15B / mergekit_config.yml
Tremontaine's picture
Upload folder using huggingface_hub
64115a2 verified
raw
history blame
674 Bytes
dtype: bfloat16
merge_method: passthrough
slices:
- sources:
- layer_range: [0, 24]
model: Casual-Autopsy/L3-Umbral-Mind-RP-v1.0-8B
- sources:
- layer_range: [8, 24]
model: Casual-Autopsy/L3-Umbral-Mind-RP-v1.0-8B
parameters:
scale:
- filter: o_proj
value: 0.0
- filter: down_proj
value: 0.0
- value: 1.0
- sources:
- layer_range: [8, 24]
model: Casual-Autopsy/L3-Umbral-Mind-RP-v1.0-8B
parameters:
scale:
- filter: o_proj
value: 0.0
- filter: down_proj
value: 0.0
- value: 1.0
- sources:
- layer_range: [24, 32]
model: Casual-Autopsy/L3-Umbral-Mind-RP-v1.0-8B