MS-Meadowlark-22B / mergekit_config.yml
ToastyPigeon's picture
Upload folder using huggingface_hub
058e689 verified
raw
history blame
667 Bytes
base_model: unsloth/Mistral-Small-Instruct-2409
merge_method: task_arithmetic
slices:
- sources:
- layer_range: [0, 56]
model: output/ms-creative
parameters:
weight: 0.3
- layer_range: [0, 56]
model: nbeerbower/Mistral-Small-Gutenberg-Doppel-22B
parameters:
weight: 0.6
- layer_range: [0, 56]
model: unsloth/Mistral-Small-Instruct-2409+Alfitaria/mistral-small-fujin-qlora
parameters:
weight: 0.4
- layer_range: [0, 56]
model: unsloth/Mistral-Small-Instruct-2409+ToastyPigeon/mistral-small-springdragon-qlora
parameters:
weight: 0.1
- layer_range: [0, 56]
model: unsloth/Mistral-Small-Instruct-2409