MS-Meadowlark-Alt-22B / mergekit_config.yml
ToastyPigeon's picture
Upload folder using huggingface_hub
2f69363 verified
raw
history blame contribute delete
551 Bytes
base_model: unsloth/Mistral-Small-Instruct-2409
merge_method: task_arithmetic
slices:
- sources:
- layer_range: [0, 56]
model: output/tempered-rp
parameters:
weight: 0.4
- layer_range: [0, 56]
model: unsloth/Mistral-Small-Instruct-2409+Alfitaria/mistral-small-fujin-qlora
parameters:
weight: 0.5
- layer_range: [0, 56]
model: unsloth/Mistral-Small-Instruct-2409+ToastyPigeon/mistral-small-springdragon-qlora
parameters:
weight: 0.1
- layer_range: [0, 56]
model: unsloth/Mistral-Small-Instruct-2409