NeuralFusion-7b-Dare-Ties / mergekit_config.yml
Kukedlc's picture
Upload folder using huggingface_hub
e8aa757 verified
raw
history blame contribute delete
492 Bytes
models:
- model: mlabonne/Monarch-7B
# no parameters necessary for base model
- model: Kukedlc/NeuralMaxime-7B-slerp
parameters:
density: 0.65
weight: 0.36
- model: Kukedlc/Fasciculus-Arcuatus-7B-slerp
parameters:
density: 0.6
weight: 0.34
- model: Kukedlc/NeoCortex-7B-slerp
parameters:
density: 0.6
weight: 0.3
merge_method: dare_ties
base_model: mlabonne/Monarch-7B
parameters:
int8_mask: true
dtype: bfloat16
random_seed: 0