models: | |
- model: eren23/ogno-monarch-jaskier-merge-7b-OH-PREF-DPO | |
# No parameters necessary for base model | |
- model: cognitivecomputations/dolphin-2.6-mistral-7b-dpo-laser | |
parameters: | |
density: 0.53 | |
weight: 0.4 | |
- model: NousResearch/Nous-Hermes-2-Mistral-7B-DPO | |
parameters: | |
density: 0.53 | |
weight: 0.3 | |
- model: Nondzu/Mistral-7B-Instruct-v0.2-code-ft | |
parameters: | |
density: 0.53 | |
weight: 0.3 | |
merge_method: dare_ties | |
base_model: eren23/ogno-monarch-jaskier-merge-7b-OH-PREF-DPO | |
parameters: | |
int8_mask: true | |
dtype: bfloat16 |