models: - model: mistralai/Mistral-7B-v0.1 # no parameters necessary for base model - model: mlabonne/OmniBeagle-7B parameters: density: 0.65 weight: 0.76 - model: shadowml/BeagleX-7B parameters: density: 0.6 weight: 0.12 - model: shadowml/FoxBeagle-7B parameters: density: 0.6 weight: 0.12 merge_method: dare_ties base_model: mistralai/Mistral-7B-v0.1 parameters: int8_mask: true dtype: float16