models: - model: DopeyGay/L3-Lunaris-Clouded-Vulca-8B - model: cognitivecomputations/dolphin-2.9-llama3-8b merge_method: slerp base_model: DopeyGay/L3-Lunaris-Clouded-Vulca-8B dtype: bfloat16 parameters: t: [0, 0.25, 0.5, 1, 0.5, 0.25, 0] # Clouded-Vulca for input & output, dolphin-llama in the middle layers