4x7 / mergekit_moe_config.yml
Gustav0-Freind's picture
Upload 8 files
84053c0 verified
raw
history blame contribute delete
664 Bytes
base_model: NousResearch/Hermes-2-Pro-Mistral-7B
gate_mode: hidden
dtype: bfloat16
architecture: Mixtral
experts:
- source_model: NousResearch/Hermes-2-Pro-Mistral-7B
positive_prompts:
- "thought"
- "deep emotions"
- "complex thoughts"
- "comprehension"
- "passion"
- source_model: openchat/openchat-3.5-0106
positive_prompts:
- "sex"
- "roleplay"
- "flirty"
- "cute"
- source_model: Open-Orca/Mistral-7B-OpenOrca
positive_prompts:
- "logic"
- "knowledge"
- "reasoning"
- source_model: cognitivecomputations/dolphin-2.2.1-mistral-7b
positive_prompts:
- "history"