File size: 664 Bytes
84053c0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
base_model: NousResearch/Hermes-2-Pro-Mistral-7B
gate_mode: hidden
dtype: bfloat16
architecture: Mixtral
experts:
  - source_model: NousResearch/Hermes-2-Pro-Mistral-7B
    positive_prompts:
      - "thought"
      - "deep emotions"
      - "complex thoughts"
      - "comprehension"
      - "passion"
  - source_model: openchat/openchat-3.5-0106
    positive_prompts:
      - "sex"
      - "roleplay"
      - "flirty"
      - "cute"
  - source_model: Open-Orca/Mistral-7B-OpenOrca
    positive_prompts:
      - "logic"
      - "knowledge"
      - "reasoning"
  - source_model: cognitivecomputations/dolphin-2.2.1-mistral-7b
    positive_prompts:
      - "history"