aegolius-acadicus-34b-v3 / mergekit_moe_configfs.yml
ibivibiv's picture
Update mergekit_moe_configfs.yml
18ac4d1 verified
raw
history blame
979 Bytes
base_model: ibivibiv/temp_tuned_mistral2
gate_mode: hidden
experts:
- source_model: macadeliccc/WestLake-7B-v2-laser-truthy-dpo
positive_prompts:
- "logical reasoning"
negative_prompts:
- "commonsense reasoning"
- source_model: ibivibiv/temp_tuned_mistral2
positive_prompts:
- "commonsense reasoning"
negative_prompts:
- "logical reasoning"
- source_model: chanwit/flux-7b-v0.1
positive_prompts:
- "multidisciplinary knowledge"
negative_prompts:
- "natural language understanding"
- source_model: ibivibiv/temp_tuned_mistral3
positive_prompts:
- "fact-checking"
negative_prompts:
- "logical reasoning"
- source_model: senseable/WestLake-7B-v2
positive_prompts:
- "ambiguity resolution"
negative_prompts:
- "scientific knowledge"
- source_model: PetroGPT/WestSeverus-7B-DPO
positive_prompts:
- "mathematical reasoning"
negative_prompts:
- "natural language understanding"