File size: 298 Bytes
e244ea4
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
base_model: TheSkullery/llama-3-cat-8b-instruct-v1
gate_mode: random
dtype: bfloat16
experts_per_token: 2
experts:
 - source_model: TheSkullery/llama-3-cat-8b-instruct-v1
   positive_prompts:
    - " "
 - source_model: NousResearch/Hermes-2-Theta-Llama-3-8B
   positive_prompts:
    - " "