File size: 302 Bytes
912a831
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14

slices:
  - sources:
    - model: Sao10K/Fimbulvetr-11B-v2
      layer_range: [0, 32]
  - sources:
    - model: KoboldAI/LLaMA2-13B-Tiefighter
      layer_range: [16, 32]
  - sources:
    - model: localfultonextractor/Erosumika-7B
      layer_range: [24, 32]
merge_method: passthrough
dtype: bfloat16