bcse commited on
Commit
351c907
1 Parent(s): afdf6dd

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. mergekit_config.yml +41 -0
mergekit_config.yml ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ merge_method: linear
2
+ parameters:
3
+ weight: 1.0
4
+ slices:
5
+ - sources:
6
+ - model: Undi95/Miqu-70B-Alpaca-DPO
7
+ layer_range: [0, 1]
8
+ - model: Sao10K/Euryale-1.3-L2-70B
9
+ layer_range: [0, 1]
10
+ parameters:
11
+ weight: 0
12
+ - sources:
13
+ - model: Undi95/Miqu-70B-Alpaca-DPO
14
+ layer_range: [1, 20]
15
+ - sources:
16
+ - model: Sao10K/Euryale-1.3-L2-70B
17
+ layer_range: [10, 30]
18
+ - sources:
19
+ - model: Undi95/Miqu-70B-Alpaca-DPO
20
+ layer_range: [20, 40]
21
+ - sources:
22
+ - model: Sao10K/Euryale-1.3-L2-70B
23
+ layer_range: [30, 50]
24
+ - sources:
25
+ - model: Undi95/Miqu-70B-Alpaca-DPO
26
+ layer_range: [40, 60]
27
+ - sources:
28
+ - model: Sao10K/Euryale-1.3-L2-70B
29
+ layer_range: [50, 70]
30
+ - sources:
31
+ - model: Undi95/Miqu-70B-Alpaca-DPO
32
+ layer_range: [60, 79]
33
+ - sources:
34
+ - model: Undi95/Miqu-70B-Alpaca-DPO
35
+ layer_range: [79, 80]
36
+ - model: Sao10K/Euryale-1.3-L2-70B
37
+ layer_range: [79, 80]
38
+ parameters:
39
+ weight: 0
40
+ dtype: float16
41
+ tokenizer_source: model:Undi95/Miqu-70B-Alpaca-DPO