File size: 387 Bytes
bf84773
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16

models:
  - model: MediaTek-Research/Breeze-7B-Instruct-v0_1
    # No parameters necessary for base model
  - model: cognitivecomputations/dolphin-2.6-mistral-7b-dpo
    parameters:
      density: 0.5
      weight: 0.5
merge_method: dare_ties
tokenizer_source: base
base_model: MediaTek-Research/Breeze-7B-Instruct-v0_1
parameters:
  #normalize: false
  int8_mask: true
dtype: bfloat16