cursa-o1-7b-v1.1 / mergekit_config.yml
marcuscedricridia's picture
Upload folder using huggingface_hub
7932d91 verified
raw
history blame
629 Bytes
model_name: "post-cursa-o1"
base_model: marcuscedricridia/post-cursa-o1
merge_method: slerp
dtype: bfloat16
tokenizer_source: "union" # or "base" or a model path
chat_template: "auto" # or a template name or Jinja2 template
slices:
- sources:
- model: marcuscedricridia/pre-cursa-o1-v1.2
layer_range: [0, 28]
- model: marcuscedricridia/post-cursa-o1
layer_range: [0, 28]
parameters:
t:
- filter: self_attn
value: [0.0, 0.3, 0.5, 0.7, 1.0]
- filter: mlp
value: [1.0, 0.7, 0.5, 0.3, 0.0]
- filter: input_layernorm|post_attention_layernorm
value: 0.5
- value: 0.5