File size: 232 Bytes
010f412 |
1 2 3 4 5 6 7 |
slices:
- sources:
- model: "Sao10K/L3-8B-Stheno-v3.2+hfl/llama-3-chinese-8b-instruct-v2-lora"
layer_range: [0,32]
merge_method: passthrough
base_model: "gradientai/Llama-3-8B-Instruct-Gradient-1048k"
dtype: bfloat16 |