|
{ |
|
"alpha_pattern": {}, |
|
"auto_mapping": null, |
|
"base_model_name_or_path": "meta-llama/Llama-2-7b-chat-hf", |
|
"bias": "none", |
|
"fan_in_fan_out": false, |
|
"inference_mode": true, |
|
"init_lora_weights": true, |
|
"layers_pattern": null, |
|
"layers_to_transform": null, |
|
"loftq_config": {}, |
|
"lora_alpha": 32, |
|
"lora_dropout": 0.05, |
|
"megatron_config": null, |
|
"megatron_core": "megatron.core", |
|
"modules_to_save": null, |
|
"peft_type": "LORA", |
|
"r": 16, |
|
"rank_pattern": {}, |
|
"revision": null, |
|
"target_modules": [ |
|
"mlp.up_proj", |
|
"self_attn.o_proj", |
|
"input_layernorm.weight", |
|
"self_attn.rotary_emb.inv_freq", |
|
"lm_head.weight", |
|
"mlp.down_proj", |
|
"mlp.gate_proj", |
|
"post_attention_layernorm.weight", |
|
"self_attn.v_proj", |
|
"self_attn.k_proj", |
|
"model.norm.weight", |
|
"self_attn.q_proj" |
|
], |
|
"task_type": "CAUSAL_LM", |
|
"use_rslora": false |
|
} |