File size: 285 Bytes
143cf6e |
1 2 3 4 5 6 7 8 9 10 11 |
{
"adapter_layers": 1,
"adapter_len": 4,
"auto_mapping": null,
"base_model_name_or_path": "HuggingFaceH4/tiny-random-LlamaForCausalLM",
"inference_mode": true,
"peft_type": "ADAPTION_PROMPT",
"revision": null,
"target_modules": "self_attn",
"task_type": "CAUSAL_LM"
} |