{ | |
"base_model_name_or_path": "t5-base", | |
"inference_mode": true, | |
"num_attention_heads": 12, | |
"num_layers": 12, | |
"num_transformer_submodules": 2, | |
"num_virtual_tokens": 100, | |
"peft_type": "prompt_tuning", | |
"promt_init": "RANDOM", | |
"task_type": "seq_2_seq_lm", | |
"token_dim": 768 | |
} |