ru-openllama-lora-3b-v2 / finetune_args.json
freQuensy23's picture
Init
75ffe0e
{
"micro_batch_size": 1,
"gradient_accumulation_steps": 1,
"num_train_epochs": 2,
"learning_rate": 0.0003,
"cutoff_len": 512,
"val_set_size": 0,
"lora_r": 4,
"lora_alpha": 16,
"lora_dropout": 0.05,
"lora_target_modules": [
"q_proj",
"v_proj"
],
"lora_modules_to_save": [],
"train_on_inputs": true,
"group_by_length": false,
"load_in_8bit": false,
"fp16": true,
"bf16": false,
"gradient_checkpointing": false,
"save_steps": 500,
"save_total_limit": 5,
"logging_steps": 10,
"additional_training_arguments": null,
"additional_lora_config": null
}