falcon + qlora + xformer mbs 40 gas 2 on A6000
Browse files
examples/falcon/config-7b-qlora.yml
CHANGED
@@ -18,7 +18,7 @@ val_set_size: 0.01
|
|
18 |
adapter: qlora
|
19 |
lora_model_dir:
|
20 |
sequence_len: 2048
|
21 |
-
max_packed_sequence_len:
|
22 |
lora_r: 64
|
23 |
lora_alpha: 16
|
24 |
lora_dropout: 0.05
|
@@ -30,8 +30,8 @@ wandb_watch:
|
|
30 |
wandb_run_id:
|
31 |
wandb_log_model:
|
32 |
output_dir: ./qlora-out
|
33 |
-
|
34 |
-
|
35 |
num_epochs: 3
|
36 |
optimizer: paged_adamw_32bit
|
37 |
torchdistx_path:
|
@@ -50,7 +50,7 @@ resume_from_checkpoint:
|
|
50 |
auto_resume_from_checkpoints: true
|
51 |
local_rank:
|
52 |
logging_steps: 1
|
53 |
-
xformers_attention:
|
54 |
flash_attention:
|
55 |
gptq_groupsize:
|
56 |
gptq_model_v1:
|
|
|
18 |
adapter: qlora
|
19 |
lora_model_dir:
|
20 |
sequence_len: 2048
|
21 |
+
max_packed_sequence_len:
|
22 |
lora_r: 64
|
23 |
lora_alpha: 16
|
24 |
lora_dropout: 0.05
|
|
|
30 |
wandb_run_id:
|
31 |
wandb_log_model:
|
32 |
output_dir: ./qlora-out
|
33 |
+
micro_batch_size: 40
|
34 |
+
gradient_accumulation_steps: 2
|
35 |
num_epochs: 3
|
36 |
optimizer: paged_adamw_32bit
|
37 |
torchdistx_path:
|
|
|
50 |
auto_resume_from_checkpoints: true
|
51 |
local_rank:
|
52 |
logging_steps: 1
|
53 |
+
xformers_attention: true
|
54 |
flash_attention:
|
55 |
gptq_groupsize:
|
56 |
gptq_model_v1:
|