Spaces:
Runtime error
Runtime error
Commit
Β·
51fc377
1
Parent(s):
dd78ee5
Delete train_configs
Browse files
train_configs/minigpt4_llama2_stage1_pretrain.yaml
DELETED
@@ -1,55 +0,0 @@
|
|
1 |
-
model:
|
2 |
-
arch: minigpt4
|
3 |
-
model_type: pretrain_llama2
|
4 |
-
|
5 |
-
|
6 |
-
datasets:
|
7 |
-
laion:
|
8 |
-
vis_processor:
|
9 |
-
train:
|
10 |
-
name: "blip2_image_train"
|
11 |
-
image_size: 224
|
12 |
-
text_processor:
|
13 |
-
train:
|
14 |
-
name: "blip_caption"
|
15 |
-
sample_ratio: 115
|
16 |
-
cc_sbu:
|
17 |
-
vis_processor:
|
18 |
-
train:
|
19 |
-
name: "blip2_image_train"
|
20 |
-
image_size: 224
|
21 |
-
text_processor:
|
22 |
-
train:
|
23 |
-
name: "blip_caption"
|
24 |
-
sample_ratio: 14
|
25 |
-
|
26 |
-
|
27 |
-
run:
|
28 |
-
task: image_text_pretrain
|
29 |
-
# optimizer
|
30 |
-
lr_sched: "linear_warmup_cosine_lr"
|
31 |
-
init_lr: 1e-4
|
32 |
-
min_lr: 8e-5
|
33 |
-
warmup_lr: 1e-6
|
34 |
-
|
35 |
-
weight_decay: 0.05
|
36 |
-
max_epoch: 4
|
37 |
-
batch_size_train: 64
|
38 |
-
batch_size_eval: 64
|
39 |
-
num_workers: 4
|
40 |
-
warmup_steps: 5000
|
41 |
-
iters_per_epoch: 5000
|
42 |
-
|
43 |
-
seed: 42
|
44 |
-
output_dir: "output/minigpt4_stage1_pretrain"
|
45 |
-
|
46 |
-
amp: True
|
47 |
-
resume_ckpt_path: null
|
48 |
-
|
49 |
-
evaluate: False
|
50 |
-
train_splits: ["train"]
|
51 |
-
|
52 |
-
device: "cuda"
|
53 |
-
world_size: 1
|
54 |
-
dist_url: "env://"
|
55 |
-
distributed: True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
train_configs/minigpt4_llama2_stage2_finetune.yaml
DELETED
@@ -1,50 +0,0 @@
|
|
1 |
-
model:
|
2 |
-
arch: minigpt4
|
3 |
-
model_type: pretrain_llama2
|
4 |
-
|
5 |
-
max_txt_len: 160
|
6 |
-
end_sym: "</s>"
|
7 |
-
prompt_path: "prompts/alignment.txt"
|
8 |
-
prompt_template: '[INST] {} [/INST] '
|
9 |
-
ckpt: '/path/to/stage1/checkpoint/'
|
10 |
-
|
11 |
-
|
12 |
-
datasets:
|
13 |
-
cc_sbu_align:
|
14 |
-
vis_processor:
|
15 |
-
train:
|
16 |
-
name: "blip2_image_train"
|
17 |
-
image_size: 224
|
18 |
-
text_processor:
|
19 |
-
train:
|
20 |
-
name: "blip_caption"
|
21 |
-
|
22 |
-
run:
|
23 |
-
task: image_text_pretrain
|
24 |
-
# optimizer
|
25 |
-
lr_sched: "linear_warmup_cosine_lr"
|
26 |
-
init_lr: 3e-5
|
27 |
-
min_lr: 1e-5
|
28 |
-
warmup_lr: 1e-6
|
29 |
-
|
30 |
-
weight_decay: 0.05
|
31 |
-
max_epoch: 5
|
32 |
-
iters_per_epoch: 200
|
33 |
-
batch_size_train: 12
|
34 |
-
batch_size_eval: 12
|
35 |
-
num_workers: 4
|
36 |
-
warmup_steps: 200
|
37 |
-
|
38 |
-
seed: 42
|
39 |
-
output_dir: "output/minigpt4_stage2_finetune"
|
40 |
-
|
41 |
-
amp: True
|
42 |
-
resume_ckpt_path: null
|
43 |
-
|
44 |
-
evaluate: False
|
45 |
-
train_splits: ["train"]
|
46 |
-
|
47 |
-
device: "cuda"
|
48 |
-
world_size: 1
|
49 |
-
dist_url: "env://"
|
50 |
-
distributed: True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
train_configs/minigpt4_stage1_pretrain.yaml
DELETED
@@ -1,55 +0,0 @@
|
|
1 |
-
model:
|
2 |
-
arch: minigpt4
|
3 |
-
model_type: pretrain_vicuna0
|
4 |
-
|
5 |
-
|
6 |
-
datasets:
|
7 |
-
laion:
|
8 |
-
vis_processor:
|
9 |
-
train:
|
10 |
-
name: "blip2_image_train"
|
11 |
-
image_size: 224
|
12 |
-
text_processor:
|
13 |
-
train:
|
14 |
-
name: "blip_caption"
|
15 |
-
sample_ratio: 115
|
16 |
-
cc_sbu:
|
17 |
-
vis_processor:
|
18 |
-
train:
|
19 |
-
name: "blip2_image_train"
|
20 |
-
image_size: 224
|
21 |
-
text_processor:
|
22 |
-
train:
|
23 |
-
name: "blip_caption"
|
24 |
-
sample_ratio: 14
|
25 |
-
|
26 |
-
|
27 |
-
run:
|
28 |
-
task: image_text_pretrain
|
29 |
-
# optimizer
|
30 |
-
lr_sched: "linear_warmup_cosine_lr"
|
31 |
-
init_lr: 1e-4
|
32 |
-
min_lr: 8e-5
|
33 |
-
warmup_lr: 1e-6
|
34 |
-
|
35 |
-
weight_decay: 0.05
|
36 |
-
max_epoch: 4
|
37 |
-
batch_size_train: 64
|
38 |
-
batch_size_eval: 64
|
39 |
-
num_workers: 4
|
40 |
-
warmup_steps: 5000
|
41 |
-
iters_per_epoch: 5000
|
42 |
-
|
43 |
-
seed: 42
|
44 |
-
output_dir: "output/minigpt4_stage1_pretrain"
|
45 |
-
|
46 |
-
amp: True
|
47 |
-
resume_ckpt_path: null
|
48 |
-
|
49 |
-
evaluate: False
|
50 |
-
train_splits: ["train"]
|
51 |
-
|
52 |
-
device: "cuda"
|
53 |
-
world_size: 1
|
54 |
-
dist_url: "env://"
|
55 |
-
distributed: True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
train_configs/minigpt4_stage2_finetune.yaml
DELETED
@@ -1,50 +0,0 @@
|
|
1 |
-
model:
|
2 |
-
arch: minigpt4
|
3 |
-
model_type: pretrain_vicuna0
|
4 |
-
|
5 |
-
max_txt_len: 160
|
6 |
-
end_sym: "###"
|
7 |
-
prompt_path: "prompts/alignment.txt"
|
8 |
-
prompt_template: '###Human: {} ###Assistant: '
|
9 |
-
ckpt: '/path/to/stage1/checkpoint/'
|
10 |
-
|
11 |
-
|
12 |
-
datasets:
|
13 |
-
cc_sbu_align:
|
14 |
-
vis_processor:
|
15 |
-
train:
|
16 |
-
name: "blip2_image_train"
|
17 |
-
image_size: 224
|
18 |
-
text_processor:
|
19 |
-
train:
|
20 |
-
name: "blip_caption"
|
21 |
-
|
22 |
-
run:
|
23 |
-
task: image_text_pretrain
|
24 |
-
# optimizer
|
25 |
-
lr_sched: "linear_warmup_cosine_lr"
|
26 |
-
init_lr: 3e-5
|
27 |
-
min_lr: 1e-5
|
28 |
-
warmup_lr: 1e-6
|
29 |
-
|
30 |
-
weight_decay: 0.05
|
31 |
-
max_epoch: 5
|
32 |
-
iters_per_epoch: 200
|
33 |
-
batch_size_train: 12
|
34 |
-
batch_size_eval: 12
|
35 |
-
num_workers: 4
|
36 |
-
warmup_steps: 200
|
37 |
-
|
38 |
-
seed: 42
|
39 |
-
output_dir: "output/minigpt4_stage2_finetune"
|
40 |
-
|
41 |
-
amp: True
|
42 |
-
resume_ckpt_path: null
|
43 |
-
|
44 |
-
evaluate: False
|
45 |
-
train_splits: ["train"]
|
46 |
-
|
47 |
-
device: "cuda"
|
48 |
-
world_size: 1
|
49 |
-
dist_url: "env://"
|
50 |
-
distributed: True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|