Spaces:
Paused
Paused
File size: 4,396 Bytes
d3653d5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 |
#### Model Setting
pretrained_model_name_or_path = 'stablediffusionapi/albedobase-xl-20'
pretrained_vae_model_name_or_path = 'madebyollin/sdxl-vae-fp16-fix'
revision = None
byt5_max_length = 512
byt5_mapper_type = 'T5EncoderBlockByT5Mapper'
byt5_mapper_config = dict(
num_layers=4,
sdxl_channels=2048,
)
byt5_config = dict(
byt5_name='google/byt5-small',
special_token=True,
color_special_token=True,
font_special_token=True,
color_ann_path='assets/color_idx.json',
font_ann_path='assets/multilingual_cn-en_font_idx.json',
multilingual=True,
)
attn_block_to_modify = [
"down_blocks.1.attentions.0.transformer_blocks.0",
"down_blocks.1.attentions.0.transformer_blocks.1",
"down_blocks.1.attentions.1.transformer_blocks.0",
"down_blocks.1.attentions.1.transformer_blocks.1",
"down_blocks.2.attentions.0.transformer_blocks.0",
"down_blocks.2.attentions.0.transformer_blocks.1",
"down_blocks.2.attentions.0.transformer_blocks.2",
"down_blocks.2.attentions.0.transformer_blocks.3",
"down_blocks.2.attentions.0.transformer_blocks.4",
"down_blocks.2.attentions.0.transformer_blocks.5",
"down_blocks.2.attentions.0.transformer_blocks.6",
"down_blocks.2.attentions.0.transformer_blocks.7",
"down_blocks.2.attentions.0.transformer_blocks.8",
"down_blocks.2.attentions.0.transformer_blocks.9",
"down_blocks.2.attentions.1.transformer_blocks.0",
"down_blocks.2.attentions.1.transformer_blocks.1",
"down_blocks.2.attentions.1.transformer_blocks.2",
"down_blocks.2.attentions.1.transformer_blocks.3",
"down_blocks.2.attentions.1.transformer_blocks.4",
"down_blocks.2.attentions.1.transformer_blocks.5",
"down_blocks.2.attentions.1.transformer_blocks.6",
"down_blocks.2.attentions.1.transformer_blocks.7",
"down_blocks.2.attentions.1.transformer_blocks.8",
"down_blocks.2.attentions.1.transformer_blocks.9",
"up_blocks.0.attentions.0.transformer_blocks.0",
"up_blocks.0.attentions.0.transformer_blocks.1",
"up_blocks.0.attentions.0.transformer_blocks.2",
"up_blocks.0.attentions.0.transformer_blocks.3",
"up_blocks.0.attentions.0.transformer_blocks.4",
"up_blocks.0.attentions.0.transformer_blocks.5",
"up_blocks.0.attentions.0.transformer_blocks.6",
"up_blocks.0.attentions.0.transformer_blocks.7",
"up_blocks.0.attentions.0.transformer_blocks.8",
"up_blocks.0.attentions.0.transformer_blocks.9",
"up_blocks.0.attentions.1.transformer_blocks.0",
"up_blocks.0.attentions.1.transformer_blocks.1",
"up_blocks.0.attentions.1.transformer_blocks.2",
"up_blocks.0.attentions.1.transformer_blocks.3",
"up_blocks.0.attentions.1.transformer_blocks.4",
"up_blocks.0.attentions.1.transformer_blocks.5",
"up_blocks.0.attentions.1.transformer_blocks.6",
"up_blocks.0.attentions.1.transformer_blocks.7",
"up_blocks.0.attentions.1.transformer_blocks.8",
"up_blocks.0.attentions.1.transformer_blocks.9",
"up_blocks.0.attentions.2.transformer_blocks.0",
"up_blocks.0.attentions.2.transformer_blocks.1",
"up_blocks.0.attentions.2.transformer_blocks.2",
"up_blocks.0.attentions.2.transformer_blocks.3",
"up_blocks.0.attentions.2.transformer_blocks.4",
"up_blocks.0.attentions.2.transformer_blocks.5",
"up_blocks.0.attentions.2.transformer_blocks.6",
"up_blocks.0.attentions.2.transformer_blocks.7",
"up_blocks.0.attentions.2.transformer_blocks.8",
"up_blocks.0.attentions.2.transformer_blocks.9",
"up_blocks.1.attentions.0.transformer_blocks.0",
"up_blocks.1.attentions.0.transformer_blocks.1",
"up_blocks.1.attentions.1.transformer_blocks.0",
"up_blocks.1.attentions.1.transformer_blocks.1",
"up_blocks.1.attentions.2.transformer_blocks.0",
"up_blocks.1.attentions.2.transformer_blocks.1",
"mid_block.attentions.0.transformer_blocks.0",
"mid_block.attentions.0.transformer_blocks.1",
"mid_block.attentions.0.transformer_blocks.2",
"mid_block.attentions.0.transformer_blocks.3",
"mid_block.attentions.0.transformer_blocks.4",
"mid_block.attentions.0.transformer_blocks.5",
"mid_block.attentions.0.transformer_blocks.6",
"mid_block.attentions.0.transformer_blocks.7",
"mid_block.attentions.0.transformer_blocks.8",
"mid_block.attentions.0.transformer_blocks.9",
]
unet_lora_rank = 128
inference_dtype = 'fp16'
|