Glyph-SDXL-v2 / configs /glyph_sdxl_multilingual_albedo.py
lzy-tony
feat: merge multilingual and eng, support 10 lang
6296bc0
#### Model Setting
pretrained_model_name_or_path = 'stablediffusionapi/albedobase-xl-20'
pretrained_vae_model_name_or_path = 'madebyollin/sdxl-vae-fp16-fix'
revision = None
byt5_max_length = 512
byt5_mapper_type = 'T5EncoderBlockByT5Mapper'
byt5_mapper_config = dict(
num_layers=4,
sdxl_channels=2048,
)
byt5_config = dict(
byt5_name='google/byt5-small',
special_token=True,
color_special_token=True,
font_special_token=True,
color_ann_path='assets/color_idx.json',
font_ann_path='assets/multilingual_10-lang_idx.json',
multilingual=True,
)
attn_block_to_modify = [
"down_blocks.1.attentions.0.transformer_blocks.0",
"down_blocks.1.attentions.0.transformer_blocks.1",
"down_blocks.1.attentions.1.transformer_blocks.0",
"down_blocks.1.attentions.1.transformer_blocks.1",
"down_blocks.2.attentions.0.transformer_blocks.0",
"down_blocks.2.attentions.0.transformer_blocks.1",
"down_blocks.2.attentions.0.transformer_blocks.2",
"down_blocks.2.attentions.0.transformer_blocks.3",
"down_blocks.2.attentions.0.transformer_blocks.4",
"down_blocks.2.attentions.0.transformer_blocks.5",
"down_blocks.2.attentions.0.transformer_blocks.6",
"down_blocks.2.attentions.0.transformer_blocks.7",
"down_blocks.2.attentions.0.transformer_blocks.8",
"down_blocks.2.attentions.0.transformer_blocks.9",
"down_blocks.2.attentions.1.transformer_blocks.0",
"down_blocks.2.attentions.1.transformer_blocks.1",
"down_blocks.2.attentions.1.transformer_blocks.2",
"down_blocks.2.attentions.1.transformer_blocks.3",
"down_blocks.2.attentions.1.transformer_blocks.4",
"down_blocks.2.attentions.1.transformer_blocks.5",
"down_blocks.2.attentions.1.transformer_blocks.6",
"down_blocks.2.attentions.1.transformer_blocks.7",
"down_blocks.2.attentions.1.transformer_blocks.8",
"down_blocks.2.attentions.1.transformer_blocks.9",
"up_blocks.0.attentions.0.transformer_blocks.0",
"up_blocks.0.attentions.0.transformer_blocks.1",
"up_blocks.0.attentions.0.transformer_blocks.2",
"up_blocks.0.attentions.0.transformer_blocks.3",
"up_blocks.0.attentions.0.transformer_blocks.4",
"up_blocks.0.attentions.0.transformer_blocks.5",
"up_blocks.0.attentions.0.transformer_blocks.6",
"up_blocks.0.attentions.0.transformer_blocks.7",
"up_blocks.0.attentions.0.transformer_blocks.8",
"up_blocks.0.attentions.0.transformer_blocks.9",
"up_blocks.0.attentions.1.transformer_blocks.0",
"up_blocks.0.attentions.1.transformer_blocks.1",
"up_blocks.0.attentions.1.transformer_blocks.2",
"up_blocks.0.attentions.1.transformer_blocks.3",
"up_blocks.0.attentions.1.transformer_blocks.4",
"up_blocks.0.attentions.1.transformer_blocks.5",
"up_blocks.0.attentions.1.transformer_blocks.6",
"up_blocks.0.attentions.1.transformer_blocks.7",
"up_blocks.0.attentions.1.transformer_blocks.8",
"up_blocks.0.attentions.1.transformer_blocks.9",
"up_blocks.0.attentions.2.transformer_blocks.0",
"up_blocks.0.attentions.2.transformer_blocks.1",
"up_blocks.0.attentions.2.transformer_blocks.2",
"up_blocks.0.attentions.2.transformer_blocks.3",
"up_blocks.0.attentions.2.transformer_blocks.4",
"up_blocks.0.attentions.2.transformer_blocks.5",
"up_blocks.0.attentions.2.transformer_blocks.6",
"up_blocks.0.attentions.2.transformer_blocks.7",
"up_blocks.0.attentions.2.transformer_blocks.8",
"up_blocks.0.attentions.2.transformer_blocks.9",
"up_blocks.1.attentions.0.transformer_blocks.0",
"up_blocks.1.attentions.0.transformer_blocks.1",
"up_blocks.1.attentions.1.transformer_blocks.0",
"up_blocks.1.attentions.1.transformer_blocks.1",
"up_blocks.1.attentions.2.transformer_blocks.0",
"up_blocks.1.attentions.2.transformer_blocks.1",
"mid_block.attentions.0.transformer_blocks.0",
"mid_block.attentions.0.transformer_blocks.1",
"mid_block.attentions.0.transformer_blocks.2",
"mid_block.attentions.0.transformer_blocks.3",
"mid_block.attentions.0.transformer_blocks.4",
"mid_block.attentions.0.transformer_blocks.5",
"mid_block.attentions.0.transformer_blocks.6",
"mid_block.attentions.0.transformer_blocks.7",
"mid_block.attentions.0.transformer_blocks.8",
"mid_block.attentions.0.transformer_blocks.9",
]
unet_lora_rank = 128
inference_dtype = 'fp16'