LoRA-WiSE / README.md
mosalama's picture
Upload dataset
6ee6040 verified
|
raw
history blame
28.9 kB
metadata
dataset_info:
  config_name: high_32
  features:
    - name: label
      dtype: int64
    - name: name
      dtype: int64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.mid_block.attentions.0.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_v.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_k.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_k.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_out.0.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_out.0.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_q.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_q.lora.up.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_v.lora.down.weight
      sequence: float64
    - name: >-
        unet.up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_v.lora.up.weight
      sequence: float64
  splits:
    - name: train
      num_bytes: 12755204000
      num_examples: 250
  download_size: 10785191542
  dataset_size: 12755204000
configs:
  - config_name: high_32
    data_files:
      - split: train
        path: high/32/train-*