metadata
dataset_info:
features:
- name: category_label
dtype: string
- name: category_name
dtype: string
- name: num_images
dtype: int64
- name: leaf_id
dtype: int64
- name: num_batches
dtype: int64
- name: num_epochs
dtype: int64
- name: batch_size
dtype: int64
- name: gradient_accumulation_steps
dtype: int64
- name: max_train_steps
dtype: int64
- name: rank
dtype: int64
- name: step_loss
sequence:
sequence: float64
- name: >-
unet.down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.0.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.0.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.1.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.0.attentions.1.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.0.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.0.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.1.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.1.attentions.1.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.0.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.0.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.1.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.down_blocks.2.attentions.1.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.mid_block.attentions.0.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.mid_block.attentions.0.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.mid_block.attentions.0.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.mid_block.attentions.0.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.mid_block.attentions.0.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.mid_block.attentions.0.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.mid_block.attentions.0.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.mid_block.attentions.0.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.0.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.0.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.1.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.1.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.2.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.1.attentions.2.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.0.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.0.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.1.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.1.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.2.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.2.attentions.2.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.0.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.0.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.1.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.1.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.2.transformer_blocks.0.attn1.to_v.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_q.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_q.lora.up.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_v.lora.down.weight
sequence:
sequence: float32
- name: >-
unet.up_blocks.3.attentions.2.transformer_blocks.0.attn2.to_v.lora.up.weight
sequence:
sequence: float32
splits:
- name: train
num_bytes: 6015416603
num_examples: 10000
download_size: 5009703366
dataset_size: 6015416603
configs:
- config_name: default
data_files:
- split: train
path: data/train-*