z-image / checkpoints /config.yaml
bimabk's picture
Upload task output 1c93dd95-2e89-48d9-813d-e0f521599cfd
6260ac8 verified
config:
name: last
process:
- datasets:
- cache_latents_to_disk: true
caption_dropout_rate: 0.0
caption_ext: txt
folder_path: /dataset/images/1c93dd95-2e89-48d9-813d-e0f521599cfd/img
is_reg: false
resolution:
- 512
- 768
- 1024
device: cuda
model:
arch: zimage:turbo
assistant_lora_path: /cache/hf_cache/zimage_turbo_training_adapter_v2.safetensors
name_or_path: /cache/models/gradients-io-tournaments--Z-Image-Turbo
qtype: qfloat8
qtype_te: qfloat8
quantize: true
quantize_te: true
network:
conv: 16
conv_alpha: 16
linear: 32
linear_alpha: 32
type: lora
save:
dtype: bf16
max_step_saves_to_keep: 4
save_every: 50
save_format: diffusers
train:
batch_size: 4
dtype: bf16
ema_config:
ema_decay: 0.99
use_ema: true
gradient_accumulation_steps: 1
gradient_checkpointing: true
loss_type: l2
lr: 0.0001
lr_scheduler: cosine
lr_scheduler_args:
num_cycles: 1
power: 1.0
lr_warmup_steps: 25
max_train_epochs: 1
noise_scheduler: flowmatch
optimizer: adamw8bit
optimizer_args:
betas:
- 0.9
- 0.999
eps: 1.0e-08
weight_decay: 0.0001
timestep_type: weighted
training_folder: /app/checkpoints/1c93dd95-2e89-48d9-813d-e0f521599cfd/z-image
type: diffusion_trainer
job: extension
meta:
name: zimage_lora
version: '1.0'