| |
| output_dir = '/diffusion_pipe_working_folder/output_folder/z_image_lora' |
|
|
| save_every_n_epochs = 10 |
| epochs = 20 |
| pipeline_stages = 1 |
| micro_batch_size_per_gpu = 1 |
| gradient_accumulation_steps = 1 |
| activation_checkpointing = true |
| dataset = 'examples/dataset.toml' |
|
|
| [model] |
| type = 'z_image' |
| diffusion_model = '/diffusion_pipe_working_folder/models/z_image/z_image_turbo_bf16.safetensors' |
| vae = '/diffusion_pipe_working_folder/models/z_image/ae.safetensors' |
| text_encoders = [ |
| {path = '/diffusion_pipe_working_folder/models/z_image/qwen_3_4b.safetensors', type = 'lumina2'} |
| ] |
| |
| merge_adapters = ['/diffusion_pipe_working_folder/models/z_image/zimage_turbo_training_adapter_v2.safetensors'] |
| dtype = 'bfloat16' |
|
|
|
|
| [adapter] |
| type = "lora" |
| rank = 32 |
| dtype = "bfloat16" |
|
|
|
|
|
|
| [optimizer] |
| type = 'adamw_optimi' |
| lr = 2e-4 |
| betas = [0.9, 0.99] |
| weight_decay = 0.01 |
| eps = 1e-8 |
|
|