LoRa121 commited on
Commit
9226e2a
·
verified ·
1 Parent(s): 5219f12

Upload z_image_toml.toml

Browse files
Files changed (1) hide show
  1. z_image_toml.toml +36 -0
z_image_toml.toml ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Output path for training runs. Each training run makes a new directory in here.
2
+ output_dir = '/diffusion_pipe_working_folder/output_folder/z_image_lora'
3
+
4
+ save_every_n_epochs = 10
5
+ epochs = 20
6
+ pipeline_stages = 1
7
+ micro_batch_size_per_gpu = 1
8
+ gradient_accumulation_steps = 1
9
+ activation_checkpointing = true
10
+ dataset = 'examples/dataset.toml'
11
+
12
+ [model]
13
+ type = 'z_image'
14
+ diffusion_model = '/diffusion_pipe_working_folder/models/z_image/z_image_turbo_bf16.safetensors'
15
+ vae = '/diffusion_pipe_working_folder/models/z_image/ae.safetensors'
16
+ text_encoders = [
17
+ {path = '/diffusion_pipe_working_folder/models/z_image/qwen_3_4b.safetensors', type = 'lumina2'}
18
+ ]
19
+ # Use if training Z-Image-Turbo
20
+ merge_adapters = ['/diffusion_pipe_working_folder/models/z_image/zimage_turbo_training_adapter_v2.safetensors']
21
+ dtype = 'bfloat16'
22
+
23
+
24
+ [adapter]
25
+ type = "lora"
26
+ rank = 32
27
+ dtype = "bfloat16"
28
+
29
+
30
+
31
+ [optimizer]
32
+ type = 'adamw_optimi'
33
+ lr = 2e-4
34
+ betas = [0.9, 0.99]
35
+ weight_decay = 0.01
36
+ eps = 1e-8