File size: 1,512 Bytes
b821928 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 | config:
name: last
process:
- datasets:
- cache_latents_to_disk: true
caption_ext: txt
folder_path: /cache/images/61f0135e-5d7a-47b2-95f8-5e22b4eae61b/1_lora style
is_reg: false
resolution:
- 512
- 768
- 1024
device: cuda
model:
arch: qwen_image
low_vram: true
name_or_path: /cache/models/gradients-io-tournaments--Qwen-Image-Jib-Mix
qtype: float8
qtype_te: float8
quantize: true
quantize_te: true
network:
conv_alpha: 32
conv_rank: 32
linear: 64
linear_alpha: 64
type: lora
save:
dtype: bf16
max_step_saves_to_keep: 4
save_every: 250
save_format: diffusers
train:
batch_size: 1
cache_text_embeddings: false
caption_dropout_rate: 0.15
cfg_scale: 3.5
do_cfg: true
dtype: bf16
ema_config:
ema_decay: 0.999
use_ema: true
gradient_checkpointing: true
huber_c: 0.1
loss_type: huber
lr: 0.00012
model_name: Qwen2-VL-7B-Instruct-Jib
noise_scheduler: flowmatch
optimizer: adamw8bit
optimizer_params:
weight_decay: 0.1
save_every_n_epochs: 60
steps: 3000
timestep_type: weighted
train_text_encoder: false
train_unet: true
training_folder: /app/checkpoints/61f0135e-5d7a-47b2-95f8-5e22b4eae61b/ipunktest-9
type: diffusion_trainer
job: extension
meta:
name: qwen_image_lora_jordansky
version: '1.6'
|