Commit ·
caabd58
1
Parent(s): bc82fa6
Upload folder using huggingface_hub
Browse files- jw1_2_q2/config.yaml +77 -0
- jw1_2_q2/jw1_2_q2.safetensors +3 -0
- jw1_2_q2/jw1_2_q2_000004522.safetensors +3 -0
- jw1_2_q2/jw1_2_q2_000004760.safetensors +3 -0
- jw1_2_q2/jw1_2_q2_000004998.safetensors +3 -0
- jw1_2_q2/jw1_2_q2_000005236.safetensors +3 -0
- jw1_2_q2/jw1_2_q2_000005474.safetensors +3 -0
- jw1_2_q2/jw1_2_q2_000005712.safetensors +3 -0
- jw1_2_q2/jw1_2_q2_000005950.safetensors +3 -0
- jw1_2_q2/jw1_2_q2_000006188.safetensors +3 -0
- jw1_2_q2/jw1_2_q2_000006426.safetensors +3 -0
jw1_2_q2/config.yaml
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
job: extension
|
| 2 |
+
config:
|
| 3 |
+
name: jw1_2_q2
|
| 4 |
+
process:
|
| 5 |
+
- type: sd_trainer
|
| 6 |
+
training_folder: /root/ai-toolkit/modal_output
|
| 7 |
+
device: cuda:0
|
| 8 |
+
trigger_word: jw1_2_q2
|
| 9 |
+
network:
|
| 10 |
+
type: lora
|
| 11 |
+
linear: 32
|
| 12 |
+
linear_alpha: 32
|
| 13 |
+
lokr_full_rank: true
|
| 14 |
+
lokr_factor: -1
|
| 15 |
+
save:
|
| 16 |
+
dtype: bf16
|
| 17 |
+
save_every: 238
|
| 18 |
+
max_step_saves_to_keep: 9
|
| 19 |
+
push_to_hub: false
|
| 20 |
+
datasets:
|
| 21 |
+
- folder_path: /root/ai-toolkit/jw1_2_q2
|
| 22 |
+
caption_ext: txt
|
| 23 |
+
caption_dropout_rate: 0.1
|
| 24 |
+
shuffle_tokens: false
|
| 25 |
+
cache_latents_to_disk: true
|
| 26 |
+
resolution:
|
| 27 |
+
- 1024
|
| 28 |
+
train:
|
| 29 |
+
batch_size: 1
|
| 30 |
+
steps: 6664
|
| 31 |
+
gradient_accumulation: 1
|
| 32 |
+
train_unet: true
|
| 33 |
+
train_text_encoder: false
|
| 34 |
+
gradient_checkpointing: true
|
| 35 |
+
noise_scheduler: flowmatch
|
| 36 |
+
optimizer: adamw
|
| 37 |
+
timestep_type: shift
|
| 38 |
+
optimizer_params:
|
| 39 |
+
weight_decay: 0.0001
|
| 40 |
+
lr: 0.0001
|
| 41 |
+
lr_scheduler: constant_with_warmup
|
| 42 |
+
lr_scheduler_params:
|
| 43 |
+
num_warmup_steps: 119
|
| 44 |
+
skip_first_sample: true
|
| 45 |
+
disable_sampling: true
|
| 46 |
+
ema_config:
|
| 47 |
+
use_ema: true
|
| 48 |
+
ema_decay: 0.99
|
| 49 |
+
dtype: bf16
|
| 50 |
+
do_differential_guidance: true
|
| 51 |
+
differential_guidance_scale: 3
|
| 52 |
+
model:
|
| 53 |
+
name_or_path: /root/Qwen-Image-2512
|
| 54 |
+
arch: qwen_image:2512
|
| 55 |
+
quantize: false
|
| 56 |
+
quantize_te: false
|
| 57 |
+
low_vram: false
|
| 58 |
+
sample:
|
| 59 |
+
sampler: flowmatch
|
| 60 |
+
sample_every: 170
|
| 61 |
+
width: 1024
|
| 62 |
+
height: 1024
|
| 63 |
+
prompts:
|
| 64 |
+
- Photo of a young woman with long black hair, wearing a blue and white dress
|
| 65 |
+
with a white lace collar. She has a fair complexion and is looking directly
|
| 66 |
+
at the camera with a neutral expression. Her hair is adorned with small pink
|
| 67 |
+
flowers. The background is a gradient of blue to white, creating a soft and
|
| 68 |
+
dreamy atmosphere. The lighting is soft and even, highlighting her features.
|
| 69 |
+
The image has a high-quality, professional feel.
|
| 70 |
+
neg: ''
|
| 71 |
+
seed: 777
|
| 72 |
+
walk_seed: true
|
| 73 |
+
guidance_scale: 4
|
| 74 |
+
sample_steps: 30
|
| 75 |
+
meta:
|
| 76 |
+
name: jw1_2_q2
|
| 77 |
+
version: '1.0'
|
jw1_2_q2/jw1_2_q2.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:e3425171f73d1fc29a97e79478b26c974ebac791dc1c7cd1bbb4c91ea5df7308
|
| 3 |
+
size 590058872
|
jw1_2_q2/jw1_2_q2_000004522.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:be8e8a8389995419022797c224b97263f53f13ae440d2c93d6a900b59e1e1fa3
|
| 3 |
+
size 590058872
|
jw1_2_q2/jw1_2_q2_000004760.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:862fb873acca2471e310e3a6da2d7fa55cb44de1a845dafdeddaffac39845017
|
| 3 |
+
size 590058872
|
jw1_2_q2/jw1_2_q2_000004998.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a018c48c98ca64797b8371d6c31c25f7180ada3f0d24599b60ba09a97eff8a9a
|
| 3 |
+
size 590058872
|
jw1_2_q2/jw1_2_q2_000005236.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1efa6adb517d280ecef93da627b30030ee8f4fd8b4ce75ce7ba726d8876b8152
|
| 3 |
+
size 590058872
|
jw1_2_q2/jw1_2_q2_000005474.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7bfc60a44928217f89b70f1cf50af280a90ef0b0ca3368ac98a8b6a42e7591e1
|
| 3 |
+
size 590058872
|
jw1_2_q2/jw1_2_q2_000005712.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b83e402ea5034e7ef1b9dc3c9dfacff965d2bb61bd4c94f883479e7fc1bae8d1
|
| 3 |
+
size 590058872
|
jw1_2_q2/jw1_2_q2_000005950.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5b16edf98f11feb0d0f57819e2d101d9a2f1c578ff401a13ede285bffb2cdf2d
|
| 3 |
+
size 590058872
|
jw1_2_q2/jw1_2_q2_000006188.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:87ce87fdc496eb8a53e253a301067f08d6f3822d729b9b5513c56ad41d73774a
|
| 3 |
+
size 590058872
|
jw1_2_q2/jw1_2_q2_000006426.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:84609f7c26e2fe8858b1541b7e7fbd20c629b4ca8f60673df29fb95fdff35639
|
| 3 |
+
size 590058872
|