Commit ·
0eb2285
1
Parent(s): 3a5860f
Upload folder using huggingface_hub
Browse files- jw1_1_q2/config.yaml +77 -0
- jw1_1_q2/jw1_1_q2.safetensors +3 -0
- jw1_1_q2/jw1_1_q2_000004572.safetensors +3 -0
- jw1_1_q2/jw1_1_q2_000004826.safetensors +3 -0
- jw1_1_q2/jw1_1_q2_000005080.safetensors +3 -0
- jw1_1_q2/jw1_1_q2_000005334.safetensors +3 -0
- jw1_1_q2/jw1_1_q2_000005588.safetensors +3 -0
- jw1_1_q2/jw1_1_q2_000005842.safetensors +3 -0
- jw1_1_q2/jw1_1_q2_000006096.safetensors +3 -0
- jw1_1_q2/jw1_1_q2_000006350.safetensors +3 -0
- jw1_1_q2/jw1_1_q2_000006604.safetensors +3 -0
jw1_1_q2/config.yaml
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
job: extension
|
| 2 |
+
config:
|
| 3 |
+
name: jw1_1_q2
|
| 4 |
+
process:
|
| 5 |
+
- type: sd_trainer
|
| 6 |
+
training_folder: /root/ai-toolkit/modal_output
|
| 7 |
+
device: cuda:0
|
| 8 |
+
trigger_word: jw1_1_q2
|
| 9 |
+
network:
|
| 10 |
+
type: lora
|
| 11 |
+
linear: 32
|
| 12 |
+
linear_alpha: 32
|
| 13 |
+
lokr_full_rank: true
|
| 14 |
+
lokr_factor: -1
|
| 15 |
+
save:
|
| 16 |
+
dtype: bf16
|
| 17 |
+
save_every: 254
|
| 18 |
+
max_step_saves_to_keep: 9
|
| 19 |
+
push_to_hub: false
|
| 20 |
+
datasets:
|
| 21 |
+
- folder_path: /root/ai-toolkit/jw1_1_q2
|
| 22 |
+
caption_ext: txt
|
| 23 |
+
caption_dropout_rate: 0.1
|
| 24 |
+
shuffle_tokens: false
|
| 25 |
+
cache_latents_to_disk: true
|
| 26 |
+
resolution:
|
| 27 |
+
- 1024
|
| 28 |
+
train:
|
| 29 |
+
batch_size: 1
|
| 30 |
+
steps: 6858
|
| 31 |
+
gradient_accumulation: 1
|
| 32 |
+
train_unet: true
|
| 33 |
+
train_text_encoder: false
|
| 34 |
+
gradient_checkpointing: true
|
| 35 |
+
noise_scheduler: flowmatch
|
| 36 |
+
optimizer: adamw
|
| 37 |
+
timestep_type: lognorm_blend
|
| 38 |
+
optimizer_params:
|
| 39 |
+
weight_decay: 0.0001
|
| 40 |
+
lr: 0.0001
|
| 41 |
+
lr_scheduler: cosine_with_restarts
|
| 42 |
+
lr_scheduler_params:
|
| 43 |
+
eta_min: 5.0e-05
|
| 44 |
+
skip_first_sample: true
|
| 45 |
+
disable_sampling: true
|
| 46 |
+
ema_config:
|
| 47 |
+
use_ema: true
|
| 48 |
+
ema_decay: 0.99
|
| 49 |
+
dtype: bf16
|
| 50 |
+
do_differential_guidance: true
|
| 51 |
+
differential_guidance_scale: 3
|
| 52 |
+
model:
|
| 53 |
+
name_or_path: /root/Qwen-Image-2512
|
| 54 |
+
arch: qwen_image:2512
|
| 55 |
+
quantize: false
|
| 56 |
+
quantize_te: false
|
| 57 |
+
low_vram: false
|
| 58 |
+
sample:
|
| 59 |
+
sampler: flowmatch
|
| 60 |
+
sample_every: 170
|
| 61 |
+
width: 1024
|
| 62 |
+
height: 1024
|
| 63 |
+
prompts:
|
| 64 |
+
- Photo of a young woman with long black hair, wearing a blue and white dress
|
| 65 |
+
with a white lace collar. She has a fair complexion and is looking directly
|
| 66 |
+
at the camera with a neutral expression. Her hair is adorned with small pink
|
| 67 |
+
flowers. The background is a gradient of blue to white, creating a soft and
|
| 68 |
+
dreamy atmosphere. The lighting is soft and even, highlighting her features.
|
| 69 |
+
The image has a high-quality, professional feel.
|
| 70 |
+
neg: ''
|
| 71 |
+
seed: 777
|
| 72 |
+
walk_seed: true
|
| 73 |
+
guidance_scale: 4
|
| 74 |
+
sample_steps: 30
|
| 75 |
+
meta:
|
| 76 |
+
name: jw1_1_q2
|
| 77 |
+
version: '1.0'
|
jw1_1_q2/jw1_1_q2.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:43efc8ebdff7e447106b5ea457b83fa130e37a95b3696c0273ea73813ec63dca
|
| 3 |
+
size 590058872
|
jw1_1_q2/jw1_1_q2_000004572.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:78a3b0c8442d9ea01e6f9f303ecec233814410d761b04ecb6086b06162feabfb
|
| 3 |
+
size 590058872
|
jw1_1_q2/jw1_1_q2_000004826.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:ba2c32ba8321e7b0ef25e5d98bd3d36222eeaaf856157a51577183d472e6ef59
|
| 3 |
+
size 590058872
|
jw1_1_q2/jw1_1_q2_000005080.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0b0abdece5645642c5f6c5613451477da6b8e3faedfeddc585160b97686dd94d
|
| 3 |
+
size 590058872
|
jw1_1_q2/jw1_1_q2_000005334.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b65954003cc35707f68c1dded86e13885cc0950bd9245c893ae84027c4a92edb
|
| 3 |
+
size 590058872
|
jw1_1_q2/jw1_1_q2_000005588.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cace6c92ab5921f11df5776e65f28e01340a29a310c423dc378cb890cd13287a
|
| 3 |
+
size 590058872
|
jw1_1_q2/jw1_1_q2_000005842.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:19a9cd6e8a1dbc38c151743e2fd422e02c096a1048424734e8b639fa426ae35a
|
| 3 |
+
size 590058872
|
jw1_1_q2/jw1_1_q2_000006096.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:65717a2c0e117c3ebe61c04abcf62aa570ecf3770a2fc7037d91b6e1dfddc70f
|
| 3 |
+
size 590058872
|
jw1_1_q2/jw1_1_q2_000006350.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:eb79210bb3ce4b9c52ef1c6ab6d59c3d9342b860b0b09b5aa5749ff32ad46984
|
| 3 |
+
size 590058872
|
jw1_1_q2/jw1_1_q2_000006604.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fe7f29ebcc4febc83e93d294bf3e773c61829b00bd16fc9c716d964df640d1bf
|
| 3 |
+
size 590058872
|