waveydaveygravy commited on
Commit
05fdc03
·
1 Parent(s): d660507

Upload m0bpyscho125steps2e6.yaml with huggingface_hub

Browse files
Files changed (1) hide show
  1. m0bpyscho125steps2e6.yaml +36 -0
m0bpyscho125steps2e6.yaml ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ command:
2
+ - accelerate
3
+ - launch
4
+ - --config_file=/content/kohya-trainer/accelerate_config/config.yaml
5
+ - --num_cpu_threads_per_process=8
6
+ - train_network.py
7
+ - --output_name=mobpsycho
8
+ - --pretrained_model_name_or_path=/content/pretrained_model/Animefull-final-pruned.ckpt
9
+ - --vae=/content/vae/anime.vae.pt
10
+ - --train_data_dir=/content/LoRA/train_data
11
+ - --in_json=/content/LoRA/meta_lat.json
12
+ - --output_dir=/content/LoRA/output
13
+ - --network_dim=128
14
+ - --network_alpha=128
15
+ - --network_module=networks.lora
16
+ - --optimizer_type=AdamW8bit
17
+ - --learning_rate=2e-06
18
+ - --unet_lr=2e-06
19
+ - --text_encoder_lr=5e-05
20
+ - --lr_scheduler=constant
21
+ - --dataset_repeats=125
22
+ - --resolution=512
23
+ - --noise_offset=0.1
24
+ - --train_batch_size=1
25
+ - --max_train_epochs=1
26
+ - --mixed_precision=fp16
27
+ - --save_precision=fp16
28
+ - --save_every_n_epochs=1
29
+ - --save_model_as=safetensors
30
+ - --max_token_length=225
31
+ - --gradient_checkpointing
32
+ - --gradient_accumulation_steps=1
33
+ - --clip_skip=2
34
+ - --logging_dir=/content/LoRA/logs
35
+ - --log_prefix=mobpsycho
36
+ - --shuffle_caption