| { | |
| "LoRA_type": "Kohya LoCon", | |
| "adaptive_noise_scale": 0, | |
| "additional_parameters": "", | |
| "block_alphas": "", | |
| "block_dims": "", | |
| "block_lr_zero_threshold": "", | |
| "bucket_no_upscale": true, | |
| "bucket_reso_steps": 1, | |
| "cache_latents": true, | |
| "cache_latents_to_disk": false, | |
| "caption_dropout_every_n_epochs": 0.0, | |
| "caption_dropout_rate": 0.1, | |
| "caption_extension": ".txt", | |
| "clip_skip": 1, | |
| "color_aug": false, | |
| "conv_alpha": 4, | |
| "conv_alphas": "", | |
| "conv_dim": 8, | |
| "conv_dims": "", | |
| "decompose_both": false, | |
| "dim_from_weights": false, | |
| "down_lr_weight": "", | |
| "enable_bucket": true, | |
| "epoch": 20, | |
| "factor": -1, | |
| "flip_aug": false, | |
| "full_fp16": false, | |
| "gradient_accumulation_steps": 4.0, | |
| "gradient_checkpointing": false, | |
| "keep_tokens": "0", | |
| "learning_rate": 0.0001, | |
| "logging_dir": "", | |
| "lora_network_weights": "", | |
| "lr_scheduler": "constant", | |
| "lr_scheduler_num_cycles": "", | |
| "lr_scheduler_power": "", | |
| "lr_warmup": 0, | |
| "max_data_loader_n_workers": "0", | |
| "max_resolution": "512,512", | |
| "max_timestep": 1000, | |
| "max_token_length": "75", | |
| "max_train_epochs": "", | |
| "mem_eff_attn": false, | |
| "mid_lr_weight": "", | |
| "min_snr_gamma": 10, | |
| "min_timestep": 0, | |
| "mixed_precision": "fp16", | |
| "model_list": "custom", | |
| "module_dropout": 0, | |
| "multires_noise_discount": 0.2, | |
| "multires_noise_iterations": 8, | |
| "network_alpha": 16, | |
| "network_dim": 32, | |
| "network_dropout": 0, | |
| "no_token_padding": false, | |
| "noise_offset": 0.03, | |
| "noise_offset_type": "Original", | |
| "num_cpu_threads_per_process": 2, | |
| "optimizer": "AdamW", | |
| "optimizer_args": "", | |
| "output_dir": "/content/train/model", | |
| "output_name": "zed-lora", | |
| "persistent_data_loader_workers": false, | |
| "pretrained_model_name_or_path": "/content/train/model/lolsplashart.safetensors", | |
| "prior_loss_weight": 1.0, | |
| "random_crop": false, | |
| "rank_dropout": 0, | |
| "reg_data_dir": "", | |
| "resume": "", | |
| "sample_every_n_epochs": 0, | |
| "sample_every_n_steps": 0, | |
| "sample_prompts": "", | |
| "sample_sampler": "euler_a", | |
| "save_every_n_epochs": 5, | |
| "save_every_n_steps": 0, | |
| "save_last_n_steps": 0, | |
| "save_last_n_steps_state": 0, | |
| "save_model_as": "safetensors", | |
| "save_precision": "fp16", | |
| "save_state": false, | |
| "scale_v_pred_loss_like_noise_pred": false, | |
| "scale_weight_norms": 0, | |
| "sdxl": false, | |
| "sdxl_cache_text_encoder_outputs": false, | |
| "sdxl_no_half_vae": true, | |
| "seed": "123", | |
| "shuffle_caption": false, | |
| "stop_text_encoder_training": 0, | |
| "text_encoder_lr": 0.0, | |
| "train_batch_size": 2, | |
| "train_data_dir": "/content/train/image", | |
| "train_on_input": false, | |
| "training_comment": "", | |
| "unet_lr": 0.0, | |
| "unit": 1, | |
| "up_lr_weight": "", | |
| "use_cp": false, | |
| "use_wandb": false, | |
| "v2": false, | |
| "v_parameterization": false, | |
| "vae_batch_size": 0, | |
| "wandb_api_key": "", | |
| "weighted_captions": false, | |
| "xformers": true | |
| } |