FluxFill / Flux_Fill_LoRA /config.json
ThatDustyGuy's picture
Upload 176 files
a189962
{
"script": "train_flux_lora_ui_with_mask_timestep_range.py",
"seed": 4321,
"mixed_precision": "bf16",
"report_to": "wandb",
"lr_warmup_steps": 0,
"output_dir": "/home/Ubuntu/Pictures/Flux_Fill_LoRA",
"save_name": "Flux-Fill-LoRA-dlay",
"train_data_dir": "/home/Ubuntu/Videos/Flux-Fill-Dataset",
"optimizer": "adamw",
"lr_scheduler": "constant",
"learning_rate": 0.0001,
"train_batch_size": 2,
"repeats": 1,
"gradient_accumulation_steps": 1,
"num_train_epochs": 35,
"save_model_epochs": 1,
"validation_epochs": 1,
"rank": 32,
"skip_epoch": 0,
"skip_step": 0,
"gradient_checkpointing": true,
"validation_ratio": 0.1,
"pretrained_model_name_or_path": "/home/Ubuntu/apps/StableSwarmUI/Models/diffusion_models/Flux-Fill-Dev",
"model_path": "/home/Ubuntu/apps/StableSwarmUI/Models/diffusion_models/Flux-Fill-Dev/flux1-fill-dev.safetensors",
"resume_from_checkpoint": "",
"recreate_cache": false,
"config_path": "/home/Ubuntu/Pictures/Flux_Fill_LoRA/config.json",
"resolution": "1024",
"caption_dropout": 0.1,
"cosine_restarts": 1,
"max_time_steps": 0,
"blocks_to_swap": 0,
"mask_dropout": 0.1,
"reg_ratio": 1,
"reg_timestep": 700
}