File size: 1,113 Bytes
42e02b3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
{
"models": {
"wan21_1b": {
"hf_repo": "Wan-AI/Wan2.1-T2V-1.3B-Diffusers",
"config": "train_lora_wan21_1b_24gb.yaml"
},
"wan21_14b": {
"hf_repo": "Wan-AI/Wan2.1-T2V-14B-Diffusers",
"config": "train_lora_wan21_14b_24gb.yaml"
},
"wan22_14b": {
"hf_repo": "ai-toolkit/Wan2.2-T2V-A14B-Diffusers-bf16",
"config": "train_lora_wan22_14b_24gb.yaml"
},
"qwen_image": {
"hf_repo": "Qwen/Qwen-Image",
"config": "train_lora_qwen_image_24gb.yaml"
},
"qwen_image_edit": {
"hf_repo": "Qwen/Qwen-Image-Edit",
"config": "train_lora_qwen_image_edit_32gb.yaml"
},
"flux_dev": {
"hf_repo": "black-forest-labs/FLUX.1-dev",
"config": "train_lora_flux_24gb.yaml"
},
"flux_schnell": {
"hf_repo": "black-forest-labs/FLUX.1-schnell",
"config": "train_lora_flux_schnell_24gb.yaml"
}
},
"ara_adapters": {
"wan22_14b": "ostris/accuracy_recovery_adapters/wan22_14b_t2i_torchao_uint4.safetensors",
"qwen_image": "ostris/accuracy_recovery_adapters/qwen_image_torchao_uint3.safetensors"
}
} |