{ "components": { "python": { "3.10": { "ver": "3.10.9", "label": "Python 3.10.9 (Legacy)" }, "3.11": { "ver": "3.11.14", "label": "Python 3.11.14 (Recommended)" } }, "torch": { "cu126": { "label": "Torch 2.6.0 + CUDA 12.6", "cmd": "torch==2.6.0+cu126 torchvision==0.21.0+cu126 torchaudio==2.6.0+cu126 --index-url https://download.pytorch.org/whl/cu126" }, "cu128": { "label": "Torch 2.7.1 + CUDA 12.8", "cmd": "torch==2.7.1 torchvision==0.22.1 torchaudio==2.7.1 --index-url https://download.pytorch.org/whl/cu128" }, "cu130": { "label": "Torch 2.10.0 + CUDA 13.0", "cmd": "torch==2.10.0 torchvision torchaudio --index-url https://download.pytorch.org/whl/cu130" }, "rocm65": { "label": "ROCm 6.5 (TheRock)", "cmd": { "win": "https://github.com/scottt/rocm-TheRock/releases/download/v6.5.0rc-pytorch-gfx110x/torch-2.7.0a0+rocm_git3f903c3-cp311-cp311-win_amd64.whl https://github.com/scottt/rocm-TheRock/releases/download/v6.5.0rc-pytorch-gfx110x/torchvision-0.22.0+9eb57cd-cp311-cp311-win_amd64.whl https://github.com/scottt/rocm-TheRock/releases/download/v6.5.0rc-pytorch-gfx110x/torchaudio-2.7.0a0+52638ef-cp311-cp311-win_amd64.whl", "linux": "torch torchvision torchaudio --index-url https://download.pytorch.org/whl/rocm6.2" } } }, "triton": { "v33": { "label": "Triton < 3.3", "cmd": { "win": "-U \"triton-windows<3.3\"", "linux": "-U \"triton<3.3\"" } }, "v34": { "label": "Triton < 3.4", "cmd": { "win": "-U \"triton-windows<3.4\"", "linux": "-U \"triton<3.4\"" } }, "latest": { "label": "Triton Latest", "cmd": { "win": "-U triton-windows", "linux": "-U triton" } } }, "sage": { "v1": { "label": "Sage Attention 1.0.6", "cmd": "sageattention==1.0.6" }, "v211": { "label": "Sage Attention 2.1.1", "cmd": { "win": "https://github.com/woct0rdho/SageAttention/releases/download/v2.1.1-windows/sageattention-2.1.1+cu126torch2.6.0-cp310-cp310-win_amd64.whl", "linux": "pip install \"setuptools<=75.8.2\" && git clone https://github.com/thu-ml/SageAttention && pip install -e SageAttention" } }, "v220": { "label": "Sage Attention 2.2.0", "cmd": { "win": "https://github.com/woct0rdho/SageAttention/releases/download/v2.2.0-windows/sageattention-2.2.0+cu128torch2.7.1-cp310-cp310-win_amd64.whl", "linux": "pip install \"setuptools<=75.8.2\" && git clone https://github.com/thu-ml/SageAttention && pip install -e SageAttention" } }, "v220_cu13": { "label": "Sage Attention 2.2.0 (CUDA 13)", "cmd": { "win": "https://github.com/woct0rdho/SageAttention/releases/download/v2.2.0-windows.post4/sageattention-2.2.0+cu130torch2.9.0andhigher.post4-cp39-abi3-win_amd64.whl", "linux": "pip install \"setuptools<=75.8.2\" && git clone https://github.com/thu-ml/SageAttention && pip install -e SageAttention" } } }, "flash": { "v27": { "label": "Flash Attention 2.7.x", "cmd": { "win": "https://github.com/Redtash1/Flash_Attention_2_Windows/releases/download/v2.7.0-v2.7.4/flash_attn-2.7.4.post1+cu128torch2.7.0cxx11abiFALSE-cp310-cp310-win_amd64.whl", "linux": "flash-attn==2.7.2.post1" } }, "v210": { "label": "Flash Attention 2.8.x", "cmd": { "win": "https://github.com/deepbeepmeep/kernels/releases/download/Flash2/flash_attn-2.8.3-cp311-cp311-win_amd64.whl", "linux": "flash-attn" } } }, "kernels": { "nunchaku": { "label": "Nunchaku INT4/FP4", "cmd": { "win": "https://github.com/deepbeepmeep/kernels/releases/download/v1.2.0_Nunchaku/nunchaku-1.2.0+torch2.7-cp310-cp310-win_amd64.whl", "linux": "https://github.com/deepbeepmeep/kernels/releases/download/v1.2.0_Nunchaku/nunchaku-1.2.0+torch2.7-cp310-cp310-linux_x86_64.whl" } }, "nunchaku_cu13": { "label": "Nunchaku INT4/FP4 (CUDA 13)", "cmd": { "win": "https://github.com/nunchaku-ai/nunchaku/releases/download/v1.2.1/nunchaku-1.2.1+cu13.0torch2.10-cp310-cp310-win_amd64.whl", "linux": "https://github.com/nunchaku-ai/nunchaku/releases/download/v1.2.1/nunchaku-1.2.1+cu13.0torch2.10-cp310-cp310-linux_x86_64.whl" } }, "light2xv": { "label": "Light2xv NVFP4 (RTX 50xx)", "cmd": { "win": "https://github.com/deepbeepmeep/kernels/releases/download/Light2xv/lightx2v_kernel-0.0.2+torch2.10.0-cp311-abi3-win_amd64.whl", "linux": "https://github.com/deepbeepmeep/kernels/releases/download/Light2xv/lightx2v_kernel-0.0.2+torch2.10.0-cp311-abi3-linux_x86_64.whl" } } } }, "gpu_profiles": { "GTX_10": { "python": "3.10", "torch": "cu126", "triton": null, "sage": null, "flash": null, "kernels": [] }, "RTX_20": { "python": "3.10", "torch": "cu126", "triton": "v33", "sage": "v1", "flash": null, "kernels": [] }, "RTX_30": { "python": "3.10", "torch": "cu126", "triton": "v33", "sage": "v211", "flash": null, "kernels": [] }, "RTX_40": { "python": "3.10", "torch": "cu128", "triton": "v34", "sage": "v220", "flash": "v27", "kernels": ["nunchaku"] }, "RTX_50": { "python": "3.11", "torch": "cu130", "triton": "latest", "sage": "v220_cu13", "flash": "v210", "kernels": ["light2xv"] }, "AMD_GFX110X": { "python": "3.11", "torch": "rocm65", "triton": null, "sage": null, "flash": null, "kernels": [], "env": {"HSA_OVERRIDE_GFX_VERSION": "11.0.0"} }, "AMD_GFX1151": { "python": "3.11", "torch": "rocm65", "triton": null, "sage": null, "flash": null, "kernels": [], "env": {"HSA_OVERRIDE_GFX_VERSION": "11.5.1"} }, "AMD_GFX1201": { "python": "3.11", "torch": "rocm65", "triton": null, "sage": null, "flash": null, "kernels": [], "env": {"HSA_OVERRIDE_GFX_VERSION": "12.0.1"} } } }