fix: add linux_aarch64 flash-attn wheel to enable Arm64 builds

#14
Files changed (1) hide show
  1. requirements.txt +1 -0
requirements.txt CHANGED
@@ -30,6 +30,7 @@ triton-windows>=3.0.0,<3.4; sys_platform == 'win32'
30
  triton>=3.0.0; sys_platform != 'win32'
31
  flash-attn @ https://github.com/sdbds/flash-attention-for-windows/releases/download/2.8.2/flash_attn-2.8.2+cu128torch2.7.1cxx11abiFALSEfullbackward-cp311-cp311-win_amd64.whl ; sys_platform == 'win32' and python_version == '3.11' and platform_machine == 'AMD64'
32
  flash-attn @ https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.7.12/flash_attn-2.8.3+cu128torch2.10-cp311-cp311-linux_x86_64.whl ; sys_platform == 'linux' and python_version == '3.11'
 
33
  # Kernels library for flash-attn3 (preferred over flash-attn when available)
34
  kernels
35
  xxhash
 
30
  triton>=3.0.0; sys_platform != 'win32'
31
  flash-attn @ https://github.com/sdbds/flash-attention-for-windows/releases/download/2.8.2/flash_attn-2.8.2+cu128torch2.7.1cxx11abiFALSEfullbackward-cp311-cp311-win_amd64.whl ; sys_platform == 'win32' and python_version == '3.11' and platform_machine == 'AMD64'
32
  flash-attn @ https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.7.12/flash_attn-2.8.3+cu128torch2.10-cp311-cp311-linux_x86_64.whl ; sys_platform == 'linux' and python_version == '3.11'
33
+ flash-attn @ https://github.com/mjun0812/flash-attention-prebuild-wheels/releases/download/v0.7.12/flash_attn-2.8.3+cu128torch2.10-cp311-cp311-linux_aarch64.whl ; sys_platform == 'linux' and python_version == '3.11' and platform_machine == 'aarch64'
34
  # Kernels library for flash-attn3 (preferred over flash-attn when available)
35
  kernels
36
  xxhash