hypnopump commited on
Commit
6052421
·
verified ·
1 Parent(s): 81d95e1

Upload folder using huggingface_hub

Browse files
build/torch29-cxx11-cu129-x86_64-linux/flash_attention_3/flash_attn_interface.py CHANGED
@@ -22,7 +22,7 @@ def round_multiple(x, m):
22
 
23
 
24
  def round_up_headdim(head_size: int) -> int:
25
- from flash_attn_config import CONFIG
26
 
27
  if not CONFIG["build_flags"]["FLASHATTENTION_DISABLE_HDIM64"]:
28
  if head_size <= 64:
 
22
 
23
 
24
  def round_up_headdim(head_size: int) -> int:
25
+ from .flash_attn_config import CONFIG
26
 
27
  if not CONFIG["build_flags"]["FLASHATTENTION_DISABLE_HDIM64"]:
28
  if head_size <= 64: