Maxtimer97 commited on
Commit
a27b55c
·
1 Parent(s): 8a2bc5d

Relative import first

Browse files
ops/__init__.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # ops/__init__.py
2
+ from .pooling import mean_pooling
3
+ from .compressed_attention import compressed_attention
4
+ from .topk_sparse_attention import topk_sparse_attention
5
+
6
+ __all__ = [
7
+ "mean_pooling",
8
+ "compressed_attention",
9
+ "topk_sparse_attention",
10
+ ]
ops/compressed_attention.py CHANGED
@@ -20,9 +20,9 @@ import triton
20
  import triton.language as tl
21
 
22
  try:
23
- from ops.utils import get_num_warps_stages, is_hopper_gpu
24
- except ImportError:
25
  from .ops.utils import get_num_warps_stages, is_hopper_gpu
 
 
26
 
27
  IS_HOPPER_GPU = is_hopper_gpu()
28
 
 
20
  import triton.language as tl
21
 
22
  try:
 
 
23
  from .ops.utils import get_num_warps_stages, is_hopper_gpu
24
+ except ImportError:
25
+ from ops.utils import get_num_warps_stages, is_hopper_gpu
26
 
27
  IS_HOPPER_GPU = is_hopper_gpu()
28
 
ops/topk_sparse_attention.py CHANGED
@@ -19,9 +19,9 @@ import triton
19
  import triton.language as tl
20
 
21
  try:
22
- from ops.utils import get_num_warps_stages, is_hopper_gpu
23
- except ImportError:
24
  from .ops.utils import get_num_warps_stages, is_hopper_gpu
 
 
25
 
26
  IS_HOPPER_GPU = is_hopper_gpu()
27
 
 
19
  import triton.language as tl
20
 
21
  try:
 
 
22
  from .ops.utils import get_num_warps_stages, is_hopper_gpu
23
+ except ImportError:
24
+ from ops.utils import get_num_warps_stages, is_hopper_gpu
25
 
26
  IS_HOPPER_GPU = is_hopper_gpu()
27