Spaces:
Paused
Paused
Commit
·
bd9b6e5
1
Parent(s):
a251936
Refactor import paths and update requirements to include timm
Browse files- models/infinity.py +7 -7
- requirements.txt +2 -1
models/infinity.py
CHANGED
|
@@ -18,15 +18,15 @@ from PIL import Image
|
|
| 18 |
import numpy as np
|
| 19 |
from torch.nn.attention.flex_attention import flex_attention
|
| 20 |
|
| 21 |
-
import
|
| 22 |
-
from
|
| 23 |
-
from
|
| 24 |
-
from
|
| 25 |
-
from
|
| 26 |
-
from
|
| 27 |
|
| 28 |
try:
|
| 29 |
-
from
|
| 30 |
except:
|
| 31 |
fused_ada_layer_norm, fused_ada_rms_norm = None, None
|
| 32 |
|
|
|
|
| 18 |
import numpy as np
|
| 19 |
from torch.nn.attention.flex_attention import flex_attention
|
| 20 |
|
| 21 |
+
import utils.dist as dist
|
| 22 |
+
from utils.dist import for_visualize
|
| 23 |
+
from models.basic import flash_attn_func, flash_fused_op_installed, AdaLNBeforeHead, CrossAttnBlock, SelfAttnBlock, CrossAttention, FastRMSNorm, precompute_rope2d_freqs_grid
|
| 24 |
+
from utils import misc
|
| 25 |
+
from models.flex_attn import FlexAttn
|
| 26 |
+
from utils.dynamic_resolution import dynamic_resolution_h_w, h_div_w_templates
|
| 27 |
|
| 28 |
try:
|
| 29 |
+
from models.fused_op import fused_ada_layer_norm, fused_ada_rms_norm
|
| 30 |
except:
|
| 31 |
fused_ada_layer_norm, fused_ada_rms_norm = None, None
|
| 32 |
|
requirements.txt
CHANGED
|
@@ -5,4 +5,5 @@ huggingface-hub
|
|
| 5 |
transformers
|
| 6 |
argparse
|
| 7 |
spaces
|
| 8 |
-
torchvision
|
|
|
|
|
|
| 5 |
transformers
|
| 6 |
argparse
|
| 7 |
spaces
|
| 8 |
+
torchvision
|
| 9 |
+
timm
|