Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -142,6 +142,23 @@ try:
|
|
| 142 |
from hyvideo.commons.infer_state import initialize_infer_state
|
| 143 |
# Import the specific I2V System Prompt from the repo
|
| 144 |
from hyvideo.utils.rewrite.i2v_prompt import i2v_rewrite_system_prompt
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 145 |
except ImportError as e:
|
| 146 |
print(f"CRITICAL ERROR: {e}")
|
| 147 |
sys.exit(1)
|
|
|
|
| 142 |
from hyvideo.commons.infer_state import initialize_infer_state
|
| 143 |
# Import the specific I2V System Prompt from the repo
|
| 144 |
from hyvideo.utils.rewrite.i2v_prompt import i2v_rewrite_system_prompt
|
| 145 |
+
|
| 146 |
+
# --- FIX: Force Disable Flash Attention Patch ---
|
| 147 |
+
import hyvideo.models.transformers.modules.attention
|
| 148 |
+
|
| 149 |
+
print("🛠️ Patching Attention Mode to 'torch' (SDPA) to bypass Flash Attn check...")
|
| 150 |
+
|
| 151 |
+
def patched_fallback(attn_mode, infer_state=None, block_idx=None):
|
| 152 |
+
# Always return 'torch' to bypass the flash-attn check
|
| 153 |
+
return "torch"
|
| 154 |
+
|
| 155 |
+
# Patch the source definition in commons
|
| 156 |
+
hyvideo.commons.maybe_fallback_attn_mode = patched_fallback
|
| 157 |
+
|
| 158 |
+
# Patch the reference inside the attention module (crucial for TokenRefiner which imports it)
|
| 159 |
+
hyvideo.models.transformers.modules.attention.maybe_fallback_attn_mode = patched_fallback
|
| 160 |
+
# ------------------------------------------------
|
| 161 |
+
|
| 162 |
except ImportError as e:
|
| 163 |
print(f"CRITICAL ERROR: {e}")
|
| 164 |
sys.exit(1)
|