Spaces:
Sleeping
Sleeping
fix: Add a fallback definition for `is_torch_fx_available` in `transformers.utils.import_utils`.
Browse files
app.py
CHANGED
|
@@ -17,6 +17,20 @@ try:
|
|
| 17 |
except Exception:
|
| 18 |
pass
|
| 19 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
# Suppress annoying warnings
|
| 21 |
warnings.filterwarnings("ignore", message="The parameters have been moved from the Blocks constructor to the launch()")
|
| 22 |
warnings.filterwarnings("ignore", message="CUDA is not available or torch_xla is imported")
|
|
|
|
| 17 |
except Exception:
|
| 18 |
pass
|
| 19 |
|
| 20 |
+
try:
|
| 21 |
+
from transformers.utils import import_utils as _import_utils
|
| 22 |
+
if not hasattr(_import_utils, "is_torch_fx_available"):
|
| 23 |
+
def is_torch_fx_available():
|
| 24 |
+
try:
|
| 25 |
+
import torch as _torch
|
| 26 |
+
return hasattr(_torch, "fx")
|
| 27 |
+
except Exception:
|
| 28 |
+
return False
|
| 29 |
+
|
| 30 |
+
_import_utils.is_torch_fx_available = is_torch_fx_available
|
| 31 |
+
except Exception:
|
| 32 |
+
pass
|
| 33 |
+
|
| 34 |
# Suppress annoying warnings
|
| 35 |
warnings.filterwarnings("ignore", message="The parameters have been moved from the Blocks constructor to the launch()")
|
| 36 |
warnings.filterwarnings("ignore", message="CUDA is not available or torch_xla is imported")
|
app_hf.py
CHANGED
|
@@ -27,6 +27,20 @@ try:
|
|
| 27 |
except Exception:
|
| 28 |
pass
|
| 29 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
# Suppress annoying warnings
|
| 31 |
warnings.filterwarnings("ignore", message="The parameters have been moved from the Blocks constructor to the launch()")
|
| 32 |
warnings.filterwarnings("ignore", message="CUDA is not available or torch_xla is imported")
|
|
|
|
| 27 |
except Exception:
|
| 28 |
pass
|
| 29 |
|
| 30 |
+
try:
|
| 31 |
+
from transformers.utils import import_utils as _import_utils
|
| 32 |
+
if not hasattr(_import_utils, "is_torch_fx_available"):
|
| 33 |
+
def is_torch_fx_available():
|
| 34 |
+
try:
|
| 35 |
+
import torch as _torch
|
| 36 |
+
return hasattr(_torch, "fx")
|
| 37 |
+
except Exception:
|
| 38 |
+
return False
|
| 39 |
+
|
| 40 |
+
_import_utils.is_torch_fx_available = is_torch_fx_available
|
| 41 |
+
except Exception:
|
| 42 |
+
pass
|
| 43 |
+
|
| 44 |
# Suppress annoying warnings
|
| 45 |
warnings.filterwarnings("ignore", message="The parameters have been moved from the Blocks constructor to the launch()")
|
| 46 |
warnings.filterwarnings("ignore", message="CUDA is not available or torch_xla is imported")
|