File size: 709 Bytes
ad5f26a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
# torch.ao is a package with a lot of interdependencies.
# We will use lazy import to avoid cyclic dependencies here.
from typing import TYPE_CHECKING as _TYPE_CHECKING
if _TYPE_CHECKING:
from types import ModuleType
from torch.ao import ( # noqa: TC004
nn as nn,
ns as ns,
pruning as pruning,
quantization as quantization,
)
__all__ = [
"nn",
"ns",
"pruning",
"quantization",
]
def __getattr__(name: str) -> "ModuleType":
if name in __all__:
import importlib
return importlib.import_module("." + name, __name__)
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|