| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| import importlib |
| import importlib.metadata as importlib_metadata |
| from functools import lru_cache |
|
|
| import packaging.version |
|
|
|
|
| @lru_cache |
| def is_bnb_available() -> bool: |
| return importlib.util.find_spec("bitsandbytes") is not None |
|
|
|
|
| @lru_cache |
| def is_bnb_4bit_available() -> bool: |
| if not is_bnb_available(): |
| return False |
|
|
| import bitsandbytes as bnb |
|
|
| return hasattr(bnb.nn, "Linear4bit") |
|
|
|
|
| @lru_cache |
| def is_auto_gptq_available(): |
| if importlib.util.find_spec("auto_gptq") is not None: |
| AUTOGPTQ_MINIMUM_VERSION = packaging.version.parse("0.5.0") |
| version_autogptq = packaging.version.parse(importlib_metadata.version("auto_gptq")) |
| if AUTOGPTQ_MINIMUM_VERSION <= version_autogptq: |
| return True |
| else: |
| raise ImportError( |
| f"Found an incompatible version of auto-gptq. Found version {version_autogptq}, " |
| f"but only versions above {AUTOGPTQ_MINIMUM_VERSION} are supported" |
| ) |
|
|
|
|
| @lru_cache |
| def is_optimum_available() -> bool: |
| return importlib.util.find_spec("optimum") is not None |
|
|
|
|
| @lru_cache |
| def is_torch_tpu_available(check_device=True): |
| "Checks if `torch_xla` is installed and potentially if a TPU is in the environment" |
| if importlib.util.find_spec("torch_xla") is not None: |
| if check_device: |
| |
| try: |
| import torch_xla.core.xla_model as xm |
|
|
| _ = xm.xla_device() |
| return True |
| except RuntimeError: |
| return False |
| return True |
| return False |
|
|
|
|
| @lru_cache |
| def is_aqlm_available(): |
| return importlib.util.find_spec("aqlm") is not None |
|
|
|
|
| @lru_cache |
| def is_auto_awq_available(): |
| return importlib.util.find_spec("awq") is not None |
|
|
|
|
| @lru_cache |
| def is_eetq_available(): |
| if importlib.util.find_spec("eetq") is None: |
| return False |
|
|
| is_available = True |
| try: |
| from eetq import EetqLinear |
| except ImportError as exc: |
| if "shard_checkpoint" in str(exc): |
| |
| |
| |
| is_available = False |
| return is_available |
|
|
|
|
| @lru_cache |
| def is_hqq_available(): |
| return importlib.util.find_spec("hqq") is not None |
|
|
|
|
| @lru_cache |
| def is_torchao_available(): |
| if importlib.util.find_spec("torchao") is None: |
| return False |
|
|
| TORCHAO_MINIMUM_VERSION = packaging.version.parse("0.4.0") |
| torchao_version = packaging.version.parse(importlib_metadata.version("torchao")) |
|
|
| if torchao_version < TORCHAO_MINIMUM_VERSION: |
| raise ImportError( |
| f"Found an incompatible version of torchao. Found version {torchao_version}, " |
| f"but only versions above {TORCHAO_MINIMUM_VERSION} are supported" |
| ) |
| return True |
|
|