| """Runtime compatibility patch for HF Inference Toolkit + Transformers 5.""" |
|
|
| from __future__ import annotations |
|
|
|
|
| def _patch_transformers_file_utils() -> None: |
| try: |
| import transformers.file_utils as file_utils |
| except Exception: |
| return |
|
|
| if hasattr(file_utils, "is_tf_available"): |
| return |
|
|
| try: |
| from transformers.utils import is_tf_available as _is_tf_available |
| except Exception: |
| def _is_tf_available() -> bool: |
| return False |
|
|
| try: |
| setattr(file_utils, "is_tf_available", _is_tf_available) |
| except Exception: |
| pass |
|
|
|
|
| def _patch_transformers_utils_constants() -> None: |
| try: |
| import transformers.utils as tutils |
| except Exception: |
| return |
|
|
| if not hasattr(tutils, "FLAX_WEIGHTS_NAME"): |
| try: |
| setattr(tutils, "FLAX_WEIGHTS_NAME", "flax_model.msgpack") |
| except Exception: |
| pass |
|
|
|
|
| def _patch_transformers_tokenizer_aliases() -> None: |
| try: |
| import transformers |
| except Exception: |
| return |
|
|
| if hasattr(transformers, "MT5Tokenizer"): |
| return |
|
|
| try: |
| if hasattr(transformers, "T5Tokenizer"): |
| setattr(transformers, "MT5Tokenizer", getattr(transformers, "T5Tokenizer")) |
| except Exception: |
| pass |
|
|
|
|
| _patch_transformers_file_utils() |
| _patch_transformers_utils_constants() |
| _patch_transformers_tokenizer_aliases() |
|
|