| |
|
|
| import sys |
|
|
| |
| from transformers import AutoTokenizer |
|
|
| class T5TokenizerReplacement: |
| |
| |
| @classmethod |
| def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): |
| |
| return AutoTokenizer.from_pretrained(pretrained_model_name_or_path, *args, **kwargs) |
|
|
| |
| import transformers.models.t5.tokenization_t5 as t5_module |
| t5_module.T5Tokenizer = T5TokenizerReplacement |
|
|
| print("[PATCH] Replaced T5Tokenizer with AutoTokenizer (ByT5 compatible)") |
|
|
| |
| def patch_imagen_project(): |
| |
| try: |
| import imagen_pytorch.imagen_pytorch as imagen_module |
| |
| def project_mps_compatible(x, y): |
| |
| import torch |
| original_dtype = x.dtype |
| x_compute = x.float() if x.device.type == "mps" else x.double() |
| y_compute = y.float() if y.device.type == "mps" else y.double() |
| |
| x_compute = torch.nn.functional.normalize(x_compute, dim=-1) |
| parallel = (x_compute * y_compute).sum(dim=-1, keepdim=True) * x_compute |
| orthogonal = y_compute - parallel |
| |
| return parallel.to(original_dtype), orthogonal.to(original_dtype) |
| |
| imagen_module.project = project_mps_compatible |
| print("[PATCH] Applied MPS compatibility patch for project function") |
| except ImportError: |
| pass |
|
|
|
|
| patch_imagen_project() |