Spaces:
Running on Zero
Running on Zero
| # Patch for lazy loading issues | |
| import sys | |
| import importlib | |
| def patch_t5_tokenizer(): | |
| """Force proper loading of T5 tokenizer""" | |
| try: | |
| # Force import of T5 tokenizer module | |
| import transformers.models.t5.tokenization_t5 | |
| # Ensure the module is properly loaded | |
| if hasattr(transformers.models.t5.tokenization_t5, 'T5Tokenizer'): | |
| print("T5Tokenizer successfully loaded") | |
| return True | |
| else: | |
| print("T5Tokenizer not found in module") | |
| return False | |
| except Exception as e: | |
| print(f"Failed to patch T5 tokenizer: {e}") | |
| return False | |
| def patch_transformers(): | |
| """Apply patches to fix lazy loading""" | |
| # Import transformers first | |
| import transformers | |
| # Force load tokenizer modules | |
| from transformers import T5Tokenizer, T5TokenizerFast | |
| # Monkey patch the lazy module if needed | |
| import transformers.models.t5.tokenization_t5 as t5_tokenizer_module | |
| # Ensure classes are properly exposed | |
| if not hasattr(t5_tokenizer_module, 'T5Tokenizer'): | |
| t5_tokenizer_module.T5Tokenizer = T5Tokenizer | |
| print("Transformers patching complete") | |
| if __name__ == "__main__": | |
| patch_transformers() | |
| patch_t5_tokenizer() | |