| |
| """ |
| Version Compatibility Test Script |
| Tests that all dependencies are compatible and can import successfully |
| """ |
|
|
| import sys |
| import subprocess |
| import importlib.util |
|
|
| def check_package_version(package_name, min_version=None, max_version=None): |
| """Check if a package is installed and within version range""" |
| try: |
| package = importlib.import_module(package_name) |
| version = getattr(package, '__version__', 'unknown') |
| print(f"β
{package_name}: {version}") |
| return True |
| except ImportError as e: |
| print(f"β {package_name}: Not installed ({e})") |
| return False |
| except Exception as e: |
| print(f"β οΈ {package_name}: Error checking version ({e})") |
| return False |
|
|
| def test_torch_device_mesh(): |
| """Test the specific issue that caused the previous error""" |
| try: |
| import torch |
| if hasattr(torch, 'distributed') and hasattr(torch.distributed, 'device_mesh'): |
| print("β
torch.distributed.device_mesh: Available") |
| return True |
| else: |
| print("β οΈ torch.distributed.device_mesh: Not available (expected for torch < 2.2.0)") |
| return True |
| except Exception as e: |
| print(f"β torch.distributed.device_mesh: Error ({e})") |
| return False |
|
|
| def test_transformers_mistral(): |
| """Test if transformers can import mistral models without device_mesh""" |
| try: |
| from transformers import AutoTokenizer, AutoModelForCausalLM |
| print("β
transformers.AutoTokenizer: OK") |
| print("β
transformers.AutoModelForCausalLM: OK") |
| |
| |
| try: |
| |
| from transformers.models.mistral import modeling_mistral |
| print("β
transformers.models.mistral.modeling_mistral: OK") |
| except ImportError as e: |
| if "device_mesh" in str(e): |
| print("β transformers.models.mistral: Still has device_mesh issue") |
| return False |
| else: |
| print(f"β οΈ transformers.models.mistral: Other import issue ({e})") |
| |
| return True |
| except Exception as e: |
| print(f"β transformers imports: Error ({e})") |
| return False |
|
|
| def test_tokenizer_compatibility(): |
| """Test tokenizer creation (the enum error)""" |
| try: |
| from transformers import AutoTokenizer |
| |
| |
| tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium") |
| print("β
DialoGPT tokenizer: OK") |
| |
| |
| try: |
| tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.1") |
| print("β
Mistral tokenizer: OK") |
| except Exception as e: |
| print(f"β οΈ Mistral tokenizer: {e}") |
| |
| return True |
| except Exception as e: |
| print(f"β Tokenizer test: {e}") |
| return False |
|
|
| def main(): |
| print("π§ͺ Version Compatibility Test") |
| print("=" * 50) |
| |
| |
| print("\nπ¦ Package Versions:") |
| check_package_version("torch") |
| check_package_version("transformers") |
| check_package_version("accelerate") |
| check_package_version("bitsandbytes") |
| check_package_version("gradio") |
| |
| print("\nπ Specific Compatibility Tests:") |
| |
| |
| test_torch_device_mesh() |
| |
| |
| test_transformers_mistral() |
| |
| |
| test_tokenizer_compatibility() |
| |
| print("\n" + "=" * 50) |
| print("β
If all tests passed, version compatibility is good!") |
| print("β If tests failed, there may still be version conflicts") |
|
|
| if __name__ == "__main__": |
| main() |
|
|