File size: 3,906 Bytes
fd5eb19 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 |
#!/usr/bin/env python3
"""
Version Compatibility Test Script
Tests that all dependencies are compatible and can import successfully
"""
import sys
import subprocess
import importlib.util
def check_package_version(package_name, min_version=None, max_version=None):
"""Check if a package is installed and within version range"""
try:
package = importlib.import_module(package_name)
version = getattr(package, '__version__', 'unknown')
print(f"β
{package_name}: {version}")
return True
except ImportError as e:
print(f"β {package_name}: Not installed ({e})")
return False
except Exception as e:
print(f"β οΈ {package_name}: Error checking version ({e})")
return False
def test_torch_device_mesh():
"""Test the specific issue that caused the previous error"""
try:
import torch
if hasattr(torch, 'distributed') and hasattr(torch.distributed, 'device_mesh'):
print("β
torch.distributed.device_mesh: Available")
return True
else:
print("β οΈ torch.distributed.device_mesh: Not available (expected for torch < 2.2.0)")
return True # This is expected and OK
except Exception as e:
print(f"β torch.distributed.device_mesh: Error ({e})")
return False
def test_transformers_mistral():
"""Test if transformers can import mistral models without device_mesh"""
try:
from transformers import AutoTokenizer, AutoModelForCausalLM
print("β
transformers.AutoTokenizer: OK")
print("β
transformers.AutoModelForCausalLM: OK")
# Test specific model imports that failed before
try:
# This should not fail with compatible versions
from transformers.models.mistral import modeling_mistral
print("β
transformers.models.mistral.modeling_mistral: OK")
except ImportError as e:
if "device_mesh" in str(e):
print("β transformers.models.mistral: Still has device_mesh issue")
return False
else:
print(f"β οΈ transformers.models.mistral: Other import issue ({e})")
return True
except Exception as e:
print(f"β transformers imports: Error ({e})")
return False
def test_tokenizer_compatibility():
"""Test tokenizer creation (the enum error)"""
try:
from transformers import AutoTokenizer
# Test with a simple, reliable model first
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
print("β
DialoGPT tokenizer: OK")
# Test if we can handle mistral tokenizers
try:
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.1")
print("β
Mistral tokenizer: OK")
except Exception as e:
print(f"β οΈ Mistral tokenizer: {e}")
return True
except Exception as e:
print(f"β Tokenizer test: {e}")
return False
def main():
print("π§ͺ Version Compatibility Test")
print("=" * 50)
# Test core packages
print("\nπ¦ Package Versions:")
check_package_version("torch")
check_package_version("transformers")
check_package_version("accelerate")
check_package_version("bitsandbytes")
check_package_version("gradio")
print("\nπ Specific Compatibility Tests:")
# Test the device_mesh issue
test_torch_device_mesh()
# Test transformers imports
test_transformers_mistral()
# Test tokenizer enum issue
test_tokenizer_compatibility()
print("\n" + "=" * 50)
print("β
If all tests passed, version compatibility is good!")
print("β If tests failed, there may still be version conflicts")
if __name__ == "__main__":
main()
|