chatbot / test_versions.py
Deva1211's picture
Resolving issues
fd5eb19
#!/usr/bin/env python3
"""
Version Compatibility Test Script
Tests that all dependencies are compatible and can import successfully
"""
import sys
import subprocess
import importlib.util
def check_package_version(package_name, min_version=None, max_version=None):
"""Check if a package is installed and within version range"""
try:
package = importlib.import_module(package_name)
version = getattr(package, '__version__', 'unknown')
print(f"βœ… {package_name}: {version}")
return True
except ImportError as e:
print(f"❌ {package_name}: Not installed ({e})")
return False
except Exception as e:
print(f"⚠️ {package_name}: Error checking version ({e})")
return False
def test_torch_device_mesh():
"""Test the specific issue that caused the previous error"""
try:
import torch
if hasattr(torch, 'distributed') and hasattr(torch.distributed, 'device_mesh'):
print("βœ… torch.distributed.device_mesh: Available")
return True
else:
print("⚠️ torch.distributed.device_mesh: Not available (expected for torch < 2.2.0)")
return True # This is expected and OK
except Exception as e:
print(f"❌ torch.distributed.device_mesh: Error ({e})")
return False
def test_transformers_mistral():
"""Test if transformers can import mistral models without device_mesh"""
try:
from transformers import AutoTokenizer, AutoModelForCausalLM
print("βœ… transformers.AutoTokenizer: OK")
print("βœ… transformers.AutoModelForCausalLM: OK")
# Test specific model imports that failed before
try:
# This should not fail with compatible versions
from transformers.models.mistral import modeling_mistral
print("βœ… transformers.models.mistral.modeling_mistral: OK")
except ImportError as e:
if "device_mesh" in str(e):
print("❌ transformers.models.mistral: Still has device_mesh issue")
return False
else:
print(f"⚠️ transformers.models.mistral: Other import issue ({e})")
return True
except Exception as e:
print(f"❌ transformers imports: Error ({e})")
return False
def test_tokenizer_compatibility():
"""Test tokenizer creation (the enum error)"""
try:
from transformers import AutoTokenizer
# Test with a simple, reliable model first
tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
print("βœ… DialoGPT tokenizer: OK")
# Test if we can handle mistral tokenizers
try:
tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-Instruct-v0.1")
print("βœ… Mistral tokenizer: OK")
except Exception as e:
print(f"⚠️ Mistral tokenizer: {e}")
return True
except Exception as e:
print(f"❌ Tokenizer test: {e}")
return False
def main():
print("πŸ§ͺ Version Compatibility Test")
print("=" * 50)
# Test core packages
print("\nπŸ“¦ Package Versions:")
check_package_version("torch")
check_package_version("transformers")
check_package_version("accelerate")
check_package_version("bitsandbytes")
check_package_version("gradio")
print("\nπŸ” Specific Compatibility Tests:")
# Test the device_mesh issue
test_torch_device_mesh()
# Test transformers imports
test_transformers_mistral()
# Test tokenizer enum issue
test_tokenizer_compatibility()
print("\n" + "=" * 50)
print("βœ… If all tests passed, version compatibility is good!")
print("❌ If tests failed, there may still be version conflicts")
if __name__ == "__main__":
main()