simple-text-analyzer / test_current_gpu.py
egumasa's picture
Enhance GPU support with stronger enforcement
bb65e54
#!/usr/bin/env python3
"""Quick GPU diagnostic for current environment."""
import sys
import os
print("=== GPU Diagnostic ===")
print(f"Python: {sys.version}")
print(f"Platform: {sys.platform}")
print(f"Current directory: {os.getcwd()}")
# Check PyTorch
try:
import torch
print(f"\nPyTorch: {torch.__version__}")
print(f"CUDA available: {torch.cuda.is_available()}")
if torch.cuda.is_available():
print(f"CUDA version: {torch.version.cuda}")
print(f"GPU count: {torch.cuda.device_count()}")
for i in range(torch.cuda.device_count()):
print(f"GPU {i}: {torch.cuda.get_device_name(i)}")
print(f" Memory: {torch.cuda.get_device_properties(i).total_memory / 1024**3:.2f} GB")
else:
print("Running on CPU")
# Check environment variables
print("\nRelevant environment variables:")
for var in ['CUDA_VISIBLE_DEVICES', 'CUDA_HOME', 'SPACES', 'SPACE_ID']:
print(f" {var}: {os.environ.get(var, 'Not set')}")
except ImportError as e:
print(f"PyTorch not available: {e}")
# Check SpaCy
print("\n--- SpaCy Configuration ---")
try:
import spacy
print(f"SpaCy: {spacy.__version__}")
# Try to enable GPU
gpu_id = spacy.prefer_gpu()
print(f"spacy.prefer_gpu(): {gpu_id}")
# Check if we can require GPU
if torch.cuda.is_available():
try:
spacy.require_gpu()
print("spacy.require_gpu(): Success")
except Exception as e:
print(f"spacy.require_gpu(): Failed - {e}")
except ImportError as e:
print(f"SpaCy not available: {e}")
except Exception as e:
print(f"SpaCy error: {e}")