Spaces:
Building
Building
File size: 3,889 Bytes
3f10400 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 |
#!/usr/bin/env python3
"""
Verify GPU setup for Hugging Face Spaces deployment.
This script follows the official HuggingFace documentation for GPU verification.
"""
import sys
import os
print("=== GPU Setup Verification for HuggingFace Spaces ===")
print()
# Test 1: Check PyTorch installation and CUDA availability
print("1. PyTorch/CUDA Check:")
try:
import torch
print(f" β PyTorch installed: version {torch.__version__}")
print(f" Is CUDA available: {torch.cuda.is_available()}")
if torch.cuda.is_available():
print(f" CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
print(f" CUDA version: {torch.version.cuda}")
else:
print(" Running on CPU")
except ImportError as e:
print(f" β PyTorch not installed or import error: {e}")
except Exception as e:
print(f" β Error checking PyTorch: {e}")
print()
# Test 2: Check spaCy GPU configuration
print("2. spaCy GPU Check:")
try:
import spacy
# Try to enable GPU
gpu_id = spacy.prefer_gpu()
if gpu_id is not False:
print(f" β spaCy GPU enabled on device {gpu_id}")
else:
print(" β spaCy could not enable GPU (will use CPU)")
print(f" spaCy version: {spacy.__version__}")
except ImportError:
print(" β spaCy not installed")
except Exception as e:
print(f" β Error checking spaCy: {e}")
print()
# Test 3: Check transformer packages
print("3. Transformer Packages Check:")
packages_found = []
try:
import spacy_transformers
packages_found.append("spacy-transformers")
print(" β spacy-transformers installed")
except ImportError:
print(" β spacy-transformers not installed")
try:
import spacy_curated_transformers
packages_found.append("spacy-curated-transformers")
print(" β spacy-curated-transformers installed")
except ImportError:
print(" β spacy-curated-transformers not installed")
if not packages_found:
print(" β οΈ No transformer packages found - transformer models won't work!")
else:
print(f" Found packages: {', '.join(packages_found)}")
print()
# Test 4: Test loading a transformer model
print("4. Transformer Model Loading Test:")
try:
import spacy
# Try to load English transformer model
print(" Testing en_core_web_trf...")
nlp = spacy.load("en_core_web_trf")
# Process a test sentence
doc = nlp("This is a test sentence.")
print(f" β Successfully loaded and processed text with {len(doc)} tokens")
# Check if model is on GPU
if hasattr(nlp, 'pipe'):
for name, component in nlp.pipeline:
if hasattr(component, 'model') and hasattr(component.model, 'device'):
print(f" Component '{name}' device: {component.model.device}")
except Exception as e:
print(f" β Error loading transformer model: {e}")
print()
# Test 5: Environment information
print("5. Environment Information:")
print(f" Platform: {sys.platform}")
print(f" Python: {sys.version}")
print(f" Working directory: {os.getcwd()}")
# Check for HuggingFace Spaces environment
if os.environ.get('SPACES'):
print(" β Running in HuggingFace Spaces")
print(f" Space ID: {os.environ.get('SPACE_ID', 'N/A')}")
print(f" Space Host: {os.environ.get('SPACE_HOST', 'N/A')}")
else:
print(" β Not running in HuggingFace Spaces (local environment)")
print()
print("=== Verification Complete ===")
# Summary
print("\nSummary:")
if 'torch' in sys.modules and torch.cuda.is_available():
print("β
GPU support is properly configured for HuggingFace Spaces")
else:
print("β οΈ GPU not available - will fall back to CPU processing")
print(" This is normal for local development on Mac")
print(" GPU will be available when deployed to HuggingFace Spaces with GPU hardware")
|