File size: 2,614 Bytes
c28358e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
#!/usr/bin/env python3
"""
Check available hardware for model inference
"""

import torch
import psutil
import logging
import subprocess
import platform

logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

def check_hardware():
    """Check available hardware resources"""
    logger.info("=" * 50)
    logger.info("๐Ÿ–ฅ๏ธ  Hardware Assessment")
    logger.info("=" * 50)
    
    # System RAM
    ram_gb = psutil.virtual_memory().total / (1024 ** 3)
    ram_available_gb = psutil.virtual_memory().available / (1024 ** 3)
    logger.info(f"๐Ÿ’พ System RAM: {ram_gb:.1f} GB total, {ram_available_gb:.1f} GB available")
    
    # GPU info (if available)
    if torch.cuda.is_available():
        gpu_count = torch.cuda.device_count()
        logger.info(f"๐ŸŽฎ CUDA GPUs: {gpu_count}")
        for i in range(gpu_count):
            gpu_name = torch.cuda.get_device_name(i)
            gpu_memory = torch.cuda.get_device_properties(i).total_memory / (1024 ** 3)
            logger.info(f"   GPU {i}: {gpu_name} ({gpu_memory:.1f} GB VRAM)")
    else:
        logger.info("๐ŸŽฎ No CUDA GPUs detected")
    
    # Apple Silicon GPU (M1/M2/M3)
    try:
        # Try to get Apple GPU info
        result = subprocess.run(['system_profiler', 'SPDisplaysDataType'], 
                              capture_output=True, text=True)
        if 'Chip' in result.stdout:
            lines = result.stdout.split('\n')
            for line in lines:
                if 'Chip' in line and 'Model' in line:
                    logger.info(f"๐ŸŽ Apple Silicon: {line.strip()}")
    except:
        pass
    
    # Disk space
    disk = psutil.disk_usage('/')
    disk_free_gb = disk.free / (1024 ** 3)
    logger.info(f"๐Ÿ’ฟ Disk space: {disk_free_gb:.1f} GB free")
    
    # Model requirements
    logger.info("\n๐Ÿ“‹ GPT-OSS-120B Requirements:")
    logger.info("   Minimum: 64GB RAM (very slow)")
    logger.info("   Recommended: 128GB+ RAM with 80GB+ GPU VRAM")
    logger.info("   Ideal: Multiple high-end GPUs with 80GB+ VRAM each")
    
    # Recommendations
    logger.info("\n๐Ÿ’ก Recommendations:")
    if ram_gb >= 128:
        logger.info("   โœ… You have enough RAM to attempt loading (will be slow)")
    elif ram_gb >= 64:
        logger.info("   โš ๏ธ  Borderline RAM - loading may be very slow or fail")
    else:
        logger.info("   โŒ Insufficient RAM for this model")
    
    if disk_free_gb < 100:
        logger.info("   โš ๏ธ  Low disk space - consider freeing up space")

if __name__ == "__main__":
    check_hardware()