File size: 906 Bytes
fca155a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
import sys
import os
import shutil

def check_gpu():
    print("\n--- GPU Check (via nvidia-smi) ---")
    if shutil.which("nvidia-smi"):
        ret = os.system("nvidia-smi")
        if ret == 0:
            print("✅ NVIDIA Driver detected.")
        else:
            print("⚠️ nvidia-smi found but returned error.")
    else:
        print("❌ nvidia-smi not found. CUDA might not be in PATH.")

def check_llama_cuda():
    print("\n--- Llama.cpp CUDA Check ---")
    try:
        from llama_cpp import Llama
        print("✅ llama-cpp-python is installed.")
        print(f"Llama.cpp package location: {sys.modules['llama_cpp'].__file__}")
    except ImportError:
        print("❌ llama-cpp-python is NOT installed.")
        sys.exit(1)

if __name__ == "__main__":
    print(f"Python Version: {sys.version}")
    check_gpu()
    check_llama_cuda()
    print("\nEnvironment check complete.")