File size: 1,368 Bytes
f6f8d06
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
import torch


# 检测是否包含 ZLUDA 标记
def zluda_available(device_name):
    return "[ZLUDA]" in device_name


# 关闭 ZLUDA Cudnn 支持 防止错误
def enable_zluda_config():
    if hasattr(torch, 'cuda') and torch.cuda.is_available():
        device_name = torch.cuda.get_device_name(0)
        print('Device name: ', device_name)
        print('Cuda is available: ', torch.cuda.is_available())
        print('Cuda version: ', torch.version.cuda)
        print('ZLUDA is available: ', zluda_available(device_name))

        if zluda_available(device_name):
            torch.backends.cudnn.enabled = False
            cuda_attr = torch.backends.cuda
            if hasattr(cuda_attr, 'enable_flash_sdp'):
                torch.backends.cuda.enable_flash_sdp(False)
                print('Cuda enable flash sdp: ', False)
            if hasattr(cuda_attr, 'enable_math_sdp'):
                torch.backends.cuda.enable_math_sdp(True)
                print('Cuda enable math sdp: ', True)
            if hasattr(cuda_attr, 'enable_mem_efficient_sdp'):
                torch.backends.cuda.enable_mem_efficient_sdp(False)
                print('Cuda enable mem efficient sdp: ', False)
            if hasattr(cuda_attr, 'enable_cudnn_sdp'):
                torch.backends.cuda.enable_cudnn_sdp(False)
                print('Cuda enable cudnn sdp: ', False)