File size: 706 Bytes
67e9774 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
import threading
from typing import Any, Optional
import torch._C._lazy
class DeviceContext:
_CONTEXTS: dict[str, Any] = {}
_CONTEXTS_LOCK = threading.Lock()
def __init__(self, device: str) -> None:
self.device = device
def get_device_context(device: Optional[str] = None) -> DeviceContext:
if device is None:
device = torch._C._lazy._get_default_device_type()
else:
device = str(device)
with DeviceContext._CONTEXTS_LOCK:
devctx = DeviceContext._CONTEXTS.get(device, None)
if devctx is None:
devctx = DeviceContext(device)
DeviceContext._CONTEXTS[device] = devctx
return devctx
|