File size: 2,348 Bytes
ee93ecd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
"""Tests for sim data bridge."""

from dataclasses import dataclass

import pytest
import torch
import warp as wp
from conftest import get_test_device

from mjlab.sim.sim_data import TorchArray, WarpBridge


@dataclass
class MockData:
  arr: wp.array
  val: float = 1.0


@pytest.fixture
def device():
  """Test device fixture."""
  return get_test_device()


@pytest.fixture
def mock_data(device):
  with wp.ScopedDevice(device):
    return MockData(arr=wp.array([[1.0, 2.0], [3.0, 4.0]], dtype=wp.float32))


def test_torch_array_shares_memory(device):
  """TorchArray modifications affect underlying warp array."""
  with wp.ScopedDevice(device):
    wp_arr = wp.array([1.0, 2.0], dtype=wp.float32)
  torch_arr = TorchArray(wp_arr)
  torch_arr[0] = 99.0
  assert wp_arr.numpy()[0] == 99.0


def test_torch_array_ops_work(device):
  """TorchArray supports PyTorch operations."""
  with wp.ScopedDevice(device):
    wp_arr = wp.array([1.0, 2.0], dtype=wp.float32)
  torch_arr = TorchArray(wp_arr)
  assert torch.allclose(torch_arr * 2, torch.tensor([2.0, 4.0], device=device))
  assert torch.sum(torch_arr).item() == 3.0  # type: ignore


def test_bridge_wraps_arrays(mock_data):
  """WarpBridge wraps warp arrays as TorchArray."""
  bridge = WarpBridge(mock_data)
  assert isinstance(bridge.arr, TorchArray)
  assert bridge.val == 1.0  # Non-arrays pass through.


def test_bridge_preserves_memory_on_slice_assign(mock_data, device):
  """Slice assignment preserves memory addresses (CUDA graph safe)."""
  bridge = WarpBridge(mock_data)
  initial_ptr = bridge.arr._tensor.data_ptr()

  bridge.arr[:] = torch.zeros((2, 2), device=device)

  assert bridge.arr._tensor.data_ptr() == initial_ptr
  assert torch.all(bridge.arr._tensor == 0)


def test_bridge_raises_on_setattr(mock_data, device):
  """Direct assignment raises AttributeError with helpful message."""
  bridge = WarpBridge(mock_data)

  with pytest.raises(AttributeError, match="Cannot set attribute 'arr' on WarpBridge"):
    bridge.arr = torch.zeros((2, 2), device=device)

  with pytest.raises(AttributeError, match="Use in-place operations instead"):
    bridge.val = 42.0


def test_bridge_caches_wrappers(mock_data):
  """WarpBridge caches wrapper objects."""
  bridge = WarpBridge(mock_data)
  arr1 = bridge.arr
  arr2 = bridge.arr
  assert arr1 is arr2  # Same cached object.