| | """Tests for CircularBuffer.""" |
| |
|
| | import pytest |
| | import torch |
| | from conftest import get_test_device |
| |
|
| | from mjlab.utils.buffers import CircularBuffer |
| |
|
| |
|
| | @pytest.fixture |
| | def device(): |
| | """Test device fixture.""" |
| | return get_test_device() |
| |
|
| |
|
| | def test_circular_buffer_basic_append(device): |
| | """Basic append and chronological retrieval (oldest -> newest).""" |
| | buffer = CircularBuffer(max_len=3, batch_size=2, device=device) |
| |
|
| | buffer.append(torch.tensor([[1.0, 2.0], [3.0, 4.0]], device=device)) |
| | buffer.append(torch.tensor([[5.0, 6.0], [7.0, 8.0]], device=device)) |
| | buffer.append(torch.tensor([[9.0, 10.0], [11.0, 12.0]], device=device)) |
| |
|
| | result = buffer.buffer |
| | assert result.shape == (2, 3, 2) |
| | |
| | assert torch.allclose( |
| | result[0], torch.tensor([[1.0, 2.0], [5.0, 6.0], [9.0, 10.0]], device=device) |
| | ) |
| | assert torch.allclose( |
| | result[1], torch.tensor([[3.0, 4.0], [7.0, 8.0], [11.0, 12.0]], device=device) |
| | ) |
| |
|
| |
|
| | def test_circular_buffer_overwrite(device): |
| | """Overwrites oldest once capacity reached.""" |
| | buffer = CircularBuffer(max_len=2, batch_size=1, device=device) |
| |
|
| | buffer.append(torch.tensor([[1.0]], device=device)) |
| | buffer.append(torch.tensor([[2.0]], device=device)) |
| | buffer.append(torch.tensor([[3.0]], device=device)) |
| |
|
| | result = buffer.buffer |
| | assert result.shape == (1, 2, 1) |
| | assert torch.allclose(result[0], torch.tensor([[2.0], [3.0]], device=device)) |
| |
|
| |
|
| | def test_circular_buffer_reset_single_batch(device): |
| | """Reset clears values and counters for specified batch rows.""" |
| | buffer = CircularBuffer(max_len=2, batch_size=3, device=device) |
| |
|
| | buffer.append(torch.tensor([[1.0], [2.0], [3.0]], device=device)) |
| | buffer.append(torch.tensor([[4.0], [5.0], [6.0]], device=device)) |
| |
|
| | |
| | buffer.reset(batch_ids=torch.tensor([1], device=device)) |
| |
|
| | result = buffer.buffer |
| | |
| | assert result[0, 0, 0] == 1.0 |
| | assert result[1, 0, 0] == 0.0 |
| | assert result[2, 0, 0] == 3.0 |
| |
|
| | |
| | cl = buffer.current_length |
| | assert torch.equal(cl.cpu(), torch.tensor([2, 0, 2])) |
| |
|
| |
|
| | def test_circular_buffer_first_append_fills(device): |
| | """First append back-fills whole history for each batch row.""" |
| | buffer = CircularBuffer(max_len=3, batch_size=2, device=device) |
| | buffer.append(torch.tensor([[1.0], [2.0]], device=device)) |
| |
|
| | result = buffer.buffer |
| | assert torch.allclose(result[0], torch.tensor([[1.0], [1.0], [1.0]], device=device)) |
| | assert torch.allclose(result[1], torch.tensor([[2.0], [2.0], [2.0]], device=device)) |
| |
|
| | |
| | cl = buffer.current_length |
| | assert torch.equal(cl.cpu(), torch.tensor([1, 1])) |
| |
|
| |
|
| | def test_current_length_counts_and_clamps(device): |
| | """current_length counts per-batch valid frames and clamps to max_len.""" |
| | buffer = CircularBuffer(max_len=4, batch_size=3, device=device) |
| |
|
| | |
| | for _ in range(2): |
| | buffer.append(torch.arange(3, dtype=torch.float32, device=device).unsqueeze(-1)) |
| |
|
| | assert torch.equal(buffer.current_length.cpu(), torch.tensor([2, 2, 2])) |
| |
|
| | |
| | buffer.reset(batch_ids=[1]) |
| | assert torch.equal(buffer.current_length.cpu(), torch.tensor([2, 0, 2])) |
| |
|
| | |
| | buffer.append(torch.arange(3, dtype=torch.float32, device=device).unsqueeze(-1)) |
| | assert torch.equal(buffer.current_length.cpu(), torch.tensor([3, 1, 3])) |
| |
|
| | |
| | for _ in range(5): |
| | buffer.append(torch.arange(3, dtype=torch.float32, device=device).unsqueeze(-1)) |
| | assert torch.equal(buffer.current_length.cpu(), torch.tensor([4, 4, 4])) |
| |
|
| |
|
| | def test_reset_all_none_path(device): |
| | """reset(None) zeros the entire buffer and counters without indexing.""" |
| | buffer = CircularBuffer(max_len=3, batch_size=2, device=device) |
| | buffer.append(torch.tensor([[1.0], [2.0]], device=device)) |
| | buffer.append(torch.tensor([[3.0], [4.0]], device=device)) |
| |
|
| | buffer.reset() |
| |
|
| | |
| | assert torch.equal(buffer.current_length.cpu(), torch.tensor([0, 0])) |
| | |
| | result = buffer.buffer |
| | assert torch.count_nonzero(result) == 0 |
| |
|
| |
|
| | def test_getitem_lifo_and_clamp(device): |
| | """__getitem__ returns lagged frames per-batch (LIFO), clamping when needed.""" |
| | buffer = CircularBuffer(max_len=3, batch_size=2, device=device) |
| |
|
| | buffer.append(torch.tensor([[1.0], [10.0]], device=device)) |
| | buffer.append(torch.tensor([[2.0], [20.0]], device=device)) |
| | buffer.append(torch.tensor([[3.0], [30.0]], device=device)) |
| |
|
| | |
| | out = buffer[torch.tensor([0, 2], device=device)] |
| | assert torch.allclose(out, torch.tensor([[3.0], [10.0]], device=device)) |
| |
|
| | |
| | out = buffer[torch.tensor([99, 1], device=device)] |
| | assert torch.allclose(out, torch.tensor([[1.0], [20.0]], device=device)) |
| |
|
| |
|
| | def test_backfill_after_per_batch_reset(device): |
| | """After resetting a row, the next append back-fills its entire history for that row.""" |
| | buffer = CircularBuffer(max_len=3, batch_size=2, device=device) |
| |
|
| | buffer.append(torch.tensor([[1.0], [10.0]], device=device)) |
| | buffer.append(torch.tensor([[2.0], [20.0]], device=device)) |
| |
|
| | |
| | buffer.reset(batch_ids=[1]) |
| | assert torch.equal(buffer.current_length.cpu(), torch.tensor([2, 0])) |
| |
|
| | |
| | buffer.append(torch.tensor([[3.0], [99.0]], device=device)) |
| |
|
| | hist = buffer.buffer |
| | |
| | assert torch.allclose( |
| | hist[0].squeeze(-1), torch.tensor([1.0, 2.0, 3.0], device=device) |
| | ) |
| | |
| | assert torch.allclose( |
| | hist[1].squeeze(-1), torch.tensor([99.0, 99.0, 99.0], device=device) |
| | ) |
| |
|
| |
|
| | def test_errors_and_types(device): |
| | """Error paths: wrong batch, pre-append access, and bad key size.""" |
| | buffer = CircularBuffer(max_len=2, batch_size=2, device=device) |
| |
|
| | |
| | with pytest.raises(ValueError): |
| | buffer.append(torch.tensor([[1.0]], device=device)) |
| |
|
| | |
| | with pytest.raises(RuntimeError): |
| | _ = CircularBuffer(max_len=1, batch_size=1, device=device).buffer |
| |
|
| | |
| | with pytest.raises(RuntimeError): |
| | _ = buffer[torch.tensor([0, 0], device=device)] |
| |
|
| | |
| | buffer.append(torch.tensor([[1.0], [2.0]], device=device)) |
| |
|
| | |
| | with pytest.raises(ValueError): |
| | _ = buffer[torch.tensor([0], device=device)] |
| |
|
| |
|
| | def test_dtype_and_device_preserved(device): |
| | """Buffer preserves dtype and device.""" |
| | buffer = CircularBuffer(max_len=2, batch_size=2, device=device) |
| | x = torch.tensor([[1.0], [2.0]], dtype=torch.float32, device=device) |
| | buffer.append(x) |
| | assert buffer.buffer.dtype == torch.float32 |
| | assert buffer.buffer.device.type == torch.device(device).type |
| |
|