imagenet-sdxl-quantized / test_quantization.py
jon-kyl's picture
add everything
49e7d2b
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
os.environ["JAX_PLATFORMS"] = "cpu"
import pytest
import jax
import jax.numpy as jnp
import numpy as np
import torch
from quantization import quantization_types
torch.autograd.grad_mode.set_grad_enabled(False)
backends = {"jax": jnp, "numpy": np, "torch": torch}
scales = (0.5, 1.0, 1.5)
@pytest.fixture(params=backends.values(), ids=backends.keys())
def backend(request):
"""Fixture to parameterize over all backends."""
return request.param
def _uint8_range(backend):
"""Fixture to provide uint8 range for each backend."""
return backend.arange(256, dtype=backend.uint8)
uint8_range = pytest.fixture(_uint8_range)
def _float_range(backend):
"""Fixture to provide float range for each backend."""
return backend.linspace(-4, 4, 101, dtype=backend.float32)
float_range = pytest.fixture(_float_range)
@pytest.fixture(params=quantization_types)
def cls(request):
"""Fixture to parameterize over all quantization classes."""
return request.param
@pytest.fixture
def default_instance(cls):
"""Fixture to parameterize over default instances of all quantization classes."""
return cls()
@pytest.fixture(params=scales)
def scale(request):
"""Fixture to parameterize over different scale values."""
return request.param
@pytest.fixture
def instance(cls, scale):
"""Fixture to parameterize over instances of all quantization classes, swept over all scale values."""
return cls(scale=scale)
def test_interface(cls, default_instance):
"""Test that all quantization classes and instances have the expected methods."""
assert isinstance(default_instance, cls)
classattrs = ()
classmethods = ()
instanceattrs = ("scale",)
instancemethods = ("quantize", "dequantize", "nonlinearity", "inv_nonlinearity")
interface = {
cls: (classattrs, classmethods),
default_instance: (instanceattrs, instancemethods)
}
for obj, (attrs, methods) in interface.items():
for attr in attrs + methods:
assert hasattr(obj, attr)
for method in methods:
assert callable(getattr(obj, method))
def test_quantize_returns_uint8(default_instance, float_range):
"""Test that quantize returns uint8 values."""
quantized = default_instance.quantize(float_range)
assert "uint8" in str(quantized.dtype).lower()
def test_dequantize_returns_float32(default_instance, uint8_range):
"""Test that dequantize returns float32 values."""
dequantized = default_instance.dequantize(uint8_range)
assert "float32" in str(dequantized.dtype).lower()
def test_roundtrip_consistency(instance, uint8_range):
"""Test that dequantize->quantize is lossless."""
dequantized = instance.dequantize(uint8_range)
requantized = instance.quantize(dequantized)
np.testing.assert_array_equal(uint8_range, requantized)
def test_cross_backend_consistency(instance):
"""Test that dequantization gives identical results across all backends."""
results = {
name: instance.dequantize(_uint8_range(backend))
for name, backend in backends.items()
}
baseline_name = list(backends.keys())[0]
baseline_result = results[baseline_name]
for name, result in results.items():
if name == baseline_name:
continue
np.testing.assert_array_almost_equal(
result,
baseline_result,
err_msg=f"{name} result doesn't match {baseline_name}",
)
def test_jax_jit(instance):
"""Test that the JAX backend works when the quantization methods are jitted."""
data = _uint8_range(jnp)
def roundtrip(x):
return instance.quantize(instance.dequantize(x))
nojit = roundtrip(data)
yesjit = jax.jit(roundtrip)(data)
np.testing.assert_array_equal(yesjit, nojit)
def test_torch_compile(instance):
"""Test that the PyTorch backend works when the quantization methods are compiled."""
data = _uint8_range(torch)
def roundtrip(x):
return instance.quantize(instance.dequantize(x))
nojit = roundtrip(data)
yesjit = torch.compile(roundtrip)(data)
np.testing.assert_array_equal(yesjit, nojit)