{ "model_name": "Qwen/Qwen3-8B", "nbits": 4, "group_size": 128, "tiling_mode": "1D", "method": "sinq", "compute_dtype": "torch.bfloat16", "timestamp": "2025-10-06T17:31:58.512995" }