============================= test session starts ============================== platform linux -- Python 3.12.3, pytest-8.1.1, pluggy-1.6.0 -- /usr/bin/python cachedir: .pytest_cache hypothesis profile 'default' -> database=DirectoryBasedExampleDatabase(PosixPath('/home/yiliu7/workspace/vllm/tests/kernels/attention/.hypothesis/examples')) rootdir: /home/yiliu7/workspace/vllm configfile: pyproject.toml plugins: anyio-4.9.0, rerunfailures-15.1, shard-0.1.2, xdoctest-1.0.2, xdist-3.6.1, flakefinder-1.1.0, hypothesis-6.130.8, typeguard-4.3.0 collecting ... WARNING 11-29 09:56:20 [interface.py:508] Current platform cuda does not have '_pytestfixturefunction' attribute. WARNING 11-29 09:56:20 [interface.py:508] Current platform cuda does not have '__test__' attribute. WARNING 11-29 09:56:20 [interface.py:508] Current platform cuda does not have '__bases__' attribute. WARNING 11-29 09:56:20 [interface.py:508] Current platform cuda does not have '__test__' attribute. collected 1 item Running 1 items in this shard: tests/kernels/attention/test_triton_unified_attention.py::test_triton_unified_attn[q_dtype0-2048-None-dtype0-None-16-8-num_heads0-seq_lens0] test_triton_unified_attention.py::test_triton_unified_attn[q_dtype0-2048-None-dtype0-None-16-8-num_heads0-seq_lens0] query shape: torch.Size([5, 8, 8]) query_idx: tensor([[[ 0, 1, 2, 3, 4, 5, 6, 7], [ 8, 9, 10, 11, 12, 13, 14, 15], [ 16, 17, 18, 19, 20, 21, 22, 23], [ 24, 25, 26, 27, 28, 29, 30, 31], [ 32, 33, 34, 35, 36, 37, 38, 39], [ 40, 41, 42, 43, 44, 45, 46, 47], [ 48, 49, 50, 51, 52, 53, 54, 55], [ 56, 57, 58, 59, 60, 61, 62, 63]], [[ 64, 65, 66, 67, 68, 69, 70, 71], [ 72, 73, 74, 75, 76, 77, 78, 79], [ 80, 81, 82, 83, 84, 85, 86, 87], [ 88, 89, 90, 91, 92, 93, 94, 95], [ 96, 97, 98, 99, 100, 101, 102, 103], [104, 105, 106, 107, 108, 109, 110, 111], [112, 113, 114, 115, 116, 117, 118, 119], [120, 121, 122, 123, 124, 125, 126, 127]], [[128, 129, 130, 131, 132, 133, 134, 135], [136, 137, 138, 139, 140, 141, 142, 143], [144, 145, 146, 147, 148, 149, 150, 151], [152, 153, 154, 155, 156, 157, 158, 159], [160, 161, 162, 163, 164, 165, 166, 167], [168, 169, 170, 171, 172, 173, 174, 175], [176, 177, 178, 179, 180, 181, 182, 183], [184, 185, 186, 187, 188, 189, 190, 191]], [[192, 193, 194, 195, 196, 197, 198, 199], [200, 201, 202, 203, 204, 205, 206, 207], [208, 209, 210, 211, 212, 213, 214, 215], [216, 217, 218, 219, 220, 221, 222, 223], [224, 225, 226, 227, 228, 229, 230, 231], [232, 233, 234, 235, 236, 237, 238, 239], [240, 241, 242, 243, 244, 245, 246, 247], [248, 249, 250, 251, 252, 253, 254, 255]], [[256, 257, 258, 259, 260, 261, 262, 263], [264, 265, 266, 267, 268, 269, 270, 271], [272, 273, 274, 275, 276, 277, 278, 279], [280, 281, 282, 283, 284, 285, 286, 287], [288, 289, 290, 291, 292, 293, 294, 295], [296, 297, 298, 299, 300, 301, 302, 303], [304, 305, 306, 307, 308, 309, 310, 311], [312, 313, 314, 315, 316, 317, 318, 319]]], device='cuda:0') query_uint8: tensor([[[183, 174, 195, 33, 160, 177, 178, 170], [185, 172, 189, 65, 42, 164, 58, 57], [160, 58, 60, 56, 176, 187, 180, 186], [ 25, 132, 63, 51, 154, 181, 59, 169], [178, 170, 183, 181, 180, 149, 49, 60], [ 7, 46, 61, 51, 164, 56, 188, 180], [164, 44, 45, 155, 54, 185, 30, 48], [177, 170, 177, 54, 41, 32, 47, 185]], [[145, 184, 55, 185, 184, 31, 51, 173], [179, 177, 51, 188, 44, 147, 44, 176], [170, 52, 196, 43, 142, 60, 129, 36], [ 38, 167, 56, 61, 33, 48, 57, 183], [155, 188, 182, 174, 24, 146, 49, 54], [189, 46, 52, 182, 51, 171, 191, 163], [ 63, 48, 57, 187, 168, 188, 60, 6], [ 65, 183, 62, 168, 182, 166, 171, 165]], [[185, 177, 178, 64, 187, 58, 192, 182], [ 41, 180, 164, 171, 43, 161, 43, 184], [173, 49, 21, 61, 173, 166, 163, 34], [ 24, 187, 145, 167, 56, 58, 52, 181], [ 45, 175, 184, 55, 44, 175, 189, 56], [177, 41, 169, 34, 193, 49, 186, 60], [ 29, 175, 47, 34, 187, 63, 185, 41], [179, 51, 181, 156, 36, 38, 180, 174]], [[161, 187, 182, 173, 44, 39, 187, 176], [173, 188, 185, 168, 179, 193, 174, 23], [ 41, 58, 136, 45, 186, 61, 48, 187], [190, 55, 166, 177, 42, 175, 65, 49], [ 55, 41, 193, 46, 57, 50, 42, 58], [ 29, 192, 49, 34, 53, 40, 179, 134], [194, 43, 43, 154, 38, 194, 25, 192], [ 57, 177, 59, 190, 185, 59, 69, 182]], [[ 39, 57, 168, 176, 185, 59, 55, 151], [ 45, 175, 28, 57, 51, 164, 175, 190], [ 25, 184, 162, 50, 45, 175, 55, 181], [145, 186, 178, 57, 179, 57, 179, 161], [ 51, 179, 36, 190, 177, 179, 160, 178], [165, 62, 49, 152, 64, 168, 190, 174], [185, 58, 43, 179, 174, 184, 183, 49], [ 58, 191, 55, 180, 19, 63, 181, 179]]], device='cuda:0', dtype=torch.uint8) >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB set_trace >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> > /home/yiliu7/workspace/vllm/tests/kernels/attention/test_triton_unified_attention.py(219)test_triton_unified_attn() -> ref_output = ref_paged_attn( (Pdb) >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB continue >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB set_trace >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> > /home/yiliu7/workspace/vllm/tests/kernels/attention/test_triton_unified_attention.py(56)ref_paged_attn() -> for i in range(num_seqs): (Pdb) >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB continue >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> k shape: torch.Size([7, 2, 8]) k data: tensor([[[186, 58, 189, 59, 179, 172, 49, 53], [ 57, 54, 58, 51, 49, 134, 181, 63]], [[168, 56, 35, 177, 66, 40, 43, 50], [ 42, 157, 176, 184, 40, 57, 176, 181]], [[186, 191, 54, 48, 180, 43, 38, 157], [ 50, 164, 52, 189, 51, 64, 46, 191]], [[ 51, 178, 180, 61, 179, 45, 60, 41], [ 60, 52, 152, 182, 41, 173, 191, 178]], [[132, 174, 50, 181, 180, 59, 180, 46], [ 67, 42, 41, 43, 192, 64, 152, 186]], [[170, 171, 184, 48, 43, 162, 169, 190], [ 52, 179, 57, 50, 169, 168, 59, 40]], [[ 47, 49, 57, 49, 57, 177, 178, 169], [183, 189, 186, 164, 51, 33, 184, 48]]], device='cuda:0', dtype=torch.uint8) v shape: torch.Size([7, 2, 8]) v data: tensor([[[181, 50, 178, 57, 177, 187, 59, 21], [ 68, 57, 42, 179, 61, 50, 169, 147]], [[173, 57, 39, 41, 42, 194, 183, 54], [ 50, 188, 160, 42, 170, 173, 172, 170]], [[ 17, 179, 181, 53, 151, 162, 60, 186], [187, 182, 182, 41, 51, 37, 186, 183]], [[177, 170, 39, 179, 188, 43, 59, 55], [184, 64, 183, 53, 174, 160, 58, 194]], [[ 34, 36, 191, 44, 32, 34, 50, 39], [181, 59, 47, 184, 188, 50, 57, 61]], [[ 46, 52, 147, 59, 43, 180, 21, 54], [186, 36, 171, 49, 63, 61, 61, 129]], [[ 1, 193, 157, 154, 183, 175, 43, 61], [ 55, 54, 173, 56, 63, 60, 45, 56]]], device='cuda:0', dtype=torch.uint8) q shape : torch.Size([5, 8, 8]), k/v shape torch.Size([7, 8, 8]) attn shape: torch.Size([8, 5, 7]), v shape: torch.Size([7, 8, 8]), out shape: torch.Size([5, 8, 8]) num_seqs=1, num_query_heads=8, num_kv_heads=2 num_queries_per_kv=4, head_size=8 Using BLOCK_M=16, BLOCK_Q=4 >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB set_trace >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> > /home/yiliu7/workspace/vllm/vllm/attention/ops/triton_unified_attention.py(762)unified_attention() -> print(f"Launch parameters: total_num_q_blocks={total_num_q_blocks}, num_kv_heads={num_kv_heads}") (Pdb) >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB continue >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> Launch parameters: total_num_q_blocks=2, num_kv_heads=2 q_block_global_idx: [0], kv_head_idx: [0] q_block_global_idx: [0], kv_head_idx: [0] query_offset_0: [0 0 0 0 1 1 1 1 2 2 2 2 3 3 3 3] query_offset_1: [0 1 2 3 0 1 2 3 0 1 2 3 0 1 2 3] query_offset: [[ 0 1 2 3 4 5 6 7] [ 8 9 10 11 12 13 14 15] [ 16 17 18 19 20 21 22 23] [ 24 25 26 27 28 29 30 31] [ 64 65 66 67 68 69 70 71] [ 72 73 74 75 76 77 78 79] [ 80 81 82 83 84 85 86 87] [ 88 89 90 91 92 93 94 95] [128 129 130 131 132 133 134 135] [136 137 138 139 140 141 142 143] [144 145 146 147 148 149 150 151] [152 153 154 155 156 157 158 159] [192 193 194 195 196 197 198 199] [200 201 202 203 204 205 206 207] [208 209 210 211 212 213 214 215] [216 217 218 219 220 221 222 223]] Q: [[183 174 195 33 160 177 178 170] [185 172 189 65 42 164 58 57] [160 58 60 56 176 187 180 186] [ 25 132 63 51 154 181 59 169] [145 184 55 185 184 31 51 173] [179 177 51 188 44 147 44 176] [170 52 196 43 142 60 129 36] [ 38 167 56 61 33 48 57 183] [185 177 178 64 187 58 192 182] [ 41 180 164 171 43 161 43 184] [173 49 21 61 173 166 163 34] [ 24 187 145 167 56 58 52 181] [161 187 182 173 44 39 187 176] [173 188 185 168 179 193 174 23] [ 41 58 136 45 186 61 48 187] [190 55 166 177 42 175 65 49]] q_block_global_idx: [0], kv_head_idx: [0] k_offset: [[469248 469264 469280 469296 469312 469328 469344 469360 469376 469392 469408 469424 469440 469456 469472 469488] [469249 469265 469281 469297 469313 469329 469345 469361 469377 469393 469409 469425 469441 469457 469473 469489] [469250 469266 469282 469298 469314 469330 469346 469362 469378 469394 469410 469426 469442 469458 469474 469490] [469251 469267 469283 469299 469315 469331 469347 469363 469379 469395 469411 469427 469443 469459 469475 469491] [469252 469268 469284 469300 469316 469332 469348 469364 469380 469396 469412 469428 469444 469460 469476 469492] [469253 469269 469285 469301 469317 469333 469349 469365 469381 469397 469413 469429 469445 469461 469477 469493] [469254 469270 469286 469302 469318 469334 469350 469366 469382 469398 469414 469430 469446 469462 469478 469494] [469255 469271 469287 469303 469319 469335 469351 469367 469383 469399 469415 469431 469447 469463 469479 469495]] K_load: [[186 168 186 51 132 170 0 0 0 0 0 0 0 0 0 0] [ 58 56 191 178 174 171 0 0 0 0 0 0 0 0 0 0] [189 35 54 180 50 184 0 0 0 0 0 0 0 0 0 0] [ 59 177 48 61 181 48 0 0 0 0 0 0 0 0 0 0] [179 66 180 179 180 43 0 0 0 0 0 0 0 0 0 0] [172 40 43 45 59 162 0 0 0 0 0 0 0 0 0 0] [ 49 43 38 60 180 169 0 0 0 0 0 0 0 0 0 0] [ 53 50 157 41 46 190 0 0 0 0 0 0 0 0 0 0]] v_offset: [[469248 469249 469250 469251 469252 469253 469254 469255] [469264 469265 469266 469267 469268 469269 469270 469271] [469280 469281 469282 469283 469284 469285 469286 469287] [469296 469297 469298 469299 469300 469301 469302 469303] [469312 469313 469314 469315 469316 469317 469318 469319] [469328 469329 469330 469331 469332 469333 469334 469335] [469344 469345 469346 469347 469348 469349 469350 469351] [469360 469361 469362 469363 469364 469365 469366 469367] [469376 469377 469378 469379 469380 469381 469382 469383] [469392 469393 469394 469395 469396 469397 469398 469399] [469408 469409 469410 469411 469412 469413 469414 469415] [469424 469425 469426 469427 469428 469429 469430 469431] [469440 469441 469442 469443 469444 469445 469446 469447] [469456 469457 469458 469459 469460 469461 469462 469463] [469472 469473 469474 469475 469476 469477 469478 469479] [469488 469489 469490 469491 469492 469493 469494 469495]] V_load: [[181 50 178 57 177 187 59 21] [173 57 39 41 42 194 183 54] [ 17 179 181 53 151 162 60 186] [177 170 39 179 188 43 59 55] [ 34 36 191 44 32 34 50 39] [ 46 52 147 59 43 180 21 54] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0]] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB set_trace >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> > /home/yiliu7/workspace/vllm/vllm/attention/ops/triton_unified_attention.py(264)kernel_unified_attention_2d() -> S = tl.where( (Pdb) >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB continue >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> q_block_global_idx: [0], kv_head_idx: [1] q_block_global_idx: [0], kv_head_idx: [1] query_offset_0: [0 0 0 0 1 1 1 1 2 2 2 2 3 3 3 3] query_offset_1: [4 5 6 7 4 5 6 7 4 5 6 7 4 5 6 7] query_offset: [[ 32 33 34 35 36 37 38 39] [ 40 41 42 43 44 45 46 47] [ 48 49 50 51 52 53 54 55] [ 56 57 58 59 60 61 62 63] [ 96 97 98 99 100 101 102 103] [104 105 106 107 108 109 110 111] [112 113 114 115 116 117 118 119] [120 121 122 123 124 125 126 127] [160 161 162 163 164 165 166 167] [168 169 170 171 172 173 174 175] [176 177 178 179 180 181 182 183] [184 185 186 187 188 189 190 191] [224 225 226 227 228 229 230 231] [232 233 234 235 236 237 238 239] [240 241 242 243 244 245 246 247] [248 249 250 251 252 253 254 255]] Q: [[178 170 183 181 180 149 49 60] [ 7 46 61 51 164 56 188 180] [164 44 45 155 54 185 30 48] [177 170 177 54 41 32 47 185] [155 188 182 174 24 146 49 54] [189 46 52 182 51 171 191 163] [ 63 48 57 187 168 188 60 6] [ 65 183 62 168 182 166 171 165] [ 45 175 184 55 44 175 189 56] [177 41 169 34 193 49 186 60] [ 29 175 47 34 187 63 185 41] [179 51 181 156 36 38 180 174] [ 55 41 193 46 57 50 42 58] [ 29 192 49 34 53 40 179 134] [194 43 43 154 38 194 25 192] [ 57 177 59 190 185 59 69 182]] q_block_global_idx: [0], kv_head_idx: [1] k_offset: [[469256 469272 469288 469304 469320 469336 469352 469368 469384 469400 469416 469432 469448 469464 469480 469496] [469257 469273 469289 469305 469321 469337 469353 469369 469385 469401 469417 469433 469449 469465 469481 469497] [469258 469274 469290 469306 469322 469338 469354 469370 469386 469402 469418 469434 469450 469466 469482 469498] [469259 469275 469291 469307 469323 469339 469355 469371 469387 469403 469419 469435 469451 469467 469483 469499] [469260 469276 469292 469308 469324 469340 469356 469372 469388 469404 469420 469436 469452 469468 469484 469500] [469261 469277 469293 469309 469325 469341 469357 469373 469389 469405 469421 469437 469453 469469 469485 469501] [469262 469278 469294 469310 469326 469342 469358 469374 469390 469406 469422 469438 469454 469470 469486 469502] [469263 469279 469295 469311 469327 469343 469359 469375 469391 469407 469423 469439 469455 469471 469487 469503]] K_load: [[ 57 42 50 60 67 52 0 0 0 0 0 0 0 0 0 0] [ 54 157 164 52 42 179 0 0 0 0 0 0 0 0 0 0] [ 58 176 52 152 41 57 0 0 0 0 0 0 0 0 0 0] [ 51 184 189 182 43 50 0 0 0 0 0 0 0 0 0 0] [ 49 40 51 41 192 169 0 0 0 0 0 0 0 0 0 0] [134 57 64 173 64 168 0 0 0 0 0 0 0 0 0 0] [181 176 46 191 152 59 0 0 0 0 0 0 0 0 0 0] [ 63 181 191 178 186 40 0 0 0 0 0 0 0 0 0 0]] v_offset: [[469256 469257 469258 469259 469260 469261 469262 469263] [469272 469273 469274 469275 469276 469277 469278 469279] [469288 469289 469290 469291 469292 469293 469294 469295] [469304 469305 469306 469307 469308 469309 469310 469311] [469320 469321 469322 469323 469324 469325 469326 469327] [469336 469337 469338 469339 469340 469341 469342 469343] [469352 469353 469354 469355 469356 469357 469358 469359] [469368 469369 469370 469371 469372 469373 469374 469375] [469384 469385 469386 469387 469388 469389 469390 469391] [469400 469401 469402 469403 469404 469405 469406 469407] [469416 469417 469418 469419 469420 469421 469422 469423] [469432 469433 469434 469435 469436 469437 469438 469439] [469448 469449 469450 469451 469452 469453 469454 469455] [469464 469465 469466 469467 469468 469469 469470 469471] [469480 469481 469482 469483 469484 469485 469486 469487] [469496 469497 469498 469499 469500 469501 469502 469503]] V_load: [[ 68 57 42 179 61 50 169 147] [ 50 188 160 42 170 173 172 170] [187 182 182 41 51 37 186 183] [184 64 183 53 174 160 58 194] [181 59 47 184 188 50 57 61] [186 36 171 49 63 61 61 129] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0]] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB set_trace >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> > /home/yiliu7/workspace/vllm/vllm/attention/ops/triton_unified_attention.py(264)kernel_unified_attention_2d() -> S = tl.where( (Pdb) >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB continue >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> q_block_global_idx: [1], kv_head_idx: [0] q_block_global_idx: [1], kv_head_idx: [0] query_offset_0: [4 4 4 4 5 5 5 5 6 6 6 6 7 7 7 7] query_offset_1: [0 1 2 3 0 1 2 3 0 1 2 3 0 1 2 3] query_offset: [[256 257 258 259 260 261 262 263] [264 265 266 267 268 269 270 271] [272 273 274 275 276 277 278 279] [280 281 282 283 284 285 286 287] [320 321 322 323 324 325 326 327] [328 329 330 331 332 333 334 335] [336 337 338 339 340 341 342 343] [344 345 346 347 348 349 350 351] [384 385 386 387 388 389 390 391] [392 393 394 395 396 397 398 399] [400 401 402 403 404 405 406 407] [408 409 410 411 412 413 414 415] [448 449 450 451 452 453 454 455] [456 457 458 459 460 461 462 463] [464 465 466 467 468 469 470 471] [472 473 474 475 476 477 478 479]] Q: [[ 39 57 168 176 185 59 55 151] [ 45 175 28 57 51 164 175 190] [ 25 184 162 50 45 175 55 181] [145 186 178 57 179 57 179 161] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0]] q_block_global_idx: [1], kv_head_idx: [0] k_offset: [[469248 469264 469280 469296 469312 469328 469344 469360 469376 469392 469408 469424 469440 469456 469472 469488] [469249 469265 469281 469297 469313 469329 469345 469361 469377 469393 469409 469425 469441 469457 469473 469489] [469250 469266 469282 469298 469314 469330 469346 469362 469378 469394 469410 469426 469442 469458 469474 469490] [469251 469267 469283 469299 469315 469331 469347 469363 469379 469395 469411 469427 469443 469459 469475 469491] [469252 469268 469284 469300 469316 469332 469348 469364 469380 469396 469412 469428 469444 469460 469476 469492] [469253 469269 469285 469301 469317 469333 469349 469365 469381 469397 469413 469429 469445 469461 469477 469493] [469254 469270 469286 469302 469318 469334 469350 469366 469382 469398 469414 469430 469446 469462 469478 469494] [469255 469271 469287 469303 469319 469335 469351 469367 469383 469399 469415 469431 469447 469463 469479 469495]] K_load: [[186 168 186 51 132 170 47 0 0 0 0 0 0 0 0 0] [ 58 56 191 178 174 171 49 0 0 0 0 0 0 0 0 0] [189 35 54 180 50 184 57 0 0 0 0 0 0 0 0 0] [ 59 177 48 61 181 48 49 0 0 0 0 0 0 0 0 0] [179 66 180 179 180 43 57 0 0 0 0 0 0 0 0 0] [172 40 43 45 59 162 177 0 0 0 0 0 0 0 0 0] [ 49 43 38 60 180 169 178 0 0 0 0 0 0 0 0 0] [ 53 50 157 41 46 190 169 0 0 0 0 0 0 0 0 0]] v_offset: [[469248 469249 469250 469251 469252 469253 469254 469255] [469264 469265 469266 469267 469268 469269 469270 469271] [469280 469281 469282 469283 469284 469285 469286 469287] [469296 469297 469298 469299 469300 469301 469302 469303] [469312 469313 469314 469315 469316 469317 469318 469319] [469328 469329 469330 469331 469332 469333 469334 469335] [469344 469345 469346 469347 469348 469349 469350 469351] [469360 469361 469362 469363 469364 469365 469366 469367] [469376 469377 469378 469379 469380 469381 469382 469383] [469392 469393 469394 469395 469396 469397 469398 469399] [469408 469409 469410 469411 469412 469413 469414 469415] [469424 469425 469426 469427 469428 469429 469430 469431] [469440 469441 469442 469443 469444 469445 469446 469447] [469456 469457 469458 469459 469460 469461 469462 469463] [469472 469473 469474 469475 469476 469477 469478 469479] [469488 469489 469490 469491 469492 469493 469494 469495]] V_load: [[181 50 178 57 177 187 59 21] [173 57 39 41 42 194 183 54] [ 17 179 181 53 151 162 60 186] [177 170 39 179 188 43 59 55] [ 34 36 191 44 32 34 50 39] [ 46 52 147 59 43 180 21 54] [ 1 193 157 154 183 175 43 61] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0]] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB set_trace >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> > /home/yiliu7/workspace/vllm/vllm/attention/ops/triton_unified_attention.py(264)kernel_unified_attention_2d() -> S = tl.where( (Pdb) >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB continue >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> q_block_global_idx: [1], kv_head_idx: [1] q_block_global_idx: [1], kv_head_idx: [1] query_offset_0: [4 4 4 4 5 5 5 5 6 6 6 6 7 7 7 7] query_offset_1: [4 5 6 7 4 5 6 7 4 5 6 7 4 5 6 7] query_offset: [[288 289 290 291 292 293 294 295] [296 297 298 299 300 301 302 303] [304 305 306 307 308 309 310 311] [312 313 314 315 316 317 318 319] [352 353 354 355 356 357 358 359] [360 361 362 363 364 365 366 367] [368 369 370 371 372 373 374 375] [376 377 378 379 380 381 382 383] [416 417 418 419 420 421 422 423] [424 425 426 427 428 429 430 431] [432 433 434 435 436 437 438 439] [440 441 442 443 444 445 446 447] [480 481 482 483 484 485 486 487] [488 489 490 491 492 493 494 495] [496 497 498 499 500 501 502 503] [504 505 506 507 508 509 510 511]] Q: [[ 51 179 36 190 177 179 160 178] [165 62 49 152 64 168 190 174] [185 58 43 179 174 184 183 49] [ 58 191 55 180 19 63 181 179] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0]] q_block_global_idx: [1], kv_head_idx: [1] k_offset: [[469256 469272 469288 469304 469320 469336 469352 469368 469384 469400 469416 469432 469448 469464 469480 469496] [469257 469273 469289 469305 469321 469337 469353 469369 469385 469401 469417 469433 469449 469465 469481 469497] [469258 469274 469290 469306 469322 469338 469354 469370 469386 469402 469418 469434 469450 469466 469482 469498] [469259 469275 469291 469307 469323 469339 469355 469371 469387 469403 469419 469435 469451 469467 469483 469499] [469260 469276 469292 469308 469324 469340 469356 469372 469388 469404 469420 469436 469452 469468 469484 469500] [469261 469277 469293 469309 469325 469341 469357 469373 469389 469405 469421 469437 469453 469469 469485 469501] [469262 469278 469294 469310 469326 469342 469358 469374 469390 469406 469422 469438 469454 469470 469486 469502] [469263 469279 469295 469311 469327 469343 469359 469375 469391 469407 469423 469439 469455 469471 469487 469503]] K_load: [[ 57 42 50 60 67 52 183 0 0 0 0 0 0 0 0 0] [ 54 157 164 52 42 179 189 0 0 0 0 0 0 0 0 0] [ 58 176 52 152 41 57 186 0 0 0 0 0 0 0 0 0] [ 51 184 189 182 43 50 164 0 0 0 0 0 0 0 0 0] [ 49 40 51 41 192 169 51 0 0 0 0 0 0 0 0 0] [134 57 64 173 64 168 33 0 0 0 0 0 0 0 0 0] [181 176 46 191 152 59 184 0 0 0 0 0 0 0 0 0] [ 63 181 191 178 186 40 48 0 0 0 0 0 0 0 0 0]] v_offset: [[469256 469257 469258 469259 469260 469261 469262 469263] [469272 469273 469274 469275 469276 469277 469278 469279] [469288 469289 469290 469291 469292 469293 469294 469295] [469304 469305 469306 469307 469308 469309 469310 469311] [469320 469321 469322 469323 469324 469325 469326 469327] [469336 469337 469338 469339 469340 469341 469342 469343] [469352 469353 469354 469355 469356 469357 469358 469359] [469368 469369 469370 469371 469372 469373 469374 469375] [469384 469385 469386 469387 469388 469389 469390 469391] [469400 469401 469402 469403 469404 469405 469406 469407] [469416 469417 469418 469419 469420 469421 469422 469423] [469432 469433 469434 469435 469436 469437 469438 469439] [469448 469449 469450 469451 469452 469453 469454 469455] [469464 469465 469466 469467 469468 469469 469470 469471] [469480 469481 469482 469483 469484 469485 469486 469487] [469496 469497 469498 469499 469500 469501 469502 469503]] V_load: [[ 68 57 42 179 61 50 169 147] [ 50 188 160 42 170 173 172 170] [187 182 182 41 51 37 186 183] [184 64 183 53 174 160 58 194] [181 59 47 184 188 50 57 61] [186 36 171 49 63 61 61 129] [ 55 54 173 56 63 60 45 56] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 0 0]] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB set_trace >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> > /home/yiliu7/workspace/vllm/vllm/attention/ops/triton_unified_attention.py(264)kernel_unified_attention_2d() -> S = tl.where( (Pdb) >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB continue >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB set_trace >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> > /home/yiliu7/workspace/vllm/tests/kernels/attention/test_triton_unified_attention.py(256)test_triton_unified_attn() -> torch.testing.assert_close(output, ref_output, atol=atol, rtol=rtol), (Pdb) >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PDB continue >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> PASSED =============================== warnings summary =============================== :488 :488: DeprecationWarning: builtin type SwigPyPacked has no __module__ attribute :488 :488: DeprecationWarning: builtin type SwigPyObject has no __module__ attribute tests/kernels/attention/test_triton_unified_attention.py::test_triton_unified_attn[q_dtype0-2048-None-dtype0-None-16-8-num_heads0-seq_lens0] tests/kernels/attention/test_triton_unified_attention.py::test_triton_unified_attn[q_dtype0-2048-None-dtype0-None-16-8-num_heads0-seq_lens0] tests/kernels/attention/test_triton_unified_attention.py::test_triton_unified_attn[q_dtype0-2048-None-dtype0-None-16-8-num_heads0-seq_lens0] tests/kernels/attention/test_triton_unified_attention.py::test_triton_unified_attn[q_dtype0-2048-None-dtype0-None-16-8-num_heads0-seq_lens0] tests/kernels/attention/test_triton_unified_attention.py::test_triton_unified_attn[q_dtype0-2048-None-dtype0-None-16-8-num_heads0-seq_lens0] tests/kernels/attention/test_triton_unified_attention.py::test_triton_unified_attn[q_dtype0-2048-None-dtype0-None-16-8-num_heads0-seq_lens0] tests/kernels/attention/test_triton_unified_attention.py::test_triton_unified_attn[q_dtype0-2048-None-dtype0-None-16-8-num_heads0-seq_lens0] tests/kernels/attention/test_triton_unified_attention.py::test_triton_unified_attn[q_dtype0-2048-None-dtype0-None-16-8-num_heads0-seq_lens0] /usr/local/lib/python3.12/dist-packages/triton/runtime/interpreter.py:818: DeprecationWarning: Conversion of an array with ndim > 0 to a scalar is deprecated, and will error in future. Ensure you extract a single element from your array before performing this operation. (Deprecated NumPy 1.25.) tensor.__index__ = lambda self: int(self.handle.data) tests/kernels/attention/test_triton_unified_attention.py::test_triton_unified_attn[q_dtype0-2048-None-dtype0-None-16-8-num_heads0-seq_lens0] /usr/local/lib/python3.12/dist-packages/triton/runtime/interpreter.py:463: RuntimeWarning: invalid value encountered in divide return TensorHandle(op(lhs.data, rhs.data), lhs.dtype.scalar) -- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html ======================= 1 passed, 11 warnings in 10.85s ======================== sys:1: DeprecationWarning: builtin type swigvarlink has no __module__ attribute