repo_id
stringlengths 5
115
| size
int64 590
5.01M
| file_path
stringlengths 4
212
| content
stringlengths 590
5.01M
|
|---|---|---|---|
tactcomplabs/xbgas-binutils-gdb
| 1,552
|
gas/testsuite/gas/i386/x86-64-inval-avx512f.s
|
# Check illegal AVX512F instructions
.text
.allow_index_reg
_start:
mov {sae}, %rax{%k1}
mov {sae}, %rax
mov %rbx, %rax{%k2}
vaddps %zmm3, %zmm1, %zmm2{z}{%k1}{z}
vaddps %zmm3, %zmm1{%k3}, %zmm2{z}
vaddps %zmm3, %zmm1{%k1}, %zmm2{%k2}
vcvtps2pd (%rax), %zmm1{1to8}
vcvtps2pd (%rax){1to16}, %zmm1
vcvtps2pd (%rax){%k1}, %zmm1
vcvtps2pd (%rax){z}, %zmm1
vgatherqpd (%rdi),%zmm6{%k1}
vgatherqpd (%zmm2),%zmm6{%k1}
vpscatterdd %zmm6,(%rdi){%k1}
vpscatterdd %zmm6,(%zmm2){%k1}
.intel_syntax noprefix
mov rax{k1}, {sae}
mov rax, {sae}
mov rax{k2}, rbx
vaddps zmm2{z}{k1}{z}, zmm1, zmm3
vaddps zmm2{z}, zmm1{k3}, zmm3
vaddps zmm2{k2}, zmm1{k1}, zmm3
vcvtps2pd zmm1{1to8}, [rax]
vcvtps2pd zmm1, [rax]{1to16}
vcvtps2pd zmm1, [rax]{k1}
vcvtps2pd zmm1, [rax]{z}
vgatherqpd zmm6{k1}, ZMMWORD PTR [rdi]
vgatherqpd zmm6{k1}, ZMMWORD PTR [zmm2+riz]
vpscatterdd ZMMWORD PTR [rdi]{k1}, zmm6
vpscatterdd ZMMWORD PTR [zmm2+riz]{k1}, zmm6
vaddps zmm2, zmm1, QWORD PTR [rax]{1to8}
vaddps zmm2, zmm1, QWORD PTR [rax]{1to16}
vaddpd zmm2, zmm1, DWORD PTR [rax]{1to8}
vaddpd zmm2, zmm1, DWORD PTR [rax]{1to16}
vaddps zmm2, zmm1, ZMMWORD PTR [rax]{1to16}
vaddps zmm2, zmm1, DWORD PTR [rax]
vaddpd zmm2, zmm1, QWORD PTR [rax]
.att_syntax prefix
vaddps %zmm0, %zmm1, %zmm2{%rcx}
vaddps %zmm0, %zmm1, %zmm2{z}
.intel_syntax noprefix
vaddps zmm2{rcx}, zmm1, zmm0
vaddps zmm2{z}, zmm1, zmm0
vcvtps2qq xmm0, DWORD PTR [rax]
.att_syntax prefix
vdpbf16ps 8(%rax){1to8}, %zmm2, %zmm2
vcvtne2ps2bf16 8(%rax){1to8}, %zmm2, %zmm2
|
tactcomplabs/xbgas-binutils-gdb
| 1,364
|
gas/testsuite/gas/i386/align-branch-1.s
|
.text
.globl foo
.p2align 4
foo:
movl %eax, %gs:0x1
pushl %ebp
pushl %ebp
pushl %ebp
pushl %ebp
movl %esp, %ebp
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
cmp %eax, %ebp
je .L_2
movl %esi, -12(%ebx)
movl %esi, -12(%ebp)
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
popl %ebp
popl %ebp
popl %ebp
je .L_2
popl %ebp
je .L_2
movl %eax, -4(%esp)
movl %esi, -12(%ebp)
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
popl %ebp
jmp .L_3
jmp .L_3
jmp .L_3
movl %eax, -4(%ebp)
movl %esi, -12(%ebp)
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
popl %ebp
popl %ebp
cmp %eax, %ebp
je .L_2
jmp .L_3
.L_2:
movl -12(%ebp), %eax
movl %eax, -4(%ebp)
.L_3:
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, 12(%ebp)
jmp bar
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, (%ebp)
je .L_3
je .L_3
|
tactcomplabs/xbgas-binutils-gdb
| 17,384
|
gas/testsuite/gas/i386/x86-64-avx512f_vl-opts.s
|
# Check 64bit AVX512{F,VL} swap instructions
.allow_index_reg
.text
_start:
vmovapd %xmm29, %xmm30 # AVX512{F,VL}
vmovapd.s %xmm29, %xmm30 # AVX512{F,VL}
vmovapd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovapd.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovapd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovapd.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovapd %xmm29, %xmm30 # AVX512{F,VL}
vmovapd.s %xmm29, %xmm30 # AVX512{F,VL}
vmovapd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovapd.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovapd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovapd.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovapd %ymm29, %ymm30 # AVX512{F,VL}
vmovapd.s %ymm29, %ymm30 # AVX512{F,VL}
vmovapd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovapd.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovapd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovapd.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovapd %ymm29, %ymm30 # AVX512{F,VL}
vmovapd.s %ymm29, %ymm30 # AVX512{F,VL}
vmovapd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovapd.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovapd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovapd.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovaps %xmm29, %xmm30 # AVX512{F,VL}
vmovaps.s %xmm29, %xmm30 # AVX512{F,VL}
vmovaps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovaps.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovaps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovaps.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovaps %xmm29, %xmm30 # AVX512{F,VL}
vmovaps.s %xmm29, %xmm30 # AVX512{F,VL}
vmovaps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovaps.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovaps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovaps.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovaps %ymm29, %ymm30 # AVX512{F,VL}
vmovaps.s %ymm29, %ymm30 # AVX512{F,VL}
vmovaps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovaps.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovaps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovaps.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovaps %ymm29, %ymm30 # AVX512{F,VL}
vmovaps.s %ymm29, %ymm30 # AVX512{F,VL}
vmovaps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovaps.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovaps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovaps.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa32.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa32.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa32.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa32.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa32.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa32.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa32.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa32.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa32.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa32.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa32.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa32.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa64.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa64.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa64.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa64.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa64.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa64.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa64.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa64.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa64.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa64.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa64.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa64.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu32.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu32.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu32.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu32.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu32.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu32.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu32.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu32.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu32.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu32.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu32.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu32.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu64.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu64.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu64.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu64.s %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu64.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu64.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu64.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu64.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu64.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu64.s %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu64.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu64.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovupd %xmm29, %xmm30 # AVX512{F,VL}
vmovupd.s %xmm29, %xmm30 # AVX512{F,VL}
vmovupd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovupd.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovupd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovupd.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovupd %xmm29, %xmm30 # AVX512{F,VL}
vmovupd.s %xmm29, %xmm30 # AVX512{F,VL}
vmovupd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovupd.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovupd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovupd.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovupd %ymm29, %ymm30 # AVX512{F,VL}
vmovupd.s %ymm29, %ymm30 # AVX512{F,VL}
vmovupd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovupd.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovupd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovupd.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovupd %ymm29, %ymm30 # AVX512{F,VL}
vmovupd.s %ymm29, %ymm30 # AVX512{F,VL}
vmovupd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovupd.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovupd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovupd.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovups %xmm29, %xmm30 # AVX512{F,VL}
vmovups.s %xmm29, %xmm30 # AVX512{F,VL}
vmovups %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovups.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovups %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovups.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovups %xmm29, %xmm30 # AVX512{F,VL}
vmovups.s %xmm29, %xmm30 # AVX512{F,VL}
vmovups %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovups.s %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovups %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovups.s %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovups %ymm29, %ymm30 # AVX512{F,VL}
vmovups.s %ymm29, %ymm30 # AVX512{F,VL}
vmovups %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovups.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovups %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovups.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovups %ymm29, %ymm30 # AVX512{F,VL}
vmovups.s %ymm29, %ymm30 # AVX512{F,VL}
vmovups %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovups.s %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovups %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovups.s %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
.intel_syntax noprefix
vmovapd xmm30, xmm29 # AVX512{F,VL}
vmovapd.s xmm30, xmm29 # AVX512{F,VL}
vmovapd xmm30{k7}, xmm29 # AVX512{F,VL}
vmovapd.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovapd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovapd.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovapd xmm30, xmm29 # AVX512{F,VL}
vmovapd.s xmm30, xmm29 # AVX512{F,VL}
vmovapd xmm30{k7}, xmm29 # AVX512{F,VL}
vmovapd.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovapd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovapd.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovapd ymm30, ymm29 # AVX512{F,VL}
vmovapd.s ymm30, ymm29 # AVX512{F,VL}
vmovapd ymm30{k7}, ymm29 # AVX512{F,VL}
vmovapd.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovapd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovapd.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovapd ymm30, ymm29 # AVX512{F,VL}
vmovapd.s ymm30, ymm29 # AVX512{F,VL}
vmovapd ymm30{k7}, ymm29 # AVX512{F,VL}
vmovapd.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovapd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovapd.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovaps xmm30, xmm29 # AVX512{F,VL}
vmovaps.s xmm30, xmm29 # AVX512{F,VL}
vmovaps xmm30{k7}, xmm29 # AVX512{F,VL}
vmovaps.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovaps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovaps.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovaps xmm30, xmm29 # AVX512{F,VL}
vmovaps.s xmm30, xmm29 # AVX512{F,VL}
vmovaps xmm30{k7}, xmm29 # AVX512{F,VL}
vmovaps.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovaps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovaps.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovaps ymm30, ymm29 # AVX512{F,VL}
vmovaps.s ymm30, ymm29 # AVX512{F,VL}
vmovaps ymm30{k7}, ymm29 # AVX512{F,VL}
vmovaps.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovaps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovaps.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovaps ymm30, ymm29 # AVX512{F,VL}
vmovaps.s ymm30, ymm29 # AVX512{F,VL}
vmovaps ymm30{k7}, ymm29 # AVX512{F,VL}
vmovaps.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovaps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovaps.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa32 xmm30, xmm29 # AVX512{F,VL}
vmovdqa32.s xmm30, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa32.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa32.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30, xmm29 # AVX512{F,VL}
vmovdqa32.s xmm30, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa32.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa32.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa32 ymm30, ymm29 # AVX512{F,VL}
vmovdqa32.s ymm30, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa32.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa32.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30, ymm29 # AVX512{F,VL}
vmovdqa32.s ymm30, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa32.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa32.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa64 xmm30, xmm29 # AVX512{F,VL}
vmovdqa64.s xmm30, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa64.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa64.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30, xmm29 # AVX512{F,VL}
vmovdqa64.s xmm30, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa64.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa64.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa64 ymm30, ymm29 # AVX512{F,VL}
vmovdqa64.s ymm30, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa64.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa64.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30, ymm29 # AVX512{F,VL}
vmovdqa64.s ymm30, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa64.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa64.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu32 xmm30, xmm29 # AVX512{F,VL}
vmovdqu32.s xmm30, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu32.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu32.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30, xmm29 # AVX512{F,VL}
vmovdqu32.s xmm30, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu32.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu32.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu32 ymm30, ymm29 # AVX512{F,VL}
vmovdqu32.s ymm30, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu32.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu32.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30, ymm29 # AVX512{F,VL}
vmovdqu32.s ymm30, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu32.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu32.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu64 xmm30, xmm29 # AVX512{F,VL}
vmovdqu64.s xmm30, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu64.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu64.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30, xmm29 # AVX512{F,VL}
vmovdqu64.s xmm30, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu64.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu64.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu64 ymm30, ymm29 # AVX512{F,VL}
vmovdqu64.s ymm30, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu64.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu64.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30, ymm29 # AVX512{F,VL}
vmovdqu64.s ymm30, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu64.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu64.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovupd xmm30, xmm29 # AVX512{F,VL}
vmovupd.s xmm30, xmm29 # AVX512{F,VL}
vmovupd xmm30{k7}, xmm29 # AVX512{F,VL}
vmovupd.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovupd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovupd.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovupd xmm30, xmm29 # AVX512{F,VL}
vmovupd.s xmm30, xmm29 # AVX512{F,VL}
vmovupd xmm30{k7}, xmm29 # AVX512{F,VL}
vmovupd.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovupd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovupd.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovupd ymm30, ymm29 # AVX512{F,VL}
vmovupd.s ymm30, ymm29 # AVX512{F,VL}
vmovupd ymm30{k7}, ymm29 # AVX512{F,VL}
vmovupd.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovupd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovupd.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovupd ymm30, ymm29 # AVX512{F,VL}
vmovupd.s ymm30, ymm29 # AVX512{F,VL}
vmovupd ymm30{k7}, ymm29 # AVX512{F,VL}
vmovupd.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovupd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovupd.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovups xmm30, xmm29 # AVX512{F,VL}
vmovups.s xmm30, xmm29 # AVX512{F,VL}
vmovups xmm30{k7}, xmm29 # AVX512{F,VL}
vmovups.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovups xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovups.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovups xmm30, xmm29 # AVX512{F,VL}
vmovups.s xmm30, xmm29 # AVX512{F,VL}
vmovups xmm30{k7}, xmm29 # AVX512{F,VL}
vmovups.s xmm30{k7}, xmm29 # AVX512{F,VL}
vmovups xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovups.s xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovups ymm30, ymm29 # AVX512{F,VL}
vmovups.s ymm30, ymm29 # AVX512{F,VL}
vmovups ymm30{k7}, ymm29 # AVX512{F,VL}
vmovups.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovups ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovups.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovups ymm30, ymm29 # AVX512{F,VL}
vmovups.s ymm30, ymm29 # AVX512{F,VL}
vmovups ymm30{k7}, ymm29 # AVX512{F,VL}
vmovups.s ymm30{k7}, ymm29 # AVX512{F,VL}
vmovups ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovups.s ymm30{k7}{z}, ymm29 # AVX512{F,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 63,430
|
gas/testsuite/gas/i386/x86-64-hle.s
|
# Check 64bit HLE instructions
.allow_index_reg
.text
_start:
# Tests for op imm32 rax
# Tests for op imm8 regb/m8
xacquire lock adcb $100,(%rcx)
lock xacquire adcb $100,(%rcx)
xrelease lock adcb $100,(%rcx)
lock xrelease adcb $100,(%rcx)
.byte 0xf0; .byte 0xf2; adcb $100,(%rcx)
.byte 0xf0; .byte 0xf3; adcb $100,(%rcx)
xacquire lock addb $100,(%rcx)
lock xacquire addb $100,(%rcx)
xrelease lock addb $100,(%rcx)
lock xrelease addb $100,(%rcx)
.byte 0xf0; .byte 0xf2; addb $100,(%rcx)
.byte 0xf0; .byte 0xf3; addb $100,(%rcx)
xacquire lock andb $100,(%rcx)
lock xacquire andb $100,(%rcx)
xrelease lock andb $100,(%rcx)
lock xrelease andb $100,(%rcx)
.byte 0xf0; .byte 0xf2; andb $100,(%rcx)
.byte 0xf0; .byte 0xf3; andb $100,(%rcx)
xrelease movb $100,(%rcx)
xacquire lock orb $100,(%rcx)
lock xacquire orb $100,(%rcx)
xrelease lock orb $100,(%rcx)
lock xrelease orb $100,(%rcx)
.byte 0xf0; .byte 0xf2; orb $100,(%rcx)
.byte 0xf0; .byte 0xf3; orb $100,(%rcx)
xacquire lock sbbb $100,(%rcx)
lock xacquire sbbb $100,(%rcx)
xrelease lock sbbb $100,(%rcx)
lock xrelease sbbb $100,(%rcx)
.byte 0xf0; .byte 0xf2; sbbb $100,(%rcx)
.byte 0xf0; .byte 0xf3; sbbb $100,(%rcx)
xacquire lock subb $100,(%rcx)
lock xacquire subb $100,(%rcx)
xrelease lock subb $100,(%rcx)
lock xrelease subb $100,(%rcx)
.byte 0xf0; .byte 0xf2; subb $100,(%rcx)
.byte 0xf0; .byte 0xf3; subb $100,(%rcx)
xacquire lock xorb $100,(%rcx)
lock xacquire xorb $100,(%rcx)
xrelease lock xorb $100,(%rcx)
lock xrelease xorb $100,(%rcx)
.byte 0xf0; .byte 0xf2; xorb $100,(%rcx)
.byte 0xf0; .byte 0xf3; xorb $100,(%rcx)
# Tests for op imm16 regs/m16
xacquire lock adcw $1000,(%rcx)
lock xacquire adcw $1000,(%rcx)
xrelease lock adcw $1000,(%rcx)
lock xrelease adcw $1000,(%rcx)
.byte 0xf0; .byte 0xf2; adcw $1000,(%rcx)
.byte 0xf0; .byte 0xf3; adcw $1000,(%rcx)
xacquire lock addw $1000,(%rcx)
lock xacquire addw $1000,(%rcx)
xrelease lock addw $1000,(%rcx)
lock xrelease addw $1000,(%rcx)
.byte 0xf0; .byte 0xf2; addw $1000,(%rcx)
.byte 0xf0; .byte 0xf3; addw $1000,(%rcx)
xacquire lock andw $1000,(%rcx)
lock xacquire andw $1000,(%rcx)
xrelease lock andw $1000,(%rcx)
lock xrelease andw $1000,(%rcx)
.byte 0xf0; .byte 0xf2; andw $1000,(%rcx)
.byte 0xf0; .byte 0xf3; andw $1000,(%rcx)
xrelease movw $1000,(%rcx)
xacquire lock orw $1000,(%rcx)
lock xacquire orw $1000,(%rcx)
xrelease lock orw $1000,(%rcx)
lock xrelease orw $1000,(%rcx)
.byte 0xf0; .byte 0xf2; orw $1000,(%rcx)
.byte 0xf0; .byte 0xf3; orw $1000,(%rcx)
xacquire lock sbbw $1000,(%rcx)
lock xacquire sbbw $1000,(%rcx)
xrelease lock sbbw $1000,(%rcx)
lock xrelease sbbw $1000,(%rcx)
.byte 0xf0; .byte 0xf2; sbbw $1000,(%rcx)
.byte 0xf0; .byte 0xf3; sbbw $1000,(%rcx)
xacquire lock subw $1000,(%rcx)
lock xacquire subw $1000,(%rcx)
xrelease lock subw $1000,(%rcx)
lock xrelease subw $1000,(%rcx)
.byte 0xf0; .byte 0xf2; subw $1000,(%rcx)
.byte 0xf0; .byte 0xf3; subw $1000,(%rcx)
xacquire lock xorw $1000,(%rcx)
lock xacquire xorw $1000,(%rcx)
xrelease lock xorw $1000,(%rcx)
lock xrelease xorw $1000,(%rcx)
.byte 0xf0; .byte 0xf2; xorw $1000,(%rcx)
.byte 0xf0; .byte 0xf3; xorw $1000,(%rcx)
# Tests for op imm32 regl/m32
xacquire lock adcl $10000000,(%rcx)
lock xacquire adcl $10000000,(%rcx)
xrelease lock adcl $10000000,(%rcx)
lock xrelease adcl $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; adcl $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; adcl $10000000,(%rcx)
xacquire lock addl $10000000,(%rcx)
lock xacquire addl $10000000,(%rcx)
xrelease lock addl $10000000,(%rcx)
lock xrelease addl $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; addl $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; addl $10000000,(%rcx)
xacquire lock andl $10000000,(%rcx)
lock xacquire andl $10000000,(%rcx)
xrelease lock andl $10000000,(%rcx)
lock xrelease andl $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; andl $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; andl $10000000,(%rcx)
xrelease movl $10000000,(%rcx)
xacquire lock orl $10000000,(%rcx)
lock xacquire orl $10000000,(%rcx)
xrelease lock orl $10000000,(%rcx)
lock xrelease orl $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; orl $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; orl $10000000,(%rcx)
xacquire lock sbbl $10000000,(%rcx)
lock xacquire sbbl $10000000,(%rcx)
xrelease lock sbbl $10000000,(%rcx)
lock xrelease sbbl $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; sbbl $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; sbbl $10000000,(%rcx)
xacquire lock subl $10000000,(%rcx)
lock xacquire subl $10000000,(%rcx)
xrelease lock subl $10000000,(%rcx)
lock xrelease subl $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; subl $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; subl $10000000,(%rcx)
xacquire lock xorl $10000000,(%rcx)
lock xacquire xorl $10000000,(%rcx)
xrelease lock xorl $10000000,(%rcx)
lock xrelease xorl $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; xorl $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; xorl $10000000,(%rcx)
# Tests for op imm32 regq/m64
xacquire lock adcq $10000000,(%rcx)
lock xacquire adcq $10000000,(%rcx)
xrelease lock adcq $10000000,(%rcx)
lock xrelease adcq $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; adcq $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; adcq $10000000,(%rcx)
xacquire lock addq $10000000,(%rcx)
lock xacquire addq $10000000,(%rcx)
xrelease lock addq $10000000,(%rcx)
lock xrelease addq $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; addq $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; addq $10000000,(%rcx)
xacquire lock andq $10000000,(%rcx)
lock xacquire andq $10000000,(%rcx)
xrelease lock andq $10000000,(%rcx)
lock xrelease andq $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; andq $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; andq $10000000,(%rcx)
xrelease movq $10000000,(%rcx)
xacquire lock orq $10000000,(%rcx)
lock xacquire orq $10000000,(%rcx)
xrelease lock orq $10000000,(%rcx)
lock xrelease orq $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; orq $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; orq $10000000,(%rcx)
xacquire lock sbbq $10000000,(%rcx)
lock xacquire sbbq $10000000,(%rcx)
xrelease lock sbbq $10000000,(%rcx)
lock xrelease sbbq $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; sbbq $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; sbbq $10000000,(%rcx)
xacquire lock subq $10000000,(%rcx)
lock xacquire subq $10000000,(%rcx)
xrelease lock subq $10000000,(%rcx)
lock xrelease subq $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; subq $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; subq $10000000,(%rcx)
xacquire lock xorq $10000000,(%rcx)
lock xacquire xorq $10000000,(%rcx)
xrelease lock xorq $10000000,(%rcx)
lock xrelease xorq $10000000,(%rcx)
.byte 0xf0; .byte 0xf2; xorq $10000000,(%rcx)
.byte 0xf0; .byte 0xf3; xorq $10000000,(%rcx)
# Tests for op imm8 regs/m16
xacquire lock adcw $100,(%rcx)
lock xacquire adcw $100,(%rcx)
xrelease lock adcw $100,(%rcx)
lock xrelease adcw $100,(%rcx)
.byte 0xf0; .byte 0xf2; adcw $100,(%rcx)
.byte 0xf0; .byte 0xf3; adcw $100,(%rcx)
xacquire lock addw $100,(%rcx)
lock xacquire addw $100,(%rcx)
xrelease lock addw $100,(%rcx)
lock xrelease addw $100,(%rcx)
.byte 0xf0; .byte 0xf2; addw $100,(%rcx)
.byte 0xf0; .byte 0xf3; addw $100,(%rcx)
xacquire lock andw $100,(%rcx)
lock xacquire andw $100,(%rcx)
xrelease lock andw $100,(%rcx)
lock xrelease andw $100,(%rcx)
.byte 0xf0; .byte 0xf2; andw $100,(%rcx)
.byte 0xf0; .byte 0xf3; andw $100,(%rcx)
xacquire lock btcw $100,(%rcx)
lock xacquire btcw $100,(%rcx)
xrelease lock btcw $100,(%rcx)
lock xrelease btcw $100,(%rcx)
.byte 0xf0; .byte 0xf2; btcw $100,(%rcx)
.byte 0xf0; .byte 0xf3; btcw $100,(%rcx)
xacquire lock btrw $100,(%rcx)
lock xacquire btrw $100,(%rcx)
xrelease lock btrw $100,(%rcx)
lock xrelease btrw $100,(%rcx)
.byte 0xf0; .byte 0xf2; btrw $100,(%rcx)
.byte 0xf0; .byte 0xf3; btrw $100,(%rcx)
xacquire lock btsw $100,(%rcx)
lock xacquire btsw $100,(%rcx)
xrelease lock btsw $100,(%rcx)
lock xrelease btsw $100,(%rcx)
.byte 0xf0; .byte 0xf2; btsw $100,(%rcx)
.byte 0xf0; .byte 0xf3; btsw $100,(%rcx)
xrelease movw $100,(%rcx)
xacquire lock orw $100,(%rcx)
lock xacquire orw $100,(%rcx)
xrelease lock orw $100,(%rcx)
lock xrelease orw $100,(%rcx)
.byte 0xf0; .byte 0xf2; orw $100,(%rcx)
.byte 0xf0; .byte 0xf3; orw $100,(%rcx)
xacquire lock sbbw $100,(%rcx)
lock xacquire sbbw $100,(%rcx)
xrelease lock sbbw $100,(%rcx)
lock xrelease sbbw $100,(%rcx)
.byte 0xf0; .byte 0xf2; sbbw $100,(%rcx)
.byte 0xf0; .byte 0xf3; sbbw $100,(%rcx)
xacquire lock subw $100,(%rcx)
lock xacquire subw $100,(%rcx)
xrelease lock subw $100,(%rcx)
lock xrelease subw $100,(%rcx)
.byte 0xf0; .byte 0xf2; subw $100,(%rcx)
.byte 0xf0; .byte 0xf3; subw $100,(%rcx)
xacquire lock xorw $100,(%rcx)
lock xacquire xorw $100,(%rcx)
xrelease lock xorw $100,(%rcx)
lock xrelease xorw $100,(%rcx)
.byte 0xf0; .byte 0xf2; xorw $100,(%rcx)
.byte 0xf0; .byte 0xf3; xorw $100,(%rcx)
# Tests for op imm8 regl/m32
xacquire lock adcl $100,(%rcx)
lock xacquire adcl $100,(%rcx)
xrelease lock adcl $100,(%rcx)
lock xrelease adcl $100,(%rcx)
.byte 0xf0; .byte 0xf2; adcl $100,(%rcx)
.byte 0xf0; .byte 0xf3; adcl $100,(%rcx)
xacquire lock addl $100,(%rcx)
lock xacquire addl $100,(%rcx)
xrelease lock addl $100,(%rcx)
lock xrelease addl $100,(%rcx)
.byte 0xf0; .byte 0xf2; addl $100,(%rcx)
.byte 0xf0; .byte 0xf3; addl $100,(%rcx)
xacquire lock andl $100,(%rcx)
lock xacquire andl $100,(%rcx)
xrelease lock andl $100,(%rcx)
lock xrelease andl $100,(%rcx)
.byte 0xf0; .byte 0xf2; andl $100,(%rcx)
.byte 0xf0; .byte 0xf3; andl $100,(%rcx)
xacquire lock btcl $100,(%rcx)
lock xacquire btcl $100,(%rcx)
xrelease lock btcl $100,(%rcx)
lock xrelease btcl $100,(%rcx)
.byte 0xf0; .byte 0xf2; btcl $100,(%rcx)
.byte 0xf0; .byte 0xf3; btcl $100,(%rcx)
xacquire lock btrl $100,(%rcx)
lock xacquire btrl $100,(%rcx)
xrelease lock btrl $100,(%rcx)
lock xrelease btrl $100,(%rcx)
.byte 0xf0; .byte 0xf2; btrl $100,(%rcx)
.byte 0xf0; .byte 0xf3; btrl $100,(%rcx)
xacquire lock btsl $100,(%rcx)
lock xacquire btsl $100,(%rcx)
xrelease lock btsl $100,(%rcx)
lock xrelease btsl $100,(%rcx)
.byte 0xf0; .byte 0xf2; btsl $100,(%rcx)
.byte 0xf0; .byte 0xf3; btsl $100,(%rcx)
xrelease movl $100,(%rcx)
xacquire lock orl $100,(%rcx)
lock xacquire orl $100,(%rcx)
xrelease lock orl $100,(%rcx)
lock xrelease orl $100,(%rcx)
.byte 0xf0; .byte 0xf2; orl $100,(%rcx)
.byte 0xf0; .byte 0xf3; orl $100,(%rcx)
xacquire lock sbbl $100,(%rcx)
lock xacquire sbbl $100,(%rcx)
xrelease lock sbbl $100,(%rcx)
lock xrelease sbbl $100,(%rcx)
.byte 0xf0; .byte 0xf2; sbbl $100,(%rcx)
.byte 0xf0; .byte 0xf3; sbbl $100,(%rcx)
xacquire lock subl $100,(%rcx)
lock xacquire subl $100,(%rcx)
xrelease lock subl $100,(%rcx)
lock xrelease subl $100,(%rcx)
.byte 0xf0; .byte 0xf2; subl $100,(%rcx)
.byte 0xf0; .byte 0xf3; subl $100,(%rcx)
xacquire lock xorl $100,(%rcx)
lock xacquire xorl $100,(%rcx)
xrelease lock xorl $100,(%rcx)
lock xrelease xorl $100,(%rcx)
.byte 0xf0; .byte 0xf2; xorl $100,(%rcx)
.byte 0xf0; .byte 0xf3; xorl $100,(%rcx)
# Tests for op imm8 regq/m64
xacquire lock adcq $100,(%rcx)
lock xacquire adcq $100,(%rcx)
xrelease lock adcq $100,(%rcx)
lock xrelease adcq $100,(%rcx)
.byte 0xf0; .byte 0xf2; adcq $100,(%rcx)
.byte 0xf0; .byte 0xf3; adcq $100,(%rcx)
xacquire lock addq $100,(%rcx)
lock xacquire addq $100,(%rcx)
xrelease lock addq $100,(%rcx)
lock xrelease addq $100,(%rcx)
.byte 0xf0; .byte 0xf2; addq $100,(%rcx)
.byte 0xf0; .byte 0xf3; addq $100,(%rcx)
xacquire lock andq $100,(%rcx)
lock xacquire andq $100,(%rcx)
xrelease lock andq $100,(%rcx)
lock xrelease andq $100,(%rcx)
.byte 0xf0; .byte 0xf2; andq $100,(%rcx)
.byte 0xf0; .byte 0xf3; andq $100,(%rcx)
xacquire lock btcq $100,(%rcx)
lock xacquire btcq $100,(%rcx)
xrelease lock btcq $100,(%rcx)
lock xrelease btcq $100,(%rcx)
.byte 0xf0; .byte 0xf2; btcq $100,(%rcx)
.byte 0xf0; .byte 0xf3; btcq $100,(%rcx)
xacquire lock btrq $100,(%rcx)
lock xacquire btrq $100,(%rcx)
xrelease lock btrq $100,(%rcx)
lock xrelease btrq $100,(%rcx)
.byte 0xf0; .byte 0xf2; btrq $100,(%rcx)
.byte 0xf0; .byte 0xf3; btrq $100,(%rcx)
xacquire lock btsq $100,(%rcx)
lock xacquire btsq $100,(%rcx)
xrelease lock btsq $100,(%rcx)
lock xrelease btsq $100,(%rcx)
.byte 0xf0; .byte 0xf2; btsq $100,(%rcx)
.byte 0xf0; .byte 0xf3; btsq $100,(%rcx)
xrelease movq $100,(%rcx)
xacquire lock orq $100,(%rcx)
lock xacquire orq $100,(%rcx)
xrelease lock orq $100,(%rcx)
lock xrelease orq $100,(%rcx)
.byte 0xf0; .byte 0xf2; orq $100,(%rcx)
.byte 0xf0; .byte 0xf3; orq $100,(%rcx)
xacquire lock sbbq $100,(%rcx)
lock xacquire sbbq $100,(%rcx)
xrelease lock sbbq $100,(%rcx)
lock xrelease sbbq $100,(%rcx)
.byte 0xf0; .byte 0xf2; sbbq $100,(%rcx)
.byte 0xf0; .byte 0xf3; sbbq $100,(%rcx)
xacquire lock subq $100,(%rcx)
lock xacquire subq $100,(%rcx)
xrelease lock subq $100,(%rcx)
lock xrelease subq $100,(%rcx)
.byte 0xf0; .byte 0xf2; subq $100,(%rcx)
.byte 0xf0; .byte 0xf3; subq $100,(%rcx)
xacquire lock xorq $100,(%rcx)
lock xacquire xorq $100,(%rcx)
xrelease lock xorq $100,(%rcx)
lock xrelease xorq $100,(%rcx)
.byte 0xf0; .byte 0xf2; xorq $100,(%rcx)
.byte 0xf0; .byte 0xf3; xorq $100,(%rcx)
# Tests for op imm8 regb/m8
xacquire lock adcb $100,(%rcx)
lock xacquire adcb $100,(%rcx)
xrelease lock adcb $100,(%rcx)
lock xrelease adcb $100,(%rcx)
.byte 0xf0; .byte 0xf2; adcb $100,(%rcx)
.byte 0xf0; .byte 0xf3; adcb $100,(%rcx)
xacquire lock addb $100,(%rcx)
lock xacquire addb $100,(%rcx)
xrelease lock addb $100,(%rcx)
lock xrelease addb $100,(%rcx)
.byte 0xf0; .byte 0xf2; addb $100,(%rcx)
.byte 0xf0; .byte 0xf3; addb $100,(%rcx)
xacquire lock andb $100,(%rcx)
lock xacquire andb $100,(%rcx)
xrelease lock andb $100,(%rcx)
lock xrelease andb $100,(%rcx)
.byte 0xf0; .byte 0xf2; andb $100,(%rcx)
.byte 0xf0; .byte 0xf3; andb $100,(%rcx)
xrelease movb $100,(%rcx)
xacquire lock orb $100,(%rcx)
lock xacquire orb $100,(%rcx)
xrelease lock orb $100,(%rcx)
lock xrelease orb $100,(%rcx)
.byte 0xf0; .byte 0xf2; orb $100,(%rcx)
.byte 0xf0; .byte 0xf3; orb $100,(%rcx)
xacquire lock sbbb $100,(%rcx)
lock xacquire sbbb $100,(%rcx)
xrelease lock sbbb $100,(%rcx)
lock xrelease sbbb $100,(%rcx)
.byte 0xf0; .byte 0xf2; sbbb $100,(%rcx)
.byte 0xf0; .byte 0xf3; sbbb $100,(%rcx)
xacquire lock subb $100,(%rcx)
lock xacquire subb $100,(%rcx)
xrelease lock subb $100,(%rcx)
lock xrelease subb $100,(%rcx)
.byte 0xf0; .byte 0xf2; subb $100,(%rcx)
.byte 0xf0; .byte 0xf3; subb $100,(%rcx)
xacquire lock xorb $100,(%rcx)
lock xacquire xorb $100,(%rcx)
xrelease lock xorb $100,(%rcx)
lock xrelease xorb $100,(%rcx)
.byte 0xf0; .byte 0xf2; xorb $100,(%rcx)
.byte 0xf0; .byte 0xf3; xorb $100,(%rcx)
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire lock adcb %al,(%rcx)
lock xacquire adcb %al,(%rcx)
xrelease lock adcb %al,(%rcx)
lock xrelease adcb %al,(%rcx)
.byte 0xf0; .byte 0xf2; adcb %al,(%rcx)
.byte 0xf0; .byte 0xf3; adcb %al,(%rcx)
xacquire lock addb %al,(%rcx)
lock xacquire addb %al,(%rcx)
xrelease lock addb %al,(%rcx)
lock xrelease addb %al,(%rcx)
.byte 0xf0; .byte 0xf2; addb %al,(%rcx)
.byte 0xf0; .byte 0xf3; addb %al,(%rcx)
xacquire lock andb %al,(%rcx)
lock xacquire andb %al,(%rcx)
xrelease lock andb %al,(%rcx)
lock xrelease andb %al,(%rcx)
.byte 0xf0; .byte 0xf2; andb %al,(%rcx)
.byte 0xf0; .byte 0xf3; andb %al,(%rcx)
xrelease movb %al,(%rcx)
xacquire lock orb %al,(%rcx)
lock xacquire orb %al,(%rcx)
xrelease lock orb %al,(%rcx)
lock xrelease orb %al,(%rcx)
.byte 0xf0; .byte 0xf2; orb %al,(%rcx)
.byte 0xf0; .byte 0xf3; orb %al,(%rcx)
xacquire lock sbbb %al,(%rcx)
lock xacquire sbbb %al,(%rcx)
xrelease lock sbbb %al,(%rcx)
lock xrelease sbbb %al,(%rcx)
.byte 0xf0; .byte 0xf2; sbbb %al,(%rcx)
.byte 0xf0; .byte 0xf3; sbbb %al,(%rcx)
xacquire lock subb %al,(%rcx)
lock xacquire subb %al,(%rcx)
xrelease lock subb %al,(%rcx)
lock xrelease subb %al,(%rcx)
.byte 0xf0; .byte 0xf2; subb %al,(%rcx)
.byte 0xf0; .byte 0xf3; subb %al,(%rcx)
xacquire lock xchgb %al,(%rcx)
lock xacquire xchgb %al,(%rcx)
xacquire xchgb %al,(%rcx)
xrelease lock xchgb %al,(%rcx)
lock xrelease xchgb %al,(%rcx)
xrelease xchgb %al,(%rcx)
.byte 0xf0; .byte 0xf2; xchgb %al,(%rcx)
.byte 0xf0; .byte 0xf3; xchgb %al,(%rcx)
xacquire lock xorb %al,(%rcx)
lock xacquire xorb %al,(%rcx)
xrelease lock xorb %al,(%rcx)
lock xrelease xorb %al,(%rcx)
.byte 0xf0; .byte 0xf2; xorb %al,(%rcx)
.byte 0xf0; .byte 0xf3; xorb %al,(%rcx)
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire lock adcw %ax,(%rcx)
lock xacquire adcw %ax,(%rcx)
xrelease lock adcw %ax,(%rcx)
lock xrelease adcw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; adcw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; adcw %ax,(%rcx)
xacquire lock addw %ax,(%rcx)
lock xacquire addw %ax,(%rcx)
xrelease lock addw %ax,(%rcx)
lock xrelease addw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; addw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; addw %ax,(%rcx)
xacquire lock andw %ax,(%rcx)
lock xacquire andw %ax,(%rcx)
xrelease lock andw %ax,(%rcx)
lock xrelease andw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; andw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; andw %ax,(%rcx)
xrelease movw %ax,(%rcx)
xacquire lock orw %ax,(%rcx)
lock xacquire orw %ax,(%rcx)
xrelease lock orw %ax,(%rcx)
lock xrelease orw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; orw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; orw %ax,(%rcx)
xacquire lock sbbw %ax,(%rcx)
lock xacquire sbbw %ax,(%rcx)
xrelease lock sbbw %ax,(%rcx)
lock xrelease sbbw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; sbbw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; sbbw %ax,(%rcx)
xacquire lock subw %ax,(%rcx)
lock xacquire subw %ax,(%rcx)
xrelease lock subw %ax,(%rcx)
lock xrelease subw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; subw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; subw %ax,(%rcx)
xacquire lock xchgw %ax,(%rcx)
lock xacquire xchgw %ax,(%rcx)
xacquire xchgw %ax,(%rcx)
xrelease lock xchgw %ax,(%rcx)
lock xrelease xchgw %ax,(%rcx)
xrelease xchgw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; xchgw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; xchgw %ax,(%rcx)
xacquire lock xorw %ax,(%rcx)
lock xacquire xorw %ax,(%rcx)
xrelease lock xorw %ax,(%rcx)
lock xrelease xorw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; xorw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; xorw %ax,(%rcx)
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire lock adcl %eax,(%rcx)
lock xacquire adcl %eax,(%rcx)
xrelease lock adcl %eax,(%rcx)
lock xrelease adcl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; adcl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; adcl %eax,(%rcx)
xacquire lock addl %eax,(%rcx)
lock xacquire addl %eax,(%rcx)
xrelease lock addl %eax,(%rcx)
lock xrelease addl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; addl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; addl %eax,(%rcx)
xacquire lock andl %eax,(%rcx)
lock xacquire andl %eax,(%rcx)
xrelease lock andl %eax,(%rcx)
lock xrelease andl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; andl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; andl %eax,(%rcx)
xrelease movl %eax,(%rcx)
xacquire lock orl %eax,(%rcx)
lock xacquire orl %eax,(%rcx)
xrelease lock orl %eax,(%rcx)
lock xrelease orl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; orl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; orl %eax,(%rcx)
xacquire lock sbbl %eax,(%rcx)
lock xacquire sbbl %eax,(%rcx)
xrelease lock sbbl %eax,(%rcx)
lock xrelease sbbl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; sbbl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; sbbl %eax,(%rcx)
xacquire lock subl %eax,(%rcx)
lock xacquire subl %eax,(%rcx)
xrelease lock subl %eax,(%rcx)
lock xrelease subl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; subl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; subl %eax,(%rcx)
xacquire lock xchgl %eax,(%rcx)
lock xacquire xchgl %eax,(%rcx)
xacquire xchgl %eax,(%rcx)
xrelease lock xchgl %eax,(%rcx)
lock xrelease xchgl %eax,(%rcx)
xrelease xchgl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; xchgl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; xchgl %eax,(%rcx)
xacquire lock xorl %eax,(%rcx)
lock xacquire xorl %eax,(%rcx)
xrelease lock xorl %eax,(%rcx)
lock xrelease xorl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; xorl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; xorl %eax,(%rcx)
# Tests for op regq regq/m64
# Tests for op regq/m64 regq
xacquire lock adcq %rax,(%rcx)
lock xacquire adcq %rax,(%rcx)
xrelease lock adcq %rax,(%rcx)
lock xrelease adcq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; adcq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; adcq %rax,(%rcx)
xacquire lock addq %rax,(%rcx)
lock xacquire addq %rax,(%rcx)
xrelease lock addq %rax,(%rcx)
lock xrelease addq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; addq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; addq %rax,(%rcx)
xacquire lock andq %rax,(%rcx)
lock xacquire andq %rax,(%rcx)
xrelease lock andq %rax,(%rcx)
lock xrelease andq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; andq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; andq %rax,(%rcx)
xrelease movq %rax,(%rcx)
xacquire lock orq %rax,(%rcx)
lock xacquire orq %rax,(%rcx)
xrelease lock orq %rax,(%rcx)
lock xrelease orq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; orq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; orq %rax,(%rcx)
xacquire lock sbbq %rax,(%rcx)
lock xacquire sbbq %rax,(%rcx)
xrelease lock sbbq %rax,(%rcx)
lock xrelease sbbq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; sbbq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; sbbq %rax,(%rcx)
xacquire lock subq %rax,(%rcx)
lock xacquire subq %rax,(%rcx)
xrelease lock subq %rax,(%rcx)
lock xrelease subq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; subq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; subq %rax,(%rcx)
xacquire lock xchgq %rax,(%rcx)
lock xacquire xchgq %rax,(%rcx)
xacquire xchgq %rax,(%rcx)
xrelease lock xchgq %rax,(%rcx)
lock xrelease xchgq %rax,(%rcx)
xrelease xchgq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; xchgq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; xchgq %rax,(%rcx)
xacquire lock xorq %rax,(%rcx)
lock xacquire xorq %rax,(%rcx)
xrelease lock xorq %rax,(%rcx)
lock xrelease xorq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; xorq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; xorq %rax,(%rcx)
# Tests for op regs, regs/m16
xacquire lock btcw %ax,(%rcx)
lock xacquire btcw %ax,(%rcx)
xrelease lock btcw %ax,(%rcx)
lock xrelease btcw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; btcw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; btcw %ax,(%rcx)
xacquire lock btrw %ax,(%rcx)
lock xacquire btrw %ax,(%rcx)
xrelease lock btrw %ax,(%rcx)
lock xrelease btrw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; btrw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; btrw %ax,(%rcx)
xacquire lock btsw %ax,(%rcx)
lock xacquire btsw %ax,(%rcx)
xrelease lock btsw %ax,(%rcx)
lock xrelease btsw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; btsw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; btsw %ax,(%rcx)
xacquire lock cmpxchgw %ax,(%rcx)
lock xacquire cmpxchgw %ax,(%rcx)
xrelease lock cmpxchgw %ax,(%rcx)
lock xrelease cmpxchgw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; cmpxchgw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; cmpxchgw %ax,(%rcx)
xacquire lock xaddw %ax,(%rcx)
lock xacquire xaddw %ax,(%rcx)
xrelease lock xaddw %ax,(%rcx)
lock xrelease xaddw %ax,(%rcx)
.byte 0xf0; .byte 0xf2; xaddw %ax,(%rcx)
.byte 0xf0; .byte 0xf3; xaddw %ax,(%rcx)
# Tests for op regl regl/m32
xacquire lock btcl %eax,(%rcx)
lock xacquire btcl %eax,(%rcx)
xrelease lock btcl %eax,(%rcx)
lock xrelease btcl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; btcl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; btcl %eax,(%rcx)
xacquire lock btrl %eax,(%rcx)
lock xacquire btrl %eax,(%rcx)
xrelease lock btrl %eax,(%rcx)
lock xrelease btrl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; btrl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; btrl %eax,(%rcx)
xacquire lock btsl %eax,(%rcx)
lock xacquire btsl %eax,(%rcx)
xrelease lock btsl %eax,(%rcx)
lock xrelease btsl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; btsl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; btsl %eax,(%rcx)
xacquire lock cmpxchgl %eax,(%rcx)
lock xacquire cmpxchgl %eax,(%rcx)
xrelease lock cmpxchgl %eax,(%rcx)
lock xrelease cmpxchgl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; cmpxchgl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; cmpxchgl %eax,(%rcx)
xacquire lock xaddl %eax,(%rcx)
lock xacquire xaddl %eax,(%rcx)
xrelease lock xaddl %eax,(%rcx)
lock xrelease xaddl %eax,(%rcx)
.byte 0xf0; .byte 0xf2; xaddl %eax,(%rcx)
.byte 0xf0; .byte 0xf3; xaddl %eax,(%rcx)
# Tests for op regq regq/m64
xacquire lock btcq %rax,(%rcx)
lock xacquire btcq %rax,(%rcx)
xrelease lock btcq %rax,(%rcx)
lock xrelease btcq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; btcq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; btcq %rax,(%rcx)
xacquire lock btrq %rax,(%rcx)
lock xacquire btrq %rax,(%rcx)
xrelease lock btrq %rax,(%rcx)
lock xrelease btrq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; btrq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; btrq %rax,(%rcx)
xacquire lock btsq %rax,(%rcx)
lock xacquire btsq %rax,(%rcx)
xrelease lock btsq %rax,(%rcx)
lock xrelease btsq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; btsq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; btsq %rax,(%rcx)
xacquire lock cmpxchgq %rax,(%rcx)
lock xacquire cmpxchgq %rax,(%rcx)
xrelease lock cmpxchgq %rax,(%rcx)
lock xrelease cmpxchgq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; cmpxchgq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; cmpxchgq %rax,(%rcx)
xacquire lock xaddq %rax,(%rcx)
lock xacquire xaddq %rax,(%rcx)
xrelease lock xaddq %rax,(%rcx)
lock xrelease xaddq %rax,(%rcx)
.byte 0xf0; .byte 0xf2; xaddq %rax,(%rcx)
.byte 0xf0; .byte 0xf3; xaddq %rax,(%rcx)
# Tests for op regb/m8
xacquire lock decb (%rcx)
lock xacquire decb (%rcx)
xrelease lock decb (%rcx)
lock xrelease decb (%rcx)
.byte 0xf0; .byte 0xf2; decb (%rcx)
.byte 0xf0; .byte 0xf3; decb (%rcx)
xacquire lock incb (%rcx)
lock xacquire incb (%rcx)
xrelease lock incb (%rcx)
lock xrelease incb (%rcx)
.byte 0xf0; .byte 0xf2; incb (%rcx)
.byte 0xf0; .byte 0xf3; incb (%rcx)
xacquire lock negb (%rcx)
lock xacquire negb (%rcx)
xrelease lock negb (%rcx)
lock xrelease negb (%rcx)
.byte 0xf0; .byte 0xf2; negb (%rcx)
.byte 0xf0; .byte 0xf3; negb (%rcx)
xacquire lock notb (%rcx)
lock xacquire notb (%rcx)
xrelease lock notb (%rcx)
lock xrelease notb (%rcx)
.byte 0xf0; .byte 0xf2; notb (%rcx)
.byte 0xf0; .byte 0xf3; notb (%rcx)
# Tests for op regs/m16
xacquire lock decw (%rcx)
lock xacquire decw (%rcx)
xrelease lock decw (%rcx)
lock xrelease decw (%rcx)
.byte 0xf0; .byte 0xf2; decw (%rcx)
.byte 0xf0; .byte 0xf3; decw (%rcx)
xacquire lock incw (%rcx)
lock xacquire incw (%rcx)
xrelease lock incw (%rcx)
lock xrelease incw (%rcx)
.byte 0xf0; .byte 0xf2; incw (%rcx)
.byte 0xf0; .byte 0xf3; incw (%rcx)
xacquire lock negw (%rcx)
lock xacquire negw (%rcx)
xrelease lock negw (%rcx)
lock xrelease negw (%rcx)
.byte 0xf0; .byte 0xf2; negw (%rcx)
.byte 0xf0; .byte 0xf3; negw (%rcx)
xacquire lock notw (%rcx)
lock xacquire notw (%rcx)
xrelease lock notw (%rcx)
lock xrelease notw (%rcx)
.byte 0xf0; .byte 0xf2; notw (%rcx)
.byte 0xf0; .byte 0xf3; notw (%rcx)
# Tests for op regl/m32
xacquire lock decl (%rcx)
lock xacquire decl (%rcx)
xrelease lock decl (%rcx)
lock xrelease decl (%rcx)
.byte 0xf0; .byte 0xf2; decl (%rcx)
.byte 0xf0; .byte 0xf3; decl (%rcx)
xacquire lock incl (%rcx)
lock xacquire incl (%rcx)
xrelease lock incl (%rcx)
lock xrelease incl (%rcx)
.byte 0xf0; .byte 0xf2; incl (%rcx)
.byte 0xf0; .byte 0xf3; incl (%rcx)
xacquire lock negl (%rcx)
lock xacquire negl (%rcx)
xrelease lock negl (%rcx)
lock xrelease negl (%rcx)
.byte 0xf0; .byte 0xf2; negl (%rcx)
.byte 0xf0; .byte 0xf3; negl (%rcx)
xacquire lock notl (%rcx)
lock xacquire notl (%rcx)
xrelease lock notl (%rcx)
lock xrelease notl (%rcx)
.byte 0xf0; .byte 0xf2; notl (%rcx)
.byte 0xf0; .byte 0xf3; notl (%rcx)
# Tests for op regq/m64
xacquire lock decq (%rcx)
lock xacquire decq (%rcx)
xrelease lock decq (%rcx)
lock xrelease decq (%rcx)
.byte 0xf0; .byte 0xf2; decq (%rcx)
.byte 0xf0; .byte 0xf3; decq (%rcx)
xacquire lock incq (%rcx)
lock xacquire incq (%rcx)
xrelease lock incq (%rcx)
lock xrelease incq (%rcx)
.byte 0xf0; .byte 0xf2; incq (%rcx)
.byte 0xf0; .byte 0xf3; incq (%rcx)
xacquire lock negq (%rcx)
lock xacquire negq (%rcx)
xrelease lock negq (%rcx)
lock xrelease negq (%rcx)
.byte 0xf0; .byte 0xf2; negq (%rcx)
.byte 0xf0; .byte 0xf3; negq (%rcx)
xacquire lock notq (%rcx)
lock xacquire notq (%rcx)
xrelease lock notq (%rcx)
lock xrelease notq (%rcx)
.byte 0xf0; .byte 0xf2; notq (%rcx)
.byte 0xf0; .byte 0xf3; notq (%rcx)
# Tests for op m64
xacquire lock cmpxchg8bq (%rcx)
lock xacquire cmpxchg8bq (%rcx)
xrelease lock cmpxchg8bq (%rcx)
lock xrelease cmpxchg8bq (%rcx)
.byte 0xf0; .byte 0xf2; cmpxchg8bq (%rcx)
.byte 0xf0; .byte 0xf3; cmpxchg8bq (%rcx)
# Tests for op regb, regb/m8
xacquire lock cmpxchgb %cl,(%rcx)
lock xacquire cmpxchgb %cl,(%rcx)
xrelease lock cmpxchgb %cl,(%rcx)
lock xrelease cmpxchgb %cl,(%rcx)
.byte 0xf0; .byte 0xf2; cmpxchgb %cl,(%rcx)
.byte 0xf0; .byte 0xf3; cmpxchgb %cl,(%rcx)
xacquire lock xaddb %cl,(%rcx)
lock xacquire xaddb %cl,(%rcx)
xrelease lock xaddb %cl,(%rcx)
lock xrelease xaddb %cl,(%rcx)
.byte 0xf0; .byte 0xf2; xaddb %cl,(%rcx)
.byte 0xf0; .byte 0xf3; xaddb %cl,(%rcx)
.intel_syntax noprefix
# Tests for op imm32 rax
# Tests for op imm8 regb/m8
xacquire lock adc BYTE PTR [rcx],100
lock xacquire adc BYTE PTR [rcx],100
xrelease lock adc BYTE PTR [rcx],100
lock xrelease adc BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; adc BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; adc BYTE PTR [rcx],100
xacquire lock add BYTE PTR [rcx],100
lock xacquire add BYTE PTR [rcx],100
xrelease lock add BYTE PTR [rcx],100
lock xrelease add BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; add BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; add BYTE PTR [rcx],100
xacquire lock and BYTE PTR [rcx],100
lock xacquire and BYTE PTR [rcx],100
xrelease lock and BYTE PTR [rcx],100
lock xrelease and BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; and BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; and BYTE PTR [rcx],100
xrelease mov BYTE PTR [rcx],100
xacquire lock or BYTE PTR [rcx],100
lock xacquire or BYTE PTR [rcx],100
xrelease lock or BYTE PTR [rcx],100
lock xrelease or BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; or BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; or BYTE PTR [rcx],100
xacquire lock sbb BYTE PTR [rcx],100
lock xacquire sbb BYTE PTR [rcx],100
xrelease lock sbb BYTE PTR [rcx],100
lock xrelease sbb BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; sbb BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; sbb BYTE PTR [rcx],100
xacquire lock sub BYTE PTR [rcx],100
lock xacquire sub BYTE PTR [rcx],100
xrelease lock sub BYTE PTR [rcx],100
lock xrelease sub BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; sub BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; sub BYTE PTR [rcx],100
xacquire lock xor BYTE PTR [rcx],100
lock xacquire xor BYTE PTR [rcx],100
xrelease lock xor BYTE PTR [rcx],100
lock xrelease xor BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; xor BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; xor BYTE PTR [rcx],100
# Tests for op imm16 regs/m16
xacquire lock adc WORD PTR [rcx],1000
lock xacquire adc WORD PTR [rcx],1000
xrelease lock adc WORD PTR [rcx],1000
lock xrelease adc WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf2; adc WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf3; adc WORD PTR [rcx],1000
xacquire lock add WORD PTR [rcx],1000
lock xacquire add WORD PTR [rcx],1000
xrelease lock add WORD PTR [rcx],1000
lock xrelease add WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf2; add WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf3; add WORD PTR [rcx],1000
xacquire lock and WORD PTR [rcx],1000
lock xacquire and WORD PTR [rcx],1000
xrelease lock and WORD PTR [rcx],1000
lock xrelease and WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf2; and WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf3; and WORD PTR [rcx],1000
xrelease mov WORD PTR [rcx],1000
xacquire lock or WORD PTR [rcx],1000
lock xacquire or WORD PTR [rcx],1000
xrelease lock or WORD PTR [rcx],1000
lock xrelease or WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf2; or WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf3; or WORD PTR [rcx],1000
xacquire lock sbb WORD PTR [rcx],1000
lock xacquire sbb WORD PTR [rcx],1000
xrelease lock sbb WORD PTR [rcx],1000
lock xrelease sbb WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf2; sbb WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf3; sbb WORD PTR [rcx],1000
xacquire lock sub WORD PTR [rcx],1000
lock xacquire sub WORD PTR [rcx],1000
xrelease lock sub WORD PTR [rcx],1000
lock xrelease sub WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf2; sub WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf3; sub WORD PTR [rcx],1000
xacquire lock xor WORD PTR [rcx],1000
lock xacquire xor WORD PTR [rcx],1000
xrelease lock xor WORD PTR [rcx],1000
lock xrelease xor WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf2; xor WORD PTR [rcx],1000
.byte 0xf0; .byte 0xf3; xor WORD PTR [rcx],1000
# Tests for op imm32 regl/m32
xacquire lock adc DWORD PTR [rcx],10000000
lock xacquire adc DWORD PTR [rcx],10000000
xrelease lock adc DWORD PTR [rcx],10000000
lock xrelease adc DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; adc DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; adc DWORD PTR [rcx],10000000
xacquire lock add DWORD PTR [rcx],10000000
lock xacquire add DWORD PTR [rcx],10000000
xrelease lock add DWORD PTR [rcx],10000000
lock xrelease add DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; add DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; add DWORD PTR [rcx],10000000
xacquire lock and DWORD PTR [rcx],10000000
lock xacquire and DWORD PTR [rcx],10000000
xrelease lock and DWORD PTR [rcx],10000000
lock xrelease and DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; and DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; and DWORD PTR [rcx],10000000
xrelease mov DWORD PTR [rcx],10000000
xacquire lock or DWORD PTR [rcx],10000000
lock xacquire or DWORD PTR [rcx],10000000
xrelease lock or DWORD PTR [rcx],10000000
lock xrelease or DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; or DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; or DWORD PTR [rcx],10000000
xacquire lock sbb DWORD PTR [rcx],10000000
lock xacquire sbb DWORD PTR [rcx],10000000
xrelease lock sbb DWORD PTR [rcx],10000000
lock xrelease sbb DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; sbb DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; sbb DWORD PTR [rcx],10000000
xacquire lock sub DWORD PTR [rcx],10000000
lock xacquire sub DWORD PTR [rcx],10000000
xrelease lock sub DWORD PTR [rcx],10000000
lock xrelease sub DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; sub DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; sub DWORD PTR [rcx],10000000
xacquire lock xor DWORD PTR [rcx],10000000
lock xacquire xor DWORD PTR [rcx],10000000
xrelease lock xor DWORD PTR [rcx],10000000
lock xrelease xor DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; xor DWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; xor DWORD PTR [rcx],10000000
# Tests for op imm32 regq/m64
xacquire lock adc QWORD PTR [rcx],10000000
lock xacquire adc QWORD PTR [rcx],10000000
xrelease lock adc QWORD PTR [rcx],10000000
lock xrelease adc QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; adc QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; adc QWORD PTR [rcx],10000000
xacquire lock add QWORD PTR [rcx],10000000
lock xacquire add QWORD PTR [rcx],10000000
xrelease lock add QWORD PTR [rcx],10000000
lock xrelease add QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; add QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; add QWORD PTR [rcx],10000000
xacquire lock and QWORD PTR [rcx],10000000
lock xacquire and QWORD PTR [rcx],10000000
xrelease lock and QWORD PTR [rcx],10000000
lock xrelease and QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; and QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; and QWORD PTR [rcx],10000000
xrelease mov QWORD PTR [rcx],10000000
xacquire lock or QWORD PTR [rcx],10000000
lock xacquire or QWORD PTR [rcx],10000000
xrelease lock or QWORD PTR [rcx],10000000
lock xrelease or QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; or QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; or QWORD PTR [rcx],10000000
xacquire lock sbb QWORD PTR [rcx],10000000
lock xacquire sbb QWORD PTR [rcx],10000000
xrelease lock sbb QWORD PTR [rcx],10000000
lock xrelease sbb QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; sbb QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; sbb QWORD PTR [rcx],10000000
xacquire lock sub QWORD PTR [rcx],10000000
lock xacquire sub QWORD PTR [rcx],10000000
xrelease lock sub QWORD PTR [rcx],10000000
lock xrelease sub QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; sub QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; sub QWORD PTR [rcx],10000000
xacquire lock xor QWORD PTR [rcx],10000000
lock xacquire xor QWORD PTR [rcx],10000000
xrelease lock xor QWORD PTR [rcx],10000000
lock xrelease xor QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf2; xor QWORD PTR [rcx],10000000
.byte 0xf0; .byte 0xf3; xor QWORD PTR [rcx],10000000
# Tests for op imm8 regs/m16
xacquire lock adc WORD PTR [rcx],100
lock xacquire adc WORD PTR [rcx],100
xrelease lock adc WORD PTR [rcx],100
lock xrelease adc WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; adc WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; adc WORD PTR [rcx],100
xacquire lock add WORD PTR [rcx],100
lock xacquire add WORD PTR [rcx],100
xrelease lock add WORD PTR [rcx],100
lock xrelease add WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; add WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; add WORD PTR [rcx],100
xacquire lock and WORD PTR [rcx],100
lock xacquire and WORD PTR [rcx],100
xrelease lock and WORD PTR [rcx],100
lock xrelease and WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; and WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; and WORD PTR [rcx],100
xacquire lock btc WORD PTR [rcx],100
lock xacquire btc WORD PTR [rcx],100
xrelease lock btc WORD PTR [rcx],100
lock xrelease btc WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; btc WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; btc WORD PTR [rcx],100
xacquire lock btr WORD PTR [rcx],100
lock xacquire btr WORD PTR [rcx],100
xrelease lock btr WORD PTR [rcx],100
lock xrelease btr WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; btr WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; btr WORD PTR [rcx],100
xacquire lock bts WORD PTR [rcx],100
lock xacquire bts WORD PTR [rcx],100
xrelease lock bts WORD PTR [rcx],100
lock xrelease bts WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; bts WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; bts WORD PTR [rcx],100
xrelease mov WORD PTR [rcx],100
xacquire lock or WORD PTR [rcx],100
lock xacquire or WORD PTR [rcx],100
xrelease lock or WORD PTR [rcx],100
lock xrelease or WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; or WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; or WORD PTR [rcx],100
xacquire lock sbb WORD PTR [rcx],100
lock xacquire sbb WORD PTR [rcx],100
xrelease lock sbb WORD PTR [rcx],100
lock xrelease sbb WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; sbb WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; sbb WORD PTR [rcx],100
xacquire lock sub WORD PTR [rcx],100
lock xacquire sub WORD PTR [rcx],100
xrelease lock sub WORD PTR [rcx],100
lock xrelease sub WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; sub WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; sub WORD PTR [rcx],100
xacquire lock xor WORD PTR [rcx],100
lock xacquire xor WORD PTR [rcx],100
xrelease lock xor WORD PTR [rcx],100
lock xrelease xor WORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; xor WORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; xor WORD PTR [rcx],100
# Tests for op imm8 regl/m32
xacquire lock adc DWORD PTR [rcx],100
lock xacquire adc DWORD PTR [rcx],100
xrelease lock adc DWORD PTR [rcx],100
lock xrelease adc DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; adc DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; adc DWORD PTR [rcx],100
xacquire lock add DWORD PTR [rcx],100
lock xacquire add DWORD PTR [rcx],100
xrelease lock add DWORD PTR [rcx],100
lock xrelease add DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; add DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; add DWORD PTR [rcx],100
xacquire lock and DWORD PTR [rcx],100
lock xacquire and DWORD PTR [rcx],100
xrelease lock and DWORD PTR [rcx],100
lock xrelease and DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; and DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; and DWORD PTR [rcx],100
xacquire lock btc DWORD PTR [rcx],100
lock xacquire btc DWORD PTR [rcx],100
xrelease lock btc DWORD PTR [rcx],100
lock xrelease btc DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; btc DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; btc DWORD PTR [rcx],100
xacquire lock btr DWORD PTR [rcx],100
lock xacquire btr DWORD PTR [rcx],100
xrelease lock btr DWORD PTR [rcx],100
lock xrelease btr DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; btr DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; btr DWORD PTR [rcx],100
xacquire lock bts DWORD PTR [rcx],100
lock xacquire bts DWORD PTR [rcx],100
xrelease lock bts DWORD PTR [rcx],100
lock xrelease bts DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; bts DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; bts DWORD PTR [rcx],100
xrelease mov DWORD PTR [rcx],100
xacquire lock or DWORD PTR [rcx],100
lock xacquire or DWORD PTR [rcx],100
xrelease lock or DWORD PTR [rcx],100
lock xrelease or DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; or DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; or DWORD PTR [rcx],100
xacquire lock sbb DWORD PTR [rcx],100
lock xacquire sbb DWORD PTR [rcx],100
xrelease lock sbb DWORD PTR [rcx],100
lock xrelease sbb DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; sbb DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; sbb DWORD PTR [rcx],100
xacquire lock sub DWORD PTR [rcx],100
lock xacquire sub DWORD PTR [rcx],100
xrelease lock sub DWORD PTR [rcx],100
lock xrelease sub DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; sub DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; sub DWORD PTR [rcx],100
xacquire lock xor DWORD PTR [rcx],100
lock xacquire xor DWORD PTR [rcx],100
xrelease lock xor DWORD PTR [rcx],100
lock xrelease xor DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; xor DWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; xor DWORD PTR [rcx],100
# Tests for op imm8 regq/m64
xacquire lock adc QWORD PTR [rcx],100
lock xacquire adc QWORD PTR [rcx],100
xrelease lock adc QWORD PTR [rcx],100
lock xrelease adc QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; adc QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; adc QWORD PTR [rcx],100
xacquire lock add QWORD PTR [rcx],100
lock xacquire add QWORD PTR [rcx],100
xrelease lock add QWORD PTR [rcx],100
lock xrelease add QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; add QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; add QWORD PTR [rcx],100
xacquire lock and QWORD PTR [rcx],100
lock xacquire and QWORD PTR [rcx],100
xrelease lock and QWORD PTR [rcx],100
lock xrelease and QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; and QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; and QWORD PTR [rcx],100
xacquire lock btc QWORD PTR [rcx],100
lock xacquire btc QWORD PTR [rcx],100
xrelease lock btc QWORD PTR [rcx],100
lock xrelease btc QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; btc QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; btc QWORD PTR [rcx],100
xacquire lock btr QWORD PTR [rcx],100
lock xacquire btr QWORD PTR [rcx],100
xrelease lock btr QWORD PTR [rcx],100
lock xrelease btr QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; btr QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; btr QWORD PTR [rcx],100
xacquire lock bts QWORD PTR [rcx],100
lock xacquire bts QWORD PTR [rcx],100
xrelease lock bts QWORD PTR [rcx],100
lock xrelease bts QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; bts QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; bts QWORD PTR [rcx],100
xrelease mov QWORD PTR [rcx],100
xacquire lock or QWORD PTR [rcx],100
lock xacquire or QWORD PTR [rcx],100
xrelease lock or QWORD PTR [rcx],100
lock xrelease or QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; or QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; or QWORD PTR [rcx],100
xacquire lock sbb QWORD PTR [rcx],100
lock xacquire sbb QWORD PTR [rcx],100
xrelease lock sbb QWORD PTR [rcx],100
lock xrelease sbb QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; sbb QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; sbb QWORD PTR [rcx],100
xacquire lock sub QWORD PTR [rcx],100
lock xacquire sub QWORD PTR [rcx],100
xrelease lock sub QWORD PTR [rcx],100
lock xrelease sub QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; sub QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; sub QWORD PTR [rcx],100
xacquire lock xor QWORD PTR [rcx],100
lock xacquire xor QWORD PTR [rcx],100
xrelease lock xor QWORD PTR [rcx],100
lock xrelease xor QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf2; xor QWORD PTR [rcx],100
.byte 0xf0; .byte 0xf3; xor QWORD PTR [rcx],100
# Tests for op imm8 regb/m8
xacquire lock adc BYTE PTR [rcx],100
lock xacquire adc BYTE PTR [rcx],100
xrelease lock adc BYTE PTR [rcx],100
lock xrelease adc BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; adc BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; adc BYTE PTR [rcx],100
xacquire lock add BYTE PTR [rcx],100
lock xacquire add BYTE PTR [rcx],100
xrelease lock add BYTE PTR [rcx],100
lock xrelease add BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; add BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; add BYTE PTR [rcx],100
xacquire lock and BYTE PTR [rcx],100
lock xacquire and BYTE PTR [rcx],100
xrelease lock and BYTE PTR [rcx],100
lock xrelease and BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; and BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; and BYTE PTR [rcx],100
xrelease mov BYTE PTR [rcx],100
xacquire lock or BYTE PTR [rcx],100
lock xacquire or BYTE PTR [rcx],100
xrelease lock or BYTE PTR [rcx],100
lock xrelease or BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; or BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; or BYTE PTR [rcx],100
xacquire lock sbb BYTE PTR [rcx],100
lock xacquire sbb BYTE PTR [rcx],100
xrelease lock sbb BYTE PTR [rcx],100
lock xrelease sbb BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; sbb BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; sbb BYTE PTR [rcx],100
xacquire lock sub BYTE PTR [rcx],100
lock xacquire sub BYTE PTR [rcx],100
xrelease lock sub BYTE PTR [rcx],100
lock xrelease sub BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; sub BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; sub BYTE PTR [rcx],100
xacquire lock xor BYTE PTR [rcx],100
lock xacquire xor BYTE PTR [rcx],100
xrelease lock xor BYTE PTR [rcx],100
lock xrelease xor BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf2; xor BYTE PTR [rcx],100
.byte 0xf0; .byte 0xf3; xor BYTE PTR [rcx],100
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire lock adc BYTE PTR [rcx],al
lock xacquire adc BYTE PTR [rcx],al
xrelease lock adc BYTE PTR [rcx],al
lock xrelease adc BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; adc BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; adc BYTE PTR [rcx],al
xacquire lock add BYTE PTR [rcx],al
lock xacquire add BYTE PTR [rcx],al
xrelease lock add BYTE PTR [rcx],al
lock xrelease add BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; add BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; add BYTE PTR [rcx],al
xacquire lock and BYTE PTR [rcx],al
lock xacquire and BYTE PTR [rcx],al
xrelease lock and BYTE PTR [rcx],al
lock xrelease and BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; and BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; and BYTE PTR [rcx],al
xrelease mov BYTE PTR [rcx],al
xacquire lock or BYTE PTR [rcx],al
lock xacquire or BYTE PTR [rcx],al
xrelease lock or BYTE PTR [rcx],al
lock xrelease or BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; or BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; or BYTE PTR [rcx],al
xacquire lock sbb BYTE PTR [rcx],al
lock xacquire sbb BYTE PTR [rcx],al
xrelease lock sbb BYTE PTR [rcx],al
lock xrelease sbb BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; sbb BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; sbb BYTE PTR [rcx],al
xacquire lock sub BYTE PTR [rcx],al
lock xacquire sub BYTE PTR [rcx],al
xrelease lock sub BYTE PTR [rcx],al
lock xrelease sub BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; sub BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; sub BYTE PTR [rcx],al
xacquire lock xchg BYTE PTR [rcx],al
lock xacquire xchg BYTE PTR [rcx],al
xacquire xchg BYTE PTR [rcx],al
xrelease lock xchg BYTE PTR [rcx],al
lock xrelease xchg BYTE PTR [rcx],al
xrelease xchg BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; xchg BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; xchg BYTE PTR [rcx],al
xacquire lock xor BYTE PTR [rcx],al
lock xacquire xor BYTE PTR [rcx],al
xrelease lock xor BYTE PTR [rcx],al
lock xrelease xor BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf2; xor BYTE PTR [rcx],al
.byte 0xf0; .byte 0xf3; xor BYTE PTR [rcx],al
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire lock adc WORD PTR [rcx],ax
lock xacquire adc WORD PTR [rcx],ax
xrelease lock adc WORD PTR [rcx],ax
lock xrelease adc WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; adc WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; adc WORD PTR [rcx],ax
xacquire lock add WORD PTR [rcx],ax
lock xacquire add WORD PTR [rcx],ax
xrelease lock add WORD PTR [rcx],ax
lock xrelease add WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; add WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; add WORD PTR [rcx],ax
xacquire lock and WORD PTR [rcx],ax
lock xacquire and WORD PTR [rcx],ax
xrelease lock and WORD PTR [rcx],ax
lock xrelease and WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; and WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; and WORD PTR [rcx],ax
xrelease mov WORD PTR [rcx],ax
xacquire lock or WORD PTR [rcx],ax
lock xacquire or WORD PTR [rcx],ax
xrelease lock or WORD PTR [rcx],ax
lock xrelease or WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; or WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; or WORD PTR [rcx],ax
xacquire lock sbb WORD PTR [rcx],ax
lock xacquire sbb WORD PTR [rcx],ax
xrelease lock sbb WORD PTR [rcx],ax
lock xrelease sbb WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; sbb WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; sbb WORD PTR [rcx],ax
xacquire lock sub WORD PTR [rcx],ax
lock xacquire sub WORD PTR [rcx],ax
xrelease lock sub WORD PTR [rcx],ax
lock xrelease sub WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; sub WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; sub WORD PTR [rcx],ax
xacquire lock xchg WORD PTR [rcx],ax
lock xacquire xchg WORD PTR [rcx],ax
xacquire xchg WORD PTR [rcx],ax
xrelease lock xchg WORD PTR [rcx],ax
lock xrelease xchg WORD PTR [rcx],ax
xrelease xchg WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; xchg WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; xchg WORD PTR [rcx],ax
xacquire lock xor WORD PTR [rcx],ax
lock xacquire xor WORD PTR [rcx],ax
xrelease lock xor WORD PTR [rcx],ax
lock xrelease xor WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; xor WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; xor WORD PTR [rcx],ax
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire lock adc DWORD PTR [rcx],eax
lock xacquire adc DWORD PTR [rcx],eax
xrelease lock adc DWORD PTR [rcx],eax
lock xrelease adc DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; adc DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; adc DWORD PTR [rcx],eax
xacquire lock add DWORD PTR [rcx],eax
lock xacquire add DWORD PTR [rcx],eax
xrelease lock add DWORD PTR [rcx],eax
lock xrelease add DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; add DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; add DWORD PTR [rcx],eax
xacquire lock and DWORD PTR [rcx],eax
lock xacquire and DWORD PTR [rcx],eax
xrelease lock and DWORD PTR [rcx],eax
lock xrelease and DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; and DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; and DWORD PTR [rcx],eax
xrelease mov DWORD PTR [rcx],eax
xacquire lock or DWORD PTR [rcx],eax
lock xacquire or DWORD PTR [rcx],eax
xrelease lock or DWORD PTR [rcx],eax
lock xrelease or DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; or DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; or DWORD PTR [rcx],eax
xacquire lock sbb DWORD PTR [rcx],eax
lock xacquire sbb DWORD PTR [rcx],eax
xrelease lock sbb DWORD PTR [rcx],eax
lock xrelease sbb DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; sbb DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; sbb DWORD PTR [rcx],eax
xacquire lock sub DWORD PTR [rcx],eax
lock xacquire sub DWORD PTR [rcx],eax
xrelease lock sub DWORD PTR [rcx],eax
lock xrelease sub DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; sub DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; sub DWORD PTR [rcx],eax
xacquire lock xchg DWORD PTR [rcx],eax
lock xacquire xchg DWORD PTR [rcx],eax
xacquire xchg DWORD PTR [rcx],eax
xrelease lock xchg DWORD PTR [rcx],eax
lock xrelease xchg DWORD PTR [rcx],eax
xrelease xchg DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; xchg DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; xchg DWORD PTR [rcx],eax
xacquire lock xor DWORD PTR [rcx],eax
lock xacquire xor DWORD PTR [rcx],eax
xrelease lock xor DWORD PTR [rcx],eax
lock xrelease xor DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; xor DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; xor DWORD PTR [rcx],eax
# Tests for op regq regq/m64
# Tests for op regq/m64 regq
xacquire lock adc QWORD PTR [rcx],rax
lock xacquire adc QWORD PTR [rcx],rax
xrelease lock adc QWORD PTR [rcx],rax
lock xrelease adc QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; adc QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; adc QWORD PTR [rcx],rax
xacquire lock add QWORD PTR [rcx],rax
lock xacquire add QWORD PTR [rcx],rax
xrelease lock add QWORD PTR [rcx],rax
lock xrelease add QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; add QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; add QWORD PTR [rcx],rax
xacquire lock and QWORD PTR [rcx],rax
lock xacquire and QWORD PTR [rcx],rax
xrelease lock and QWORD PTR [rcx],rax
lock xrelease and QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; and QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; and QWORD PTR [rcx],rax
xrelease mov QWORD PTR [rcx],rax
xacquire lock or QWORD PTR [rcx],rax
lock xacquire or QWORD PTR [rcx],rax
xrelease lock or QWORD PTR [rcx],rax
lock xrelease or QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; or QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; or QWORD PTR [rcx],rax
xacquire lock sbb QWORD PTR [rcx],rax
lock xacquire sbb QWORD PTR [rcx],rax
xrelease lock sbb QWORD PTR [rcx],rax
lock xrelease sbb QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; sbb QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; sbb QWORD PTR [rcx],rax
xacquire lock sub QWORD PTR [rcx],rax
lock xacquire sub QWORD PTR [rcx],rax
xrelease lock sub QWORD PTR [rcx],rax
lock xrelease sub QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; sub QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; sub QWORD PTR [rcx],rax
xacquire lock xchg QWORD PTR [rcx],rax
lock xacquire xchg QWORD PTR [rcx],rax
xacquire xchg QWORD PTR [rcx],rax
xrelease lock xchg QWORD PTR [rcx],rax
lock xrelease xchg QWORD PTR [rcx],rax
xrelease xchg QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; xchg QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; xchg QWORD PTR [rcx],rax
xacquire lock xor QWORD PTR [rcx],rax
lock xacquire xor QWORD PTR [rcx],rax
xrelease lock xor QWORD PTR [rcx],rax
lock xrelease xor QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; xor QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; xor QWORD PTR [rcx],rax
# Tests for op regs, regs/m16
xacquire lock btc WORD PTR [rcx],ax
lock xacquire btc WORD PTR [rcx],ax
xrelease lock btc WORD PTR [rcx],ax
lock xrelease btc WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; btc WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; btc WORD PTR [rcx],ax
xacquire lock btr WORD PTR [rcx],ax
lock xacquire btr WORD PTR [rcx],ax
xrelease lock btr WORD PTR [rcx],ax
lock xrelease btr WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; btr WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; btr WORD PTR [rcx],ax
xacquire lock bts WORD PTR [rcx],ax
lock xacquire bts WORD PTR [rcx],ax
xrelease lock bts WORD PTR [rcx],ax
lock xrelease bts WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; bts WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; bts WORD PTR [rcx],ax
xacquire lock cmpxchg WORD PTR [rcx],ax
lock xacquire cmpxchg WORD PTR [rcx],ax
xrelease lock cmpxchg WORD PTR [rcx],ax
lock xrelease cmpxchg WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; cmpxchg WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; cmpxchg WORD PTR [rcx],ax
xacquire lock xadd WORD PTR [rcx],ax
lock xacquire xadd WORD PTR [rcx],ax
xrelease lock xadd WORD PTR [rcx],ax
lock xrelease xadd WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf2; xadd WORD PTR [rcx],ax
.byte 0xf0; .byte 0xf3; xadd WORD PTR [rcx],ax
# Tests for op regl regl/m32
xacquire lock btc DWORD PTR [rcx],eax
lock xacquire btc DWORD PTR [rcx],eax
xrelease lock btc DWORD PTR [rcx],eax
lock xrelease btc DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; btc DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; btc DWORD PTR [rcx],eax
xacquire lock btr DWORD PTR [rcx],eax
lock xacquire btr DWORD PTR [rcx],eax
xrelease lock btr DWORD PTR [rcx],eax
lock xrelease btr DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; btr DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; btr DWORD PTR [rcx],eax
xacquire lock bts DWORD PTR [rcx],eax
lock xacquire bts DWORD PTR [rcx],eax
xrelease lock bts DWORD PTR [rcx],eax
lock xrelease bts DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; bts DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; bts DWORD PTR [rcx],eax
xacquire lock cmpxchg DWORD PTR [rcx],eax
lock xacquire cmpxchg DWORD PTR [rcx],eax
xrelease lock cmpxchg DWORD PTR [rcx],eax
lock xrelease cmpxchg DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; cmpxchg DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; cmpxchg DWORD PTR [rcx],eax
xacquire lock xadd DWORD PTR [rcx],eax
lock xacquire xadd DWORD PTR [rcx],eax
xrelease lock xadd DWORD PTR [rcx],eax
lock xrelease xadd DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf2; xadd DWORD PTR [rcx],eax
.byte 0xf0; .byte 0xf3; xadd DWORD PTR [rcx],eax
# Tests for op regq regq/m64
xacquire lock btc QWORD PTR [rcx],rax
lock xacquire btc QWORD PTR [rcx],rax
xrelease lock btc QWORD PTR [rcx],rax
lock xrelease btc QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; btc QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; btc QWORD PTR [rcx],rax
xacquire lock btr QWORD PTR [rcx],rax
lock xacquire btr QWORD PTR [rcx],rax
xrelease lock btr QWORD PTR [rcx],rax
lock xrelease btr QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; btr QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; btr QWORD PTR [rcx],rax
xacquire lock bts QWORD PTR [rcx],rax
lock xacquire bts QWORD PTR [rcx],rax
xrelease lock bts QWORD PTR [rcx],rax
lock xrelease bts QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; bts QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; bts QWORD PTR [rcx],rax
xacquire lock cmpxchg QWORD PTR [rcx],rax
lock xacquire cmpxchg QWORD PTR [rcx],rax
xrelease lock cmpxchg QWORD PTR [rcx],rax
lock xrelease cmpxchg QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; cmpxchg QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; cmpxchg QWORD PTR [rcx],rax
xacquire lock xadd QWORD PTR [rcx],rax
lock xacquire xadd QWORD PTR [rcx],rax
xrelease lock xadd QWORD PTR [rcx],rax
lock xrelease xadd QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf2; xadd QWORD PTR [rcx],rax
.byte 0xf0; .byte 0xf3; xadd QWORD PTR [rcx],rax
# Tests for op regb/m8
xacquire lock dec BYTE PTR [rcx]
lock xacquire dec BYTE PTR [rcx]
xrelease lock dec BYTE PTR [rcx]
lock xrelease dec BYTE PTR [rcx]
.byte 0xf0; .byte 0xf2; dec BYTE PTR [rcx]
.byte 0xf0; .byte 0xf3; dec BYTE PTR [rcx]
xacquire lock inc BYTE PTR [rcx]
lock xacquire inc BYTE PTR [rcx]
xrelease lock inc BYTE PTR [rcx]
lock xrelease inc BYTE PTR [rcx]
.byte 0xf0; .byte 0xf2; inc BYTE PTR [rcx]
.byte 0xf0; .byte 0xf3; inc BYTE PTR [rcx]
xacquire lock neg BYTE PTR [rcx]
lock xacquire neg BYTE PTR [rcx]
xrelease lock neg BYTE PTR [rcx]
lock xrelease neg BYTE PTR [rcx]
.byte 0xf0; .byte 0xf2; neg BYTE PTR [rcx]
.byte 0xf0; .byte 0xf3; neg BYTE PTR [rcx]
xacquire lock not BYTE PTR [rcx]
lock xacquire not BYTE PTR [rcx]
xrelease lock not BYTE PTR [rcx]
lock xrelease not BYTE PTR [rcx]
.byte 0xf0; .byte 0xf2; not BYTE PTR [rcx]
.byte 0xf0; .byte 0xf3; not BYTE PTR [rcx]
# Tests for op regs/m16
xacquire lock dec WORD PTR [rcx]
lock xacquire dec WORD PTR [rcx]
xrelease lock dec WORD PTR [rcx]
lock xrelease dec WORD PTR [rcx]
.byte 0xf0; .byte 0xf2; dec WORD PTR [rcx]
.byte 0xf0; .byte 0xf3; dec WORD PTR [rcx]
xacquire lock inc WORD PTR [rcx]
lock xacquire inc WORD PTR [rcx]
xrelease lock inc WORD PTR [rcx]
lock xrelease inc WORD PTR [rcx]
.byte 0xf0; .byte 0xf2; inc WORD PTR [rcx]
.byte 0xf0; .byte 0xf3; inc WORD PTR [rcx]
xacquire lock neg WORD PTR [rcx]
lock xacquire neg WORD PTR [rcx]
xrelease lock neg WORD PTR [rcx]
lock xrelease neg WORD PTR [rcx]
.byte 0xf0; .byte 0xf2; neg WORD PTR [rcx]
.byte 0xf0; .byte 0xf3; neg WORD PTR [rcx]
xacquire lock not WORD PTR [rcx]
lock xacquire not WORD PTR [rcx]
xrelease lock not WORD PTR [rcx]
lock xrelease not WORD PTR [rcx]
.byte 0xf0; .byte 0xf2; not WORD PTR [rcx]
.byte 0xf0; .byte 0xf3; not WORD PTR [rcx]
# Tests for op regl/m32
xacquire lock dec DWORD PTR [rcx]
lock xacquire dec DWORD PTR [rcx]
xrelease lock dec DWORD PTR [rcx]
lock xrelease dec DWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; dec DWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; dec DWORD PTR [rcx]
xacquire lock inc DWORD PTR [rcx]
lock xacquire inc DWORD PTR [rcx]
xrelease lock inc DWORD PTR [rcx]
lock xrelease inc DWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; inc DWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; inc DWORD PTR [rcx]
xacquire lock neg DWORD PTR [rcx]
lock xacquire neg DWORD PTR [rcx]
xrelease lock neg DWORD PTR [rcx]
lock xrelease neg DWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; neg DWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; neg DWORD PTR [rcx]
xacquire lock not DWORD PTR [rcx]
lock xacquire not DWORD PTR [rcx]
xrelease lock not DWORD PTR [rcx]
lock xrelease not DWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; not DWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; not DWORD PTR [rcx]
# Tests for op regq/m64
xacquire lock dec QWORD PTR [rcx]
lock xacquire dec QWORD PTR [rcx]
xrelease lock dec QWORD PTR [rcx]
lock xrelease dec QWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; dec QWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; dec QWORD PTR [rcx]
xacquire lock inc QWORD PTR [rcx]
lock xacquire inc QWORD PTR [rcx]
xrelease lock inc QWORD PTR [rcx]
lock xrelease inc QWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; inc QWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; inc QWORD PTR [rcx]
xacquire lock neg QWORD PTR [rcx]
lock xacquire neg QWORD PTR [rcx]
xrelease lock neg QWORD PTR [rcx]
lock xrelease neg QWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; neg QWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; neg QWORD PTR [rcx]
xacquire lock not QWORD PTR [rcx]
lock xacquire not QWORD PTR [rcx]
xrelease lock not QWORD PTR [rcx]
lock xrelease not QWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; not QWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; not QWORD PTR [rcx]
# Tests for op m64
xacquire lock cmpxchg8b QWORD PTR [rcx]
lock xacquire cmpxchg8b QWORD PTR [rcx]
xrelease lock cmpxchg8b QWORD PTR [rcx]
lock xrelease cmpxchg8b QWORD PTR [rcx]
.byte 0xf0; .byte 0xf2; cmpxchg8b QWORD PTR [rcx]
.byte 0xf0; .byte 0xf3; cmpxchg8b QWORD PTR [rcx]
# Tests for op regb, regb/m8
xacquire lock cmpxchg BYTE PTR [rcx],cl
lock xacquire cmpxchg BYTE PTR [rcx],cl
xrelease lock cmpxchg BYTE PTR [rcx],cl
lock xrelease cmpxchg BYTE PTR [rcx],cl
.byte 0xf0; .byte 0xf2; cmpxchg BYTE PTR [rcx],cl
.byte 0xf0; .byte 0xf3; cmpxchg BYTE PTR [rcx],cl
xacquire lock xadd BYTE PTR [rcx],cl
lock xacquire xadd BYTE PTR [rcx],cl
xrelease lock xadd BYTE PTR [rcx],cl
lock xrelease xadd BYTE PTR [rcx],cl
.byte 0xf0; .byte 0xf2; xadd BYTE PTR [rcx],cl
.byte 0xf0; .byte 0xf3; xadd BYTE PTR [rcx],cl
|
tactcomplabs/xbgas-binutils-gdb
| 4,780
|
gas/testsuite/gas/i386/x86-64-avx512vl_gfni.s
|
# Check 64bit AVX512VL,GFNI instructions
.allow_index_reg
.text
_start:
vgf2p8affineqb $0xab, %xmm28, %xmm29, %xmm30 # AVX512VL,GFNI
vgf2p8affineqb $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512VL,GFNI
vgf2p8affineqb $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512VL,GFNI
vgf2p8affineqb $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512VL,GFNI
vgf2p8affineqb $123, 2032(%rdx), %xmm29, %xmm30 # AVX512VL,GFNI Disp8
vgf2p8affineqb $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512VL,GFNI Disp8
vgf2p8affineqb $0xab, %ymm28, %ymm29, %ymm30 # AVX512VL,GFNI
vgf2p8affineqb $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512VL,GFNI
vgf2p8affineqb $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512VL,GFNI
vgf2p8affineqb $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512VL,GFNI
vgf2p8affineqb $123, 4064(%rdx), %ymm29, %ymm30 # AVX512VL,GFNI Disp8
vgf2p8affineqb $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb $0xab, %xmm28, %xmm29, %xmm30 # AVX512VL,GFNI
vgf2p8affineinvqb $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512VL,GFNI
vgf2p8affineinvqb $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512VL,GFNI
vgf2p8affineinvqb $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512VL,GFNI
vgf2p8affineinvqb $123, 2032(%rdx), %xmm29, %xmm30 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb $0xab, %ymm28, %ymm29, %ymm30 # AVX512VL,GFNI
vgf2p8affineinvqb $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512VL,GFNI
vgf2p8affineinvqb $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512VL,GFNI
vgf2p8affineinvqb $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512VL,GFNI
vgf2p8affineinvqb $123, 4064(%rdx), %ymm29, %ymm30 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512VL,GFNI Disp8
vgf2p8mulb %xmm28, %xmm29, %xmm30 # AVX512VL,GFNI
vgf2p8mulb %xmm28, %xmm29, %xmm30{%k7} # AVX512VL,GFNI
vgf2p8mulb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512VL,GFNI
vgf2p8mulb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512VL,GFNI
vgf2p8mulb 2032(%rdx), %xmm29, %xmm30 # AVX512VL,GFNI Disp8
vgf2p8mulb %ymm28, %ymm29, %ymm30 # AVX512VL,GFNI
vgf2p8mulb %ymm28, %ymm29, %ymm30{%k7} # AVX512VL,GFNI
vgf2p8mulb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512VL,GFNI
vgf2p8mulb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512VL,GFNI
vgf2p8mulb 4064(%rdx), %ymm29, %ymm30 # AVX512VL,GFNI Disp8
.intel_syntax noprefix
vgf2p8affineqb xmm30, xmm29, xmm28, 0xab # AVX512VL,GFNI
vgf2p8affineqb xmm30{k7}, xmm29, xmm28, 0xab # AVX512VL,GFNI
vgf2p8affineqb xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512VL,GFNI
vgf2p8affineqb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,GFNI
vgf2p8affineqb xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512VL,GFNI Disp8
vgf2p8affineqb xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512VL,GFNI Disp8
vgf2p8affineqb ymm30, ymm29, ymm28, 0xab # AVX512VL,GFNI
vgf2p8affineqb ymm30{k7}, ymm29, ymm28, 0xab # AVX512VL,GFNI
vgf2p8affineqb ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512VL,GFNI
vgf2p8affineqb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,GFNI
vgf2p8affineqb ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512VL,GFNI Disp8
vgf2p8affineqb ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb xmm30, xmm29, xmm28, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb xmm30{k7}, xmm29, xmm28, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,GFNI
vgf2p8affineinvqb xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb ymm30, ymm29, ymm28, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb ymm30{k7}, ymm29, ymm28, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,GFNI
vgf2p8affineinvqb ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512VL,GFNI
vgf2p8mulb xmm30, xmm29, xmm28 # AVX512VL,GFNI
vgf2p8mulb xmm30{k7}, xmm29, xmm28 # AVX512VL,GFNI
vgf2p8mulb xmm30{k7}{z}, xmm29, xmm28 # AVX512VL,GFNI
vgf2p8mulb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512VL,GFNI
vgf2p8mulb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512VL,GFNI Disp8
vgf2p8mulb ymm30, ymm29, ymm28 # AVX512VL,GFNI
vgf2p8mulb ymm30{k7}, ymm29, ymm28 # AVX512VL,GFNI
vgf2p8mulb ymm30{k7}{z}, ymm29, ymm28 # AVX512VL,GFNI
vgf2p8mulb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512VL,GFNI
vgf2p8mulb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512VL,GFNI Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 5,379
|
gas/testsuite/gas/i386/avx512bitalg_vl.s
|
# Check 32bit AVX512{BITALG,VL} instructions
.allow_index_reg
.text
_start:
vpshufbitqmb %xmm4, %xmm5, %k5{%k7} # AVX512{BITALG,VL}
vpshufbitqmb -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512{BITALG,VL}
vpshufbitqmb 2032(%edx), %xmm5, %k5{%k7} # AVX512{BITALG,VL} Disp8
vpshufbitqmb %ymm4, %ymm5, %k5{%k7} # AVX512{BITALG,VL}
vpshufbitqmb -123456(%esp,%esi,8), %ymm5, %k5{%k7} # AVX512{BITALG,VL}
vpshufbitqmb 4064(%edx), %ymm5, %k5{%k7} # AVX512{BITALG,VL} Disp8
vpopcntb %xmm5, %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntb %xmm5, %xmm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntb -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntb 2032(%edx), %xmm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntb %ymm5, %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntb %ymm5, %ymm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntb -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntb 4064(%edx), %ymm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntw %xmm5, %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntw %xmm5, %xmm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntw 2032(%edx), %xmm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntw %ymm5, %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntw %ymm5, %ymm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntw 4064(%edx), %ymm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntd %xmm5, %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntd %xmm5, %xmm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntd 2032(%edx), %xmm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntd 508(%edx){1to4}, %xmm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntd %ymm5, %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntd %ymm5, %ymm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntd 4064(%edx), %ymm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntd 508(%edx){1to8}, %ymm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntq %xmm5, %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntq %xmm5, %xmm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntq 2032(%edx), %xmm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntq 1016(%edx){1to2}, %xmm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntq %ymm5, %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntq %ymm5, %ymm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntq 4064(%edx), %ymm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntq 1016(%edx){1to4}, %ymm6{%k7} # AVX512{BITALG,VL} Disp8
.intel_syntax noprefix
vpshufbitqmb k5{k7}, xmm5, xmm4 # AVX512{BITALG,VL}
vpshufbitqmb k5{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpshufbitqmb k5{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BITALG,VL} Disp8
vpshufbitqmb k5{k7}, ymm5, ymm4 # AVX512{BITALG,VL}
vpshufbitqmb k5{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpshufbitqmb k5{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BITALG,VL} Disp8
vpopcntb xmm6{k7}, xmm5 # AVX512{BITALG,VL}
vpopcntb xmm6{k7}{z}, xmm5 # AVX512{BITALG,VL}
vpopcntb xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntb xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BITALG,VL} Disp8
vpopcntb ymm6{k7}, ymm5 # AVX512{BITALG,VL}
vpopcntb ymm6{k7}{z}, ymm5 # AVX512{BITALG,VL}
vpopcntb ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntb ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BITALG,VL} Disp8
vpopcntw xmm6{k7}, xmm5 # AVX512{BITALG,VL}
vpopcntw xmm6{k7}{z}, xmm5 # AVX512{BITALG,VL}
vpopcntw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntw xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BITALG,VL} Disp8
vpopcntw ymm6{k7}, ymm5 # AVX512{BITALG,VL}
vpopcntw ymm6{k7}{z}, ymm5 # AVX512{BITALG,VL}
vpopcntw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntw ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BITALG,VL} Disp8
vpopcntd xmm6{k7}, xmm5 # AVX512{BITALG,VL}
vpopcntd xmm6{k7}{z}, xmm5 # AVX512{BITALG,VL}
vpopcntd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BITALG,VL} Disp8
vpopcntd xmm6{k7}, [edx+508]{1to4} # AVX512{BITALG,VL} Disp8
vpopcntd xmm6{k7}, DWORD BCST [edx] # AVX512{BITALG,VL}
vpopcntd ymm6{k7}, ymm5 # AVX512{BITALG,VL}
vpopcntd ymm6{k7}{z}, ymm5 # AVX512{BITALG,VL}
vpopcntd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BITALG,VL} Disp8
vpopcntd ymm6{k7}, [edx+508]{1to8} # AVX512{BITALG,VL} Disp8
vpopcntd ymm6{k7}, DWORD BCST [edx] # AVX512{BITALG,VL}
vpopcntq xmm6{k7}, xmm5 # AVX512{BITALG,VL}
vpopcntq xmm6{k7}{z}, xmm5 # AVX512{BITALG,VL}
vpopcntq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BITALG,VL} Disp8
vpopcntq xmm6{k7}, [edx+1016]{1to2} # AVX512{BITALG,VL} Disp8
vpopcntq xmm6{k7}, QWORD BCST [edx] # AVX512{BITALG,VL}
vpopcntq ymm6{k7}, ymm5 # AVX512{BITALG,VL}
vpopcntq ymm6{k7}{z}, ymm5 # AVX512{BITALG,VL}
vpopcntq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BITALG,VL} Disp8
vpopcntq ymm6{k7}, [edx+1016]{1to4} # AVX512{BITALG,VL} Disp8
vpopcntq ymm6{k7}, QWORD BCST [edx] # AVX512{BITALG,VL
|
tactcomplabs/xbgas-binutils-gdb
| 4,259
|
gas/testsuite/gas/i386/optimize-1.s
|
# Check instructions with optimized encoding
.allow_index_reg
.text
_start:
vandnpd %zmm1, %zmm1, %zmm5{%k7}
vandnpd %ymm1, %ymm1, %ymm5 {%k7} {z}
vandnpd %zmm1, %zmm1, %zmm5
vandnpd %ymm1, %ymm1, %ymm5
vandnps %zmm1, %zmm1, %zmm5{%k7}
vandnps %ymm1, %ymm1, %ymm5{z}{%k7}
vandnps %zmm1, %zmm1, %zmm5
vandnps %ymm1, %ymm1, %ymm5
vpandn %ymm1, %ymm1, %ymm5
vpandnd %zmm1, %zmm1, %zmm5{%k7}
vpandnd %ymm1, %ymm1, %ymm5{z}{%k7}
vpandnd %zmm1, %zmm1, %zmm5
vpandnd %ymm1, %ymm1, %ymm5
vpandnq %zmm1, %zmm1, %zmm5{%k7}
vpandnq %ymm1, %ymm1, %ymm5{z}{%k7}
vpandnq %zmm1, %zmm1, %zmm5
vpandnq %ymm1, %ymm1, %ymm5
vxorpd %zmm1, %zmm1, %zmm5{%k7}
vxorpd %ymm1, %ymm1, %ymm5{z}{%k7}
vxorpd %zmm1, %zmm1, %zmm5
vxorpd %ymm1, %ymm1, %ymm5
vxorps %zmm1, %zmm1, %zmm5{%k7}
vxorps %ymm1, %ymm1, %ymm5{z}{%k7}
vxorps %zmm1, %zmm1, %zmm5
vxorps %ymm1, %ymm1, %ymm5
vpxor %ymm1, %ymm1, %ymm5
vpxord %zmm1, %zmm1, %zmm5{%k7}
vpxord %ymm1, %ymm1, %ymm5{z}{%k7}
vpxord %zmm1, %zmm1, %zmm5
vpxord %ymm1, %ymm1, %ymm5
vpxorq %zmm1, %zmm1, %zmm5{%k7}
vpxorq %ymm1, %ymm1, %ymm5{z}{%k7}
vpxorq %zmm1, %zmm1, %zmm5
vpxorq %ymm1, %ymm1, %ymm5
vpsubb %zmm1, %zmm1, %zmm5{%k7}
vpsubb %ymm1, %ymm1, %ymm5{z}{%k7}
vpsubb %zmm1, %zmm1, %zmm5
vpsubb %ymm1, %ymm1, %ymm5
vpsubw %zmm1, %zmm1, %zmm5{%k7}
vpsubw %ymm1, %ymm1, %ymm5{z}{%k7}
vpsubw %zmm1, %zmm1, %zmm5
vpsubw %ymm1, %ymm1, %ymm5
vpsubd %zmm1, %zmm1, %zmm5{%k7}
vpsubd %ymm1, %ymm1, %ymm5{z}{%k7}
vpsubd %zmm1, %zmm1, %zmm5
vpsubd %ymm1, %ymm1, %ymm5
vpsubq %zmm1, %zmm1, %zmm5{%k7}
vpsubq %ymm1, %ymm1, %ymm5{z}{%k7}
vpsubq %zmm1, %zmm1, %zmm5
vpsubq %ymm1, %ymm1, %ymm5
kxord %k1, %k1, %k5
kxorq %k1, %k1, %k5
kandnd %k1, %k1, %k5
kandnq %k1, %k1, %k5
vmovdqa32 %xmm1, %xmm2
vmovdqa64 %xmm1, %xmm2
vmovdqu8 %xmm1, %xmm2
vmovdqu16 %xmm1, %xmm2
vmovdqu32 %xmm1, %xmm2
vmovdqu64 %xmm1, %xmm2
vmovdqa32 127(%eax), %xmm2
vmovdqa64 127(%eax), %xmm2
vmovdqu8 127(%eax), %xmm2
vmovdqu16 127(%eax), %xmm2
vmovdqu32 127(%eax), %xmm2
vmovdqu64 127(%eax), %xmm2
vmovdqa32 %xmm1, 128(%eax)
vmovdqa64 %xmm1, 128(%eax)
vmovdqu8 %xmm1, 128(%eax)
vmovdqu16 %xmm1, 128(%eax)
vmovdqu32 %xmm1, 128(%eax)
vmovdqu64 %xmm1, 128(%eax)
vmovdqa32 %ymm1, %ymm2
vmovdqa64 %ymm1, %ymm2
vmovdqu8 %ymm1, %ymm2
vmovdqu16 %ymm1, %ymm2
vmovdqu32 %ymm1, %ymm2
vmovdqu64 %ymm1, %ymm2
vmovdqa32 127(%eax), %ymm2
vmovdqa64 127(%eax), %ymm2
vmovdqu8 127(%eax), %ymm2
vmovdqu16 127(%eax), %ymm2
vmovdqu32 127(%eax), %ymm2
vmovdqu64 127(%eax), %ymm2
vmovdqa32 %ymm1, 128(%eax)
vmovdqa64 %ymm1, 128(%eax)
vmovdqu8 %ymm1, 128(%eax)
vmovdqu16 %ymm1, 128(%eax)
vmovdqu32 %ymm1, 128(%eax)
vmovdqu64 %ymm1, 128(%eax)
vmovdqa32 (%eax), %zmm2
vpandd %xmm2, %xmm3, %xmm4
vpandq %xmm2, %xmm3, %xmm4
vpandnd %xmm2, %xmm3, %xmm4
vpandnq %xmm2, %xmm3, %xmm4
vpord %xmm2, %xmm3, %xmm4
vporq %xmm2, %xmm3, %xmm4
vpxord %xmm2, %xmm3, %xmm4
vpxorq %xmm2, %xmm3, %xmm4
vpandd %ymm2, %ymm3, %ymm4
vpandq %ymm2, %ymm3, %ymm4
vpandnd %ymm2, %ymm3, %ymm4
vpandnq %ymm2, %ymm3, %ymm4
vpord %ymm2, %ymm3, %ymm4
vporq %ymm2, %ymm3, %ymm4
vpxord %ymm2, %ymm3, %ymm4
vpxorq %ymm2, %ymm3, %ymm4
vpandd 112(%eax), %xmm2, %xmm3
vpandq 112(%eax), %xmm2, %xmm3
vpandnd 112(%eax), %xmm2, %xmm3
vpandnq 112(%eax), %xmm2, %xmm3
vpord 112(%eax), %xmm2, %xmm3
vporq 112(%eax), %xmm2, %xmm3
vpxord 112(%eax), %xmm2, %xmm3
vpxorq 112(%eax), %xmm2, %xmm3
vpandd 128(%eax), %xmm2, %xmm3
vpandq 128(%eax), %xmm2, %xmm3
vpandnd 128(%eax), %xmm2, %xmm3
vpandnq 128(%eax), %xmm2, %xmm3
vpord 128(%eax), %xmm2, %xmm3
vporq 128(%eax), %xmm2, %xmm3
vpxord 128(%eax), %xmm2, %xmm3
vpxorq 128(%eax), %xmm2, %xmm3
vpandd 96(%eax), %ymm2, %ymm3
vpandq 96(%eax), %ymm2, %ymm3
vpandnd 96(%eax), %ymm2, %ymm3
vpandnq 96(%eax), %ymm2, %ymm3
vpord 96(%eax), %ymm2, %ymm3
vporq 96(%eax), %ymm2, %ymm3
vpxord 96(%eax), %ymm2, %ymm3
vpxorq 96(%eax), %ymm2, %ymm3
vpandd 128(%eax), %ymm2, %ymm3
vpandq 128(%eax), %ymm2, %ymm3
vpandnd 128(%eax), %ymm2, %ymm3
vpandnq 128(%eax), %ymm2, %ymm3
vpord 128(%eax), %ymm2, %ymm3
vporq 128(%eax), %ymm2, %ymm3
vpxord 128(%eax), %ymm2, %ymm3
vpxorq 128(%eax), %ymm2, %ymm3
|
tactcomplabs/xbgas-binutils-gdb
| 1,515
|
gas/testsuite/gas/i386/sib.s
|
#Test the special case of the index bits, 0x4, in SIB.
.text
.allow_index_reg
foo:
mov -30,%ebx
mov -30(,%eiz),%ebx
mov -30(,%eiz,1),%eax
mov -30(,%eiz,2),%eax
mov -30(,%eiz,4),%eax
mov -30(,%eiz,8),%eax
mov 30,%eax
mov 30(,%eiz),%eax
mov 30(,%eiz,1),%eax
mov 30(,%eiz,2),%eax
mov 30(,%eiz,4),%eax
mov 30(,%eiz,8),%eax
mov (%ebx),%eax
mov (%ebx,%eiz),%eax
mov (%ebx,%eiz,1),%eax
mov (%ebx,%eiz,2),%eax
mov (%ebx,%eiz,4),%eax
mov (%ebx,%eiz,8),%eax
mov (%esp),%eax
mov (%esp,%eiz,1),%eax
mov (%esp,%eiz,2),%eax
mov (%esp,%eiz,4),%eax
mov (%esp,%eiz,8),%eax
mov (%eax, %eax, (1 << 0)), %eax
mov (%eax, %eax, (1 << 1)), %eax
mov (%eax, %eax, (1 << 2)), %eax
mov (%eax, %eax, (1 << 3)), %eax
.intel_syntax noprefix
mov eax,DWORD PTR [eiz*1-30]
mov eax,DWORD PTR [eiz*2-30]
mov eax,DWORD PTR [eiz*4-30]
mov eax,DWORD PTR [eiz*8-30]
mov eax,DWORD PTR [eiz*1+30]
mov eax,DWORD PTR [eiz*2+30]
mov eax,DWORD PTR [eiz*4+30]
mov eax,DWORD PTR [eiz*8+30]
mov eax,DWORD PTR [ebx+eiz]
mov eax,DWORD PTR [ebx+eiz*1]
mov eax,DWORD PTR [ebx+eiz*2]
mov eax,DWORD PTR [ebx+eiz*4]
mov eax,DWORD PTR [ebx+eiz*8]
mov eax,DWORD PTR [esp]
mov eax,DWORD PTR [esp+eiz]
mov eax,DWORD PTR [esp+eiz*1]
mov eax,DWORD PTR [esp+eiz*2]
mov eax,DWORD PTR [esp+eiz*4]
mov eax,DWORD PTR [esp+eiz*8]
.p2align 4
|
tactcomplabs/xbgas-binutils-gdb
| 2,184
|
gas/testsuite/gas/i386/dw2-compress-2.s
|
.file "dw2-compress-2.c"
.section .debug_abbrev,"",@progbits
.Ldebug_abbrev0:
.section .debug_info,"",@progbits
.Ldebug_info0:
.section .debug_line,"",@progbits
.Ldebug_line0:
.text
.Ltext0:
.cfi_sections .debug_frame
.p2align 4,,15
.globl foo2
.type foo2, @function
foo2:
.LFB1:
.file 1 "dw2-compress-2.c"
.loc 1 11 0
.cfi_startproc
.loc 1 12 0
rep
ret
.cfi_endproc
.LFE1:
.size foo2, .-foo2
.p2align 4,,15
.globl foo1
.type foo1, @function
foo1:
.LFB0:
.loc 1 5 0
.cfi_startproc
subl $12, %esp
.cfi_def_cfa_offset 16
.loc 1 7 0
addl $12, %esp
.cfi_def_cfa_offset 4
.loc 1 6 0
jmp bar
.cfi_endproc
.LFE0:
.size foo1, .-foo1
.Letext0:
.section .debug_info
.long 0x46
.value 0x3
.long .Ldebug_abbrev0
.byte 0x4
.uleb128 0x1
.long .LASF2
.byte 0x1
.long .LASF3
.long .LASF4
.long .Ltext0
.long .Letext0
.long .Ldebug_line0
.uleb128 0x2
.byte 0x1
.long .LASF0
.byte 0x1
.byte 0xa
.long .LFB1
.long .LFE1
.byte 0x1
.byte 0x9c
.uleb128 0x2
.byte 0x1
.long .LASF1
.byte 0x1
.byte 0x4
.long .LFB0
.long .LFE0
.byte 0x1
.byte 0x9c
.byte 0x0
.section .debug_abbrev
.uleb128 0x1
.uleb128 0x11
.byte 0x1
.uleb128 0x25
.uleb128 0xe
.uleb128 0x13
.uleb128 0xb
.uleb128 0x3
.uleb128 0xe
.uleb128 0x1b
.uleb128 0xe
.uleb128 0x11
.uleb128 0x1
.uleb128 0x12
.uleb128 0x1
.uleb128 0x10
.uleb128 0x6
.byte 0x0
.byte 0x0
.uleb128 0x2
.uleb128 0x2e
.byte 0x0
.uleb128 0x3f
.uleb128 0xc
.uleb128 0x3
.uleb128 0xe
.uleb128 0x3a
.uleb128 0xb
.uleb128 0x3b
.uleb128 0xb
.uleb128 0x11
.uleb128 0x1
.uleb128 0x12
.uleb128 0x1
.uleb128 0x40
.uleb128 0xa
.byte 0x0
.byte 0x0
.byte 0x0
.section .debug_pubnames,"",@progbits
.long 0x20
.value 0x2
.long .Ldebug_info0
.long 0x4a
.long 0x25
.string "foo2"
.long 0x37
.string "foo1"
.long 0x0
.section .debug_aranges,"",@progbits
.long 0x1c
.value 0x2
.long .Ldebug_info0
.byte 0x4
.byte 0x0
.value 0x0
.value 0x0
.long .Ltext0
.long .Letext0-.Ltext0
.long 0x0
.long 0x0
.section .debug_str,"MS",@progbits,1
.LASF2:
.string "GNU C 4.4.4"
.LASF0:
.string "foo2"
.LASF1:
.string "foo1"
.LASF4:
.string "."
.LASF3:
.string "dw2-compress-2.c"
|
tactcomplabs/xbgas-binutils-gdb
| 4,871
|
gas/testsuite/gas/i386/avx512_bf16_vl.s
|
# Check 32bit AVX512{BF16,VL} instructions
.allow_index_reg
.text
_start:
vcvtne2ps2bf16 %ymm4, %ymm5, %ymm6 #AVX512{BF16,VL}
vcvtne2ps2bf16 %xmm4, %xmm5, %xmm6 #AVX512{BF16,VL}
vcvtne2ps2bf16 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 (%ecx){1to8}, %ymm5, %ymm6 #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 4064(%ecx), %ymm5, %ymm6 #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 -4096(%edx){1to8}, %ymm5, %ymm6{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtne2ps2bf16 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 (%ecx){1to4}, %xmm5, %xmm6 #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 2032(%ecx), %xmm5, %xmm6 #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 -2048(%edx){1to4}, %xmm5, %xmm6{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 %xmm5, %xmm6 #AVX512{BF16,VL}
vcvtneps2bf16 %ymm5, %xmm6 #AVX512{BF16,VL}
vcvtneps2bf16x 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512{BF16,VL} MASK_ENABLING
vcvtneps2bf16 (%ecx){1to4}, %xmm6 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16x (%ecx){1to4}, %xmm6 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16x 2032(%ecx), %xmm6 #AVX512{BF16,VL} Disp8
vcvtneps2bf16 -2048(%edx){1to4}, %xmm6{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 (%ecx){1to8}, %xmm6 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16y (%ecx){1to8}, %xmm6 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16y 4064(%ecx), %xmm6 #AVX512{BF16,VL} Disp8
vcvtneps2bf16 -4096(%edx){1to8}, %xmm6{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps %ymm4, %ymm5, %ymm6 #AVX512{BF16,VL}
vdpbf16ps %xmm4, %xmm5, %xmm6 #AVX512{BF16,VL}
vdpbf16ps 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps (%ecx){1to8}, %ymm5, %ymm6 #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps 4064(%ecx), %ymm5, %ymm6 #AVX512{BF16,VL} Disp8
vdpbf16ps -4096(%edx){1to8}, %ymm5, %ymm6{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps (%ecx){1to4}, %xmm5, %xmm6 #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps 2032(%ecx), %xmm5, %xmm6 #AVX512{BF16,VL} Disp8
vdpbf16ps -2048(%edx){1to4}, %xmm5, %xmm6{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
.intel_syntax noprefix
vcvtne2ps2bf16 ymm6, ymm5, ymm4 #AVX512{BF16,VL}
vcvtne2ps2bf16 xmm6, xmm5, xmm4 #AVX512{BF16,VL}
vcvtne2ps2bf16 ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 ymm6, ymm5, DWORD BCST [ecx] #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 ymm6{k7}{z}, ymm5, DWORD BCST [edx-4096] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtne2ps2bf16 xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 xmm6, xmm5, DWORD BCST [ecx] #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 xmm6{k7}{z}, xmm5, DWORD BCST [edx-2048] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 xmm6, xmm5 #AVX512{BF16,VL}
vcvtneps2bf16 xmm6, ymm5 #AVX512{BF16,VL}
vcvtneps2bf16 xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vcvtneps2bf16 xmm6, [ecx]{1to4} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm6, DWORD BCST [ecx]{1to4} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm6, XMMWORD PTR [ecx+2032] #AVX512{BF16,VL} Disp8
vcvtneps2bf16 xmm6{k7}{z}, DWORD BCST [edx-2048]{1to4} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 xmm6, [ecx]{1to8} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm6, DWORD BCST [ecx]{1to8} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm6, YMMWORD PTR [ecx+4064] #AVX512{BF16,VL} Disp8
vcvtneps2bf16 xmm6{k7}{z}, DWORD BCST [edx-4096]{1to8} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps ymm6, ymm5, ymm4 #AVX512{BF16,VL}
vdpbf16ps xmm6, xmm5, xmm4 #AVX512{BF16,VL}
vdpbf16ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps ymm6, ymm5, DWORD BCST [ecx] #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512{BF16,VL} Disp8
vdpbf16ps ymm6{k7}{z}, ymm5, DWORD BCST [edx-4096] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps xmm6, xmm5, DWORD BCST [ecx] #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512{BF16,VL} Disp8
vdpbf16ps xmm6{k7}{z}, xmm5, DWORD BCST [edx-2048] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
|
tactcomplabs/xbgas-binutils-gdb
| 12,388
|
gas/testsuite/gas/i386/x86-64-avx512cd_vl.s
|
# Check 64bit AVX512{CD,VL} instructions
.allow_index_reg
.text
_start:
vpconflictd %xmm29, %xmm30 # AVX512{CD,VL}
vpconflictd %xmm29, %xmm30{%k7} # AVX512{CD,VL}
vpconflictd %xmm29, %xmm30{%k7}{z} # AVX512{CD,VL}
vpconflictd (%rcx), %xmm30 # AVX512{CD,VL}
vpconflictd 0x123(%rax,%r14,8), %xmm30 # AVX512{CD,VL}
vpconflictd (%rcx){1to4}, %xmm30 # AVX512{CD,VL}
vpconflictd 2032(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vpconflictd 2048(%rdx), %xmm30 # AVX512{CD,VL}
vpconflictd -2048(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vpconflictd -2064(%rdx), %xmm30 # AVX512{CD,VL}
vpconflictd 508(%rdx){1to4}, %xmm30 # AVX512{CD,VL} Disp8
vpconflictd 512(%rdx){1to4}, %xmm30 # AVX512{CD,VL}
vpconflictd -512(%rdx){1to4}, %xmm30 # AVX512{CD,VL} Disp8
vpconflictd -516(%rdx){1to4}, %xmm30 # AVX512{CD,VL}
vpconflictd %ymm29, %ymm30 # AVX512{CD,VL}
vpconflictd %ymm29, %ymm30{%k7} # AVX512{CD,VL}
vpconflictd %ymm29, %ymm30{%k7}{z} # AVX512{CD,VL}
vpconflictd (%rcx), %ymm30 # AVX512{CD,VL}
vpconflictd 0x123(%rax,%r14,8), %ymm30 # AVX512{CD,VL}
vpconflictd (%rcx){1to8}, %ymm30 # AVX512{CD,VL}
vpconflictd 4064(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vpconflictd 4096(%rdx), %ymm30 # AVX512{CD,VL}
vpconflictd -4096(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vpconflictd -4128(%rdx), %ymm30 # AVX512{CD,VL}
vpconflictd 508(%rdx){1to8}, %ymm30 # AVX512{CD,VL} Disp8
vpconflictd 512(%rdx){1to8}, %ymm30 # AVX512{CD,VL}
vpconflictd -512(%rdx){1to8}, %ymm30 # AVX512{CD,VL} Disp8
vpconflictd -516(%rdx){1to8}, %ymm30 # AVX512{CD,VL}
vpconflictq %xmm29, %xmm30 # AVX512{CD,VL}
vpconflictq %xmm29, %xmm30{%k7} # AVX512{CD,VL}
vpconflictq %xmm29, %xmm30{%k7}{z} # AVX512{CD,VL}
vpconflictq (%rcx), %xmm30 # AVX512{CD,VL}
vpconflictq 0x123(%rax,%r14,8), %xmm30 # AVX512{CD,VL}
vpconflictq (%rcx){1to2}, %xmm30 # AVX512{CD,VL}
vpconflictq 2032(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vpconflictq 2048(%rdx), %xmm30 # AVX512{CD,VL}
vpconflictq -2048(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vpconflictq -2064(%rdx), %xmm30 # AVX512{CD,VL}
vpconflictq 1016(%rdx){1to2}, %xmm30 # AVX512{CD,VL} Disp8
vpconflictq 1024(%rdx){1to2}, %xmm30 # AVX512{CD,VL}
vpconflictq -1024(%rdx){1to2}, %xmm30 # AVX512{CD,VL} Disp8
vpconflictq -1032(%rdx){1to2}, %xmm30 # AVX512{CD,VL}
vpconflictq %ymm29, %ymm30 # AVX512{CD,VL}
vpconflictq %ymm29, %ymm30{%k7} # AVX512{CD,VL}
vpconflictq %ymm29, %ymm30{%k7}{z} # AVX512{CD,VL}
vpconflictq (%rcx), %ymm30 # AVX512{CD,VL}
vpconflictq 0x123(%rax,%r14,8), %ymm30 # AVX512{CD,VL}
vpconflictq (%rcx){1to4}, %ymm30 # AVX512{CD,VL}
vpconflictq 4064(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vpconflictq 4096(%rdx), %ymm30 # AVX512{CD,VL}
vpconflictq -4096(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vpconflictq -4128(%rdx), %ymm30 # AVX512{CD,VL}
vpconflictq 1016(%rdx){1to4}, %ymm30 # AVX512{CD,VL} Disp8
vpconflictq 1024(%rdx){1to4}, %ymm30 # AVX512{CD,VL}
vpconflictq -1024(%rdx){1to4}, %ymm30 # AVX512{CD,VL} Disp8
vpconflictq -1032(%rdx){1to4}, %ymm30 # AVX512{CD,VL}
vplzcntd %xmm29, %xmm30 # AVX512{CD,VL}
vplzcntd %xmm29, %xmm30{%k7} # AVX512{CD,VL}
vplzcntd %xmm29, %xmm30{%k7}{z} # AVX512{CD,VL}
vplzcntd (%rcx), %xmm30 # AVX512{CD,VL}
vplzcntd 0x123(%rax,%r14,8), %xmm30 # AVX512{CD,VL}
vplzcntd (%rcx){1to4}, %xmm30 # AVX512{CD,VL}
vplzcntd 2032(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vplzcntd 2048(%rdx), %xmm30 # AVX512{CD,VL}
vplzcntd -2048(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vplzcntd -2064(%rdx), %xmm30 # AVX512{CD,VL}
vplzcntd 508(%rdx){1to4}, %xmm30 # AVX512{CD,VL} Disp8
vplzcntd 512(%rdx){1to4}, %xmm30 # AVX512{CD,VL}
vplzcntd -512(%rdx){1to4}, %xmm30 # AVX512{CD,VL} Disp8
vplzcntd -516(%rdx){1to4}, %xmm30 # AVX512{CD,VL}
vplzcntd %ymm29, %ymm30 # AVX512{CD,VL}
vplzcntd %ymm29, %ymm30{%k7} # AVX512{CD,VL}
vplzcntd %ymm29, %ymm30{%k7}{z} # AVX512{CD,VL}
vplzcntd (%rcx), %ymm30 # AVX512{CD,VL}
vplzcntd 0x123(%rax,%r14,8), %ymm30 # AVX512{CD,VL}
vplzcntd (%rcx){1to8}, %ymm30 # AVX512{CD,VL}
vplzcntd 4064(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vplzcntd 4096(%rdx), %ymm30 # AVX512{CD,VL}
vplzcntd -4096(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vplzcntd -4128(%rdx), %ymm30 # AVX512{CD,VL}
vplzcntd 508(%rdx){1to8}, %ymm30 # AVX512{CD,VL} Disp8
vplzcntd 512(%rdx){1to8}, %ymm30 # AVX512{CD,VL}
vplzcntd -512(%rdx){1to8}, %ymm30 # AVX512{CD,VL} Disp8
vplzcntd -516(%rdx){1to8}, %ymm30 # AVX512{CD,VL}
vplzcntq %xmm29, %xmm30 # AVX512{CD,VL}
vplzcntq %xmm29, %xmm30{%k7} # AVX512{CD,VL}
vplzcntq %xmm29, %xmm30{%k7}{z} # AVX512{CD,VL}
vplzcntq (%rcx), %xmm30 # AVX512{CD,VL}
vplzcntq 0x123(%rax,%r14,8), %xmm30 # AVX512{CD,VL}
vplzcntq (%rcx){1to2}, %xmm30 # AVX512{CD,VL}
vplzcntq 2032(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vplzcntq 2048(%rdx), %xmm30 # AVX512{CD,VL}
vplzcntq -2048(%rdx), %xmm30 # AVX512{CD,VL} Disp8
vplzcntq -2064(%rdx), %xmm30 # AVX512{CD,VL}
vplzcntq 1016(%rdx){1to2}, %xmm30 # AVX512{CD,VL} Disp8
vplzcntq 1024(%rdx){1to2}, %xmm30 # AVX512{CD,VL}
vplzcntq -1024(%rdx){1to2}, %xmm30 # AVX512{CD,VL} Disp8
vplzcntq -1032(%rdx){1to2}, %xmm30 # AVX512{CD,VL}
vplzcntq %ymm29, %ymm30 # AVX512{CD,VL}
vplzcntq %ymm29, %ymm30{%k7} # AVX512{CD,VL}
vplzcntq %ymm29, %ymm30{%k7}{z} # AVX512{CD,VL}
vplzcntq (%rcx), %ymm30 # AVX512{CD,VL}
vplzcntq 0x123(%rax,%r14,8), %ymm30 # AVX512{CD,VL}
vplzcntq (%rcx){1to4}, %ymm30 # AVX512{CD,VL}
vplzcntq 4064(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vplzcntq 4096(%rdx), %ymm30 # AVX512{CD,VL}
vplzcntq -4096(%rdx), %ymm30 # AVX512{CD,VL} Disp8
vplzcntq -4128(%rdx), %ymm30 # AVX512{CD,VL}
vplzcntq 1016(%rdx){1to4}, %ymm30 # AVX512{CD,VL} Disp8
vplzcntq 1024(%rdx){1to4}, %ymm30 # AVX512{CD,VL}
vplzcntq -1024(%rdx){1to4}, %ymm30 # AVX512{CD,VL} Disp8
vplzcntq -1032(%rdx){1to4}, %ymm30 # AVX512{CD,VL}
vpbroadcastmw2d %k6, %xmm30 # AVX512{CD,VL}
vpbroadcastmw2d %k6, %ymm30 # AVX512{CD,VL}
vpbroadcastmb2q %k6, %xmm30 # AVX512{CD,VL}
vpbroadcastmb2q %k6, %ymm30 # AVX512{CD,VL}
.intel_syntax noprefix
vpconflictd xmm30, xmm29 # AVX512{CD,VL}
vpconflictd xmm30{k7}, xmm29 # AVX512{CD,VL}
vpconflictd xmm30{k7}{z}, xmm29 # AVX512{CD,VL}
vpconflictd xmm30, XMMWORD PTR [rcx] # AVX512{CD,VL}
vpconflictd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vpconflictd xmm30, [rcx]{1to4} # AVX512{CD,VL}
vpconflictd xmm30, XMMWORD PTR [rdx+2032] # AVX512{CD,VL} Disp8
vpconflictd xmm30, XMMWORD PTR [rdx+2048] # AVX512{CD,VL}
vpconflictd xmm30, XMMWORD PTR [rdx-2048] # AVX512{CD,VL} Disp8
vpconflictd xmm30, XMMWORD PTR [rdx-2064] # AVX512{CD,VL}
vpconflictd xmm30, [rdx+508]{1to4} # AVX512{CD,VL} Disp8
vpconflictd xmm30, [rdx+512]{1to4} # AVX512{CD,VL}
vpconflictd xmm30, [rdx-512]{1to4} # AVX512{CD,VL} Disp8
vpconflictd xmm30, [rdx-516]{1to4} # AVX512{CD,VL}
vpconflictd ymm30, ymm29 # AVX512{CD,VL}
vpconflictd ymm30{k7}, ymm29 # AVX512{CD,VL}
vpconflictd ymm30{k7}{z}, ymm29 # AVX512{CD,VL}
vpconflictd ymm30, YMMWORD PTR [rcx] # AVX512{CD,VL}
vpconflictd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vpconflictd ymm30, [rcx]{1to8} # AVX512{CD,VL}
vpconflictd ymm30, YMMWORD PTR [rdx+4064] # AVX512{CD,VL} Disp8
vpconflictd ymm30, YMMWORD PTR [rdx+4096] # AVX512{CD,VL}
vpconflictd ymm30, YMMWORD PTR [rdx-4096] # AVX512{CD,VL} Disp8
vpconflictd ymm30, YMMWORD PTR [rdx-4128] # AVX512{CD,VL}
vpconflictd ymm30, [rdx+508]{1to8} # AVX512{CD,VL} Disp8
vpconflictd ymm30, [rdx+512]{1to8} # AVX512{CD,VL}
vpconflictd ymm30, [rdx-512]{1to8} # AVX512{CD,VL} Disp8
vpconflictd ymm30, [rdx-516]{1to8} # AVX512{CD,VL}
vpconflictq xmm30, xmm29 # AVX512{CD,VL}
vpconflictq xmm30{k7}, xmm29 # AVX512{CD,VL}
vpconflictq xmm30{k7}{z}, xmm29 # AVX512{CD,VL}
vpconflictq xmm30, XMMWORD PTR [rcx] # AVX512{CD,VL}
vpconflictq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vpconflictq xmm30, [rcx]{1to2} # AVX512{CD,VL}
vpconflictq xmm30, XMMWORD PTR [rdx+2032] # AVX512{CD,VL} Disp8
vpconflictq xmm30, XMMWORD PTR [rdx+2048] # AVX512{CD,VL}
vpconflictq xmm30, XMMWORD PTR [rdx-2048] # AVX512{CD,VL} Disp8
vpconflictq xmm30, XMMWORD PTR [rdx-2064] # AVX512{CD,VL}
vpconflictq xmm30, [rdx+1016]{1to2} # AVX512{CD,VL} Disp8
vpconflictq xmm30, [rdx+1024]{1to2} # AVX512{CD,VL}
vpconflictq xmm30, [rdx-1024]{1to2} # AVX512{CD,VL} Disp8
vpconflictq xmm30, [rdx-1032]{1to2} # AVX512{CD,VL}
vpconflictq ymm30, ymm29 # AVX512{CD,VL}
vpconflictq ymm30{k7}, ymm29 # AVX512{CD,VL}
vpconflictq ymm30{k7}{z}, ymm29 # AVX512{CD,VL}
vpconflictq ymm30, YMMWORD PTR [rcx] # AVX512{CD,VL}
vpconflictq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vpconflictq ymm30, [rcx]{1to4} # AVX512{CD,VL}
vpconflictq ymm30, YMMWORD PTR [rdx+4064] # AVX512{CD,VL} Disp8
vpconflictq ymm30, YMMWORD PTR [rdx+4096] # AVX512{CD,VL}
vpconflictq ymm30, YMMWORD PTR [rdx-4096] # AVX512{CD,VL} Disp8
vpconflictq ymm30, YMMWORD PTR [rdx-4128] # AVX512{CD,VL}
vpconflictq ymm30, [rdx+1016]{1to4} # AVX512{CD,VL} Disp8
vpconflictq ymm30, [rdx+1024]{1to4} # AVX512{CD,VL}
vpconflictq ymm30, [rdx-1024]{1to4} # AVX512{CD,VL} Disp8
vpconflictq ymm30, [rdx-1032]{1to4} # AVX512{CD,VL}
vplzcntd xmm30, xmm29 # AVX512{CD,VL}
vplzcntd xmm30{k7}, xmm29 # AVX512{CD,VL}
vplzcntd xmm30{k7}{z}, xmm29 # AVX512{CD,VL}
vplzcntd xmm30, XMMWORD PTR [rcx] # AVX512{CD,VL}
vplzcntd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vplzcntd xmm30, [rcx]{1to4} # AVX512{CD,VL}
vplzcntd xmm30, XMMWORD PTR [rdx+2032] # AVX512{CD,VL} Disp8
vplzcntd xmm30, XMMWORD PTR [rdx+2048] # AVX512{CD,VL}
vplzcntd xmm30, XMMWORD PTR [rdx-2048] # AVX512{CD,VL} Disp8
vplzcntd xmm30, XMMWORD PTR [rdx-2064] # AVX512{CD,VL}
vplzcntd xmm30, [rdx+508]{1to4} # AVX512{CD,VL} Disp8
vplzcntd xmm30, [rdx+512]{1to4} # AVX512{CD,VL}
vplzcntd xmm30, [rdx-512]{1to4} # AVX512{CD,VL} Disp8
vplzcntd xmm30, [rdx-516]{1to4} # AVX512{CD,VL}
vplzcntd ymm30, ymm29 # AVX512{CD,VL}
vplzcntd ymm30{k7}, ymm29 # AVX512{CD,VL}
vplzcntd ymm30{k7}{z}, ymm29 # AVX512{CD,VL}
vplzcntd ymm30, YMMWORD PTR [rcx] # AVX512{CD,VL}
vplzcntd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vplzcntd ymm30, [rcx]{1to8} # AVX512{CD,VL}
vplzcntd ymm30, YMMWORD PTR [rdx+4064] # AVX512{CD,VL} Disp8
vplzcntd ymm30, YMMWORD PTR [rdx+4096] # AVX512{CD,VL}
vplzcntd ymm30, YMMWORD PTR [rdx-4096] # AVX512{CD,VL} Disp8
vplzcntd ymm30, YMMWORD PTR [rdx-4128] # AVX512{CD,VL}
vplzcntd ymm30, [rdx+508]{1to8} # AVX512{CD,VL} Disp8
vplzcntd ymm30, [rdx+512]{1to8} # AVX512{CD,VL}
vplzcntd ymm30, [rdx-512]{1to8} # AVX512{CD,VL} Disp8
vplzcntd ymm30, [rdx-516]{1to8} # AVX512{CD,VL}
vplzcntq xmm30, xmm29 # AVX512{CD,VL}
vplzcntq xmm30{k7}, xmm29 # AVX512{CD,VL}
vplzcntq xmm30{k7}{z}, xmm29 # AVX512{CD,VL}
vplzcntq xmm30, XMMWORD PTR [rcx] # AVX512{CD,VL}
vplzcntq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vplzcntq xmm30, [rcx]{1to2} # AVX512{CD,VL}
vplzcntq xmm30, XMMWORD PTR [rdx+2032] # AVX512{CD,VL} Disp8
vplzcntq xmm30, XMMWORD PTR [rdx+2048] # AVX512{CD,VL}
vplzcntq xmm30, XMMWORD PTR [rdx-2048] # AVX512{CD,VL} Disp8
vplzcntq xmm30, XMMWORD PTR [rdx-2064] # AVX512{CD,VL}
vplzcntq xmm30, [rdx+1016]{1to2} # AVX512{CD,VL} Disp8
vplzcntq xmm30, [rdx+1024]{1to2} # AVX512{CD,VL}
vplzcntq xmm30, [rdx-1024]{1to2} # AVX512{CD,VL} Disp8
vplzcntq xmm30, [rdx-1032]{1to2} # AVX512{CD,VL}
vplzcntq ymm30, ymm29 # AVX512{CD,VL}
vplzcntq ymm30{k7}, ymm29 # AVX512{CD,VL}
vplzcntq ymm30{k7}{z}, ymm29 # AVX512{CD,VL}
vplzcntq ymm30, YMMWORD PTR [rcx] # AVX512{CD,VL}
vplzcntq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{CD,VL}
vplzcntq ymm30, [rcx]{1to4} # AVX512{CD,VL}
vplzcntq ymm30, YMMWORD PTR [rdx+4064] # AVX512{CD,VL} Disp8
vplzcntq ymm30, YMMWORD PTR [rdx+4096] # AVX512{CD,VL}
vplzcntq ymm30, YMMWORD PTR [rdx-4096] # AVX512{CD,VL} Disp8
vplzcntq ymm30, YMMWORD PTR [rdx-4128] # AVX512{CD,VL}
vplzcntq ymm30, [rdx+1016]{1to4} # AVX512{CD,VL} Disp8
vplzcntq ymm30, [rdx+1024]{1to4} # AVX512{CD,VL}
vplzcntq ymm30, [rdx-1024]{1to4} # AVX512{CD,VL} Disp8
vplzcntq ymm30, [rdx-1032]{1to4} # AVX512{CD,VL}
vpbroadcastmw2d xmm30, k6 # AVX512{CD,VL}
vpbroadcastmw2d ymm30, k6 # AVX512{CD,VL}
vpbroadcastmb2q xmm30, k6 # AVX512{CD,VL}
vpbroadcastmb2q ymm30, k6 # AVX512{CD,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 5,228
|
gas/testsuite/gas/i386/avx512vnni_vl.s
|
# Check 32bit AVX512{VNNI,VL} instructions
.allow_index_reg
.text
_start:
vpdpwssd %xmm2, %xmm4, %xmm2{%k3} # AVX512{VNNI,VL}
vpdpwssd %xmm2, %xmm4, %xmm2{%k3}{z} # AVX512{VNNI,VL}
vpdpwssd -123456(%esp,%esi,8), %xmm4, %xmm2{%k1} # AVX512{VNNI,VL}
vpdpwssd 2032(%edx), %xmm4, %xmm2{%k1} # AVX512{VNNI,VL} Disp8
vpdpwssd 508(%edx){1to4}, %xmm4, %xmm2{%k1} # AVX512{VNNI,VL} Disp8
vpdpwssd %ymm1, %ymm3, %ymm3{%k1} # AVX512{VNNI,VL}
vpdpwssd %ymm1, %ymm3, %ymm3{%k1}{z} # AVX512{VNNI,VL}
vpdpwssd -123456(%esp,%esi,8), %ymm3, %ymm3{%k4} # AVX512{VNNI,VL}
vpdpwssd 4064(%edx), %ymm3, %ymm3{%k4} # AVX512{VNNI,VL} Disp8
vpdpwssd 508(%edx){1to8}, %ymm3, %ymm3{%k4} # AVX512{VNNI,VL} Disp8
vpdpwssds %xmm1, %xmm4, %xmm2{%k1} # AVX512{VNNI,VL}
vpdpwssds %xmm1, %xmm4, %xmm2{%k1}{z} # AVX512{VNNI,VL}
vpdpwssds -123456(%esp,%esi,8), %xmm4, %xmm2{%k4} # AVX512{VNNI,VL}
vpdpwssds 2032(%edx), %xmm4, %xmm2{%k4} # AVX512{VNNI,VL} Disp8
vpdpwssds 508(%edx){1to4}, %xmm4, %xmm2{%k4} # AVX512{VNNI,VL} Disp8
vpdpwssds %ymm4, %ymm1, %ymm4{%k7} # AVX512{VNNI,VL}
vpdpwssds %ymm4, %ymm1, %ymm4{%k7}{z} # AVX512{VNNI,VL}
vpdpwssds -123456(%esp,%esi,8), %ymm1, %ymm4{%k3} # AVX512{VNNI,VL}
vpdpwssds 4064(%edx), %ymm1, %ymm4{%k3} # AVX512{VNNI,VL} Disp8
vpdpwssds 508(%edx){1to8}, %ymm1, %ymm4{%k3} # AVX512{VNNI,VL} Disp8
vpdpbusd %xmm1, %xmm3, %xmm2{%k4} # AVX512{VNNI,VL}
vpdpbusd %xmm1, %xmm3, %xmm2{%k4}{z} # AVX512{VNNI,VL}
vpdpbusd -123456(%esp,%esi,8), %xmm3, %xmm2{%k2} # AVX512{VNNI,VL}
vpdpbusd 2032(%edx), %xmm3, %xmm2{%k2} # AVX512{VNNI,VL} Disp8
vpdpbusd 508(%edx){1to4}, %xmm3, %xmm2{%k2} # AVX512{VNNI,VL} Disp8
vpdpbusd %ymm2, %ymm2, %ymm2{%k5} # AVX512{VNNI,VL}
vpdpbusd %ymm2, %ymm2, %ymm2{%k5}{z} # AVX512{VNNI,VL}
vpdpbusd -123456(%esp,%esi,8), %ymm2, %ymm2{%k7} # AVX512{VNNI,VL}
vpdpbusd 4064(%edx), %ymm2, %ymm2{%k7} # AVX512{VNNI,VL} Disp8
vpdpbusd 508(%edx){1to8}, %ymm2, %ymm2{%k7} # AVX512{VNNI,VL} Disp8
vpdpbusds %xmm4, %xmm2, %xmm6{%k6} # AVX512{VNNI,VL}
vpdpbusds %xmm4, %xmm2, %xmm6{%k6}{z} # AVX512{VNNI,VL}
vpdpbusds -123456(%esp,%esi,8), %xmm2, %xmm6{%k4} # AVX512{VNNI,VL}
vpdpbusds 2032(%edx), %xmm2, %xmm6{%k4} # AVX512{VNNI,VL} Disp8
vpdpbusds 508(%edx){1to4}, %xmm2, %xmm6{%k4} # AVX512{VNNI,VL} Disp8
vpdpbusds %ymm1, %ymm3, %ymm4{%k7} # AVX512{VNNI,VL}
vpdpbusds %ymm1, %ymm3, %ymm4{%k7}{z} # AVX512{VNNI,VL}
vpdpbusds -123456(%esp,%esi,8), %ymm3, %ymm4{%k1} # AVX512{VNNI,VL}
vpdpbusds 4064(%edx), %ymm3, %ymm4{%k1} # AVX512{VNNI,VL} Disp8
vpdpbusds 508(%edx){1to8}, %ymm3, %ymm4{%k1} # AVX512{VNNI,VL} Disp8
.intel_syntax noprefix
vpdpwssd xmm5{k1}, xmm2, xmm2 # AVX512{VNNI,VL}
vpdpwssd xmm5{k1}{z}, xmm2, xmm2 # AVX512{VNNI,VL}
vpdpwssd xmm5{k6}, xmm2, XMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpwssd xmm5{k6}, xmm2, XMMWORD PTR [edx+2032] # AVX512{VNNI,VL} Disp8
vpdpwssd xmm5{k6}, xmm2, [edx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpwssd ymm1{k7}, ymm2, ymm4 # AVX512{VNNI,VL}
vpdpwssd ymm1{k7}{z}, ymm2, ymm4 # AVX512{VNNI,VL}
vpdpwssd ymm1{k6}, ymm2, YMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpwssd ymm1{k6}, ymm2, YMMWORD PTR [edx+4064] # AVX512{VNNI,VL} Disp8
vpdpwssd ymm1{k6}, ymm2, [edx+508]{1to8} # AVX512{VNNI,VL} Disp8
vpdpwssds xmm1{k2}, xmm4, xmm1 # AVX512{VNNI,VL}
vpdpwssds xmm1{k2}{z}, xmm4, xmm1 # AVX512{VNNI,VL}
vpdpwssds xmm1{k6}, xmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpwssds xmm1{k6}, xmm4, XMMWORD PTR [edx+2032] # AVX512{VNNI,VL} Disp8
vpdpwssds xmm1{k6}, xmm4, [edx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpwssds ymm3{k4}, ymm2, ymm4 # AVX512{VNNI,VL}
vpdpwssds ymm3{k4}{z}, ymm2, ymm4 # AVX512{VNNI,VL}
vpdpwssds ymm3{k5}, ymm2, YMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpwssds ymm3{k5}, ymm2, YMMWORD PTR [edx+4064] # AVX512{VNNI,VL} Disp8
vpdpwssds ymm3{k5}, ymm2, [edx+508]{1to8} # AVX512{VNNI,VL} Disp8
vpdpbusd xmm3{k7}, xmm4, xmm4 # AVX512{VNNI,VL}
vpdpbusd xmm3{k7}{z}, xmm4, xmm4 # AVX512{VNNI,VL}
vpdpbusd xmm3{k1}, xmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpbusd xmm3{k1}, xmm4, XMMWORD PTR [edx+2032] # AVX512{VNNI,VL} Disp8
vpdpbusd xmm3{k1}, xmm4, [edx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpbusd ymm6{k5}, ymm2, ymm4 # AVX512{VNNI,VL}
vpdpbusd ymm6{k5}{z}, ymm2, ymm4 # AVX512{VNNI,VL}
vpdpbusd ymm6{k5}, ymm2, YMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpbusd ymm6{k5}, ymm2, YMMWORD PTR [edx+4064] # AVX512{VNNI,VL} Disp8
vpdpbusd ymm6{k5}, ymm2, [edx+508]{1to8} # AVX512{VNNI,VL} Disp8
vpdpbusds xmm3{k5}, xmm3, xmm4 # AVX512{VNNI,VL}
vpdpbusds xmm3{k5}{z}, xmm3, xmm4 # AVX512{VNNI,VL}
vpdpbusds xmm3{k4}, xmm3, XMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpbusds xmm3{k4}, xmm3, XMMWORD PTR [edx+2032] # AVX512{VNNI,VL} Disp8
vpdpbusds xmm3{k4}, xmm3, [edx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpbusds ymm2{k4}, ymm3, ymm4 # AVX512{VNNI,VL}
vpdpbusds ymm2{k4}{z}, ymm3, ymm4 # AVX512{VNNI,VL}
vpdpbusds ymm2{k1}, ymm3, YMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpbusds ymm2{k1}, ymm3, YMMWORD PTR [edx+4064] # AVX512{VNNI,VL} Disp8
vpdpbusds ymm2{k1}, ymm3, [edx+508]{1to8} # AVX512{VNNI,VL} Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 1,527
|
gas/testsuite/gas/i386/x86-64-avx512f_vaes.s
|
# Check 64bit AVX512F,VAES instructions
.allow_index_reg
.text
_start:
vaesdec %zmm28, %zmm29, %zmm30 # AVX512F,VAES
vaesdec 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,VAES
vaesdec 8128(%rdx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesdeclast %zmm28, %zmm29, %zmm30 # AVX512F,VAES
vaesdeclast 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,VAES
vaesdeclast 8128(%rdx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesenc %zmm28, %zmm29, %zmm30 # AVX512F,VAES
vaesenc 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,VAES
vaesenc 8128(%rdx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesenclast %zmm28, %zmm29, %zmm30 # AVX512F,VAES
vaesenclast 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,VAES
vaesenclast 8128(%rdx), %zmm5, %zmm6 # AVX512F,VAES Disp8
.intel_syntax noprefix
vaesdec zmm30, zmm29, zmm28 # AVX512F,VAES
vaesdec zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F,VAES
vaesdec zmm6, zmm5, ZMMWORD PTR [rdx+8128] # AVX512F,VAES Disp8
vaesdeclast zmm30, zmm29, zmm28 # AVX512F,VAES
vaesdeclast zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F,VAES
vaesdeclast zmm6, zmm5, ZMMWORD PTR [rdx+8128] # AVX512F,VAES Disp8
vaesenc zmm30, zmm29, zmm28 # AVX512F,VAES
vaesenc zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F,VAES
vaesenc zmm6, zmm5, ZMMWORD PTR [rdx+8128] # AVX512F,VAES Disp8
vaesenclast zmm30, zmm29, zmm28 # AVX512F,VAES
vaesenclast zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F,VAES
vaesenclast zmm6, zmm5, ZMMWORD PTR [rdx+8128] # AVX512F,VAES Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 12,047
|
gas/testsuite/gas/i386/avx512er.s
|
# Check 32bit AVX512ER instructions
.allow_index_reg
.text
_start:
vexp2ps %zmm5, %zmm6 # AVX512ER
vexp2ps {sae}, %zmm5, %zmm6 # AVX512ER
vexp2ps (%ecx), %zmm6 # AVX512ER
vexp2ps -123456(%esp,%esi,8), %zmm6 # AVX512ER
vexp2ps (%eax){1to16}, %zmm6 # AVX512ER
vexp2ps 8128(%edx), %zmm6 # AVX512ER Disp8
vexp2ps 8192(%edx), %zmm6 # AVX512ER
vexp2ps -8192(%edx), %zmm6 # AVX512ER Disp8
vexp2ps -8256(%edx), %zmm6 # AVX512ER
vexp2ps 508(%edx){1to16}, %zmm6 # AVX512ER Disp8
vexp2ps 512(%edx){1to16}, %zmm6 # AVX512ER
vexp2ps -512(%edx){1to16}, %zmm6 # AVX512ER Disp8
vexp2ps -516(%edx){1to16}, %zmm6 # AVX512ER
vexp2pd %zmm5, %zmm6 # AVX512ER
vexp2pd {sae}, %zmm5, %zmm6 # AVX512ER
vexp2pd (%ecx), %zmm6 # AVX512ER
vexp2pd -123456(%esp,%esi,8), %zmm6 # AVX512ER
vexp2pd (%eax){1to8}, %zmm6 # AVX512ER
vexp2pd 8128(%edx), %zmm6 # AVX512ER Disp8
vexp2pd 8192(%edx), %zmm6 # AVX512ER
vexp2pd -8192(%edx), %zmm6 # AVX512ER Disp8
vexp2pd -8256(%edx), %zmm6 # AVX512ER
vexp2pd 1016(%edx){1to8}, %zmm6 # AVX512ER Disp8
vexp2pd 1024(%edx){1to8}, %zmm6 # AVX512ER
vexp2pd -1024(%edx){1to8}, %zmm6 # AVX512ER Disp8
vexp2pd -1032(%edx){1to8}, %zmm6 # AVX512ER
vrcp28ps %zmm5, %zmm6 # AVX512ER
vrcp28ps %zmm5, %zmm6{%k7} # AVX512ER
vrcp28ps %zmm5, %zmm6{%k7}{z} # AVX512ER
vrcp28ps {sae}, %zmm5, %zmm6 # AVX512ER
vrcp28ps (%ecx), %zmm6 # AVX512ER
vrcp28ps -123456(%esp,%esi,8), %zmm6 # AVX512ER
vrcp28ps (%eax){1to16}, %zmm6 # AVX512ER
vrcp28ps 8128(%edx), %zmm6 # AVX512ER Disp8
vrcp28ps 8192(%edx), %zmm6 # AVX512ER
vrcp28ps -8192(%edx), %zmm6 # AVX512ER Disp8
vrcp28ps -8256(%edx), %zmm6 # AVX512ER
vrcp28ps 508(%edx){1to16}, %zmm6 # AVX512ER Disp8
vrcp28ps 512(%edx){1to16}, %zmm6 # AVX512ER
vrcp28ps -512(%edx){1to16}, %zmm6 # AVX512ER Disp8
vrcp28ps -516(%edx){1to16}, %zmm6 # AVX512ER
vrcp28pd %zmm5, %zmm6 # AVX512ER
vrcp28pd %zmm5, %zmm6{%k7} # AVX512ER
vrcp28pd %zmm5, %zmm6{%k7}{z} # AVX512ER
vrcp28pd {sae}, %zmm5, %zmm6 # AVX512ER
vrcp28pd (%ecx), %zmm6 # AVX512ER
vrcp28pd -123456(%esp,%esi,8), %zmm6 # AVX512ER
vrcp28pd (%eax){1to8}, %zmm6 # AVX512ER
vrcp28pd 8128(%edx), %zmm6 # AVX512ER Disp8
vrcp28pd 8192(%edx), %zmm6 # AVX512ER
vrcp28pd -8192(%edx), %zmm6 # AVX512ER Disp8
vrcp28pd -8256(%edx), %zmm6 # AVX512ER
vrcp28pd 1016(%edx){1to8}, %zmm6 # AVX512ER Disp8
vrcp28pd 1024(%edx){1to8}, %zmm6 # AVX512ER
vrcp28pd -1024(%edx){1to8}, %zmm6 # AVX512ER Disp8
vrcp28pd -1032(%edx){1to8}, %zmm6 # AVX512ER
vrcp28ss %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrcp28ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512ER
vrcp28ss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrcp28ss (%ecx), %xmm5, %xmm6{%k7} # AVX512ER
vrcp28ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512ER
vrcp28ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrcp28ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512ER
vrcp28ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrcp28ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512ER
vrcp28sd %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrcp28sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512ER
vrcp28sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrcp28sd (%ecx), %xmm5, %xmm6{%k7} # AVX512ER
vrcp28sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512ER
vrcp28sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrcp28sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512ER
vrcp28sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrcp28sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28ps %zmm5, %zmm6 # AVX512ER
vrsqrt28ps %zmm5, %zmm6{%k7} # AVX512ER
vrsqrt28ps %zmm5, %zmm6{%k7}{z} # AVX512ER
vrsqrt28ps {sae}, %zmm5, %zmm6 # AVX512ER
vrsqrt28ps (%ecx), %zmm6 # AVX512ER
vrsqrt28ps -123456(%esp,%esi,8), %zmm6 # AVX512ER
vrsqrt28ps (%eax){1to16}, %zmm6 # AVX512ER
vrsqrt28ps 8128(%edx), %zmm6 # AVX512ER Disp8
vrsqrt28ps 8192(%edx), %zmm6 # AVX512ER
vrsqrt28ps -8192(%edx), %zmm6 # AVX512ER Disp8
vrsqrt28ps -8256(%edx), %zmm6 # AVX512ER
vrsqrt28ps 508(%edx){1to16}, %zmm6 # AVX512ER Disp8
vrsqrt28ps 512(%edx){1to16}, %zmm6 # AVX512ER
vrsqrt28ps -512(%edx){1to16}, %zmm6 # AVX512ER Disp8
vrsqrt28ps -516(%edx){1to16}, %zmm6 # AVX512ER
vrsqrt28pd %zmm5, %zmm6 # AVX512ER
vrsqrt28pd %zmm5, %zmm6{%k7} # AVX512ER
vrsqrt28pd %zmm5, %zmm6{%k7}{z} # AVX512ER
vrsqrt28pd {sae}, %zmm5, %zmm6 # AVX512ER
vrsqrt28pd (%ecx), %zmm6 # AVX512ER
vrsqrt28pd -123456(%esp,%esi,8), %zmm6 # AVX512ER
vrsqrt28pd (%eax){1to8}, %zmm6 # AVX512ER
vrsqrt28pd 8128(%edx), %zmm6 # AVX512ER Disp8
vrsqrt28pd 8192(%edx), %zmm6 # AVX512ER
vrsqrt28pd -8192(%edx), %zmm6 # AVX512ER Disp8
vrsqrt28pd -8256(%edx), %zmm6 # AVX512ER
vrsqrt28pd 1016(%edx){1to8}, %zmm6 # AVX512ER Disp8
vrsqrt28pd 1024(%edx){1to8}, %zmm6 # AVX512ER
vrsqrt28pd -1024(%edx){1to8}, %zmm6 # AVX512ER Disp8
vrsqrt28pd -1032(%edx){1to8}, %zmm6 # AVX512ER
vrsqrt28ss %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512ER
vrsqrt28ss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28ss (%ecx), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrsqrt28ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrsqrt28ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28sd %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512ER
vrsqrt28sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28sd (%ecx), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrsqrt28sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrsqrt28sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512ER
.intel_syntax noprefix
vexp2ps zmm6, zmm5 # AVX512ER
vexp2ps zmm6, zmm5{sae} # AVX512ER
vexp2ps zmm6, ZMMWORD PTR [ecx] # AVX512ER
vexp2ps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512ER
vexp2ps zmm6, [eax]{1to16} # AVX512ER
vexp2ps zmm6, ZMMWORD PTR [edx+8128] # AVX512ER Disp8
vexp2ps zmm6, ZMMWORD PTR [edx+8192] # AVX512ER
vexp2ps zmm6, ZMMWORD PTR [edx-8192] # AVX512ER Disp8
vexp2ps zmm6, ZMMWORD PTR [edx-8256] # AVX512ER
vexp2ps zmm6, [edx+508]{1to16} # AVX512ER Disp8
vexp2ps zmm6, [edx+512]{1to16} # AVX512ER
vexp2ps zmm6, [edx-512]{1to16} # AVX512ER Disp8
vexp2ps zmm6, [edx-516]{1to16} # AVX512ER
vexp2pd zmm6, zmm5 # AVX512ER
vexp2pd zmm6, zmm5{sae} # AVX512ER
vexp2pd zmm6, ZMMWORD PTR [ecx] # AVX512ER
vexp2pd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512ER
vexp2pd zmm6, [eax]{1to8} # AVX512ER
vexp2pd zmm6, ZMMWORD PTR [edx+8128] # AVX512ER Disp8
vexp2pd zmm6, ZMMWORD PTR [edx+8192] # AVX512ER
vexp2pd zmm6, ZMMWORD PTR [edx-8192] # AVX512ER Disp8
vexp2pd zmm6, ZMMWORD PTR [edx-8256] # AVX512ER
vexp2pd zmm6, [edx+1016]{1to8} # AVX512ER Disp8
vexp2pd zmm6, [edx+1024]{1to8} # AVX512ER
vexp2pd zmm6, [edx-1024]{1to8} # AVX512ER Disp8
vexp2pd zmm6, [edx-1032]{1to8} # AVX512ER
vrcp28ps zmm6, zmm5 # AVX512ER
vrcp28ps zmm6{k7}, zmm5 # AVX512ER
vrcp28ps zmm6{k7}{z}, zmm5 # AVX512ER
vrcp28ps zmm6, zmm5{sae} # AVX512ER
vrcp28ps zmm6, ZMMWORD PTR [ecx] # AVX512ER
vrcp28ps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512ER
vrcp28ps zmm6, [eax]{1to16} # AVX512ER
vrcp28ps zmm6, ZMMWORD PTR [edx+8128] # AVX512ER Disp8
vrcp28ps zmm6, ZMMWORD PTR [edx+8192] # AVX512ER
vrcp28ps zmm6, ZMMWORD PTR [edx-8192] # AVX512ER Disp8
vrcp28ps zmm6, ZMMWORD PTR [edx-8256] # AVX512ER
vrcp28ps zmm6, [edx+508]{1to16} # AVX512ER Disp8
vrcp28ps zmm6, [edx+512]{1to16} # AVX512ER
vrcp28ps zmm6, [edx-512]{1to16} # AVX512ER Disp8
vrcp28ps zmm6, [edx-516]{1to16} # AVX512ER
vrcp28pd zmm6, zmm5 # AVX512ER
vrcp28pd zmm6{k7}, zmm5 # AVX512ER
vrcp28pd zmm6{k7}{z}, zmm5 # AVX512ER
vrcp28pd zmm6, zmm5{sae} # AVX512ER
vrcp28pd zmm6, ZMMWORD PTR [ecx] # AVX512ER
vrcp28pd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512ER
vrcp28pd zmm6, [eax]{1to8} # AVX512ER
vrcp28pd zmm6, ZMMWORD PTR [edx+8128] # AVX512ER Disp8
vrcp28pd zmm6, ZMMWORD PTR [edx+8192] # AVX512ER
vrcp28pd zmm6, ZMMWORD PTR [edx-8192] # AVX512ER Disp8
vrcp28pd zmm6, ZMMWORD PTR [edx-8256] # AVX512ER
vrcp28pd zmm6, [edx+1016]{1to8} # AVX512ER Disp8
vrcp28pd zmm6, [edx+1024]{1to8} # AVX512ER
vrcp28pd zmm6, [edx-1024]{1to8} # AVX512ER Disp8
vrcp28pd zmm6, [edx-1032]{1to8} # AVX512ER
vrcp28ss xmm6{k7}, xmm5, xmm4 # AVX512ER
vrcp28ss xmm6{k7}{z}, xmm5, xmm4 # AVX512ER
vrcp28ss xmm6{k7}, xmm5, xmm4{sae} # AVX512ER
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512ER
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512ER
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512ER Disp8
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512ER
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512ER Disp8
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512ER
vrcp28sd xmm6{k7}, xmm5, xmm4 # AVX512ER
vrcp28sd xmm6{k7}{z}, xmm5, xmm4 # AVX512ER
vrcp28sd xmm6{k7}, xmm5, xmm4{sae} # AVX512ER
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512ER
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512ER
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512ER Disp8
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512ER
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512ER Disp8
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512ER
vrsqrt28ps zmm6, zmm5 # AVX512ER
vrsqrt28ps zmm6{k7}, zmm5 # AVX512ER
vrsqrt28ps zmm6{k7}{z}, zmm5 # AVX512ER
vrsqrt28ps zmm6, zmm5{sae} # AVX512ER
vrsqrt28ps zmm6, ZMMWORD PTR [ecx] # AVX512ER
vrsqrt28ps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512ER
vrsqrt28ps zmm6, [eax]{1to16} # AVX512ER
vrsqrt28ps zmm6, ZMMWORD PTR [edx+8128] # AVX512ER Disp8
vrsqrt28ps zmm6, ZMMWORD PTR [edx+8192] # AVX512ER
vrsqrt28ps zmm6, ZMMWORD PTR [edx-8192] # AVX512ER Disp8
vrsqrt28ps zmm6, ZMMWORD PTR [edx-8256] # AVX512ER
vrsqrt28ps zmm6, [edx+508]{1to16} # AVX512ER Disp8
vrsqrt28ps zmm6, [edx+512]{1to16} # AVX512ER
vrsqrt28ps zmm6, [edx-512]{1to16} # AVX512ER Disp8
vrsqrt28ps zmm6, [edx-516]{1to16} # AVX512ER
vrsqrt28pd zmm6, zmm5 # AVX512ER
vrsqrt28pd zmm6{k7}, zmm5 # AVX512ER
vrsqrt28pd zmm6{k7}{z}, zmm5 # AVX512ER
vrsqrt28pd zmm6, zmm5{sae} # AVX512ER
vrsqrt28pd zmm6, ZMMWORD PTR [ecx] # AVX512ER
vrsqrt28pd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512ER
vrsqrt28pd zmm6, [eax]{1to8} # AVX512ER
vrsqrt28pd zmm6, ZMMWORD PTR [edx+8128] # AVX512ER Disp8
vrsqrt28pd zmm6, ZMMWORD PTR [edx+8192] # AVX512ER
vrsqrt28pd zmm6, ZMMWORD PTR [edx-8192] # AVX512ER Disp8
vrsqrt28pd zmm6, ZMMWORD PTR [edx-8256] # AVX512ER
vrsqrt28pd zmm6, [edx+1016]{1to8} # AVX512ER Disp8
vrsqrt28pd zmm6, [edx+1024]{1to8} # AVX512ER
vrsqrt28pd zmm6, [edx-1024]{1to8} # AVX512ER Disp8
vrsqrt28pd zmm6, [edx-1032]{1to8} # AVX512ER
vrsqrt28ss xmm6{k7}, xmm5, xmm4 # AVX512ER
vrsqrt28ss xmm6{k7}{z}, xmm5, xmm4 # AVX512ER
vrsqrt28ss xmm6{k7}, xmm5, xmm4{sae} # AVX512ER
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512ER
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512ER
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512ER Disp8
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512ER
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512ER Disp8
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512ER
vrsqrt28sd xmm6{k7}, xmm5, xmm4 # AVX512ER
vrsqrt28sd xmm6{k7}{z}, xmm5, xmm4 # AVX512ER
vrsqrt28sd xmm6{k7}, xmm5, xmm4{sae} # AVX512ER
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512ER
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512ER
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512ER Disp8
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512ER
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512ER Disp8
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512ER
|
tactcomplabs/xbgas-binutils-gdb
| 1,421
|
gas/testsuite/gas/i386/align-branch-9.s
|
.text
.globl foo
.p2align 4
foo:
movl %eax, %gs:0x1
pushl %ebp
pushl %ebp
pushl %ebp
pushl %ebp
movl %esp, %ebp
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
cmp %eax, %ebp
jo label2
movl %esi, -12(%ebx)
movl %esi, -12(%ebp)
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
popl %ebp
popl %ebp
popl %ebp
je label2
popl %ebp
je label2
movl %eax, -4(%esp)
movl %esi, -12(%ebp)
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
popl %ebp
jmp label3
jmp label3
jmp label3
movl %eax, -4(%ebp)
movl %esi, -12(%ebp)
movl %edi, -8(%ebp)
popl %ebp
popl %ebp
inc %eax
jc label2
movl %eax, -4(%ebp)
movl %esi, -12(%ebp)
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
and %eax, %ebx
jl label3
label2:
movl -12(%ebp), %eax
movl %eax, -4(%ebp)
label3:
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, 12(%ebp)
jmp bar
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, (%ebp)
je label3
je label3
|
tactcomplabs/xbgas-binutils-gdb
| 1,228
|
gas/testsuite/gas/i386/x86-64-avx512_fp16-inval-bcast.s
|
# Check error for invalid {1toXX} and {2toXX} broadcasts.
.allow_index_reg
.text
_start:
vcvtpd2ph (%ecx){1to16}, %xmm30
vcvtuqq2ph -1024(%edx){1to32}, %xmm30
vcvtdq2ph (%ecx){1to8}, %ymm30
vcvtudq2ph -512(%edx){1to32}, %ymm30
vcmpph $123, (%ecx){1to16}, %zmm29, %k5
vcmpph $123, (%ecx){1to64}, %zmm29, %k5
vfmadd132ph (%ecx){1to8}, %zmm29, %zmm3
vfcmaddcph (%ecx){1to8}, %zmm29, %zmm3
vfcmulcph (%ecx){1to32}, %zmm29, %zmm3
vcvtdq2ph (%ecx){1to8}, %ymm30
vfmaddcph (%ecx){1to8}, %zmm29, %zmm3
vfmulcph -512(%edx){1to32}, %zmm29, %zmm3
vfmulcph -512(%edx){1to4}, %zmm29, %zmm3
.intel_syntax noprefix
vcvtpd2ph xmm30, QWORD PTR [ecx]{1to16}
vcvtuqq2ph xmm30, QWORD PTR [edx-1024]{1to32}
vcvtdq2ph ymm30, DWORD PTR [ecx]{1to8}
vcvtudq2ph ymm30, DWORD PTR [edx-512]{1to32}
vcmpph k5, zmm29, WORD PTR [edx-256]{1to16}, 123
vcmpph k5, zmm29, WORD PTR [edx-256]{1to64}, 123
vfmsubadd231ph zmm30, zmm2, WORD PTR [edx-256]{1to8}
vfcmaddcph zmm3, zmm29, DWORD PTR [ecx]{1to8}
vfcmulcph zmm3, zmm29, DWORD PTR [ecx]{1to32}
vcvtdq2ph ymm30, DWORD PTR [ecx]{1to8}
vfcmaddcph zmm30, zmm2, DWORD PTR [ecx]{1to8}
vfmulcph zmm30, zmm2, DWORD PTR [edx-512]{1to32}
vfmulcph zmm30, zmm2, DWORD PTR [edx-512]{1to4}
|
tactcomplabs/xbgas-binutils-gdb
| 7,796
|
gas/testsuite/gas/i386/evex-wig.s
|
# Check EVEX WIG instructions
.allow_index_reg
.text
_start:
{evex} vcvtsi2ss %eax, %xmm0, %xmm0
{evex} vcvtsi2ss 4(%eax), %xmm0, %xmm0
{evex} vcvtsi2sd %eax, %xmm0, %xmm0
{evex} vcvtsi2sd 4(%eax), %xmm0, %xmm0
{evex} vcvtss2si %xmm0, %eax
{evex} vcvtsd2si %xmm0, %eax
{evex} vcvttss2si %xmm0, %eax
{evex} vcvttsd2si %xmm0, %eax
vcvtusi2ss %eax, %xmm0, %xmm0
vcvtusi2ss 4(%eax), %xmm0, %xmm0
vcvtusi2sd %eax, %xmm0, %xmm0
vcvtusi2sd 4(%eax), %xmm0, %xmm0
vcvtss2usi %xmm0, %eax
vcvtsd2usi %xmm0, %eax
vcvttss2usi %xmm0, %eax
vcvttsd2usi %xmm0, %eax
{evex} vextractps $0, %xmm0, %eax
{evex} vextractps $0, %xmm0, 4(%eax)
{evex} vmovd %eax, %xmm0
{evex} vmovd 4(%eax), %xmm0
{evex} vmovd %xmm0, %eax
{evex} vmovd %xmm0, 4(%eax)
vpbroadcastd %eax, %xmm0
{evex} vpextrb $0, %xmm0, %eax
{evex} vpextrb $0, %xmm0, 1(%eax)
{evex} vpextrd $0, %xmm0, %eax
{evex} vpextrd $0, %xmm0, 4(%eax)
{evex} vpextrw $0, %xmm0, %eax
{evex} {store} vpextrw $0, %xmm0, %eax
{evex} vpextrw $0, %xmm0, 2(%eax)
{evex} vpinsrb $0, %eax, %xmm0, %xmm0
{evex} vpinsrb $0, 1(%eax), %xmm0, %xmm0
{evex} vpinsrd $0, %eax, %xmm0, %xmm0
{evex} vpinsrd $0, 4(%eax), %xmm0, %xmm0
{evex} vpinsrw $0, %eax, %xmm0, %xmm0
{evex} vpinsrw $0, 2(%eax), %xmm0, %xmm0
vmovss %xmm0, %xmm0, %xmm0{%k7}
vmovss (%eax), %xmm0{%k7}
vmovss %xmm0, (%eax){%k7}
vmovsd %xmm0, %xmm0, %xmm0{%k7}
vmovsd (%eax), %xmm0{%k7}
vmovsd %xmm0, (%eax){%k7}
vmovsh %xmm0, %xmm0, %xmm0{%k7}
vmovsh (%eax), %xmm0{%k7}
vmovsh %xmm0, (%eax){%k7}
vpmovsxbd %xmm5, %zmm6{%k7} # AVX512
vpmovsxbd %xmm5, %zmm6{%k7}{z} # AVX512
vpmovsxbd (%ecx), %zmm6{%k7} # AVX512
vpmovsxbd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovsxbd 2032(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxbd 2048(%edx), %zmm6{%k7} # AVX512
vpmovsxbd -2048(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxbd -2064(%edx), %zmm6{%k7} # AVX512
vpmovsxbq %xmm5, %zmm6{%k7} # AVX512
vpmovsxbq %xmm5, %zmm6{%k7}{z} # AVX512
vpmovsxbq (%ecx), %zmm6{%k7} # AVX512
vpmovsxbq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovsxbq 1016(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxbq 1024(%edx), %zmm6{%k7} # AVX512
vpmovsxbq -1024(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxbq -1032(%edx), %zmm6{%k7} # AVX512
vpmovsxwd %ymm5, %zmm6{%k7} # AVX512
vpmovsxwd %ymm5, %zmm6{%k7}{z} # AVX512
vpmovsxwd (%ecx), %zmm6{%k7} # AVX512
vpmovsxwd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovsxwd 4064(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxwd 4096(%edx), %zmm6{%k7} # AVX512
vpmovsxwd -4096(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxwd -4128(%edx), %zmm6{%k7} # AVX512
vpmovsxwq %xmm5, %zmm6{%k7} # AVX512
vpmovsxwq %xmm5, %zmm6{%k7}{z} # AVX512
vpmovsxwq (%ecx), %zmm6{%k7} # AVX512
vpmovsxwq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovsxwq 2032(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxwq 2048(%edx), %zmm6{%k7} # AVX512
vpmovsxwq -2048(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovsxwq -2064(%edx), %zmm6{%k7} # AVX512
vpmovzxbd %xmm5, %zmm6{%k7} # AVX512
vpmovzxbd %xmm5, %zmm6{%k7}{z} # AVX512
vpmovzxbd (%ecx), %zmm6{%k7} # AVX512
vpmovzxbd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovzxbd 2032(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxbd 2048(%edx), %zmm6{%k7} # AVX512
vpmovzxbd -2048(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxbd -2064(%edx), %zmm6{%k7} # AVX512
vpmovzxbq %xmm5, %zmm6{%k7} # AVX512
vpmovzxbq %xmm5, %zmm6{%k7}{z} # AVX512
vpmovzxbq (%ecx), %zmm6{%k7} # AVX512
vpmovzxbq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovzxbq 1016(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxbq 1024(%edx), %zmm6{%k7} # AVX512
vpmovzxbq -1024(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxbq -1032(%edx), %zmm6{%k7} # AVX512
vpmovzxwd %ymm5, %zmm6{%k7} # AVX512
vpmovzxwd %ymm5, %zmm6{%k7}{z} # AVX512
vpmovzxwd (%ecx), %zmm6{%k7} # AVX512
vpmovzxwd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovzxwd 4064(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxwd 4096(%edx), %zmm6{%k7} # AVX512
vpmovzxwd -4096(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxwd -4128(%edx), %zmm6{%k7} # AVX512
vpmovzxwq %xmm5, %zmm6{%k7} # AVX512
vpmovzxwq %xmm5, %zmm6{%k7}{z} # AVX512
vpmovzxwq (%ecx), %zmm6{%k7} # AVX512
vpmovzxwq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512
vpmovzxwq 2032(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxwq 2048(%edx), %zmm6{%k7} # AVX512
vpmovzxwq -2048(%edx), %zmm6{%k7} # AVX512 Disp8
vpmovzxwq -2064(%edx), %zmm6{%k7} # AVX512
.intel_syntax noprefix
vpmovsxbd zmm6{k7}, xmm5 # AVX512
vpmovsxbd zmm6{k7}{z}, xmm5 # AVX512
vpmovsxbd zmm6{k7}, XMMWORD PTR [ecx] # AVX512
vpmovsxbd zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512 Disp8
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512 Disp8
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512
vpmovsxbq zmm6{k7}, xmm5 # AVX512
vpmovsxbq zmm6{k7}{z}, xmm5 # AVX512
vpmovsxbq zmm6{k7}, QWORD PTR [ecx] # AVX512
vpmovsxbq zmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512
vpmovsxbq zmm6{k7}, QWORD PTR [edx+1016] # AVX512 Disp8
vpmovsxbq zmm6{k7}, QWORD PTR [edx+1024] # AVX512
vpmovsxbq zmm6{k7}, QWORD PTR [edx-1024] # AVX512 Disp8
vpmovsxbq zmm6{k7}, QWORD PTR [edx-1032] # AVX512
vpmovsxwd zmm6{k7}, ymm5 # AVX512
vpmovsxwd zmm6{k7}{z}, ymm5 # AVX512
vpmovsxwd zmm6{k7}, YMMWORD PTR [ecx] # AVX512
vpmovsxwd zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512 Disp8
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512 Disp8
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512
vpmovsxwq zmm6{k7}, xmm5 # AVX512
vpmovsxwq zmm6{k7}{z}, xmm5 # AVX512
vpmovsxwq zmm6{k7}, XMMWORD PTR [ecx] # AVX512
vpmovsxwq zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512 Disp8
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512 Disp8
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512
vpmovzxbd zmm6{k7}, xmm5 # AVX512
vpmovzxbd zmm6{k7}{z}, xmm5 # AVX512
vpmovzxbd zmm6{k7}, XMMWORD PTR [ecx] # AVX512
vpmovzxbd zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512 Disp8
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512 Disp8
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512
vpmovzxbq zmm6{k7}, xmm5 # AVX512
vpmovzxbq zmm6{k7}{z}, xmm5 # AVX512
vpmovzxbq zmm6{k7}, QWORD PTR [ecx] # AVX512
vpmovzxbq zmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512
vpmovzxbq zmm6{k7}, QWORD PTR [edx+1016] # AVX512 Disp8
vpmovzxbq zmm6{k7}, QWORD PTR [edx+1024] # AVX512
vpmovzxbq zmm6{k7}, QWORD PTR [edx-1024] # AVX512 Disp8
vpmovzxbq zmm6{k7}, QWORD PTR [edx-1032] # AVX512
vpmovzxwd zmm6{k7}, ymm5 # AVX512
vpmovzxwd zmm6{k7}{z}, ymm5 # AVX512
vpmovzxwd zmm6{k7}, YMMWORD PTR [ecx] # AVX512
vpmovzxwd zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512 Disp8
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512 Disp8
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512
vpmovzxwq zmm6{k7}, xmm5 # AVX512
vpmovzxwq zmm6{k7}{z}, xmm5 # AVX512
vpmovzxwq zmm6{k7}, XMMWORD PTR [ecx] # AVX512
vpmovzxwq zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512 Disp8
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512 Disp8
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512
|
tactcomplabs/xbgas-binutils-gdb
| 1,620
|
gas/testsuite/gas/i386/x86-64-lockbad-1.s
|
# 64bit unlockable Instructions
.text
foo:
lock mov %ecx, %eax
lock mov (%rbx), %eax
lock add %ebx, %eax
lock add $0x64, %ebx
lock adc %ebx, %eax
lock adc $0x64, %ebx
lock and %ebx, %eax
lock and $0x64, %ebx
lock btc %eax, %ebx
lock btc $0x64, %ebx
lock btr %eax, %ebx
lock btr $0x64, %ebx
lock bts %eax, %ebx
lock bts $0x64, %ebx
lock cmpxchg %eax,%ebx
lock decl %ebx
lock incl %ebx
lock negl %ebx
lock notl %ebx
lock or %ebx, %eax
lock or $0x64, %ebx
lock sbb %ebx, %eax
lock sbb $0x64, %ebx
lock sub %ebx, %eax
lock sub $0x64, %ebx
lock xadd %eax, %ebx
lock xchg %ebx, %eax
lock xchg %eax, %ebx
lock xor %ebx, %eax
lock xor $0x64, %ebx
lock add (%rbx), %eax
lock adc (%rbx), %eax
lock and (%rbx), %eax
lock or (%rbx), %eax
lock sbb (%rbx), %eax
lock sub (%rbx), %eax
lock xor (%rbx), %eax
.intel_syntax noprefix
lock mov eax,ebx
lock mov eax,DWORD PTR [rbx]
lock add eax,ebx
lock add ebx,0x64
lock adc eax,ebx
lock adc ebx,0x64
lock and eax,ebx
lock and ebx,0x64
lock btc ebx,eax
lock btc ebx,0x64
lock btr ebx,eax
lock btr ebx,0x64
lock bts ebx,eax
lock bts ebx,0x64
lock cmpxchg ebx,eax
lock dec ebx
lock inc ebx
lock neg ebx
lock not ebx
lock or eax,ebx
lock or ebx,0x64
lock sbb eax,ebx
lock sbb ebx,0x64
lock sub eax,ebx
lock sub ebx,0x64
lock xadd ebx,eax
lock xchg ebx,eax
lock xchg ebx,eax
lock xor eax,ebx
lock xor ebx,0x64
lock add eax,DWORD PTR [rbx]
lock adc eax,DWORD PTR [rbx]
lock and eax,DWORD PTR [rbx]
lock or eax,DWORD PTR [rbx]
lock sbb eax,DWORD PTR [rbx]
lock sub eax,DWORD PTR [rbx]
lock xor eax,DWORD PTR [rbx]
|
tactcomplabs/xbgas-binutils-gdb
| 4,969
|
gas/testsuite/gas/i386/x86-64-avx512_bf16_vl.s
|
# Check 64bit AVX512{BF16,VL} instructions
.allow_index_reg
.text
_start:
vcvtne2ps2bf16 %ymm28, %ymm29, %ymm30 #AVX512{BF16,VL}
vcvtne2ps2bf16 %xmm28, %xmm29, %xmm30 #AVX512{BF16,VL}
vcvtne2ps2bf16 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 (%r9){1to8}, %ymm29, %ymm30 #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 4064(%rcx), %ymm29, %ymm30 #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 -4096(%rdx){1to8}, %ymm29, %ymm30{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtne2ps2bf16 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 (%r9){1to4}, %xmm29, %xmm30 #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 2032(%rcx), %xmm29, %xmm30 #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 -2048(%rdx){1to4}, %xmm29, %xmm28{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 %xmm29, %xmm30 #AVX512{BF16,VL}
vcvtneps2bf16 %ymm29, %xmm30 #AVX512{BF16,VL}
vcvtneps2bf16x 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512{BF16,VL} MASK_ENABLING
vcvtneps2bf16 (%r9){1to4}, %xmm21 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16x (%rcx){1to4}, %xmm1 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16x 2032(%rcx), %xmm30 #AVX512{BF16,VL} Disp8
vcvtneps2bf16 -2048(%rdx){1to4}, %xmm29{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 (%r9){1to8}, %xmm22 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16y (%rcx){1to8}, %xmm2 #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16y 4064(%rcx), %xmm23 #AVX512{BF16,VL} Disp8
vcvtneps2bf16 -4096(%rdx){1to8}, %xmm27{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps %ymm28, %ymm29, %ymm30 #AVX512{BF16,VL}
vdpbf16ps %xmm28, %xmm29, %xmm30 #AVX512{BF16,VL}
vdpbf16ps 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps (%r9){1to8}, %ymm29, %ymm30 #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps 4064(%rcx), %ymm29, %ymm30 #AVX512{BF16,VL} Disp8
vdpbf16ps -4096(%rdx){1to8}, %ymm29, %ymm30{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps (%r9){1to4}, %xmm29, %xmm30 #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps 2032(%rcx), %xmm29, %xmm30 #AVX512{BF16,VL} Disp8
vdpbf16ps -2048(%rdx){1to4}, %xmm29, %xmm30{%k7}{z} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
.intel_syntax noprefix
vcvtne2ps2bf16 ymm30, ymm29, ymm28 #AVX512{BF16,VL}
vcvtne2ps2bf16 xmm30, xmm29, xmm28 #AVX512{BF16,VL}
vcvtne2ps2bf16 ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 ymm30, ymm29, DWORD BCST [r9] #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 ymm30{k7}{z}, ymm29, DWORD BCST [rdx-4096] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtne2ps2bf16 xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vcvtne2ps2bf16 xmm30, xmm29, DWORD BCST [r9] #AVX512{BF16,VL} BROADCAST_EN
vcvtne2ps2bf16 xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512{BF16,VL} Disp8
vcvtne2ps2bf16 xmm30{k7}{z}, xmm29, DWORD BCST [rdx-2048] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 xmm30, xmm29 #AVX512{BF16,VL}
vcvtneps2bf16 xmm30, ymm29 #AVX512{BF16,VL}
vcvtneps2bf16 xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vcvtneps2bf16 xmm5, [rcx]{1to4} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm25, DWORD BCST [r9]{1to4} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm30, XMMWORD PTR [rcx+2032] #AVX512{BF16,VL} Disp8
vcvtneps2bf16 xmm30{k7}{z}, DWORD BCST [rdx-2048]{1to4} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 xmm4, [rcx]{1to8} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm24, DWORD BCST [r9]{1to8} #AVX512{BF16,VL} BROADCAST_EN
vcvtneps2bf16 xmm30, YMMWORD PTR [rcx+4064] #AVX512{BF16,VL} Disp8
vcvtneps2bf16 xmm30{k7}{z}, DWORD BCST [rdx-4096]{1to8} #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps ymm30, ymm29, ymm28 #AVX512{BF16,VL}
vdpbf16ps xmm30, xmm29, xmm28 #AVX512{BF16,VL}
vdpbf16ps ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps ymm30, ymm29, DWORD BCST [r9] #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512{BF16,VL} Disp8
vdpbf16ps ymm30{k7}{z}, ymm29, DWORD BCST [rdx-4096] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512{BF16,VL} MASK_ENABLING
vdpbf16ps xmm30, xmm29, DWORD BCST [r9] #AVX512{BF16,VL} BROADCAST_EN
vdpbf16ps xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512{BF16,VL} Disp8
vdpbf16ps xmm30{k7}{z}, xmm29, DWORD BCST [rdx-2048] #AVX512{BF16,VL} Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
|
tactcomplabs/xbgas-binutils-gdb
| 116,029
|
gas/testsuite/gas/i386/x86-64-avx.s
|
# Check 64bit AVX instructions
.allow_index_reg
.text
_start:
# Tests for op
vzeroall
vzeroupper
# Tests for op mem64
vldmxcsr (%rcx)
vstmxcsr (%rcx)
# Tests for op mem256, mask, ymm
# Tests for op ymm, mask, mem256
vmaskmovpd (%rcx),%ymm4,%ymm6
vmaskmovpd %ymm4,%ymm6,(%rcx)
vmaskmovps (%rcx),%ymm4,%ymm6
vmaskmovps %ymm4,%ymm6,(%rcx)
# Tests for op imm8, ymm/mem256, ymm
vpermilpd $7,%ymm6,%ymm2
vpermilpd $7,(%rcx),%ymm6
vpermilps $7,%ymm6,%ymm2
vpermilps $7,(%rcx),%ymm6
vroundpd $7,%ymm6,%ymm2
vroundpd $7,(%rcx),%ymm6
vroundps $7,%ymm6,%ymm2
vroundps $7,(%rcx),%ymm6
# Tests for op ymm/mem256, ymm, ymm
vaddpd %ymm4,%ymm6,%ymm2
vaddpd (%rcx),%ymm6,%ymm2
vaddps %ymm4,%ymm6,%ymm2
vaddps (%rcx),%ymm6,%ymm2
vaddsubpd %ymm4,%ymm6,%ymm2
vaddsubpd (%rcx),%ymm6,%ymm2
vaddsubps %ymm4,%ymm6,%ymm2
vaddsubps (%rcx),%ymm6,%ymm2
vandnpd %ymm4,%ymm6,%ymm2
vandnpd (%rcx),%ymm6,%ymm2
vandnps %ymm4,%ymm6,%ymm2
vandnps (%rcx),%ymm6,%ymm2
vandpd %ymm4,%ymm6,%ymm2
vandpd (%rcx),%ymm6,%ymm2
vandps %ymm4,%ymm6,%ymm2
vandps (%rcx),%ymm6,%ymm2
vdivpd %ymm4,%ymm6,%ymm2
vdivpd (%rcx),%ymm6,%ymm2
vdivps %ymm4,%ymm6,%ymm2
vdivps (%rcx),%ymm6,%ymm2
vhaddpd %ymm4,%ymm6,%ymm2
vhaddpd (%rcx),%ymm6,%ymm2
vhaddps %ymm4,%ymm6,%ymm2
vhaddps (%rcx),%ymm6,%ymm2
vhsubpd %ymm4,%ymm6,%ymm2
vhsubpd (%rcx),%ymm6,%ymm2
vhsubps %ymm4,%ymm6,%ymm2
vhsubps (%rcx),%ymm6,%ymm2
vmaxpd %ymm4,%ymm6,%ymm2
vmaxpd (%rcx),%ymm6,%ymm2
vmaxps %ymm4,%ymm6,%ymm2
vmaxps (%rcx),%ymm6,%ymm2
vminpd %ymm4,%ymm6,%ymm2
vminpd (%rcx),%ymm6,%ymm2
vminps %ymm4,%ymm6,%ymm2
vminps (%rcx),%ymm6,%ymm2
vmulpd %ymm4,%ymm6,%ymm2
vmulpd (%rcx),%ymm6,%ymm2
vmulps %ymm4,%ymm6,%ymm2
vmulps (%rcx),%ymm6,%ymm2
vorpd %ymm4,%ymm6,%ymm2
vorpd (%rcx),%ymm6,%ymm2
vorps %ymm4,%ymm6,%ymm2
vorps (%rcx),%ymm6,%ymm2
vpermilpd %ymm4,%ymm6,%ymm2
vpermilpd (%rcx),%ymm6,%ymm2
vpermilps %ymm4,%ymm6,%ymm2
vpermilps (%rcx),%ymm6,%ymm2
vsubpd %ymm4,%ymm6,%ymm2
vsubpd (%rcx),%ymm6,%ymm2
vsubps %ymm4,%ymm6,%ymm2
vsubps (%rcx),%ymm6,%ymm2
vunpckhpd %ymm4,%ymm6,%ymm2
vunpckhpd (%rcx),%ymm6,%ymm2
vunpckhps %ymm4,%ymm6,%ymm2
vunpckhps (%rcx),%ymm6,%ymm2
vunpcklpd %ymm4,%ymm6,%ymm2
vunpcklpd (%rcx),%ymm6,%ymm2
vunpcklps %ymm4,%ymm6,%ymm2
vunpcklps (%rcx),%ymm6,%ymm2
vxorpd %ymm4,%ymm6,%ymm2
vxorpd (%rcx),%ymm6,%ymm2
vxorps %ymm4,%ymm6,%ymm2
vxorps (%rcx),%ymm6,%ymm2
vcmpeqpd %ymm4,%ymm6,%ymm2
vcmpeqpd (%rcx),%ymm6,%ymm2
vcmpltpd %ymm4,%ymm6,%ymm2
vcmpltpd (%rcx),%ymm6,%ymm2
vcmplepd %ymm4,%ymm6,%ymm2
vcmplepd (%rcx),%ymm6,%ymm2
vcmpunordpd %ymm4,%ymm6,%ymm2
vcmpunordpd (%rcx),%ymm6,%ymm2
vcmpneqpd %ymm4,%ymm6,%ymm2
vcmpneqpd (%rcx),%ymm6,%ymm2
vcmpnltpd %ymm4,%ymm6,%ymm2
vcmpnltpd (%rcx),%ymm6,%ymm2
vcmpnlepd %ymm4,%ymm6,%ymm2
vcmpnlepd (%rcx),%ymm6,%ymm2
vcmpordpd %ymm4,%ymm6,%ymm2
vcmpordpd (%rcx),%ymm6,%ymm2
vcmpeq_uqpd %ymm4,%ymm6,%ymm2
vcmpeq_uqpd (%rcx),%ymm6,%ymm2
vcmpngepd %ymm4,%ymm6,%ymm2
vcmpngepd (%rcx),%ymm6,%ymm2
vcmpngtpd %ymm4,%ymm6,%ymm2
vcmpngtpd (%rcx),%ymm6,%ymm2
vcmpfalsepd %ymm4,%ymm6,%ymm2
vcmpfalsepd (%rcx),%ymm6,%ymm2
vcmpneq_oqpd %ymm4,%ymm6,%ymm2
vcmpneq_oqpd (%rcx),%ymm6,%ymm2
vcmpgepd %ymm4,%ymm6,%ymm2
vcmpgepd (%rcx),%ymm6,%ymm2
vcmpgtpd %ymm4,%ymm6,%ymm2
vcmpgtpd (%rcx),%ymm6,%ymm2
vcmptruepd %ymm4,%ymm6,%ymm2
vcmptruepd (%rcx),%ymm6,%ymm2
vcmpeq_ospd %ymm4,%ymm6,%ymm2
vcmpeq_ospd (%rcx),%ymm6,%ymm2
vcmplt_oqpd %ymm4,%ymm6,%ymm2
vcmplt_oqpd (%rcx),%ymm6,%ymm2
vcmple_oqpd %ymm4,%ymm6,%ymm2
vcmple_oqpd (%rcx),%ymm6,%ymm2
vcmpunord_spd %ymm4,%ymm6,%ymm2
vcmpunord_spd (%rcx),%ymm6,%ymm2
vcmpneq_uspd %ymm4,%ymm6,%ymm2
vcmpneq_uspd (%rcx),%ymm6,%ymm2
vcmpnlt_uqpd %ymm4,%ymm6,%ymm2
vcmpnlt_uqpd (%rcx),%ymm6,%ymm2
vcmpnle_uqpd %ymm4,%ymm6,%ymm2
vcmpnle_uqpd (%rcx),%ymm6,%ymm2
vcmpord_spd %ymm4,%ymm6,%ymm2
vcmpord_spd (%rcx),%ymm6,%ymm2
vcmpeq_uspd %ymm4,%ymm6,%ymm2
vcmpeq_uspd (%rcx),%ymm6,%ymm2
vcmpnge_uqpd %ymm4,%ymm6,%ymm2
vcmpnge_uqpd (%rcx),%ymm6,%ymm2
vcmpngt_uqpd %ymm4,%ymm6,%ymm2
vcmpngt_uqpd (%rcx),%ymm6,%ymm2
vcmpfalse_ospd %ymm4,%ymm6,%ymm2
vcmpfalse_ospd (%rcx),%ymm6,%ymm2
vcmpneq_ospd %ymm4,%ymm6,%ymm2
vcmpneq_ospd (%rcx),%ymm6,%ymm2
vcmpge_oqpd %ymm4,%ymm6,%ymm2
vcmpge_oqpd (%rcx),%ymm6,%ymm2
vcmpgt_oqpd %ymm4,%ymm6,%ymm2
vcmpgt_oqpd (%rcx),%ymm6,%ymm2
vcmptrue_uspd %ymm4,%ymm6,%ymm2
vcmptrue_uspd (%rcx),%ymm6,%ymm2
vcmpeqps %ymm4,%ymm6,%ymm2
vcmpeqps (%rcx),%ymm6,%ymm2
vcmpltps %ymm4,%ymm6,%ymm2
vcmpltps (%rcx),%ymm6,%ymm2
vcmpleps %ymm4,%ymm6,%ymm2
vcmpleps (%rcx),%ymm6,%ymm2
vcmpunordps %ymm4,%ymm6,%ymm2
vcmpunordps (%rcx),%ymm6,%ymm2
vcmpneqps %ymm4,%ymm6,%ymm2
vcmpneqps (%rcx),%ymm6,%ymm2
vcmpnltps %ymm4,%ymm6,%ymm2
vcmpnltps (%rcx),%ymm6,%ymm2
vcmpnleps %ymm4,%ymm6,%ymm2
vcmpnleps (%rcx),%ymm6,%ymm2
vcmpordps %ymm4,%ymm6,%ymm2
vcmpordps (%rcx),%ymm6,%ymm2
vcmpeq_uqps %ymm4,%ymm6,%ymm2
vcmpeq_uqps (%rcx),%ymm6,%ymm2
vcmpngeps %ymm4,%ymm6,%ymm2
vcmpngeps (%rcx),%ymm6,%ymm2
vcmpngtps %ymm4,%ymm6,%ymm2
vcmpngtps (%rcx),%ymm6,%ymm2
vcmpfalseps %ymm4,%ymm6,%ymm2
vcmpfalseps (%rcx),%ymm6,%ymm2
vcmpneq_oqps %ymm4,%ymm6,%ymm2
vcmpneq_oqps (%rcx),%ymm6,%ymm2
vcmpgeps %ymm4,%ymm6,%ymm2
vcmpgeps (%rcx),%ymm6,%ymm2
vcmpgtps %ymm4,%ymm6,%ymm2
vcmpgtps (%rcx),%ymm6,%ymm2
vcmptrueps %ymm4,%ymm6,%ymm2
vcmptrueps (%rcx),%ymm6,%ymm2
vcmpeq_osps %ymm4,%ymm6,%ymm2
vcmpeq_osps (%rcx),%ymm6,%ymm2
vcmplt_oqps %ymm4,%ymm6,%ymm2
vcmplt_oqps (%rcx),%ymm6,%ymm2
vcmple_oqps %ymm4,%ymm6,%ymm2
vcmple_oqps (%rcx),%ymm6,%ymm2
vcmpunord_sps %ymm4,%ymm6,%ymm2
vcmpunord_sps (%rcx),%ymm6,%ymm2
vcmpneq_usps %ymm4,%ymm6,%ymm2
vcmpneq_usps (%rcx),%ymm6,%ymm2
vcmpnlt_uqps %ymm4,%ymm6,%ymm2
vcmpnlt_uqps (%rcx),%ymm6,%ymm2
vcmpnle_uqps %ymm4,%ymm6,%ymm2
vcmpnle_uqps (%rcx),%ymm6,%ymm2
vcmpord_sps %ymm4,%ymm6,%ymm2
vcmpord_sps (%rcx),%ymm6,%ymm2
vcmpeq_usps %ymm4,%ymm6,%ymm2
vcmpeq_usps (%rcx),%ymm6,%ymm2
vcmpnge_uqps %ymm4,%ymm6,%ymm2
vcmpnge_uqps (%rcx),%ymm6,%ymm2
vcmpngt_uqps %ymm4,%ymm6,%ymm2
vcmpngt_uqps (%rcx),%ymm6,%ymm2
vcmpfalse_osps %ymm4,%ymm6,%ymm2
vcmpfalse_osps (%rcx),%ymm6,%ymm2
vcmpneq_osps %ymm4,%ymm6,%ymm2
vcmpneq_osps (%rcx),%ymm6,%ymm2
vcmpge_oqps %ymm4,%ymm6,%ymm2
vcmpge_oqps (%rcx),%ymm6,%ymm2
vcmpgt_oqps %ymm4,%ymm6,%ymm2
vcmpgt_oqps (%rcx),%ymm6,%ymm2
vcmptrue_usps %ymm4,%ymm6,%ymm2
vcmptrue_usps (%rcx),%ymm6,%ymm2
vgf2p8mulb %ymm4, %ymm5, %ymm6
vgf2p8mulb (%rcx), %ymm5, %ymm6
vgf2p8mulb -123456(%rax,%r14,8), %ymm5, %ymm6
vgf2p8mulb 4064(%rdx), %ymm5, %ymm6
vgf2p8mulb 4096(%rdx), %ymm5, %ymm6
vgf2p8mulb -4096(%rdx), %ymm5, %ymm6
vgf2p8mulb -4128(%rdx), %ymm5, %ymm6
# Tests for op ymm/mem256, xmm
vcvtpd2dqy %ymm4,%xmm4
vcvtpd2dqy (%rcx),%xmm4
vcvtpd2psy %ymm4,%xmm4
vcvtpd2psy (%rcx),%xmm4
vcvttpd2dqy %ymm4,%xmm4
vcvttpd2dqy (%rcx),%xmm4
# Tests for op ymm/mem256, ymm
vcvtdq2ps %ymm4,%ymm6
vcvtdq2ps (%rcx),%ymm4
vcvtps2dq %ymm4,%ymm6
vcvtps2dq (%rcx),%ymm4
vcvttps2dq %ymm4,%ymm6
vcvttps2dq (%rcx),%ymm4
vmovapd %ymm4,%ymm6
vmovapd (%rcx),%ymm4
vmovaps %ymm4,%ymm6
vmovaps (%rcx),%ymm4
vmovdqa %ymm4,%ymm6
vmovdqa (%rcx),%ymm4
vmovdqu %ymm4,%ymm6
vmovdqu (%rcx),%ymm4
vmovddup %ymm4,%ymm6
vmovddup (%rcx),%ymm4
vmovshdup %ymm4,%ymm6
vmovshdup (%rcx),%ymm4
vmovsldup %ymm4,%ymm6
vmovsldup (%rcx),%ymm4
vmovupd %ymm4,%ymm6
vmovupd (%rcx),%ymm4
vmovups %ymm4,%ymm6
vmovups (%rcx),%ymm4
vptest %ymm4,%ymm6
vptest (%rcx),%ymm4
vrcpps %ymm4,%ymm6
vrcpps (%rcx),%ymm4
vrsqrtps %ymm4,%ymm6
vrsqrtps (%rcx),%ymm4
vsqrtpd %ymm4,%ymm6
vsqrtpd (%rcx),%ymm4
vsqrtps %ymm4,%ymm6
vsqrtps (%rcx),%ymm4
vtestpd %ymm4,%ymm6
vtestpd (%rcx),%ymm4
vtestps %ymm4,%ymm6
vtestps (%rcx),%ymm4
# Tests for op ymm, ymm/mem256
vmovapd %ymm4,%ymm6
vmovapd %ymm4,(%rcx)
vmovaps %ymm4,%ymm6
vmovaps %ymm4,(%rcx)
vmovdqa %ymm4,%ymm6
vmovdqa %ymm4,(%rcx)
vmovdqu %ymm4,%ymm6
vmovdqu %ymm4,(%rcx)
vmovupd %ymm4,%ymm6
vmovupd %ymm4,(%rcx)
vmovups %ymm4,%ymm6
vmovups %ymm4,(%rcx)
# Tests for op mem256, ymm
vlddqu (%rcx),%ymm4
# Tests for op ymm, mem256
vmovntdq %ymm4,(%rcx)
vmovntpd %ymm4,(%rcx)
vmovntps %ymm4,(%rcx)
# Tests for op imm8, ymm/mem256, ymm, ymm
vblendpd $7,%ymm4,%ymm6,%ymm2
vblendpd $7,(%rcx),%ymm6,%ymm2
vblendps $7,%ymm4,%ymm6,%ymm2
vblendps $7,(%rcx),%ymm6,%ymm2
vcmppd $7,%ymm4,%ymm6,%ymm2
vcmppd $7,(%rcx),%ymm6,%ymm2
vcmpps $7,%ymm4,%ymm6,%ymm2
vcmpps $7,(%rcx),%ymm6,%ymm2
vdpps $7,%ymm4,%ymm6,%ymm2
vdpps $7,(%rcx),%ymm6,%ymm2
vperm2f128 $7,%ymm4,%ymm6,%ymm2
vperm2f128 $7,(%rcx),%ymm6,%ymm2
vshufpd $7,%ymm4,%ymm6,%ymm2
vshufpd $7,(%rcx),%ymm6,%ymm2
vshufps $7,%ymm4,%ymm6,%ymm2
vshufps $7,(%rcx),%ymm6,%ymm2
vgf2p8affineqb $0xab, %ymm4, %ymm5, %ymm6
vgf2p8affineqb $123, %ymm4, %ymm5, %ymm6
vgf2p8affineqb $123, (%rcx), %ymm5, %ymm6
vgf2p8affineqb $123, -123456(%rax,%r14,8), %ymm5, %ymm6
vgf2p8affineqb $123, 4064(%rdx), %ymm5, %ymm6
vgf2p8affineqb $123, 4096(%rdx), %ymm5, %ymm6
vgf2p8affineqb $123, -4096(%rdx), %ymm5, %ymm6
vgf2p8affineqb $123, -4128(%rdx), %ymm5, %ymm6
vgf2p8affineinvqb $0xab, %ymm4, %ymm5, %ymm6
vgf2p8affineinvqb $123, %ymm4, %ymm5, %ymm6
vgf2p8affineinvqb $123, (%rcx), %ymm5, %ymm6
vgf2p8affineinvqb $123, -123456(%rax,%r14,8), %ymm5, %ymm6
vgf2p8affineinvqb $123, 4064(%rdx), %ymm5, %ymm6
vgf2p8affineinvqb $123, 4096(%rdx), %ymm5, %ymm6
vgf2p8affineinvqb $123, -4096(%rdx), %ymm5, %ymm6
vgf2p8affineinvqb $123, -4128(%rdx), %ymm5, %ymm6
# Tests for op ymm, ymm/mem256, ymm, ymm
vblendvpd %ymm4,%ymm6,%ymm2,%ymm7
vblendvpd %ymm4,(%rcx),%ymm2,%ymm7
vblendvps %ymm4,%ymm6,%ymm2,%ymm7
vblendvps %ymm4,(%rcx),%ymm2,%ymm7
# Tests for op imm8, xmm/mem128, ymm, ymm
vinsertf128 $7,%xmm4,%ymm4,%ymm6
vinsertf128 $7,(%rcx),%ymm4,%ymm6
# Tests for op imm8, ymm, xmm/mem128
vextractf128 $7,%ymm4,%xmm4
vextractf128 $7,%ymm4,(%rcx)
# Tests for op mem128, ymm
vbroadcastf128 (%rcx),%ymm4
# Tests for op xmm/mem128, xmm
vcvtdq2ps %xmm4,%xmm6
vcvtdq2ps (%rcx),%xmm4
vcvtpd2dqx %xmm4,%xmm6
vcvtpd2dqx (%rcx),%xmm4
vcvtpd2psx %xmm4,%xmm6
vcvtpd2psx (%rcx),%xmm4
vcvtps2dq %xmm4,%xmm6
vcvtps2dq (%rcx),%xmm4
vcvttpd2dqx %xmm4,%xmm6
vcvttpd2dqx (%rcx),%xmm4
vcvttps2dq %xmm4,%xmm6
vcvttps2dq (%rcx),%xmm4
vmovapd %xmm4,%xmm6
vmovapd (%rcx),%xmm4
vmovaps %xmm4,%xmm6
vmovaps (%rcx),%xmm4
vmovdqa %xmm4,%xmm6
vmovdqa (%rcx),%xmm4
vmovdqu %xmm4,%xmm6
vmovdqu (%rcx),%xmm4
vmovshdup %xmm4,%xmm6
vmovshdup (%rcx),%xmm4
vmovsldup %xmm4,%xmm6
vmovsldup (%rcx),%xmm4
vmovupd %xmm4,%xmm6
vmovupd (%rcx),%xmm4
vmovups %xmm4,%xmm6
vmovups (%rcx),%xmm4
vpabsb %xmm4,%xmm6
vpabsb (%rcx),%xmm4
vpabsw %xmm4,%xmm6
vpabsw (%rcx),%xmm4
vpabsd %xmm4,%xmm6
vpabsd (%rcx),%xmm4
vphminposuw %xmm4,%xmm6
vphminposuw (%rcx),%xmm4
vptest %xmm4,%xmm6
vptest (%rcx),%xmm4
vtestps %xmm4,%xmm6
vtestps (%rcx),%xmm4
vtestpd %xmm4,%xmm6
vtestpd (%rcx),%xmm4
vrcpps %xmm4,%xmm6
vrcpps (%rcx),%xmm4
vrsqrtps %xmm4,%xmm6
vrsqrtps (%rcx),%xmm4
vsqrtpd %xmm4,%xmm6
vsqrtpd (%rcx),%xmm4
vsqrtps %xmm4,%xmm6
vsqrtps (%rcx),%xmm4
vaesimc %xmm4,%xmm6
vaesimc (%rcx),%xmm4
# Tests for op xmm, xmm/mem128
vmovapd %xmm4,%xmm6
vmovapd %xmm4,(%rcx)
vmovaps %xmm4,%xmm6
vmovaps %xmm4,(%rcx)
vmovdqa %xmm4,%xmm6
vmovdqa %xmm4,(%rcx)
vmovdqu %xmm4,%xmm6
vmovdqu %xmm4,(%rcx)
vmovupd %xmm4,%xmm6
vmovupd %xmm4,(%rcx)
vmovups %xmm4,%xmm6
vmovups %xmm4,(%rcx)
# Tests for op mem128, xmm
vlddqu (%rcx),%xmm4
vmovntdqa (%rcx),%xmm4
# Tests for op xmm, mem128
vmovntdq %xmm4,(%rcx)
vmovntpd %xmm4,(%rcx)
vmovntps %xmm4,(%rcx)
# Tests for op xmm/mem128, ymm
vcvtdq2pd %xmm4,%ymm4
vcvtdq2pd (%rcx),%ymm4
vcvtps2pd %xmm4,%ymm4
vcvtps2pd (%rcx),%ymm4
# Tests for op xmm/mem128, xmm, xmm
vaddpd %xmm4,%xmm6,%xmm2
vaddpd (%rcx),%xmm6,%xmm7
vaddps %xmm4,%xmm6,%xmm2
vaddps (%rcx),%xmm6,%xmm7
vaddsubpd %xmm4,%xmm6,%xmm2
vaddsubpd (%rcx),%xmm6,%xmm7
vaddsubps %xmm4,%xmm6,%xmm2
vaddsubps (%rcx),%xmm6,%xmm7
vandnpd %xmm4,%xmm6,%xmm2
vandnpd (%rcx),%xmm6,%xmm7
vandnps %xmm4,%xmm6,%xmm2
vandnps (%rcx),%xmm6,%xmm7
vandpd %xmm4,%xmm6,%xmm2
vandpd (%rcx),%xmm6,%xmm7
vandps %xmm4,%xmm6,%xmm2
vandps (%rcx),%xmm6,%xmm7
vdivpd %xmm4,%xmm6,%xmm2
vdivpd (%rcx),%xmm6,%xmm7
vdivps %xmm4,%xmm6,%xmm2
vdivps (%rcx),%xmm6,%xmm7
vhaddpd %xmm4,%xmm6,%xmm2
vhaddpd (%rcx),%xmm6,%xmm7
vhaddps %xmm4,%xmm6,%xmm2
vhaddps (%rcx),%xmm6,%xmm7
vhsubpd %xmm4,%xmm6,%xmm2
vhsubpd (%rcx),%xmm6,%xmm7
vhsubps %xmm4,%xmm6,%xmm2
vhsubps (%rcx),%xmm6,%xmm7
vmaxpd %xmm4,%xmm6,%xmm2
vmaxpd (%rcx),%xmm6,%xmm7
vmaxps %xmm4,%xmm6,%xmm2
vmaxps (%rcx),%xmm6,%xmm7
vminpd %xmm4,%xmm6,%xmm2
vminpd (%rcx),%xmm6,%xmm7
vminps %xmm4,%xmm6,%xmm2
vminps (%rcx),%xmm6,%xmm7
vmulpd %xmm4,%xmm6,%xmm2
vmulpd (%rcx),%xmm6,%xmm7
vmulps %xmm4,%xmm6,%xmm2
vmulps (%rcx),%xmm6,%xmm7
vorpd %xmm4,%xmm6,%xmm2
vorpd (%rcx),%xmm6,%xmm7
vorps %xmm4,%xmm6,%xmm2
vorps (%rcx),%xmm6,%xmm7
vpacksswb %xmm4,%xmm6,%xmm2
vpacksswb (%rcx),%xmm6,%xmm7
vpackssdw %xmm4,%xmm6,%xmm2
vpackssdw (%rcx),%xmm6,%xmm7
vpackuswb %xmm4,%xmm6,%xmm2
vpackuswb (%rcx),%xmm6,%xmm7
vpackusdw %xmm4,%xmm6,%xmm2
vpackusdw (%rcx),%xmm6,%xmm7
vpaddb %xmm4,%xmm6,%xmm2
vpaddb (%rcx),%xmm6,%xmm7
vpaddw %xmm4,%xmm6,%xmm2
vpaddw (%rcx),%xmm6,%xmm7
vpaddd %xmm4,%xmm6,%xmm2
vpaddd (%rcx),%xmm6,%xmm7
vpaddq %xmm4,%xmm6,%xmm2
vpaddq (%rcx),%xmm6,%xmm7
vpaddsb %xmm4,%xmm6,%xmm2
vpaddsb (%rcx),%xmm6,%xmm7
vpaddsw %xmm4,%xmm6,%xmm2
vpaddsw (%rcx),%xmm6,%xmm7
vpaddusb %xmm4,%xmm6,%xmm2
vpaddusb (%rcx),%xmm6,%xmm7
vpaddusw %xmm4,%xmm6,%xmm2
vpaddusw (%rcx),%xmm6,%xmm7
vpand %xmm4,%xmm6,%xmm2
vpand (%rcx),%xmm6,%xmm7
vpandn %xmm4,%xmm6,%xmm2
vpandn (%rcx),%xmm6,%xmm7
vpavgb %xmm4,%xmm6,%xmm2
vpavgb (%rcx),%xmm6,%xmm7
vpavgw %xmm4,%xmm6,%xmm2
vpavgw (%rcx),%xmm6,%xmm7
vpclmullqlqdq %xmm4,%xmm6,%xmm2
vpclmullqlqdq (%rcx),%xmm6,%xmm7
vpclmulhqlqdq %xmm4,%xmm6,%xmm2
vpclmulhqlqdq (%rcx),%xmm6,%xmm7
vpclmullqhqdq %xmm4,%xmm6,%xmm2
vpclmullqhqdq (%rcx),%xmm6,%xmm7
vpclmulhqhqdq %xmm4,%xmm6,%xmm2
vpclmulhqhqdq (%rcx),%xmm6,%xmm7
vpcmpeqb %xmm4,%xmm6,%xmm2
vpcmpeqb (%rcx),%xmm6,%xmm7
vpcmpeqw %xmm4,%xmm6,%xmm2
vpcmpeqw (%rcx),%xmm6,%xmm7
vpcmpeqd %xmm4,%xmm6,%xmm2
vpcmpeqd (%rcx),%xmm6,%xmm7
vpcmpeqq %xmm4,%xmm6,%xmm2
vpcmpeqq (%rcx),%xmm6,%xmm7
vpcmpgtb %xmm4,%xmm6,%xmm2
vpcmpgtb (%rcx),%xmm6,%xmm7
vpcmpgtw %xmm4,%xmm6,%xmm2
vpcmpgtw (%rcx),%xmm6,%xmm7
vpcmpgtd %xmm4,%xmm6,%xmm2
vpcmpgtd (%rcx),%xmm6,%xmm7
vpcmpgtq %xmm4,%xmm6,%xmm2
vpcmpgtq (%rcx),%xmm6,%xmm7
vpermilpd %xmm4,%xmm6,%xmm2
vpermilpd (%rcx),%xmm6,%xmm7
vpermilps %xmm4,%xmm6,%xmm2
vpermilps (%rcx),%xmm6,%xmm7
vphaddw %xmm4,%xmm6,%xmm2
vphaddw (%rcx),%xmm6,%xmm7
vphaddd %xmm4,%xmm6,%xmm2
vphaddd (%rcx),%xmm6,%xmm7
vphaddsw %xmm4,%xmm6,%xmm2
vphaddsw (%rcx),%xmm6,%xmm7
vphsubw %xmm4,%xmm6,%xmm2
vphsubw (%rcx),%xmm6,%xmm7
vphsubd %xmm4,%xmm6,%xmm2
vphsubd (%rcx),%xmm6,%xmm7
vphsubsw %xmm4,%xmm6,%xmm2
vphsubsw (%rcx),%xmm6,%xmm7
vpmaddwd %xmm4,%xmm6,%xmm2
vpmaddwd (%rcx),%xmm6,%xmm7
vpmaddubsw %xmm4,%xmm6,%xmm2
vpmaddubsw (%rcx),%xmm6,%xmm7
vpmaxsb %xmm4,%xmm6,%xmm2
vpmaxsb (%rcx),%xmm6,%xmm7
vpmaxsw %xmm4,%xmm6,%xmm2
vpmaxsw (%rcx),%xmm6,%xmm7
vpmaxsd %xmm4,%xmm6,%xmm2
vpmaxsd (%rcx),%xmm6,%xmm7
vpmaxub %xmm4,%xmm6,%xmm2
vpmaxub (%rcx),%xmm6,%xmm7
vpmaxuw %xmm4,%xmm6,%xmm2
vpmaxuw (%rcx),%xmm6,%xmm7
vpmaxud %xmm4,%xmm6,%xmm2
vpmaxud (%rcx),%xmm6,%xmm7
vpminsb %xmm4,%xmm6,%xmm2
vpminsb (%rcx),%xmm6,%xmm7
vpminsw %xmm4,%xmm6,%xmm2
vpminsw (%rcx),%xmm6,%xmm7
vpminsd %xmm4,%xmm6,%xmm2
vpminsd (%rcx),%xmm6,%xmm7
vpminub %xmm4,%xmm6,%xmm2
vpminub (%rcx),%xmm6,%xmm7
vpminuw %xmm4,%xmm6,%xmm2
vpminuw (%rcx),%xmm6,%xmm7
vpminud %xmm4,%xmm6,%xmm2
vpminud (%rcx),%xmm6,%xmm7
vpmulhuw %xmm4,%xmm6,%xmm2
vpmulhuw (%rcx),%xmm6,%xmm7
vpmulhrsw %xmm4,%xmm6,%xmm2
vpmulhrsw (%rcx),%xmm6,%xmm7
vpmulhw %xmm4,%xmm6,%xmm2
vpmulhw (%rcx),%xmm6,%xmm7
vpmullw %xmm4,%xmm6,%xmm2
vpmullw (%rcx),%xmm6,%xmm7
vpmulld %xmm4,%xmm6,%xmm2
vpmulld (%rcx),%xmm6,%xmm7
vpmuludq %xmm4,%xmm6,%xmm2
vpmuludq (%rcx),%xmm6,%xmm7
vpmuldq %xmm4,%xmm6,%xmm2
vpmuldq (%rcx),%xmm6,%xmm7
vpor %xmm4,%xmm6,%xmm2
vpor (%rcx),%xmm6,%xmm7
vpsadbw %xmm4,%xmm6,%xmm2
vpsadbw (%rcx),%xmm6,%xmm7
vpshufb %xmm4,%xmm6,%xmm2
vpshufb (%rcx),%xmm6,%xmm7
vpsignb %xmm4,%xmm6,%xmm2
vpsignb (%rcx),%xmm6,%xmm7
vpsignw %xmm4,%xmm6,%xmm2
vpsignw (%rcx),%xmm6,%xmm7
vpsignd %xmm4,%xmm6,%xmm2
vpsignd (%rcx),%xmm6,%xmm7
vpsllw %xmm4,%xmm6,%xmm2
vpsllw (%rcx),%xmm6,%xmm7
vpslld %xmm4,%xmm6,%xmm2
vpslld (%rcx),%xmm6,%xmm7
vpsllq %xmm4,%xmm6,%xmm2
vpsllq (%rcx),%xmm6,%xmm7
vpsraw %xmm4,%xmm6,%xmm2
vpsraw (%rcx),%xmm6,%xmm7
vpsrad %xmm4,%xmm6,%xmm2
vpsrad (%rcx),%xmm6,%xmm7
vpsrlw %xmm4,%xmm6,%xmm2
vpsrlw (%rcx),%xmm6,%xmm7
vpsrld %xmm4,%xmm6,%xmm2
vpsrld (%rcx),%xmm6,%xmm7
vpsrlq %xmm4,%xmm6,%xmm2
vpsrlq (%rcx),%xmm6,%xmm7
vpsubb %xmm4,%xmm6,%xmm2
vpsubb (%rcx),%xmm6,%xmm7
vpsubw %xmm4,%xmm6,%xmm2
vpsubw (%rcx),%xmm6,%xmm7
vpsubd %xmm4,%xmm6,%xmm2
vpsubd (%rcx),%xmm6,%xmm7
vpsubq %xmm4,%xmm6,%xmm2
vpsubq (%rcx),%xmm6,%xmm7
vpsubsb %xmm4,%xmm6,%xmm2
vpsubsb (%rcx),%xmm6,%xmm7
vpsubsw %xmm4,%xmm6,%xmm2
vpsubsw (%rcx),%xmm6,%xmm7
vpsubusb %xmm4,%xmm6,%xmm2
vpsubusb (%rcx),%xmm6,%xmm7
vpsubusw %xmm4,%xmm6,%xmm2
vpsubusw (%rcx),%xmm6,%xmm7
vpunpckhbw %xmm4,%xmm6,%xmm2
vpunpckhbw (%rcx),%xmm6,%xmm7
vpunpckhwd %xmm4,%xmm6,%xmm2
vpunpckhwd (%rcx),%xmm6,%xmm7
vpunpckhdq %xmm4,%xmm6,%xmm2
vpunpckhdq (%rcx),%xmm6,%xmm7
vpunpckhqdq %xmm4,%xmm6,%xmm2
vpunpckhqdq (%rcx),%xmm6,%xmm7
vpunpcklbw %xmm4,%xmm6,%xmm2
vpunpcklbw (%rcx),%xmm6,%xmm7
vpunpcklwd %xmm4,%xmm6,%xmm2
vpunpcklwd (%rcx),%xmm6,%xmm7
vpunpckldq %xmm4,%xmm6,%xmm2
vpunpckldq (%rcx),%xmm6,%xmm7
vpunpcklqdq %xmm4,%xmm6,%xmm2
vpunpcklqdq (%rcx),%xmm6,%xmm7
vpxor %xmm4,%xmm6,%xmm2
vpxor (%rcx),%xmm6,%xmm7
vsubpd %xmm4,%xmm6,%xmm2
vsubpd (%rcx),%xmm6,%xmm7
vsubps %xmm4,%xmm6,%xmm2
vsubps (%rcx),%xmm6,%xmm7
vunpckhpd %xmm4,%xmm6,%xmm2
vunpckhpd (%rcx),%xmm6,%xmm7
vunpckhps %xmm4,%xmm6,%xmm2
vunpckhps (%rcx),%xmm6,%xmm7
vunpcklpd %xmm4,%xmm6,%xmm2
vunpcklpd (%rcx),%xmm6,%xmm7
vunpcklps %xmm4,%xmm6,%xmm2
vunpcklps (%rcx),%xmm6,%xmm7
vxorpd %xmm4,%xmm6,%xmm2
vxorpd (%rcx),%xmm6,%xmm7
vxorps %xmm4,%xmm6,%xmm2
vxorps (%rcx),%xmm6,%xmm7
vaesenc %xmm4,%xmm6,%xmm2
vaesenc (%rcx),%xmm6,%xmm7
vaesenclast %xmm4,%xmm6,%xmm2
vaesenclast (%rcx),%xmm6,%xmm7
vaesdec %xmm4,%xmm6,%xmm2
vaesdec (%rcx),%xmm6,%xmm7
vaesdeclast %xmm4,%xmm6,%xmm2
vaesdeclast (%rcx),%xmm6,%xmm7
vcmpeqpd %xmm4,%xmm6,%xmm2
vcmpeqpd (%rcx),%xmm6,%xmm7
vcmpltpd %xmm4,%xmm6,%xmm2
vcmpltpd (%rcx),%xmm6,%xmm7
vcmplepd %xmm4,%xmm6,%xmm2
vcmplepd (%rcx),%xmm6,%xmm7
vcmpunordpd %xmm4,%xmm6,%xmm2
vcmpunordpd (%rcx),%xmm6,%xmm7
vcmpneqpd %xmm4,%xmm6,%xmm2
vcmpneqpd (%rcx),%xmm6,%xmm7
vcmpnltpd %xmm4,%xmm6,%xmm2
vcmpnltpd (%rcx),%xmm6,%xmm7
vcmpnlepd %xmm4,%xmm6,%xmm2
vcmpnlepd (%rcx),%xmm6,%xmm7
vcmpordpd %xmm4,%xmm6,%xmm2
vcmpordpd (%rcx),%xmm6,%xmm7
vcmpeq_uqpd %xmm4,%xmm6,%xmm2
vcmpeq_uqpd (%rcx),%xmm6,%xmm7
vcmpngepd %xmm4,%xmm6,%xmm2
vcmpngepd (%rcx),%xmm6,%xmm7
vcmpngtpd %xmm4,%xmm6,%xmm2
vcmpngtpd (%rcx),%xmm6,%xmm7
vcmpfalsepd %xmm4,%xmm6,%xmm2
vcmpfalsepd (%rcx),%xmm6,%xmm7
vcmpneq_oqpd %xmm4,%xmm6,%xmm2
vcmpneq_oqpd (%rcx),%xmm6,%xmm7
vcmpgepd %xmm4,%xmm6,%xmm2
vcmpgepd (%rcx),%xmm6,%xmm7
vcmpgtpd %xmm4,%xmm6,%xmm2
vcmpgtpd (%rcx),%xmm6,%xmm7
vcmptruepd %xmm4,%xmm6,%xmm2
vcmptruepd (%rcx),%xmm6,%xmm7
vcmpeq_ospd %xmm4,%xmm6,%xmm2
vcmpeq_ospd (%rcx),%xmm6,%xmm7
vcmplt_oqpd %xmm4,%xmm6,%xmm2
vcmplt_oqpd (%rcx),%xmm6,%xmm7
vcmple_oqpd %xmm4,%xmm6,%xmm2
vcmple_oqpd (%rcx),%xmm6,%xmm7
vcmpunord_spd %xmm4,%xmm6,%xmm2
vcmpunord_spd (%rcx),%xmm6,%xmm7
vcmpneq_uspd %xmm4,%xmm6,%xmm2
vcmpneq_uspd (%rcx),%xmm6,%xmm7
vcmpnlt_uqpd %xmm4,%xmm6,%xmm2
vcmpnlt_uqpd (%rcx),%xmm6,%xmm7
vcmpnle_uqpd %xmm4,%xmm6,%xmm2
vcmpnle_uqpd (%rcx),%xmm6,%xmm7
vcmpord_spd %xmm4,%xmm6,%xmm2
vcmpord_spd (%rcx),%xmm6,%xmm7
vcmpeq_uspd %xmm4,%xmm6,%xmm2
vcmpeq_uspd (%rcx),%xmm6,%xmm7
vcmpnge_uqpd %xmm4,%xmm6,%xmm2
vcmpnge_uqpd (%rcx),%xmm6,%xmm7
vcmpngt_uqpd %xmm4,%xmm6,%xmm2
vcmpngt_uqpd (%rcx),%xmm6,%xmm7
vcmpfalse_ospd %xmm4,%xmm6,%xmm2
vcmpfalse_ospd (%rcx),%xmm6,%xmm7
vcmpneq_ospd %xmm4,%xmm6,%xmm2
vcmpneq_ospd (%rcx),%xmm6,%xmm7
vcmpge_oqpd %xmm4,%xmm6,%xmm2
vcmpge_oqpd (%rcx),%xmm6,%xmm7
vcmpgt_oqpd %xmm4,%xmm6,%xmm2
vcmpgt_oqpd (%rcx),%xmm6,%xmm7
vcmptrue_uspd %xmm4,%xmm6,%xmm2
vcmptrue_uspd (%rcx),%xmm6,%xmm7
vcmpeqps %xmm4,%xmm6,%xmm2
vcmpeqps (%rcx),%xmm6,%xmm7
vcmpltps %xmm4,%xmm6,%xmm2
vcmpltps (%rcx),%xmm6,%xmm7
vcmpleps %xmm4,%xmm6,%xmm2
vcmpleps (%rcx),%xmm6,%xmm7
vcmpunordps %xmm4,%xmm6,%xmm2
vcmpunordps (%rcx),%xmm6,%xmm7
vcmpneqps %xmm4,%xmm6,%xmm2
vcmpneqps (%rcx),%xmm6,%xmm7
vcmpnltps %xmm4,%xmm6,%xmm2
vcmpnltps (%rcx),%xmm6,%xmm7
vcmpnleps %xmm4,%xmm6,%xmm2
vcmpnleps (%rcx),%xmm6,%xmm7
vcmpordps %xmm4,%xmm6,%xmm2
vcmpordps (%rcx),%xmm6,%xmm7
vcmpeq_uqps %xmm4,%xmm6,%xmm2
vcmpeq_uqps (%rcx),%xmm6,%xmm7
vcmpngeps %xmm4,%xmm6,%xmm2
vcmpngeps (%rcx),%xmm6,%xmm7
vcmpngtps %xmm4,%xmm6,%xmm2
vcmpngtps (%rcx),%xmm6,%xmm7
vcmpfalseps %xmm4,%xmm6,%xmm2
vcmpfalseps (%rcx),%xmm6,%xmm7
vcmpneq_oqps %xmm4,%xmm6,%xmm2
vcmpneq_oqps (%rcx),%xmm6,%xmm7
vcmpgeps %xmm4,%xmm6,%xmm2
vcmpgeps (%rcx),%xmm6,%xmm7
vcmpgtps %xmm4,%xmm6,%xmm2
vcmpgtps (%rcx),%xmm6,%xmm7
vcmptrueps %xmm4,%xmm6,%xmm2
vcmptrueps (%rcx),%xmm6,%xmm7
vcmpeq_osps %xmm4,%xmm6,%xmm2
vcmpeq_osps (%rcx),%xmm6,%xmm7
vcmplt_oqps %xmm4,%xmm6,%xmm2
vcmplt_oqps (%rcx),%xmm6,%xmm7
vcmple_oqps %xmm4,%xmm6,%xmm2
vcmple_oqps (%rcx),%xmm6,%xmm7
vcmpunord_sps %xmm4,%xmm6,%xmm2
vcmpunord_sps (%rcx),%xmm6,%xmm7
vcmpneq_usps %xmm4,%xmm6,%xmm2
vcmpneq_usps (%rcx),%xmm6,%xmm7
vcmpnlt_uqps %xmm4,%xmm6,%xmm2
vcmpnlt_uqps (%rcx),%xmm6,%xmm7
vcmpnle_uqps %xmm4,%xmm6,%xmm2
vcmpnle_uqps (%rcx),%xmm6,%xmm7
vcmpord_sps %xmm4,%xmm6,%xmm2
vcmpord_sps (%rcx),%xmm6,%xmm7
vcmpeq_usps %xmm4,%xmm6,%xmm2
vcmpeq_usps (%rcx),%xmm6,%xmm7
vcmpnge_uqps %xmm4,%xmm6,%xmm2
vcmpnge_uqps (%rcx),%xmm6,%xmm7
vcmpngt_uqps %xmm4,%xmm6,%xmm2
vcmpngt_uqps (%rcx),%xmm6,%xmm7
vcmpfalse_osps %xmm4,%xmm6,%xmm2
vcmpfalse_osps (%rcx),%xmm6,%xmm7
vcmpneq_osps %xmm4,%xmm6,%xmm2
vcmpneq_osps (%rcx),%xmm6,%xmm7
vcmpge_oqps %xmm4,%xmm6,%xmm2
vcmpge_oqps (%rcx),%xmm6,%xmm7
vcmpgt_oqps %xmm4,%xmm6,%xmm2
vcmpgt_oqps (%rcx),%xmm6,%xmm7
vcmptrue_usps %xmm4,%xmm6,%xmm2
vcmptrue_usps (%rcx),%xmm6,%xmm7
vgf2p8mulb %xmm4, %xmm5, %xmm6
vgf2p8mulb (%rcx), %xmm5, %xmm6
vgf2p8mulb -123456(%rax,%r14,8), %xmm5, %xmm6
vgf2p8mulb 2032(%rdx), %xmm5, %xmm6
vgf2p8mulb 2048(%rdx), %xmm5, %xmm6
vgf2p8mulb -2048(%rdx), %xmm5, %xmm6
vgf2p8mulb -2064(%rdx), %xmm5, %xmm6
# Tests for op mem128, xmm, xmm
vmaskmovps (%rcx),%xmm4,%xmm6
vmaskmovpd (%rcx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem128, xmm
vaeskeygenassist $7,%xmm4,%xmm6
vaeskeygenassist $7,(%rcx),%xmm6
vpcmpestri $7,%xmm4,%xmm6
vpcmpestri $7,(%rcx),%xmm6
vpcmpestriq $7,%xmm4,%xmm6
vpcmpestril $7,(%rcx),%xmm6
vpcmpestrm $7,%xmm4,%xmm6
vpcmpestrm $7,(%rcx),%xmm6
vpcmpestrmq $7,%xmm4,%xmm6
vpcmpestrml $7,(%rcx),%xmm6
vpcmpistri $7,%xmm4,%xmm6
vpcmpistri $7,(%rcx),%xmm6
vpcmpistrm $7,%xmm4,%xmm6
vpcmpistrm $7,(%rcx),%xmm6
vpermilpd $7,%xmm4,%xmm6
vpermilpd $7,(%rcx),%xmm6
vpermilps $7,%xmm4,%xmm6
vpermilps $7,(%rcx),%xmm6
vpshufd $7,%xmm4,%xmm6
vpshufd $7,(%rcx),%xmm6
vpshufhw $7,%xmm4,%xmm6
vpshufhw $7,(%rcx),%xmm6
vpshuflw $7,%xmm4,%xmm6
vpshuflw $7,(%rcx),%xmm6
vroundpd $7,%xmm4,%xmm6
vroundpd $7,(%rcx),%xmm6
vroundps $7,%xmm4,%xmm6
vroundps $7,(%rcx),%xmm6
# Tests for op xmm, xmm, mem128
vmaskmovps %xmm4,%xmm6,(%rcx)
vmaskmovpd %xmm4,%xmm6,(%rcx)
# Tests for op imm8, xmm/mem128, xmm, xmm
vblendpd $7,%xmm4,%xmm6,%xmm2
vblendpd $7,(%rcx),%xmm6,%xmm2
vblendps $7,%xmm4,%xmm6,%xmm2
vblendps $7,(%rcx),%xmm6,%xmm2
vcmppd $7,%xmm4,%xmm6,%xmm2
vcmppd $7,(%rcx),%xmm6,%xmm2
vcmpps $7,%xmm4,%xmm6,%xmm2
vcmpps $7,(%rcx),%xmm6,%xmm2
vdppd $7,%xmm4,%xmm6,%xmm2
vdppd $7,(%rcx),%xmm6,%xmm2
vdpps $7,%xmm4,%xmm6,%xmm2
vdpps $7,(%rcx),%xmm6,%xmm2
vmpsadbw $7,%xmm4,%xmm6,%xmm2
vmpsadbw $7,(%rcx),%xmm6,%xmm2
vpalignr $7,%xmm4,%xmm6,%xmm2
vpalignr $7,(%rcx),%xmm6,%xmm2
vpblendw $7,%xmm4,%xmm6,%xmm2
vpblendw $7,(%rcx),%xmm6,%xmm2
vpclmulqdq $7,%xmm4,%xmm6,%xmm2
vpclmulqdq $7,(%rcx),%xmm6,%xmm2
vshufpd $7,%xmm4,%xmm6,%xmm2
vshufpd $7,(%rcx),%xmm6,%xmm2
vshufps $7,%xmm4,%xmm6,%xmm2
vshufps $7,(%rcx),%xmm6,%xmm2
vgf2p8affineqb $0xab, %xmm4, %xmm5, %xmm6
vgf2p8affineqb $123, %xmm4, %xmm5, %xmm6
vgf2p8affineqb $123, (%rcx), %xmm5, %xmm6
vgf2p8affineqb $123, -123456(%rax,%r14,8), %xmm5, %xmm6
vgf2p8affineqb $123, 2032(%rdx), %xmm5, %xmm6
vgf2p8affineqb $123, 2048(%rdx), %xmm5, %xmm6
vgf2p8affineqb $123, -2048(%rdx), %xmm5, %xmm6
vgf2p8affineqb $123, -2064(%rdx), %xmm5, %xmm6
vgf2p8affineinvqb $0xab, %xmm4, %xmm5, %xmm6
vgf2p8affineinvqb $123, %xmm4, %xmm5, %xmm6
vgf2p8affineinvqb $123, (%rcx), %xmm5, %xmm6
vgf2p8affineinvqb $123, -123456(%rax,%r14,8), %xmm5, %xmm6
vgf2p8affineinvqb $123, 2032(%rdx), %xmm5, %xmm6
vgf2p8affineinvqb $123, 2048(%rdx), %xmm5, %xmm6
vgf2p8affineinvqb $123, -2048(%rdx), %xmm5, %xmm6
vgf2p8affineinvqb $123, -2064(%rdx), %xmm5, %xmm6
# Tests for op xmm, xmm/mem128, xmm, xmm
vblendvpd %xmm4,%xmm6,%xmm2,%xmm7
vblendvpd %xmm4,(%rcx),%xmm2,%xmm7
vblendvps %xmm4,%xmm6,%xmm2,%xmm7
vblendvps %xmm4,(%rcx),%xmm2,%xmm7
vpblendvb %xmm4,%xmm6,%xmm2,%xmm7
vpblendvb %xmm4,(%rcx),%xmm2,%xmm7
# Tests for op mem64, ymm
vbroadcastsd (%rcx),%ymm4
# Tests for op xmm/mem64, xmm
vcomisd %xmm4,%xmm6
vcomisd (%rcx),%xmm4
vcvtdq2pd %xmm4,%xmm6
vcvtdq2pd (%rcx),%xmm4
vcvtps2pd %xmm4,%xmm6
vcvtps2pd (%rcx),%xmm4
vmovddup %xmm4,%xmm6
vmovddup (%rcx),%xmm4
vpmovsxbw %xmm4,%xmm6
vpmovsxbw (%rcx),%xmm4
vpmovsxwd %xmm4,%xmm6
vpmovsxwd (%rcx),%xmm4
vpmovsxdq %xmm4,%xmm6
vpmovsxdq (%rcx),%xmm4
vpmovzxbw %xmm4,%xmm6
vpmovzxbw (%rcx),%xmm4
vpmovzxwd %xmm4,%xmm6
vpmovzxwd (%rcx),%xmm4
vpmovzxdq %xmm4,%xmm6
vpmovzxdq (%rcx),%xmm4
vucomisd %xmm4,%xmm6
vucomisd (%rcx),%xmm4
# Tests for op mem64, xmm
vmovsd (%rcx),%xmm4
# Tests for op xmm, mem64
vmovlpd %xmm4,(%rcx)
vmovlps %xmm4,(%rcx)
vmovhpd %xmm4,(%rcx)
vmovhps %xmm4,(%rcx)
vmovsd %xmm4,(%rcx)
# Tests for op xmm, regq/mem64
# Tests for op regq/mem64, xmm
vmovd %xmm4,%rcx
vmovd %rcx,%xmm4
vmovq %xmm4,%rcx
vmovq %rcx,%xmm4
vmovq %xmm4,(%rcx)
vmovq (%rcx),%xmm4
# Tests for op xmm/mem64, regl
vcvtsd2si %xmm4,%ecx
vcvtsd2si (%rcx),%ecx
vcvttsd2si %xmm4,%ecx
vcvttsd2si (%rcx),%ecx
# Tests for op xmm/mem64, regq
vcvtsd2si %xmm4,%rcx
vcvtsd2si (%rcx),%rcx
vcvttsd2si %xmm4,%rcx
vcvttsd2si (%rcx),%rcx
# Tests for op regq/mem64, xmm, xmm
vcvtsi2sdq %rcx,%xmm4,%xmm6
vcvtsi2sdq (%rcx),%xmm4,%xmm6
vcvtsi2ssq %rcx,%xmm4,%xmm6
vcvtsi2ssq (%rcx),%xmm4,%xmm6
# Tests for op imm8, regq/mem64, xmm, xmm
vpinsrq $7,%rcx,%xmm4,%xmm6
vpinsrq $7,(%rcx),%xmm4,%xmm6
# Testsf for op imm8, xmm, regq/mem64
vpextrq $7,%xmm4,%rcx
vpextrq $7,%xmm4,(%rcx)
# Tests for op mem64, xmm, xmm
vmovlpd (%rcx),%xmm4,%xmm6
vmovlps (%rcx),%xmm4,%xmm6
vmovhpd (%rcx),%xmm4,%xmm6
vmovhps (%rcx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd $7,%xmm4,%xmm6,%xmm2
vcmpsd $7,(%rcx),%xmm6,%xmm2
vroundsd $7,%xmm4,%xmm6,%xmm2
vroundsd $7,(%rcx),%xmm6,%xmm2
# Tests for op xmm/mem64, xmm, xmm
vaddsd %xmm4,%xmm6,%xmm2
vaddsd (%rcx),%xmm6,%xmm2
vcvtsd2ss %xmm4,%xmm6,%xmm2
vcvtsd2ss (%rcx),%xmm6,%xmm2
vdivsd %xmm4,%xmm6,%xmm2
vdivsd (%rcx),%xmm6,%xmm2
vmaxsd %xmm4,%xmm6,%xmm2
vmaxsd (%rcx),%xmm6,%xmm2
vminsd %xmm4,%xmm6,%xmm2
vminsd (%rcx),%xmm6,%xmm2
vmulsd %xmm4,%xmm6,%xmm2
vmulsd (%rcx),%xmm6,%xmm2
vsqrtsd %xmm4,%xmm6,%xmm2
vsqrtsd (%rcx),%xmm6,%xmm2
vsubsd %xmm4,%xmm6,%xmm2
vsubsd (%rcx),%xmm6,%xmm2
vcmpeqsd %xmm4,%xmm6,%xmm2
vcmpeqsd (%rcx),%xmm6,%xmm2
vcmpltsd %xmm4,%xmm6,%xmm2
vcmpltsd (%rcx),%xmm6,%xmm2
vcmplesd %xmm4,%xmm6,%xmm2
vcmplesd (%rcx),%xmm6,%xmm2
vcmpunordsd %xmm4,%xmm6,%xmm2
vcmpunordsd (%rcx),%xmm6,%xmm2
vcmpneqsd %xmm4,%xmm6,%xmm2
vcmpneqsd (%rcx),%xmm6,%xmm2
vcmpnltsd %xmm4,%xmm6,%xmm2
vcmpnltsd (%rcx),%xmm6,%xmm2
vcmpnlesd %xmm4,%xmm6,%xmm2
vcmpnlesd (%rcx),%xmm6,%xmm2
vcmpordsd %xmm4,%xmm6,%xmm2
vcmpordsd (%rcx),%xmm6,%xmm2
vcmpeq_uqsd %xmm4,%xmm6,%xmm2
vcmpeq_uqsd (%rcx),%xmm6,%xmm2
vcmpngesd %xmm4,%xmm6,%xmm2
vcmpngesd (%rcx),%xmm6,%xmm2
vcmpngtsd %xmm4,%xmm6,%xmm2
vcmpngtsd (%rcx),%xmm6,%xmm2
vcmpfalsesd %xmm4,%xmm6,%xmm2
vcmpfalsesd (%rcx),%xmm6,%xmm2
vcmpneq_oqsd %xmm4,%xmm6,%xmm2
vcmpneq_oqsd (%rcx),%xmm6,%xmm2
vcmpgesd %xmm4,%xmm6,%xmm2
vcmpgesd (%rcx),%xmm6,%xmm2
vcmpgtsd %xmm4,%xmm6,%xmm2
vcmpgtsd (%rcx),%xmm6,%xmm2
vcmptruesd %xmm4,%xmm6,%xmm2
vcmptruesd (%rcx),%xmm6,%xmm2
vcmpeq_ossd %xmm4,%xmm6,%xmm2
vcmpeq_ossd (%rcx),%xmm6,%xmm2
vcmplt_oqsd %xmm4,%xmm6,%xmm2
vcmplt_oqsd (%rcx),%xmm6,%xmm2
vcmple_oqsd %xmm4,%xmm6,%xmm2
vcmple_oqsd (%rcx),%xmm6,%xmm2
vcmpunord_ssd %xmm4,%xmm6,%xmm2
vcmpunord_ssd (%rcx),%xmm6,%xmm2
vcmpneq_ussd %xmm4,%xmm6,%xmm2
vcmpneq_ussd (%rcx),%xmm6,%xmm2
vcmpnlt_uqsd %xmm4,%xmm6,%xmm2
vcmpnlt_uqsd (%rcx),%xmm6,%xmm2
vcmpnle_uqsd %xmm4,%xmm6,%xmm2
vcmpnle_uqsd (%rcx),%xmm6,%xmm2
vcmpord_ssd %xmm4,%xmm6,%xmm2
vcmpord_ssd (%rcx),%xmm6,%xmm2
vcmpeq_ussd %xmm4,%xmm6,%xmm2
vcmpeq_ussd (%rcx),%xmm6,%xmm2
vcmpnge_uqsd %xmm4,%xmm6,%xmm2
vcmpnge_uqsd (%rcx),%xmm6,%xmm2
vcmpngt_uqsd %xmm4,%xmm6,%xmm2
vcmpngt_uqsd (%rcx),%xmm6,%xmm2
vcmpfalse_ossd %xmm4,%xmm6,%xmm2
vcmpfalse_ossd (%rcx),%xmm6,%xmm2
vcmpneq_ossd %xmm4,%xmm6,%xmm2
vcmpneq_ossd (%rcx),%xmm6,%xmm2
vcmpge_oqsd %xmm4,%xmm6,%xmm2
vcmpge_oqsd (%rcx),%xmm6,%xmm2
vcmpgt_oqsd %xmm4,%xmm6,%xmm2
vcmpgt_oqsd (%rcx),%xmm6,%xmm2
vcmptrue_ussd %xmm4,%xmm6,%xmm2
vcmptrue_ussd (%rcx),%xmm6,%xmm2
# Tests for op mem64
vldmxcsr (%rcx)
vstmxcsr (%rcx)
# Tests for op xmm/mem32, xmm, xmm
vaddss %xmm4,%xmm6,%xmm2
vaddss (%rcx),%xmm6,%xmm2
vcvtss2sd %xmm4,%xmm6,%xmm2
vcvtss2sd (%rcx),%xmm6,%xmm2
vdivss %xmm4,%xmm6,%xmm2
vdivss (%rcx),%xmm6,%xmm2
vmaxss %xmm4,%xmm6,%xmm2
vmaxss (%rcx),%xmm6,%xmm2
vminss %xmm4,%xmm6,%xmm2
vminss (%rcx),%xmm6,%xmm2
vmulss %xmm4,%xmm6,%xmm2
vmulss (%rcx),%xmm6,%xmm2
vrcpss %xmm4,%xmm6,%xmm2
vrcpss (%rcx),%xmm6,%xmm2
vrsqrtss %xmm4,%xmm6,%xmm2
vrsqrtss (%rcx),%xmm6,%xmm2
vsqrtss %xmm4,%xmm6,%xmm2
vsqrtss (%rcx),%xmm6,%xmm2
vsubss %xmm4,%xmm6,%xmm2
vsubss (%rcx),%xmm6,%xmm2
vcmpeqss %xmm4,%xmm6,%xmm2
vcmpeqss (%rcx),%xmm6,%xmm2
vcmpltss %xmm4,%xmm6,%xmm2
vcmpltss (%rcx),%xmm6,%xmm2
vcmpless %xmm4,%xmm6,%xmm2
vcmpless (%rcx),%xmm6,%xmm2
vcmpunordss %xmm4,%xmm6,%xmm2
vcmpunordss (%rcx),%xmm6,%xmm2
vcmpneqss %xmm4,%xmm6,%xmm2
vcmpneqss (%rcx),%xmm6,%xmm2
vcmpnltss %xmm4,%xmm6,%xmm2
vcmpnltss (%rcx),%xmm6,%xmm2
vcmpnless %xmm4,%xmm6,%xmm2
vcmpnless (%rcx),%xmm6,%xmm2
vcmpordss %xmm4,%xmm6,%xmm2
vcmpordss (%rcx),%xmm6,%xmm2
vcmpeq_uqss %xmm4,%xmm6,%xmm2
vcmpeq_uqss (%rcx),%xmm6,%xmm2
vcmpngess %xmm4,%xmm6,%xmm2
vcmpngess (%rcx),%xmm6,%xmm2
vcmpngtss %xmm4,%xmm6,%xmm2
vcmpngtss (%rcx),%xmm6,%xmm2
vcmpfalsess %xmm4,%xmm6,%xmm2
vcmpfalsess (%rcx),%xmm6,%xmm2
vcmpneq_oqss %xmm4,%xmm6,%xmm2
vcmpneq_oqss (%rcx),%xmm6,%xmm2
vcmpgess %xmm4,%xmm6,%xmm2
vcmpgess (%rcx),%xmm6,%xmm2
vcmpgtss %xmm4,%xmm6,%xmm2
vcmpgtss (%rcx),%xmm6,%xmm2
vcmptruess %xmm4,%xmm6,%xmm2
vcmptruess (%rcx),%xmm6,%xmm2
vcmpeq_osss %xmm4,%xmm6,%xmm2
vcmpeq_osss (%rcx),%xmm6,%xmm2
vcmplt_oqss %xmm4,%xmm6,%xmm2
vcmplt_oqss (%rcx),%xmm6,%xmm2
vcmple_oqss %xmm4,%xmm6,%xmm2
vcmple_oqss (%rcx),%xmm6,%xmm2
vcmpunord_sss %xmm4,%xmm6,%xmm2
vcmpunord_sss (%rcx),%xmm6,%xmm2
vcmpneq_usss %xmm4,%xmm6,%xmm2
vcmpneq_usss (%rcx),%xmm6,%xmm2
vcmpnlt_uqss %xmm4,%xmm6,%xmm2
vcmpnlt_uqss (%rcx),%xmm6,%xmm2
vcmpnle_uqss %xmm4,%xmm6,%xmm2
vcmpnle_uqss (%rcx),%xmm6,%xmm2
vcmpord_sss %xmm4,%xmm6,%xmm2
vcmpord_sss (%rcx),%xmm6,%xmm2
vcmpeq_usss %xmm4,%xmm6,%xmm2
vcmpeq_usss (%rcx),%xmm6,%xmm2
vcmpnge_uqss %xmm4,%xmm6,%xmm2
vcmpnge_uqss (%rcx),%xmm6,%xmm2
vcmpngt_uqss %xmm4,%xmm6,%xmm2
vcmpngt_uqss (%rcx),%xmm6,%xmm2
vcmpfalse_osss %xmm4,%xmm6,%xmm2
vcmpfalse_osss (%rcx),%xmm6,%xmm2
vcmpneq_osss %xmm4,%xmm6,%xmm2
vcmpneq_osss (%rcx),%xmm6,%xmm2
vcmpge_oqss %xmm4,%xmm6,%xmm2
vcmpge_oqss (%rcx),%xmm6,%xmm2
vcmpgt_oqss %xmm4,%xmm6,%xmm2
vcmpgt_oqss (%rcx),%xmm6,%xmm2
vcmptrue_usss %xmm4,%xmm6,%xmm2
vcmptrue_usss (%rcx),%xmm6,%xmm2
# Tests for op mem32, ymm
vbroadcastss (%rcx),%ymm4
# Tests for op xmm/mem32, xmm
vcomiss %xmm4,%xmm6
vcomiss (%rcx),%xmm4
vpmovsxbd %xmm4,%xmm6
vpmovsxbd (%rcx),%xmm4
vpmovsxwq %xmm4,%xmm6
vpmovsxwq (%rcx),%xmm4
vpmovzxbd %xmm4,%xmm6
vpmovzxbd (%rcx),%xmm4
vpmovzxwq %xmm4,%xmm6
vpmovzxwq (%rcx),%xmm4
vucomiss %xmm4,%xmm6
vucomiss (%rcx),%xmm4
# Tests for op mem32, xmm
vbroadcastss (%rcx),%xmm4
vmovss (%rcx),%xmm4
# Tests for op xmm, mem32
vmovss %xmm4,(%rcx)
# Tests for op xmm, regl/mem32
# Tests for op regl/mem32, xmm
vmovd %xmm4,%ecx
vmovd %xmm4,(%rcx)
vmovd %ecx,%xmm4
vmovd (%rcx),%xmm4
# Tests for op xmm/mem32, regl
vcvtss2si %xmm4,%ecx
vcvtss2si (%rcx),%ecx
vcvttss2si %xmm4,%ecx
vcvttss2si (%rcx),%ecx
# Tests for op xmm/mem32, regq
vcvtss2si %xmm4,%rcx
vcvtss2si (%rcx),%rcx
vcvttss2si %xmm4,%rcx
vcvttss2si (%rcx),%rcx
# Tests for op xmm, regq
vmovmskpd %xmm4,%rcx
vmovmskps %xmm4,%rcx
vpmovmskb %xmm4,%rcx
# Tests for op imm8, xmm, regq/mem32
vextractps $7,%xmm4,%rcx
vextractps $7,%xmm4,(%rcx)
# Tests for op imm8, xmm, regl/mem32
vpextrd $7,%xmm4,%ecx
vpextrd $7,%xmm4,(%rcx)
vextractps $7,%xmm4,%ecx
vextractps $7,%xmm4,(%rcx)
# Tests for op imm8, regl/mem32, xmm, xmm
vpinsrd $7,%ecx,%xmm4,%xmm6
vpinsrd $7,(%rcx),%xmm4,%xmm6
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd %ecx,%xmm4,%xmm6
vcvtsi2sdl (%rcx),%xmm4,%xmm6
vcvtsi2ss %ecx,%xmm4,%xmm6
vcvtsi2ssl (%rcx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss $7,%xmm4,%xmm6,%xmm2
vcmpss $7,(%rcx),%xmm6,%xmm2
vinsertps $7,%xmm4,%xmm6,%xmm2
vinsertps $7,(%rcx),%xmm6,%xmm2
vroundss $7,%xmm4,%xmm6,%xmm2
vroundss $7,(%rcx),%xmm6,%xmm2
# Tests for op xmm/m16, xmm
vpmovsxbq %xmm4,%xmm6
vpmovsxbq (%rcx),%xmm4
vpmovzxbq %xmm4,%xmm6
vpmovzxbq (%rcx),%xmm4
# Tests for op imm8, xmm, regl/mem16
vpextrw $7,%xmm4,%ecx
vpextrw $7,%xmm4,(%rcx)
# Tests for op imm8, xmm, regq/mem16
vpextrw $7,%xmm4,%rcx
vpextrw $7,%xmm4,(%rcx)
# Tests for op imm8, regl/mem16, xmm, xmm
vpinsrw $7,%ecx,%xmm4,%xmm6
vpinsrw $7,(%rcx),%xmm4,%xmm6
vpinsrw $7,%rcx,%xmm4,%xmm6
vpinsrw $7,(%rcx),%xmm4,%xmm6
# Tests for op imm8, xmm, regl/mem8
vpextrb $7,%xmm4,%ecx
vpextrb $7,%xmm4,(%rcx)
# Tests for op imm8, regl/mem8, xmm, xmm
vpinsrb $7,%ecx,%xmm4,%xmm6
vpinsrb $7,(%rcx),%xmm4,%xmm6
# Tests for op imm8, xmm, regq
vpextrw $7,%xmm4,%rcx
# Tests for op imm8, xmm, regq/mem8
vpextrb $7,%xmm4,%rcx
vpextrb $7,%xmm4,(%rcx)
# Tests for op xmm, xmm
vmaskmovdqu %xmm4,%xmm6
vmovq %xmm4,%xmm6
# Tests for op xmm, regl
vmovmskpd %xmm4,%ecx
vmovmskps %xmm4,%ecx
vpmovmskb %xmm4,%ecx
# Tests for op xmm, xmm, xmm
vmovhlps %xmm4,%xmm6,%xmm2
vmovlhps %xmm4,%xmm6,%xmm2
vmovsd %xmm4,%xmm6,%xmm2
vmovss %xmm4,%xmm6,%xmm2
# Tests for op imm8, xmm, xmm
vpslld $7,%xmm4,%xmm6
vpslldq $7,%xmm4,%xmm6
vpsllq $7,%xmm4,%xmm6
vpsllw $7,%xmm4,%xmm6
vpsrad $7,%xmm4,%xmm6
vpsraw $7,%xmm4,%xmm6
vpsrld $7,%xmm4,%xmm6
vpsrldq $7,%xmm4,%xmm6
vpsrlq $7,%xmm4,%xmm6
vpsrlw $7,%xmm4,%xmm6
# Tests for op imm8, xmm, regl
vpextrw $7,%xmm4,%ecx
# Tests for op ymm, regl
vmovmskpd %ymm4,%ecx
vmovmskps %ymm4,%ecx
# Tests for op ymm, regq
vmovmskpd %ymm4,%rcx
vmovmskps %ymm4,%rcx
# Default instructions without suffixes.
vcvtpd2dq %xmm4,%xmm6
vcvtpd2dq %ymm4,%xmm6
vcvtpd2ps %xmm4,%xmm6
vcvtpd2ps %ymm4,%xmm6
vcvttpd2dq %xmm4,%xmm6
vcvttpd2dq %ymm4,%xmm6
#Tests with different memory and register operands.
vldmxcsr 0x12345678
vmovdqa 0x12345678,%xmm8
vmovdqa %xmm8,0x12345678
vmovd %xmm8,0x12345678
vcvtsd2si 0x12345678,%r8d
vcvtdq2pd 0x12345678,%ymm8
vcvtpd2psy 0x12345678,%xmm8
vpavgb 0x12345678,%xmm8,%xmm15
vaeskeygenassist $7,0x12345678,%xmm8
vpextrb $7,%xmm8,0x12345678
vcvtsi2sdl 0x12345678,%xmm8,%xmm15
vpclmulqdq $7,0x12345678,%xmm8,%xmm15
vblendvps %xmm8,0x12345678,%xmm12,%xmm14
vpinsrb $7,0x12345678,%xmm8,%xmm15
vmovdqa 0x12345678,%ymm8
vmovdqa %ymm8,0x12345678
vpermilpd 0x12345678,%ymm8,%ymm15
vroundpd $7,0x12345678,%ymm8
vextractf128 $7,%ymm8,0x12345678
vperm2f128 $7,0x12345678,%ymm8,%ymm15
vblendvpd %ymm8,0x12345678,%ymm12,%ymm14
vldmxcsr (%rbp)
vmovdqa (%rbp),%xmm8
vmovdqa %xmm8,(%rbp)
vmovd %xmm8,(%rbp)
vcvtsd2si (%rbp),%r8d
vcvtdq2pd (%rbp),%ymm8
vcvtpd2psy (%rbp),%xmm8
vpavgb (%rbp),%xmm8,%xmm15
vaeskeygenassist $7,(%rbp),%xmm8
vpextrb $7,%xmm8,(%rbp)
vcvtsi2sdl (%rbp),%xmm8,%xmm15
vpclmulqdq $7,(%rbp),%xmm8,%xmm15
vblendvps %xmm8,(%rbp),%xmm12,%xmm14
vpinsrb $7,(%rbp),%xmm8,%xmm15
vmovdqa (%rbp),%ymm8
vmovdqa %ymm8,(%rbp)
vpermilpd (%rbp),%ymm8,%ymm15
vroundpd $7,(%rbp),%ymm8
vextractf128 $7,%ymm8,(%rbp)
vperm2f128 $7,(%rbp),%ymm8,%ymm15
vblendvpd %ymm8,(%rbp),%ymm12,%ymm14
vldmxcsr (%rsp)
vmovdqa (%rsp),%xmm8
vmovdqa %xmm8,(%rsp)
vmovd %xmm8,(%rsp)
vcvtsd2si (%rsp),%r8d
vcvtdq2pd (%rsp),%ymm8
vcvtpd2psy (%rsp),%xmm8
vpavgb (%rsp),%xmm8,%xmm15
vaeskeygenassist $7,(%rsp),%xmm8
vpextrb $7,%xmm8,(%rsp)
vcvtsi2sdl (%rsp),%xmm8,%xmm15
vpclmulqdq $7,(%rsp),%xmm8,%xmm15
vblendvps %xmm8,(%rsp),%xmm12,%xmm14
vpinsrb $7,(%rsp),%xmm8,%xmm15
vmovdqa (%rsp),%ymm8
vmovdqa %ymm8,(%rsp)
vpermilpd (%rsp),%ymm8,%ymm15
vroundpd $7,(%rsp),%ymm8
vextractf128 $7,%ymm8,(%rsp)
vperm2f128 $7,(%rsp),%ymm8,%ymm15
vblendvpd %ymm8,(%rsp),%ymm12,%ymm14
vldmxcsr 0x99(%rbp)
vmovdqa 0x99(%rbp),%xmm8
vmovdqa %xmm8,0x99(%rbp)
vmovd %xmm8,0x99(%rbp)
vcvtsd2si 0x99(%rbp),%r8d
vcvtdq2pd 0x99(%rbp),%ymm8
vcvtpd2psy 0x99(%rbp),%xmm8
vpavgb 0x99(%rbp),%xmm8,%xmm15
vaeskeygenassist $7,0x99(%rbp),%xmm8
vpextrb $7,%xmm8,0x99(%rbp)
vcvtsi2sdl 0x99(%rbp),%xmm8,%xmm15
vpclmulqdq $7,0x99(%rbp),%xmm8,%xmm15
vblendvps %xmm8,0x99(%rbp),%xmm12,%xmm14
vpinsrb $7,0x99(%rbp),%xmm8,%xmm15
vmovdqa 0x99(%rbp),%ymm8
vmovdqa %ymm8,0x99(%rbp)
vpermilpd 0x99(%rbp),%ymm8,%ymm15
vroundpd $7,0x99(%rbp),%ymm8
vextractf128 $7,%ymm8,0x99(%rbp)
vperm2f128 $7,0x99(%rbp),%ymm8,%ymm15
vblendvpd %ymm8,0x99(%rbp),%ymm12,%ymm14
vldmxcsr 0x99(%r15)
vmovdqa 0x99(%r15),%xmm8
vmovdqa %xmm8,0x99(%r15)
vmovd %xmm8,0x99(%r15)
vcvtsd2si 0x99(%r15),%r8d
vcvtdq2pd 0x99(%r15),%ymm8
vcvtpd2psy 0x99(%r15),%xmm8
vpavgb 0x99(%r15),%xmm8,%xmm15
vaeskeygenassist $7,0x99(%r15),%xmm8
vpextrb $7,%xmm8,0x99(%r15)
vcvtsi2sdl 0x99(%r15),%xmm8,%xmm15
vpclmulqdq $7,0x99(%r15),%xmm8,%xmm15
vblendvps %xmm8,0x99(%r15),%xmm12,%xmm14
vpinsrb $7,0x99(%r15),%xmm8,%xmm15
vmovdqa 0x99(%r15),%ymm8
vmovdqa %ymm8,0x99(%r15)
vpermilpd 0x99(%r15),%ymm8,%ymm15
vroundpd $7,0x99(%r15),%ymm8
vextractf128 $7,%ymm8,0x99(%r15)
vperm2f128 $7,0x99(%r15),%ymm8,%ymm15
vblendvpd %ymm8,0x99(%r15),%ymm12,%ymm14
vldmxcsr 0x99(%rip)
vmovdqa 0x99(%rip),%xmm8
vmovdqa %xmm8,0x99(%rip)
vmovd %xmm8,0x99(%rip)
vcvtsd2si 0x99(%rip),%r8d
vcvtdq2pd 0x99(%rip),%ymm8
vcvtpd2psy 0x99(%rip),%xmm8
vpavgb 0x99(%rip),%xmm8,%xmm15
vaeskeygenassist $7,0x99(%rip),%xmm8
vpextrb $7,%xmm8,0x99(%rip)
vcvtsi2sdl 0x99(%rip),%xmm8,%xmm15
vpclmulqdq $7,0x99(%rip),%xmm8,%xmm15
vblendvps %xmm8,0x99(%rip),%xmm12,%xmm14
vpinsrb $7,0x99(%rip),%xmm8,%xmm15
vmovdqa 0x99(%rip),%ymm8
vmovdqa %ymm8,0x99(%rip)
vpermilpd 0x99(%rip),%ymm8,%ymm15
vroundpd $7,0x99(%rip),%ymm8
vextractf128 $7,%ymm8,0x99(%rip)
vperm2f128 $7,0x99(%rip),%ymm8,%ymm15
vblendvpd %ymm8,0x99(%rip),%ymm12,%ymm14
vldmxcsr 0x99(%rsp)
vmovdqa 0x99(%rsp),%xmm8
vmovdqa %xmm8,0x99(%rsp)
vmovd %xmm8,0x99(%rsp)
vcvtsd2si 0x99(%rsp),%r8d
vcvtdq2pd 0x99(%rsp),%ymm8
vcvtpd2psy 0x99(%rsp),%xmm8
vpavgb 0x99(%rsp),%xmm8,%xmm15
vaeskeygenassist $7,0x99(%rsp),%xmm8
vpextrb $7,%xmm8,0x99(%rsp)
vcvtsi2sdl 0x99(%rsp),%xmm8,%xmm15
vpclmulqdq $7,0x99(%rsp),%xmm8,%xmm15
vblendvps %xmm8,0x99(%rsp),%xmm12,%xmm14
vpinsrb $7,0x99(%rsp),%xmm8,%xmm15
vmovdqa 0x99(%rsp),%ymm8
vmovdqa %ymm8,0x99(%rsp)
vpermilpd 0x99(%rsp),%ymm8,%ymm15
vroundpd $7,0x99(%rsp),%ymm8
vextractf128 $7,%ymm8,0x99(%rsp)
vperm2f128 $7,0x99(%rsp),%ymm8,%ymm15
vblendvpd %ymm8,0x99(%rsp),%ymm12,%ymm14
vldmxcsr 0x99(%r12)
vmovdqa 0x99(%r12),%xmm8
vmovdqa %xmm8,0x99(%r12)
vmovd %xmm8,0x99(%r12)
vcvtsd2si 0x99(%r12),%r8d
vcvtdq2pd 0x99(%r12),%ymm8
vcvtpd2psy 0x99(%r12),%xmm8
vpavgb 0x99(%r12),%xmm8,%xmm15
vaeskeygenassist $7,0x99(%r12),%xmm8
vpextrb $7,%xmm8,0x99(%r12)
vcvtsi2sdl 0x99(%r12),%xmm8,%xmm15
vpclmulqdq $7,0x99(%r12),%xmm8,%xmm15
vblendvps %xmm8,0x99(%r12),%xmm12,%xmm14
vpinsrb $7,0x99(%r12),%xmm8,%xmm15
vmovdqa 0x99(%r12),%ymm8
vmovdqa %ymm8,0x99(%r12)
vpermilpd 0x99(%r12),%ymm8,%ymm15
vroundpd $7,0x99(%r12),%ymm8
vextractf128 $7,%ymm8,0x99(%r12)
vperm2f128 $7,0x99(%r12),%ymm8,%ymm15
vblendvpd %ymm8,0x99(%r12),%ymm12,%ymm14
vldmxcsr -0x99(,%riz)
vmovdqa -0x99(,%riz),%xmm8
vmovdqa %xmm8,-0x99(,%riz)
vmovd %xmm8,-0x99(,%riz)
vcvtsd2si -0x99(,%riz),%r8d
vcvtdq2pd -0x99(,%riz),%ymm8
vcvtpd2psy -0x99(,%riz),%xmm8
vpavgb -0x99(,%riz),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(,%riz),%xmm8
vpextrb $7,%xmm8,-0x99(,%riz)
vcvtsi2sdl -0x99(,%riz),%xmm8,%xmm15
vpclmulqdq $7,-0x99(,%riz),%xmm8,%xmm15
vblendvps %xmm8,-0x99(,%riz),%xmm12,%xmm14
vpinsrb $7,-0x99(,%riz),%xmm8,%xmm15
vmovdqa -0x99(,%riz),%ymm8
vmovdqa %ymm8,-0x99(,%riz)
vpermilpd -0x99(,%riz),%ymm8,%ymm15
vroundpd $7,-0x99(,%riz),%ymm8
vextractf128 $7,%ymm8,-0x99(,%riz)
vperm2f128 $7,-0x99(,%riz),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(,%riz),%ymm12,%ymm14
vldmxcsr -0x99(,%riz,2)
vmovdqa -0x99(,%riz,2),%xmm8
vmovdqa %xmm8,-0x99(,%riz,2)
vmovd %xmm8,-0x99(,%riz,2)
vcvtsd2si -0x99(,%riz,2),%r8d
vcvtdq2pd -0x99(,%riz,2),%ymm8
vcvtpd2psy -0x99(,%riz,2),%xmm8
vpavgb -0x99(,%riz,2),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(,%riz,2),%xmm8
vpextrb $7,%xmm8,-0x99(,%riz,2)
vcvtsi2sdl -0x99(,%riz,2),%xmm8,%xmm15
vpclmulqdq $7,-0x99(,%riz,2),%xmm8,%xmm15
vblendvps %xmm8,-0x99(,%riz,2),%xmm12,%xmm14
vpinsrb $7,-0x99(,%riz,2),%xmm8,%xmm15
vmovdqa -0x99(,%riz,2),%ymm8
vmovdqa %ymm8,-0x99(,%riz,2)
vpermilpd -0x99(,%riz,2),%ymm8,%ymm15
vroundpd $7,-0x99(,%riz,2),%ymm8
vextractf128 $7,%ymm8,-0x99(,%riz,2)
vperm2f128 $7,-0x99(,%riz,2),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(,%riz,2),%ymm12,%ymm14
vldmxcsr -0x99(%rbx,%riz)
vmovdqa -0x99(%rbx,%riz),%xmm8
vmovdqa %xmm8,-0x99(%rbx,%riz)
vmovd %xmm8,-0x99(%rbx,%riz)
vcvtsd2si -0x99(%rbx,%riz),%r8d
vcvtdq2pd -0x99(%rbx,%riz),%ymm8
vcvtpd2psy -0x99(%rbx,%riz),%xmm8
vpavgb -0x99(%rbx,%riz),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(%rbx,%riz),%xmm8
vpextrb $7,%xmm8,-0x99(%rbx,%riz)
vcvtsi2sdl -0x99(%rbx,%riz),%xmm8,%xmm15
vpclmulqdq $7,-0x99(%rbx,%riz),%xmm8,%xmm15
vblendvps %xmm8,-0x99(%rbx,%riz),%xmm12,%xmm14
vpinsrb $7,-0x99(%rbx,%riz),%xmm8,%xmm15
vmovdqa -0x99(%rbx,%riz),%ymm8
vmovdqa %ymm8,-0x99(%rbx,%riz)
vpermilpd -0x99(%rbx,%riz),%ymm8,%ymm15
vroundpd $7,-0x99(%rbx,%riz),%ymm8
vextractf128 $7,%ymm8,-0x99(%rbx,%riz)
vperm2f128 $7,-0x99(%rbx,%riz),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(%rbx,%riz),%ymm12,%ymm14
vldmxcsr -0x99(%rbx,%riz,2)
vmovdqa -0x99(%rbx,%riz,2),%xmm8
vmovdqa %xmm8,-0x99(%rbx,%riz,2)
vmovd %xmm8,-0x99(%rbx,%riz,2)
vcvtsd2si -0x99(%rbx,%riz,2),%r8d
vcvtdq2pd -0x99(%rbx,%riz,2),%ymm8
vcvtpd2psy -0x99(%rbx,%riz,2),%xmm8
vpavgb -0x99(%rbx,%riz,2),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(%rbx,%riz,2),%xmm8
vpextrb $7,%xmm8,-0x99(%rbx,%riz,2)
vcvtsi2sdl -0x99(%rbx,%riz,2),%xmm8,%xmm15
vpclmulqdq $7,-0x99(%rbx,%riz,2),%xmm8,%xmm15
vblendvps %xmm8,-0x99(%rbx,%riz,2),%xmm12,%xmm14
vpinsrb $7,-0x99(%rbx,%riz,2),%xmm8,%xmm15
vmovdqa -0x99(%rbx,%riz,2),%ymm8
vmovdqa %ymm8,-0x99(%rbx,%riz,2)
vpermilpd -0x99(%rbx,%riz,2),%ymm8,%ymm15
vroundpd $7,-0x99(%rbx,%riz,2),%ymm8
vextractf128 $7,%ymm8,-0x99(%rbx,%riz,2)
vperm2f128 $7,-0x99(%rbx,%riz,2),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(%rbx,%riz,2),%ymm12,%ymm14
vldmxcsr -0x99(%r12,%r15,4)
vmovdqa -0x99(%r12,%r15,4),%xmm8
vmovdqa %xmm8,-0x99(%r12,%r15,4)
vmovd %xmm8,-0x99(%r12,%r15,4)
vcvtsd2si -0x99(%r12,%r15,4),%r8d
vcvtdq2pd -0x99(%r12,%r15,4),%ymm8
vcvtpd2psy -0x99(%r12,%r15,4),%xmm8
vpavgb -0x99(%r12,%r15,4),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(%r12,%r15,4),%xmm8
vpextrb $7,%xmm8,-0x99(%r12,%r15,4)
vcvtsi2sdl -0x99(%r12,%r15,4),%xmm8,%xmm15
vpclmulqdq $7,-0x99(%r12,%r15,4),%xmm8,%xmm15
vblendvps %xmm8,-0x99(%r12,%r15,4),%xmm12,%xmm14
vpinsrb $7,-0x99(%r12,%r15,4),%xmm8,%xmm15
vmovdqa -0x99(%r12,%r15,4),%ymm8
vmovdqa %ymm8,-0x99(%r12,%r15,4)
vpermilpd -0x99(%r12,%r15,4),%ymm8,%ymm15
vroundpd $7,-0x99(%r12,%r15,4),%ymm8
vextractf128 $7,%ymm8,-0x99(%r12,%r15,4)
vperm2f128 $7,-0x99(%r12,%r15,4),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(%r12,%r15,4),%ymm12,%ymm14
vldmxcsr -0x99(%r8,%r15,8)
vmovdqa -0x99(%r8,%r15,8),%xmm8
vmovdqa %xmm8,-0x99(%r8,%r15,8)
vmovd %xmm8,-0x99(%r8,%r15,8)
vcvtsd2si -0x99(%r8,%r15,8),%r8d
vcvtdq2pd -0x99(%r8,%r15,8),%ymm8
vcvtpd2psy -0x99(%r8,%r15,8),%xmm8
vpavgb -0x99(%r8,%r15,8),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(%r8,%r15,8),%xmm8
vpextrb $7,%xmm8,-0x99(%r8,%r15,8)
vcvtsi2sdl -0x99(%r8,%r15,8),%xmm8,%xmm15
vpclmulqdq $7,-0x99(%r8,%r15,8),%xmm8,%xmm15
vblendvps %xmm8,-0x99(%r8,%r15,8),%xmm12,%xmm14
vpinsrb $7,-0x99(%r8,%r15,8),%xmm8,%xmm15
vmovdqa -0x99(%r8,%r15,8),%ymm8
vmovdqa %ymm8,-0x99(%r8,%r15,8)
vpermilpd -0x99(%r8,%r15,8),%ymm8,%ymm15
vroundpd $7,-0x99(%r8,%r15,8),%ymm8
vextractf128 $7,%ymm8,-0x99(%r8,%r15,8)
vperm2f128 $7,-0x99(%r8,%r15,8),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(%r8,%r15,8),%ymm12,%ymm14
vldmxcsr -0x99(%rbp,%r13,4)
vmovdqa -0x99(%rbp,%r13,4),%xmm8
vmovdqa %xmm8,-0x99(%rbp,%r13,4)
vmovd %xmm8,-0x99(%rbp,%r13,4)
vcvtsd2si -0x99(%rbp,%r13,4),%r8d
vcvtdq2pd -0x99(%rbp,%r13,4),%ymm8
vcvtpd2psy -0x99(%rbp,%r13,4),%xmm8
vpavgb -0x99(%rbp,%r13,4),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(%rbp,%r13,4),%xmm8
vpextrb $7,%xmm8,-0x99(%rbp,%r13,4)
vcvtsi2sdl -0x99(%rbp,%r13,4),%xmm8,%xmm15
vpclmulqdq $7,-0x99(%rbp,%r13,4),%xmm8,%xmm15
vblendvps %xmm8,-0x99(%rbp,%r13,4),%xmm12,%xmm14
vpinsrb $7,-0x99(%rbp,%r13,4),%xmm8,%xmm15
vmovdqa -0x99(%rbp,%r13,4),%ymm8
vmovdqa %ymm8,-0x99(%rbp,%r13,4)
vpermilpd -0x99(%rbp,%r13,4),%ymm8,%ymm15
vroundpd $7,-0x99(%rbp,%r13,4),%ymm8
vextractf128 $7,%ymm8,-0x99(%rbp,%r13,4)
vperm2f128 $7,-0x99(%rbp,%r13,4),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(%rbp,%r13,4),%ymm12,%ymm14
vldmxcsr -0x99(%rsp,%r12,1)
vmovdqa -0x99(%rsp,%r12,1),%xmm8
vmovdqa %xmm8,-0x99(%rsp,%r12,1)
vmovd %xmm8,-0x99(%rsp,%r12,1)
vcvtsd2si -0x99(%rsp,%r12,1),%r8d
vcvtdq2pd -0x99(%rsp,%r12,1),%ymm8
vcvtpd2psy -0x99(%rsp,%r12,1),%xmm8
vpavgb -0x99(%rsp,%r12,1),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(%rsp,%r12,1),%xmm8
vpextrb $7,%xmm8,-0x99(%rsp,%r12,1)
vcvtsi2sdl -0x99(%rsp,%r12,1),%xmm8,%xmm15
vpclmulqdq $7,-0x99(%rsp,%r12,1),%xmm8,%xmm15
vblendvps %xmm8,-0x99(%rsp,%r12,1),%xmm12,%xmm14
vpinsrb $7,-0x99(%rsp,%r12,1),%xmm8,%xmm15
vmovdqa -0x99(%rsp,%r12,1),%ymm8
vmovdqa %ymm8,-0x99(%rsp,%r12,1)
vpermilpd -0x99(%rsp,%r12,1),%ymm8,%ymm15
vroundpd $7,-0x99(%rsp,%r12,1),%ymm8
vextractf128 $7,%ymm8,-0x99(%rsp,%r12,1)
vperm2f128 $7,-0x99(%rsp,%r12,1),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(%rsp,%r12,1),%ymm12,%ymm14
# Tests for all register operands.
vmovmskpd %xmm8,%r8d
vpslld $7,%xmm8,%xmm15
vmovmskps %ymm8,%r8d
vmovdqa %xmm8,%xmm15
vmovd %xmm8,%r8d
vcvtsd2si %xmm8,%r8d
vcvtdq2pd %xmm8,%ymm8
vcvtpd2psy %ymm8,%xmm8
vaeskeygenassist $7,%xmm8,%xmm15
vpextrb $7,%xmm8,%r8d
vcvtsi2sdl %r8d,%xmm8,%xmm15
vpclmulqdq $7,%xmm8,%xmm15,%xmm12
vblendvps %xmm8,%xmm8,%xmm12,%xmm14
vpinsrb $7,%r8d,%xmm8,%xmm15
vmovdqa %ymm8,%ymm15
vpermilpd %ymm8,%ymm15,%ymm12
vroundpd $7,%ymm8,%ymm15
vextractf128 $7,%ymm8,%xmm8
vperm2f128 $7,%ymm8,%ymm15,%ymm12
vblendvpd %ymm8,%ymm15,%ymm12,%ymm14
vinsertf128 $7,%xmm8,%ymm8,%ymm15
# Tests for different memory/register operand
vcvtsd2si (%rcx),%r8
vextractps $10,%xmm8,%r8
vcvtss2si (%rcx),%r8
vpinsrw $7,%r8,%xmm15,%xmm8
.intel_syntax noprefix
# Tests for op mem64
vldmxcsr DWORD PTR [rcx]
vldmxcsr [rcx]
vstmxcsr DWORD PTR [rcx]
vstmxcsr [rcx]
# Tests for op mem256, mask, ymm
# Tests for op ymm, mask, mem256
vmaskmovpd ymm6,ymm4,YMMWORD PTR [rcx]
vmaskmovpd YMMWORD PTR [rcx],ymm6,ymm4
vmaskmovpd ymm6,ymm4,[rcx]
vmaskmovpd [rcx],ymm6,ymm4
vmaskmovps ymm6,ymm4,YMMWORD PTR [rcx]
vmaskmovps YMMWORD PTR [rcx],ymm6,ymm4
vmaskmovps ymm6,ymm4,[rcx]
vmaskmovps [rcx],ymm6,ymm4
# Tests for op imm8, ymm/mem256, ymm
vpermilpd ymm2,ymm6,7
vpermilpd ymm6,YMMWORD PTR [rcx],7
vpermilpd ymm6,[rcx],7
vpermilps ymm2,ymm6,7
vpermilps ymm6,YMMWORD PTR [rcx],7
vpermilps ymm6,[rcx],7
vroundpd ymm2,ymm6,7
vroundpd ymm6,YMMWORD PTR [rcx],7
vroundpd ymm6,[rcx],7
vroundps ymm2,ymm6,7
vroundps ymm6,YMMWORD PTR [rcx],7
vroundps ymm6,[rcx],7
# Tests for op ymm/mem256, ymm, ymm
vaddpd ymm2,ymm6,ymm4
vaddpd ymm2,ymm6,YMMWORD PTR [rcx]
vaddpd ymm2,ymm6,[rcx]
vaddps ymm2,ymm6,ymm4
vaddps ymm2,ymm6,YMMWORD PTR [rcx]
vaddps ymm2,ymm6,[rcx]
vaddsubpd ymm2,ymm6,ymm4
vaddsubpd ymm2,ymm6,YMMWORD PTR [rcx]
vaddsubpd ymm2,ymm6,[rcx]
vaddsubps ymm2,ymm6,ymm4
vaddsubps ymm2,ymm6,YMMWORD PTR [rcx]
vaddsubps ymm2,ymm6,[rcx]
vandnpd ymm2,ymm6,ymm4
vandnpd ymm2,ymm6,YMMWORD PTR [rcx]
vandnpd ymm2,ymm6,[rcx]
vandnps ymm2,ymm6,ymm4
vandnps ymm2,ymm6,YMMWORD PTR [rcx]
vandnps ymm2,ymm6,[rcx]
vandpd ymm2,ymm6,ymm4
vandpd ymm2,ymm6,YMMWORD PTR [rcx]
vandpd ymm2,ymm6,[rcx]
vandps ymm2,ymm6,ymm4
vandps ymm2,ymm6,YMMWORD PTR [rcx]
vandps ymm2,ymm6,[rcx]
vdivpd ymm2,ymm6,ymm4
vdivpd ymm2,ymm6,YMMWORD PTR [rcx]
vdivpd ymm2,ymm6,[rcx]
vdivps ymm2,ymm6,ymm4
vdivps ymm2,ymm6,YMMWORD PTR [rcx]
vdivps ymm2,ymm6,[rcx]
vhaddpd ymm2,ymm6,ymm4
vhaddpd ymm2,ymm6,YMMWORD PTR [rcx]
vhaddpd ymm2,ymm6,[rcx]
vhaddps ymm2,ymm6,ymm4
vhaddps ymm2,ymm6,YMMWORD PTR [rcx]
vhaddps ymm2,ymm6,[rcx]
vhsubpd ymm2,ymm6,ymm4
vhsubpd ymm2,ymm6,YMMWORD PTR [rcx]
vhsubpd ymm2,ymm6,[rcx]
vhsubps ymm2,ymm6,ymm4
vhsubps ymm2,ymm6,YMMWORD PTR [rcx]
vhsubps ymm2,ymm6,[rcx]
vmaxpd ymm2,ymm6,ymm4
vmaxpd ymm2,ymm6,YMMWORD PTR [rcx]
vmaxpd ymm2,ymm6,[rcx]
vmaxps ymm2,ymm6,ymm4
vmaxps ymm2,ymm6,YMMWORD PTR [rcx]
vmaxps ymm2,ymm6,[rcx]
vminpd ymm2,ymm6,ymm4
vminpd ymm2,ymm6,YMMWORD PTR [rcx]
vminpd ymm2,ymm6,[rcx]
vminps ymm2,ymm6,ymm4
vminps ymm2,ymm6,YMMWORD PTR [rcx]
vminps ymm2,ymm6,[rcx]
vmulpd ymm2,ymm6,ymm4
vmulpd ymm2,ymm6,YMMWORD PTR [rcx]
vmulpd ymm2,ymm6,[rcx]
vmulps ymm2,ymm6,ymm4
vmulps ymm2,ymm6,YMMWORD PTR [rcx]
vmulps ymm2,ymm6,[rcx]
vorpd ymm2,ymm6,ymm4
vorpd ymm2,ymm6,YMMWORD PTR [rcx]
vorpd ymm2,ymm6,[rcx]
vorps ymm2,ymm6,ymm4
vorps ymm2,ymm6,YMMWORD PTR [rcx]
vorps ymm2,ymm6,[rcx]
vpermilpd ymm2,ymm6,ymm4
vpermilpd ymm2,ymm6,YMMWORD PTR [rcx]
vpermilpd ymm2,ymm6,[rcx]
vpermilps ymm2,ymm6,ymm4
vpermilps ymm2,ymm6,YMMWORD PTR [rcx]
vpermilps ymm2,ymm6,[rcx]
vsubpd ymm2,ymm6,ymm4
vsubpd ymm2,ymm6,YMMWORD PTR [rcx]
vsubpd ymm2,ymm6,[rcx]
vsubps ymm2,ymm6,ymm4
vsubps ymm2,ymm6,YMMWORD PTR [rcx]
vsubps ymm2,ymm6,[rcx]
vunpckhpd ymm2,ymm6,ymm4
vunpckhpd ymm2,ymm6,YMMWORD PTR [rcx]
vunpckhpd ymm2,ymm6,[rcx]
vunpckhps ymm2,ymm6,ymm4
vunpckhps ymm2,ymm6,YMMWORD PTR [rcx]
vunpckhps ymm2,ymm6,[rcx]
vunpcklpd ymm2,ymm6,ymm4
vunpcklpd ymm2,ymm6,YMMWORD PTR [rcx]
vunpcklpd ymm2,ymm6,[rcx]
vunpcklps ymm2,ymm6,ymm4
vunpcklps ymm2,ymm6,YMMWORD PTR [rcx]
vunpcklps ymm2,ymm6,[rcx]
vxorpd ymm2,ymm6,ymm4
vxorpd ymm2,ymm6,YMMWORD PTR [rcx]
vxorpd ymm2,ymm6,[rcx]
vxorps ymm2,ymm6,ymm4
vxorps ymm2,ymm6,YMMWORD PTR [rcx]
vxorps ymm2,ymm6,[rcx]
vcmpeqpd ymm2,ymm6,ymm4
vcmpeqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeqpd ymm2,ymm6,[rcx]
vcmpltpd ymm2,ymm6,ymm4
vcmpltpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpltpd ymm2,ymm6,[rcx]
vcmplepd ymm2,ymm6,ymm4
vcmplepd ymm2,ymm6,YMMWORD PTR [rcx]
vcmplepd ymm2,ymm6,[rcx]
vcmpunordpd ymm2,ymm6,ymm4
vcmpunordpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpunordpd ymm2,ymm6,[rcx]
vcmpneqpd ymm2,ymm6,ymm4
vcmpneqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneqpd ymm2,ymm6,[rcx]
vcmpnltpd ymm2,ymm6,ymm4
vcmpnltpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnltpd ymm2,ymm6,[rcx]
vcmpnlepd ymm2,ymm6,ymm4
vcmpnlepd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnlepd ymm2,ymm6,[rcx]
vcmpordpd ymm2,ymm6,ymm4
vcmpordpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpordpd ymm2,ymm6,[rcx]
vcmpeq_uqpd ymm2,ymm6,ymm4
vcmpeq_uqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeq_uqpd ymm2,ymm6,[rcx]
vcmpngepd ymm2,ymm6,ymm4
vcmpngepd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpngepd ymm2,ymm6,[rcx]
vcmpngtpd ymm2,ymm6,ymm4
vcmpngtpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpngtpd ymm2,ymm6,[rcx]
vcmpfalsepd ymm2,ymm6,ymm4
vcmpfalsepd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpfalsepd ymm2,ymm6,[rcx]
vcmpneq_oqpd ymm2,ymm6,ymm4
vcmpneq_oqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneq_oqpd ymm2,ymm6,[rcx]
vcmpgepd ymm2,ymm6,ymm4
vcmpgepd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpgepd ymm2,ymm6,[rcx]
vcmpgtpd ymm2,ymm6,ymm4
vcmpgtpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpgtpd ymm2,ymm6,[rcx]
vcmptruepd ymm2,ymm6,ymm4
vcmptruepd ymm2,ymm6,YMMWORD PTR [rcx]
vcmptruepd ymm2,ymm6,[rcx]
vcmpeq_ospd ymm2,ymm6,ymm4
vcmpeq_ospd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeq_ospd ymm2,ymm6,[rcx]
vcmplt_oqpd ymm2,ymm6,ymm4
vcmplt_oqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmplt_oqpd ymm2,ymm6,[rcx]
vcmple_oqpd ymm2,ymm6,ymm4
vcmple_oqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmple_oqpd ymm2,ymm6,[rcx]
vcmpunord_spd ymm2,ymm6,ymm4
vcmpunord_spd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpunord_spd ymm2,ymm6,[rcx]
vcmpneq_uspd ymm2,ymm6,ymm4
vcmpneq_uspd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneq_uspd ymm2,ymm6,[rcx]
vcmpnlt_uqpd ymm2,ymm6,ymm4
vcmpnlt_uqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnlt_uqpd ymm2,ymm6,[rcx]
vcmpnle_uqpd ymm2,ymm6,ymm4
vcmpnle_uqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnle_uqpd ymm2,ymm6,[rcx]
vcmpord_spd ymm2,ymm6,ymm4
vcmpord_spd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpord_spd ymm2,ymm6,[rcx]
vcmpeq_uspd ymm2,ymm6,ymm4
vcmpeq_uspd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeq_uspd ymm2,ymm6,[rcx]
vcmpnge_uqpd ymm2,ymm6,ymm4
vcmpnge_uqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnge_uqpd ymm2,ymm6,[rcx]
vcmpngt_uqpd ymm2,ymm6,ymm4
vcmpngt_uqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpngt_uqpd ymm2,ymm6,[rcx]
vcmpfalse_ospd ymm2,ymm6,ymm4
vcmpfalse_ospd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpfalse_ospd ymm2,ymm6,[rcx]
vcmpneq_ospd ymm2,ymm6,ymm4
vcmpneq_ospd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneq_ospd ymm2,ymm6,[rcx]
vcmpge_oqpd ymm2,ymm6,ymm4
vcmpge_oqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpge_oqpd ymm2,ymm6,[rcx]
vcmpgt_oqpd ymm2,ymm6,ymm4
vcmpgt_oqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpgt_oqpd ymm2,ymm6,[rcx]
vcmptrue_uspd ymm2,ymm6,ymm4
vcmptrue_uspd ymm2,ymm6,YMMWORD PTR [rcx]
vcmptrue_uspd ymm2,ymm6,[rcx]
vcmpeqps ymm2,ymm6,ymm4
vcmpeqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeqps ymm2,ymm6,[rcx]
vcmpltps ymm2,ymm6,ymm4
vcmpltps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpltps ymm2,ymm6,[rcx]
vcmpleps ymm2,ymm6,ymm4
vcmpleps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpleps ymm2,ymm6,[rcx]
vcmpunordps ymm2,ymm6,ymm4
vcmpunordps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpunordps ymm2,ymm6,[rcx]
vcmpneqps ymm2,ymm6,ymm4
vcmpneqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneqps ymm2,ymm6,[rcx]
vcmpnltps ymm2,ymm6,ymm4
vcmpnltps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnltps ymm2,ymm6,[rcx]
vcmpnleps ymm2,ymm6,ymm4
vcmpnleps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnleps ymm2,ymm6,[rcx]
vcmpordps ymm2,ymm6,ymm4
vcmpordps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpordps ymm2,ymm6,[rcx]
vcmpeq_uqps ymm2,ymm6,ymm4
vcmpeq_uqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeq_uqps ymm2,ymm6,[rcx]
vcmpngeps ymm2,ymm6,ymm4
vcmpngeps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpngeps ymm2,ymm6,[rcx]
vcmpngtps ymm2,ymm6,ymm4
vcmpngtps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpngtps ymm2,ymm6,[rcx]
vcmpfalseps ymm2,ymm6,ymm4
vcmpfalseps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpfalseps ymm2,ymm6,[rcx]
vcmpneq_oqps ymm2,ymm6,ymm4
vcmpneq_oqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneq_oqps ymm2,ymm6,[rcx]
vcmpgeps ymm2,ymm6,ymm4
vcmpgeps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpgeps ymm2,ymm6,[rcx]
vcmpgtps ymm2,ymm6,ymm4
vcmpgtps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpgtps ymm2,ymm6,[rcx]
vcmptrueps ymm2,ymm6,ymm4
vcmptrueps ymm2,ymm6,YMMWORD PTR [rcx]
vcmptrueps ymm2,ymm6,[rcx]
vcmpeq_osps ymm2,ymm6,ymm4
vcmpeq_osps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeq_osps ymm2,ymm6,[rcx]
vcmplt_oqps ymm2,ymm6,ymm4
vcmplt_oqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmplt_oqps ymm2,ymm6,[rcx]
vcmple_oqps ymm2,ymm6,ymm4
vcmple_oqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmple_oqps ymm2,ymm6,[rcx]
vcmpunord_sps ymm2,ymm6,ymm4
vcmpunord_sps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpunord_sps ymm2,ymm6,[rcx]
vcmpneq_usps ymm2,ymm6,ymm4
vcmpneq_usps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneq_usps ymm2,ymm6,[rcx]
vcmpnlt_uqps ymm2,ymm6,ymm4
vcmpnlt_uqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnlt_uqps ymm2,ymm6,[rcx]
vcmpnle_uqps ymm2,ymm6,ymm4
vcmpnle_uqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnle_uqps ymm2,ymm6,[rcx]
vcmpord_sps ymm2,ymm6,ymm4
vcmpord_sps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpord_sps ymm2,ymm6,[rcx]
vcmpeq_usps ymm2,ymm6,ymm4
vcmpeq_usps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeq_usps ymm2,ymm6,[rcx]
vcmpnge_uqps ymm2,ymm6,ymm4
vcmpnge_uqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnge_uqps ymm2,ymm6,[rcx]
vcmpngt_uqps ymm2,ymm6,ymm4
vcmpngt_uqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpngt_uqps ymm2,ymm6,[rcx]
vcmpfalse_osps ymm2,ymm6,ymm4
vcmpfalse_osps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpfalse_osps ymm2,ymm6,[rcx]
vcmpneq_osps ymm2,ymm6,ymm4
vcmpneq_osps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneq_osps ymm2,ymm6,[rcx]
vcmpge_oqps ymm2,ymm6,ymm4
vcmpge_oqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpge_oqps ymm2,ymm6,[rcx]
vcmpgt_oqps ymm2,ymm6,ymm4
vcmpgt_oqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpgt_oqps ymm2,ymm6,[rcx]
vcmptrue_usps ymm2,ymm6,ymm4
vcmptrue_usps ymm2,ymm6,YMMWORD PTR [rcx]
vcmptrue_usps ymm2,ymm6,[rcx]
vgf2p8mulb ymm6, ymm5, ymm4
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [rcx]
vgf2p8mulb ymm6, ymm5, [rcx]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [rax+r14*8-123456]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [rdx+4064]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [rdx+4096]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [rdx-4096]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [rdx-4128]
# Tests for op ymm/mem256, xmm
vcvtpd2dq xmm4,ymm4
vcvtpd2dq xmm4,YMMWORD PTR [rcx]
vcvtpd2ps xmm4,ymm4
vcvtpd2ps xmm4,YMMWORD PTR [rcx]
vcvttpd2dq xmm4,ymm4
vcvttpd2dq xmm4,YMMWORD PTR [rcx]
# Tests for op ymm/mem256, ymm
vcvtdq2ps ymm6,ymm4
vcvtdq2ps ymm4,YMMWORD PTR [rcx]
vcvtdq2ps ymm4,[rcx]
vcvtps2dq ymm6,ymm4
vcvtps2dq ymm4,YMMWORD PTR [rcx]
vcvtps2dq ymm4,[rcx]
vcvttps2dq ymm6,ymm4
vcvttps2dq ymm4,YMMWORD PTR [rcx]
vcvttps2dq ymm4,[rcx]
vmovapd ymm6,ymm4
vmovapd ymm4,YMMWORD PTR [rcx]
vmovapd ymm4,[rcx]
vmovaps ymm6,ymm4
vmovaps ymm4,YMMWORD PTR [rcx]
vmovaps ymm4,[rcx]
vmovdqa ymm6,ymm4
vmovdqa ymm4,YMMWORD PTR [rcx]
vmovdqa ymm4,[rcx]
vmovdqu ymm6,ymm4
vmovdqu ymm4,YMMWORD PTR [rcx]
vmovdqu ymm4,[rcx]
vmovddup ymm6,ymm4
vmovddup ymm4,YMMWORD PTR [rcx]
vmovddup ymm4,[rcx]
vmovshdup ymm6,ymm4
vmovshdup ymm4,YMMWORD PTR [rcx]
vmovshdup ymm4,[rcx]
vmovsldup ymm6,ymm4
vmovsldup ymm4,YMMWORD PTR [rcx]
vmovsldup ymm4,[rcx]
vmovupd ymm6,ymm4
vmovupd ymm4,YMMWORD PTR [rcx]
vmovupd ymm4,[rcx]
vmovups ymm6,ymm4
vmovups ymm4,YMMWORD PTR [rcx]
vmovups ymm4,[rcx]
vptest ymm6,ymm4
vptest ymm4,YMMWORD PTR [rcx]
vptest ymm4,[rcx]
vrcpps ymm6,ymm4
vrcpps ymm4,YMMWORD PTR [rcx]
vrcpps ymm4,[rcx]
vrsqrtps ymm6,ymm4
vrsqrtps ymm4,YMMWORD PTR [rcx]
vrsqrtps ymm4,[rcx]
vsqrtpd ymm6,ymm4
vsqrtpd ymm4,YMMWORD PTR [rcx]
vsqrtpd ymm4,[rcx]
vsqrtps ymm6,ymm4
vsqrtps ymm4,YMMWORD PTR [rcx]
vsqrtps ymm4,[rcx]
vtestpd ymm6,ymm4
vtestpd ymm4,YMMWORD PTR [rcx]
vtestpd ymm4,[rcx]
vtestps ymm6,ymm4
vtestps ymm4,YMMWORD PTR [rcx]
vtestps ymm4,[rcx]
# Tests for op ymm, ymm/mem256
vmovapd ymm6,ymm4
vmovapd YMMWORD PTR [rcx],ymm4
vmovapd [rcx],ymm4
vmovaps ymm6,ymm4
vmovaps YMMWORD PTR [rcx],ymm4
vmovaps [rcx],ymm4
vmovdqa ymm6,ymm4
vmovdqa YMMWORD PTR [rcx],ymm4
vmovdqa [rcx],ymm4
vmovdqu ymm6,ymm4
vmovdqu YMMWORD PTR [rcx],ymm4
vmovdqu [rcx],ymm4
vmovupd ymm6,ymm4
vmovupd YMMWORD PTR [rcx],ymm4
vmovupd [rcx],ymm4
vmovups ymm6,ymm4
vmovups YMMWORD PTR [rcx],ymm4
vmovups [rcx],ymm4
# Tests for op mem256, ymm
vlddqu ymm4,YMMWORD PTR [rcx]
vlddqu ymm4,[rcx]
# Tests for op ymm, mem256
vmovntdq YMMWORD PTR [rcx],ymm4
vmovntdq [rcx],ymm4
vmovntpd YMMWORD PTR [rcx],ymm4
vmovntpd [rcx],ymm4
vmovntps YMMWORD PTR [rcx],ymm4
vmovntps [rcx],ymm4
# Tests for op imm8, ymm/mem256, ymm, ymm
vblendpd ymm2,ymm6,ymm4,7
vblendpd ymm2,ymm6,YMMWORD PTR [rcx],7
vblendpd ymm2,ymm6,[rcx],7
vblendps ymm2,ymm6,ymm4,7
vblendps ymm2,ymm6,YMMWORD PTR [rcx],7
vblendps ymm2,ymm6,[rcx],7
vcmppd ymm2,ymm6,ymm4,7
vcmppd ymm2,ymm6,YMMWORD PTR [rcx],7
vcmppd ymm2,ymm6,[rcx],7
vcmpps ymm2,ymm6,ymm4,7
vcmpps ymm2,ymm6,YMMWORD PTR [rcx],7
vcmpps ymm2,ymm6,[rcx],7
vdpps ymm2,ymm6,ymm4,7
vdpps ymm2,ymm6,YMMWORD PTR [rcx],7
vdpps ymm2,ymm6,[rcx],7
vperm2f128 ymm2,ymm6,ymm4,7
vperm2f128 ymm2,ymm6,YMMWORD PTR [rcx],7
vperm2f128 ymm2,ymm6,[rcx],7
vshufpd ymm2,ymm6,ymm4,7
vshufpd ymm2,ymm6,YMMWORD PTR [rcx],7
vshufpd ymm2,ymm6,[rcx],7
vshufps ymm2,ymm6,ymm4,7
vshufps ymm2,ymm6,YMMWORD PTR [rcx],7
vshufps ymm2,ymm6,[rcx],7
vgf2p8affineqb ymm6, ymm5, ymm4, 0xab
vgf2p8affineqb ymm6, ymm5, ymm4, 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [rcx], 123
vgf2p8affineqb ymm6, ymm5, [rcx], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [rax+r14*8-123456], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [rdx+4064], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [rdx+4096], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [rdx-4096], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [rdx-4128], 123
vgf2p8affineinvqb ymm6, ymm5, ymm4, 0xab
vgf2p8affineinvqb ymm6, ymm5, ymm4, 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [rcx], 123
vgf2p8affineinvqb ymm6, ymm5, [rcx], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [rax+r14*8-123456], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [rdx+4064], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [rdx+4096], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [rdx-4096], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [rdx-4128], 123
# Tests for op ymm, ymm/mem256, ymm, ymm
vblendvpd ymm7,ymm2,ymm6,ymm4
vblendvpd ymm7,ymm2,YMMWORD PTR [rcx],ymm4
vblendvpd ymm7,ymm2,[rcx],ymm4
vblendvps ymm7,ymm2,ymm6,ymm4
vblendvps ymm7,ymm2,YMMWORD PTR [rcx],ymm4
vblendvps ymm7,ymm2,[rcx],ymm4
# Tests for op imm8, xmm/mem128, ymm, ymm
vinsertf128 ymm6,ymm4,xmm4,7
vinsertf128 ymm6,ymm4,XMMWORD PTR [rcx],7
vinsertf128 ymm6,ymm4,[rcx],7
# Tests for op imm8, ymm, xmm/mem128
vextractf128 xmm4,ymm4,7
vextractf128 XMMWORD PTR [rcx],ymm4,7
vextractf128 [rcx],ymm4,7
# Tests for op mem128, ymm
vbroadcastf128 ymm4,XMMWORD PTR [rcx]
vbroadcastf128 ymm4,[rcx]
# Tests for op xmm/mem128, xmm
vcvtdq2ps xmm6,xmm4
vcvtdq2ps xmm4,XMMWORD PTR [rcx]
vcvtdq2ps xmm4,[rcx]
vcvtpd2dq xmm6,xmm4
vcvtpd2dq xmm4,XMMWORD PTR [rcx]
vcvtpd2ps xmm6,xmm4
vcvtpd2ps xmm4,XMMWORD PTR [rcx]
vcvtps2dq xmm6,xmm4
vcvtps2dq xmm4,XMMWORD PTR [rcx]
vcvtps2dq xmm4,[rcx]
vcvttpd2dq xmm6,xmm4
vcvttpd2dq xmm4,XMMWORD PTR [rcx]
vcvttps2dq xmm6,xmm4
vcvttps2dq xmm4,XMMWORD PTR [rcx]
vcvttps2dq xmm4,[rcx]
vmovapd xmm6,xmm4
vmovapd xmm4,XMMWORD PTR [rcx]
vmovapd xmm4,[rcx]
vmovaps xmm6,xmm4
vmovaps xmm4,XMMWORD PTR [rcx]
vmovaps xmm4,[rcx]
vmovdqa xmm6,xmm4
vmovdqa xmm4,XMMWORD PTR [rcx]
vmovdqa xmm4,[rcx]
vmovdqu xmm6,xmm4
vmovdqu xmm4,XMMWORD PTR [rcx]
vmovdqu xmm4,[rcx]
vmovshdup xmm6,xmm4
vmovshdup xmm4,XMMWORD PTR [rcx]
vmovshdup xmm4,[rcx]
vmovsldup xmm6,xmm4
vmovsldup xmm4,XMMWORD PTR [rcx]
vmovsldup xmm4,[rcx]
vmovupd xmm6,xmm4
vmovupd xmm4,XMMWORD PTR [rcx]
vmovupd xmm4,[rcx]
vmovups xmm6,xmm4
vmovups xmm4,XMMWORD PTR [rcx]
vmovups xmm4,[rcx]
vpabsb xmm6,xmm4
vpabsb xmm4,XMMWORD PTR [rcx]
vpabsb xmm4,[rcx]
vpabsw xmm6,xmm4
vpabsw xmm4,XMMWORD PTR [rcx]
vpabsw xmm4,[rcx]
vpabsd xmm6,xmm4
vpabsd xmm4,XMMWORD PTR [rcx]
vpabsd xmm4,[rcx]
vphminposuw xmm6,xmm4
vphminposuw xmm4,XMMWORD PTR [rcx]
vphminposuw xmm4,[rcx]
vptest xmm6,xmm4
vptest xmm4,XMMWORD PTR [rcx]
vptest xmm4,[rcx]
vtestps xmm6,xmm4
vtestps xmm4,XMMWORD PTR [rcx]
vtestps xmm4,[rcx]
vtestpd xmm6,xmm4
vtestpd xmm4,XMMWORD PTR [rcx]
vtestpd xmm4,[rcx]
vrcpps xmm6,xmm4
vrcpps xmm4,XMMWORD PTR [rcx]
vrcpps xmm4,[rcx]
vrsqrtps xmm6,xmm4
vrsqrtps xmm4,XMMWORD PTR [rcx]
vrsqrtps xmm4,[rcx]
vsqrtpd xmm6,xmm4
vsqrtpd xmm4,XMMWORD PTR [rcx]
vsqrtpd xmm4,[rcx]
vsqrtps xmm6,xmm4
vsqrtps xmm4,XMMWORD PTR [rcx]
vsqrtps xmm4,[rcx]
vaesimc xmm6,xmm4
vaesimc xmm4,XMMWORD PTR [rcx]
vaesimc xmm4,[rcx]
# Tests for op xmm, xmm/mem128
vmovapd xmm6,xmm4
vmovapd XMMWORD PTR [rcx],xmm4
vmovapd [rcx],xmm4
vmovaps xmm6,xmm4
vmovaps XMMWORD PTR [rcx],xmm4
vmovaps [rcx],xmm4
vmovdqa xmm6,xmm4
vmovdqa XMMWORD PTR [rcx],xmm4
vmovdqa [rcx],xmm4
vmovdqu xmm6,xmm4
vmovdqu XMMWORD PTR [rcx],xmm4
vmovdqu [rcx],xmm4
vmovupd xmm6,xmm4
vmovupd XMMWORD PTR [rcx],xmm4
vmovupd [rcx],xmm4
vmovups xmm6,xmm4
vmovups XMMWORD PTR [rcx],xmm4
vmovups [rcx],xmm4
# Tests for op mem128, xmm
vlddqu xmm4,XMMWORD PTR [rcx]
vlddqu xmm4,[rcx]
vmovntdqa xmm4,XMMWORD PTR [rcx]
vmovntdqa xmm4,[rcx]
# Tests for op xmm, mem128
vmovntdq XMMWORD PTR [rcx],xmm4
vmovntdq [rcx],xmm4
vmovntpd XMMWORD PTR [rcx],xmm4
vmovntpd [rcx],xmm4
vmovntps XMMWORD PTR [rcx],xmm4
vmovntps [rcx],xmm4
# Tests for op xmm/mem128, ymm
vcvtdq2pd ymm4,xmm4
vcvtdq2pd ymm4,XMMWORD PTR [rcx]
vcvtdq2pd ymm4,[rcx]
vcvtps2pd ymm4,xmm4
vcvtps2pd ymm4,XMMWORD PTR [rcx]
vcvtps2pd ymm4,[rcx]
# Tests for op xmm/mem128, xmm, xmm
vaddpd xmm2,xmm6,xmm4
vaddpd xmm7,xmm6,XMMWORD PTR [rcx]
vaddpd xmm7,xmm6,[rcx]
vaddps xmm2,xmm6,xmm4
vaddps xmm7,xmm6,XMMWORD PTR [rcx]
vaddps xmm7,xmm6,[rcx]
vaddsubpd xmm2,xmm6,xmm4
vaddsubpd xmm7,xmm6,XMMWORD PTR [rcx]
vaddsubpd xmm7,xmm6,[rcx]
vaddsubps xmm2,xmm6,xmm4
vaddsubps xmm7,xmm6,XMMWORD PTR [rcx]
vaddsubps xmm7,xmm6,[rcx]
vandnpd xmm2,xmm6,xmm4
vandnpd xmm7,xmm6,XMMWORD PTR [rcx]
vandnpd xmm7,xmm6,[rcx]
vandnps xmm2,xmm6,xmm4
vandnps xmm7,xmm6,XMMWORD PTR [rcx]
vandnps xmm7,xmm6,[rcx]
vandpd xmm2,xmm6,xmm4
vandpd xmm7,xmm6,XMMWORD PTR [rcx]
vandpd xmm7,xmm6,[rcx]
vandps xmm2,xmm6,xmm4
vandps xmm7,xmm6,XMMWORD PTR [rcx]
vandps xmm7,xmm6,[rcx]
vdivpd xmm2,xmm6,xmm4
vdivpd xmm7,xmm6,XMMWORD PTR [rcx]
vdivpd xmm7,xmm6,[rcx]
vdivps xmm2,xmm6,xmm4
vdivps xmm7,xmm6,XMMWORD PTR [rcx]
vdivps xmm7,xmm6,[rcx]
vhaddpd xmm2,xmm6,xmm4
vhaddpd xmm7,xmm6,XMMWORD PTR [rcx]
vhaddpd xmm7,xmm6,[rcx]
vhaddps xmm2,xmm6,xmm4
vhaddps xmm7,xmm6,XMMWORD PTR [rcx]
vhaddps xmm7,xmm6,[rcx]
vhsubpd xmm2,xmm6,xmm4
vhsubpd xmm7,xmm6,XMMWORD PTR [rcx]
vhsubpd xmm7,xmm6,[rcx]
vhsubps xmm2,xmm6,xmm4
vhsubps xmm7,xmm6,XMMWORD PTR [rcx]
vhsubps xmm7,xmm6,[rcx]
vmaxpd xmm2,xmm6,xmm4
vmaxpd xmm7,xmm6,XMMWORD PTR [rcx]
vmaxpd xmm7,xmm6,[rcx]
vmaxps xmm2,xmm6,xmm4
vmaxps xmm7,xmm6,XMMWORD PTR [rcx]
vmaxps xmm7,xmm6,[rcx]
vminpd xmm2,xmm6,xmm4
vminpd xmm7,xmm6,XMMWORD PTR [rcx]
vminpd xmm7,xmm6,[rcx]
vminps xmm2,xmm6,xmm4
vminps xmm7,xmm6,XMMWORD PTR [rcx]
vminps xmm7,xmm6,[rcx]
vmulpd xmm2,xmm6,xmm4
vmulpd xmm7,xmm6,XMMWORD PTR [rcx]
vmulpd xmm7,xmm6,[rcx]
vmulps xmm2,xmm6,xmm4
vmulps xmm7,xmm6,XMMWORD PTR [rcx]
vmulps xmm7,xmm6,[rcx]
vorpd xmm2,xmm6,xmm4
vorpd xmm7,xmm6,XMMWORD PTR [rcx]
vorpd xmm7,xmm6,[rcx]
vorps xmm2,xmm6,xmm4
vorps xmm7,xmm6,XMMWORD PTR [rcx]
vorps xmm7,xmm6,[rcx]
vpacksswb xmm2,xmm6,xmm4
vpacksswb xmm7,xmm6,XMMWORD PTR [rcx]
vpacksswb xmm7,xmm6,[rcx]
vpackssdw xmm2,xmm6,xmm4
vpackssdw xmm7,xmm6,XMMWORD PTR [rcx]
vpackssdw xmm7,xmm6,[rcx]
vpackuswb xmm2,xmm6,xmm4
vpackuswb xmm7,xmm6,XMMWORD PTR [rcx]
vpackuswb xmm7,xmm6,[rcx]
vpackusdw xmm2,xmm6,xmm4
vpackusdw xmm7,xmm6,XMMWORD PTR [rcx]
vpackusdw xmm7,xmm6,[rcx]
vpaddb xmm2,xmm6,xmm4
vpaddb xmm7,xmm6,XMMWORD PTR [rcx]
vpaddb xmm7,xmm6,[rcx]
vpaddw xmm2,xmm6,xmm4
vpaddw xmm7,xmm6,XMMWORD PTR [rcx]
vpaddw xmm7,xmm6,[rcx]
vpaddd xmm2,xmm6,xmm4
vpaddd xmm7,xmm6,XMMWORD PTR [rcx]
vpaddd xmm7,xmm6,[rcx]
vpaddq xmm2,xmm6,xmm4
vpaddq xmm7,xmm6,XMMWORD PTR [rcx]
vpaddq xmm7,xmm6,[rcx]
vpaddsb xmm2,xmm6,xmm4
vpaddsb xmm7,xmm6,XMMWORD PTR [rcx]
vpaddsb xmm7,xmm6,[rcx]
vpaddsw xmm2,xmm6,xmm4
vpaddsw xmm7,xmm6,XMMWORD PTR [rcx]
vpaddsw xmm7,xmm6,[rcx]
vpaddusb xmm2,xmm6,xmm4
vpaddusb xmm7,xmm6,XMMWORD PTR [rcx]
vpaddusb xmm7,xmm6,[rcx]
vpaddusw xmm2,xmm6,xmm4
vpaddusw xmm7,xmm6,XMMWORD PTR [rcx]
vpaddusw xmm7,xmm6,[rcx]
vpand xmm2,xmm6,xmm4
vpand xmm7,xmm6,XMMWORD PTR [rcx]
vpand xmm7,xmm6,[rcx]
vpandn xmm2,xmm6,xmm4
vpandn xmm7,xmm6,XMMWORD PTR [rcx]
vpandn xmm7,xmm6,[rcx]
vpavgb xmm2,xmm6,xmm4
vpavgb xmm7,xmm6,XMMWORD PTR [rcx]
vpavgb xmm7,xmm6,[rcx]
vpavgw xmm2,xmm6,xmm4
vpavgw xmm7,xmm6,XMMWORD PTR [rcx]
vpavgw xmm7,xmm6,[rcx]
vpclmullqlqdq xmm2,xmm6,xmm4
vpclmullqlqdq xmm7,xmm6,XMMWORD PTR [rcx]
vpclmullqlqdq xmm7,xmm6,[rcx]
vpclmulhqlqdq xmm2,xmm6,xmm4
vpclmulhqlqdq xmm7,xmm6,XMMWORD PTR [rcx]
vpclmulhqlqdq xmm7,xmm6,[rcx]
vpclmullqhqdq xmm2,xmm6,xmm4
vpclmullqhqdq xmm7,xmm6,XMMWORD PTR [rcx]
vpclmullqhqdq xmm7,xmm6,[rcx]
vpclmulhqhqdq xmm2,xmm6,xmm4
vpclmulhqhqdq xmm7,xmm6,XMMWORD PTR [rcx]
vpclmulhqhqdq xmm7,xmm6,[rcx]
vpcmpeqb xmm2,xmm6,xmm4
vpcmpeqb xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpeqb xmm7,xmm6,[rcx]
vpcmpeqw xmm2,xmm6,xmm4
vpcmpeqw xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpeqw xmm7,xmm6,[rcx]
vpcmpeqd xmm2,xmm6,xmm4
vpcmpeqd xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpeqd xmm7,xmm6,[rcx]
vpcmpeqq xmm2,xmm6,xmm4
vpcmpeqq xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpeqq xmm7,xmm6,[rcx]
vpcmpgtb xmm2,xmm6,xmm4
vpcmpgtb xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpgtb xmm7,xmm6,[rcx]
vpcmpgtw xmm2,xmm6,xmm4
vpcmpgtw xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpgtw xmm7,xmm6,[rcx]
vpcmpgtd xmm2,xmm6,xmm4
vpcmpgtd xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpgtd xmm7,xmm6,[rcx]
vpcmpgtq xmm2,xmm6,xmm4
vpcmpgtq xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpgtq xmm7,xmm6,[rcx]
vpermilpd xmm2,xmm6,xmm4
vpermilpd xmm7,xmm6,XMMWORD PTR [rcx]
vpermilpd xmm7,xmm6,[rcx]
vpermilps xmm2,xmm6,xmm4
vpermilps xmm7,xmm6,XMMWORD PTR [rcx]
vpermilps xmm7,xmm6,[rcx]
vphaddw xmm2,xmm6,xmm4
vphaddw xmm7,xmm6,XMMWORD PTR [rcx]
vphaddw xmm7,xmm6,[rcx]
vphaddd xmm2,xmm6,xmm4
vphaddd xmm7,xmm6,XMMWORD PTR [rcx]
vphaddd xmm7,xmm6,[rcx]
vphaddsw xmm2,xmm6,xmm4
vphaddsw xmm7,xmm6,XMMWORD PTR [rcx]
vphaddsw xmm7,xmm6,[rcx]
vphsubw xmm2,xmm6,xmm4
vphsubw xmm7,xmm6,XMMWORD PTR [rcx]
vphsubw xmm7,xmm6,[rcx]
vphsubd xmm2,xmm6,xmm4
vphsubd xmm7,xmm6,XMMWORD PTR [rcx]
vphsubd xmm7,xmm6,[rcx]
vphsubsw xmm2,xmm6,xmm4
vphsubsw xmm7,xmm6,XMMWORD PTR [rcx]
vphsubsw xmm7,xmm6,[rcx]
vpmaddwd xmm2,xmm6,xmm4
vpmaddwd xmm7,xmm6,XMMWORD PTR [rcx]
vpmaddwd xmm7,xmm6,[rcx]
vpmaddubsw xmm2,xmm6,xmm4
vpmaddubsw xmm7,xmm6,XMMWORD PTR [rcx]
vpmaddubsw xmm7,xmm6,[rcx]
vpmaxsb xmm2,xmm6,xmm4
vpmaxsb xmm7,xmm6,XMMWORD PTR [rcx]
vpmaxsb xmm7,xmm6,[rcx]
vpmaxsw xmm2,xmm6,xmm4
vpmaxsw xmm7,xmm6,XMMWORD PTR [rcx]
vpmaxsw xmm7,xmm6,[rcx]
vpmaxsd xmm2,xmm6,xmm4
vpmaxsd xmm7,xmm6,XMMWORD PTR [rcx]
vpmaxsd xmm7,xmm6,[rcx]
vpmaxub xmm2,xmm6,xmm4
vpmaxub xmm7,xmm6,XMMWORD PTR [rcx]
vpmaxub xmm7,xmm6,[rcx]
vpmaxuw xmm2,xmm6,xmm4
vpmaxuw xmm7,xmm6,XMMWORD PTR [rcx]
vpmaxuw xmm7,xmm6,[rcx]
vpmaxud xmm2,xmm6,xmm4
vpmaxud xmm7,xmm6,XMMWORD PTR [rcx]
vpmaxud xmm7,xmm6,[rcx]
vpminsb xmm2,xmm6,xmm4
vpminsb xmm7,xmm6,XMMWORD PTR [rcx]
vpminsb xmm7,xmm6,[rcx]
vpminsw xmm2,xmm6,xmm4
vpminsw xmm7,xmm6,XMMWORD PTR [rcx]
vpminsw xmm7,xmm6,[rcx]
vpminsd xmm2,xmm6,xmm4
vpminsd xmm7,xmm6,XMMWORD PTR [rcx]
vpminsd xmm7,xmm6,[rcx]
vpminub xmm2,xmm6,xmm4
vpminub xmm7,xmm6,XMMWORD PTR [rcx]
vpminub xmm7,xmm6,[rcx]
vpminuw xmm2,xmm6,xmm4
vpminuw xmm7,xmm6,XMMWORD PTR [rcx]
vpminuw xmm7,xmm6,[rcx]
vpminud xmm2,xmm6,xmm4
vpminud xmm7,xmm6,XMMWORD PTR [rcx]
vpminud xmm7,xmm6,[rcx]
vpmulhuw xmm2,xmm6,xmm4
vpmulhuw xmm7,xmm6,XMMWORD PTR [rcx]
vpmulhuw xmm7,xmm6,[rcx]
vpmulhrsw xmm2,xmm6,xmm4
vpmulhrsw xmm7,xmm6,XMMWORD PTR [rcx]
vpmulhrsw xmm7,xmm6,[rcx]
vpmulhw xmm2,xmm6,xmm4
vpmulhw xmm7,xmm6,XMMWORD PTR [rcx]
vpmulhw xmm7,xmm6,[rcx]
vpmullw xmm2,xmm6,xmm4
vpmullw xmm7,xmm6,XMMWORD PTR [rcx]
vpmullw xmm7,xmm6,[rcx]
vpmulld xmm2,xmm6,xmm4
vpmulld xmm7,xmm6,XMMWORD PTR [rcx]
vpmulld xmm7,xmm6,[rcx]
vpmuludq xmm2,xmm6,xmm4
vpmuludq xmm7,xmm6,XMMWORD PTR [rcx]
vpmuludq xmm7,xmm6,[rcx]
vpmuldq xmm2,xmm6,xmm4
vpmuldq xmm7,xmm6,XMMWORD PTR [rcx]
vpmuldq xmm7,xmm6,[rcx]
vpor xmm2,xmm6,xmm4
vpor xmm7,xmm6,XMMWORD PTR [rcx]
vpor xmm7,xmm6,[rcx]
vpsadbw xmm2,xmm6,xmm4
vpsadbw xmm7,xmm6,XMMWORD PTR [rcx]
vpsadbw xmm7,xmm6,[rcx]
vpshufb xmm2,xmm6,xmm4
vpshufb xmm7,xmm6,XMMWORD PTR [rcx]
vpshufb xmm7,xmm6,[rcx]
vpsignb xmm2,xmm6,xmm4
vpsignb xmm7,xmm6,XMMWORD PTR [rcx]
vpsignb xmm7,xmm6,[rcx]
vpsignw xmm2,xmm6,xmm4
vpsignw xmm7,xmm6,XMMWORD PTR [rcx]
vpsignw xmm7,xmm6,[rcx]
vpsignd xmm2,xmm6,xmm4
vpsignd xmm7,xmm6,XMMWORD PTR [rcx]
vpsignd xmm7,xmm6,[rcx]
vpsllw xmm2,xmm6,xmm4
vpsllw xmm7,xmm6,XMMWORD PTR [rcx]
vpsllw xmm7,xmm6,[rcx]
vpslld xmm2,xmm6,xmm4
vpslld xmm7,xmm6,XMMWORD PTR [rcx]
vpslld xmm7,xmm6,[rcx]
vpsllq xmm2,xmm6,xmm4
vpsllq xmm7,xmm6,XMMWORD PTR [rcx]
vpsllq xmm7,xmm6,[rcx]
vpsraw xmm2,xmm6,xmm4
vpsraw xmm7,xmm6,XMMWORD PTR [rcx]
vpsraw xmm7,xmm6,[rcx]
vpsrad xmm2,xmm6,xmm4
vpsrad xmm7,xmm6,XMMWORD PTR [rcx]
vpsrad xmm7,xmm6,[rcx]
vpsrlw xmm2,xmm6,xmm4
vpsrlw xmm7,xmm6,XMMWORD PTR [rcx]
vpsrlw xmm7,xmm6,[rcx]
vpsrld xmm2,xmm6,xmm4
vpsrld xmm7,xmm6,XMMWORD PTR [rcx]
vpsrld xmm7,xmm6,[rcx]
vpsrlq xmm2,xmm6,xmm4
vpsrlq xmm7,xmm6,XMMWORD PTR [rcx]
vpsrlq xmm7,xmm6,[rcx]
vpsubb xmm2,xmm6,xmm4
vpsubb xmm7,xmm6,XMMWORD PTR [rcx]
vpsubb xmm7,xmm6,[rcx]
vpsubw xmm2,xmm6,xmm4
vpsubw xmm7,xmm6,XMMWORD PTR [rcx]
vpsubw xmm7,xmm6,[rcx]
vpsubd xmm2,xmm6,xmm4
vpsubd xmm7,xmm6,XMMWORD PTR [rcx]
vpsubd xmm7,xmm6,[rcx]
vpsubq xmm2,xmm6,xmm4
vpsubq xmm7,xmm6,XMMWORD PTR [rcx]
vpsubq xmm7,xmm6,[rcx]
vpsubsb xmm2,xmm6,xmm4
vpsubsb xmm7,xmm6,XMMWORD PTR [rcx]
vpsubsb xmm7,xmm6,[rcx]
vpsubsw xmm2,xmm6,xmm4
vpsubsw xmm7,xmm6,XMMWORD PTR [rcx]
vpsubsw xmm7,xmm6,[rcx]
vpsubusb xmm2,xmm6,xmm4
vpsubusb xmm7,xmm6,XMMWORD PTR [rcx]
vpsubusb xmm7,xmm6,[rcx]
vpsubusw xmm2,xmm6,xmm4
vpsubusw xmm7,xmm6,XMMWORD PTR [rcx]
vpsubusw xmm7,xmm6,[rcx]
vpunpckhbw xmm2,xmm6,xmm4
vpunpckhbw xmm7,xmm6,XMMWORD PTR [rcx]
vpunpckhbw xmm7,xmm6,[rcx]
vpunpckhwd xmm2,xmm6,xmm4
vpunpckhwd xmm7,xmm6,XMMWORD PTR [rcx]
vpunpckhwd xmm7,xmm6,[rcx]
vpunpckhdq xmm2,xmm6,xmm4
vpunpckhdq xmm7,xmm6,XMMWORD PTR [rcx]
vpunpckhdq xmm7,xmm6,[rcx]
vpunpckhqdq xmm2,xmm6,xmm4
vpunpckhqdq xmm7,xmm6,XMMWORD PTR [rcx]
vpunpckhqdq xmm7,xmm6,[rcx]
vpunpcklbw xmm2,xmm6,xmm4
vpunpcklbw xmm7,xmm6,XMMWORD PTR [rcx]
vpunpcklbw xmm7,xmm6,[rcx]
vpunpcklwd xmm2,xmm6,xmm4
vpunpcklwd xmm7,xmm6,XMMWORD PTR [rcx]
vpunpcklwd xmm7,xmm6,[rcx]
vpunpckldq xmm2,xmm6,xmm4
vpunpckldq xmm7,xmm6,XMMWORD PTR [rcx]
vpunpckldq xmm7,xmm6,[rcx]
vpunpcklqdq xmm2,xmm6,xmm4
vpunpcklqdq xmm7,xmm6,XMMWORD PTR [rcx]
vpunpcklqdq xmm7,xmm6,[rcx]
vpxor xmm2,xmm6,xmm4
vpxor xmm7,xmm6,XMMWORD PTR [rcx]
vpxor xmm7,xmm6,[rcx]
vsubpd xmm2,xmm6,xmm4
vsubpd xmm7,xmm6,XMMWORD PTR [rcx]
vsubpd xmm7,xmm6,[rcx]
vsubps xmm2,xmm6,xmm4
vsubps xmm7,xmm6,XMMWORD PTR [rcx]
vsubps xmm7,xmm6,[rcx]
vunpckhpd xmm2,xmm6,xmm4
vunpckhpd xmm7,xmm6,XMMWORD PTR [rcx]
vunpckhpd xmm7,xmm6,[rcx]
vunpckhps xmm2,xmm6,xmm4
vunpckhps xmm7,xmm6,XMMWORD PTR [rcx]
vunpckhps xmm7,xmm6,[rcx]
vunpcklpd xmm2,xmm6,xmm4
vunpcklpd xmm7,xmm6,XMMWORD PTR [rcx]
vunpcklpd xmm7,xmm6,[rcx]
vunpcklps xmm2,xmm6,xmm4
vunpcklps xmm7,xmm6,XMMWORD PTR [rcx]
vunpcklps xmm7,xmm6,[rcx]
vxorpd xmm2,xmm6,xmm4
vxorpd xmm7,xmm6,XMMWORD PTR [rcx]
vxorpd xmm7,xmm6,[rcx]
vxorps xmm2,xmm6,xmm4
vxorps xmm7,xmm6,XMMWORD PTR [rcx]
vxorps xmm7,xmm6,[rcx]
vaesenc xmm2,xmm6,xmm4
vaesenc xmm7,xmm6,XMMWORD PTR [rcx]
vaesenc xmm7,xmm6,[rcx]
vaesenclast xmm2,xmm6,xmm4
vaesenclast xmm7,xmm6,XMMWORD PTR [rcx]
vaesenclast xmm7,xmm6,[rcx]
vaesdec xmm2,xmm6,xmm4
vaesdec xmm7,xmm6,XMMWORD PTR [rcx]
vaesdec xmm7,xmm6,[rcx]
vaesdeclast xmm2,xmm6,xmm4
vaesdeclast xmm7,xmm6,XMMWORD PTR [rcx]
vaesdeclast xmm7,xmm6,[rcx]
vcmpeqpd xmm2,xmm6,xmm4
vcmpeqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeqpd xmm7,xmm6,[rcx]
vcmpltpd xmm2,xmm6,xmm4
vcmpltpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpltpd xmm7,xmm6,[rcx]
vcmplepd xmm2,xmm6,xmm4
vcmplepd xmm7,xmm6,XMMWORD PTR [rcx]
vcmplepd xmm7,xmm6,[rcx]
vcmpunordpd xmm2,xmm6,xmm4
vcmpunordpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpunordpd xmm7,xmm6,[rcx]
vcmpneqpd xmm2,xmm6,xmm4
vcmpneqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneqpd xmm7,xmm6,[rcx]
vcmpnltpd xmm2,xmm6,xmm4
vcmpnltpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnltpd xmm7,xmm6,[rcx]
vcmpnlepd xmm2,xmm6,xmm4
vcmpnlepd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnlepd xmm7,xmm6,[rcx]
vcmpordpd xmm2,xmm6,xmm4
vcmpordpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpordpd xmm7,xmm6,[rcx]
vcmpeq_uqpd xmm2,xmm6,xmm4
vcmpeq_uqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeq_uqpd xmm7,xmm6,[rcx]
vcmpngepd xmm2,xmm6,xmm4
vcmpngepd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpngepd xmm7,xmm6,[rcx]
vcmpngtpd xmm2,xmm6,xmm4
vcmpngtpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpngtpd xmm7,xmm6,[rcx]
vcmpfalsepd xmm2,xmm6,xmm4
vcmpfalsepd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpfalsepd xmm7,xmm6,[rcx]
vcmpneq_oqpd xmm2,xmm6,xmm4
vcmpneq_oqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneq_oqpd xmm7,xmm6,[rcx]
vcmpgepd xmm2,xmm6,xmm4
vcmpgepd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpgepd xmm7,xmm6,[rcx]
vcmpgtpd xmm2,xmm6,xmm4
vcmpgtpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpgtpd xmm7,xmm6,[rcx]
vcmptruepd xmm2,xmm6,xmm4
vcmptruepd xmm7,xmm6,XMMWORD PTR [rcx]
vcmptruepd xmm7,xmm6,[rcx]
vcmpeq_ospd xmm2,xmm6,xmm4
vcmpeq_ospd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeq_ospd xmm7,xmm6,[rcx]
vcmplt_oqpd xmm2,xmm6,xmm4
vcmplt_oqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmplt_oqpd xmm7,xmm6,[rcx]
vcmple_oqpd xmm2,xmm6,xmm4
vcmple_oqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmple_oqpd xmm7,xmm6,[rcx]
vcmpunord_spd xmm2,xmm6,xmm4
vcmpunord_spd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpunord_spd xmm7,xmm6,[rcx]
vcmpneq_uspd xmm2,xmm6,xmm4
vcmpneq_uspd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneq_uspd xmm7,xmm6,[rcx]
vcmpnlt_uqpd xmm2,xmm6,xmm4
vcmpnlt_uqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnlt_uqpd xmm7,xmm6,[rcx]
vcmpnle_uqpd xmm2,xmm6,xmm4
vcmpnle_uqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnle_uqpd xmm7,xmm6,[rcx]
vcmpord_spd xmm2,xmm6,xmm4
vcmpord_spd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpord_spd xmm7,xmm6,[rcx]
vcmpeq_uspd xmm2,xmm6,xmm4
vcmpeq_uspd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeq_uspd xmm7,xmm6,[rcx]
vcmpnge_uqpd xmm2,xmm6,xmm4
vcmpnge_uqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnge_uqpd xmm7,xmm6,[rcx]
vcmpngt_uqpd xmm2,xmm6,xmm4
vcmpngt_uqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpngt_uqpd xmm7,xmm6,[rcx]
vcmpfalse_ospd xmm2,xmm6,xmm4
vcmpfalse_ospd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpfalse_ospd xmm7,xmm6,[rcx]
vcmpneq_ospd xmm2,xmm6,xmm4
vcmpneq_ospd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneq_ospd xmm7,xmm6,[rcx]
vcmpge_oqpd xmm2,xmm6,xmm4
vcmpge_oqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpge_oqpd xmm7,xmm6,[rcx]
vcmpgt_oqpd xmm2,xmm6,xmm4
vcmpgt_oqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpgt_oqpd xmm7,xmm6,[rcx]
vcmptrue_uspd xmm2,xmm6,xmm4
vcmptrue_uspd xmm7,xmm6,XMMWORD PTR [rcx]
vcmptrue_uspd xmm7,xmm6,[rcx]
vcmpeqps xmm2,xmm6,xmm4
vcmpeqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeqps xmm7,xmm6,[rcx]
vcmpltps xmm2,xmm6,xmm4
vcmpltps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpltps xmm7,xmm6,[rcx]
vcmpleps xmm2,xmm6,xmm4
vcmpleps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpleps xmm7,xmm6,[rcx]
vcmpunordps xmm2,xmm6,xmm4
vcmpunordps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpunordps xmm7,xmm6,[rcx]
vcmpneqps xmm2,xmm6,xmm4
vcmpneqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneqps xmm7,xmm6,[rcx]
vcmpnltps xmm2,xmm6,xmm4
vcmpnltps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnltps xmm7,xmm6,[rcx]
vcmpnleps xmm2,xmm6,xmm4
vcmpnleps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnleps xmm7,xmm6,[rcx]
vcmpordps xmm2,xmm6,xmm4
vcmpordps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpordps xmm7,xmm6,[rcx]
vcmpeq_uqps xmm2,xmm6,xmm4
vcmpeq_uqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeq_uqps xmm7,xmm6,[rcx]
vcmpngeps xmm2,xmm6,xmm4
vcmpngeps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpngeps xmm7,xmm6,[rcx]
vcmpngtps xmm2,xmm6,xmm4
vcmpngtps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpngtps xmm7,xmm6,[rcx]
vcmpfalseps xmm2,xmm6,xmm4
vcmpfalseps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpfalseps xmm7,xmm6,[rcx]
vcmpneq_oqps xmm2,xmm6,xmm4
vcmpneq_oqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneq_oqps xmm7,xmm6,[rcx]
vcmpgeps xmm2,xmm6,xmm4
vcmpgeps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpgeps xmm7,xmm6,[rcx]
vcmpgtps xmm2,xmm6,xmm4
vcmpgtps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpgtps xmm7,xmm6,[rcx]
vcmptrueps xmm2,xmm6,xmm4
vcmptrueps xmm7,xmm6,XMMWORD PTR [rcx]
vcmptrueps xmm7,xmm6,[rcx]
vcmpeq_osps xmm2,xmm6,xmm4
vcmpeq_osps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeq_osps xmm7,xmm6,[rcx]
vcmplt_oqps xmm2,xmm6,xmm4
vcmplt_oqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmplt_oqps xmm7,xmm6,[rcx]
vcmple_oqps xmm2,xmm6,xmm4
vcmple_oqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmple_oqps xmm7,xmm6,[rcx]
vcmpunord_sps xmm2,xmm6,xmm4
vcmpunord_sps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpunord_sps xmm7,xmm6,[rcx]
vcmpneq_usps xmm2,xmm6,xmm4
vcmpneq_usps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneq_usps xmm7,xmm6,[rcx]
vcmpnlt_uqps xmm2,xmm6,xmm4
vcmpnlt_uqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnlt_uqps xmm7,xmm6,[rcx]
vcmpnle_uqps xmm2,xmm6,xmm4
vcmpnle_uqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnle_uqps xmm7,xmm6,[rcx]
vcmpord_sps xmm2,xmm6,xmm4
vcmpord_sps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpord_sps xmm7,xmm6,[rcx]
vcmpeq_usps xmm2,xmm6,xmm4
vcmpeq_usps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeq_usps xmm7,xmm6,[rcx]
vcmpnge_uqps xmm2,xmm6,xmm4
vcmpnge_uqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnge_uqps xmm7,xmm6,[rcx]
vcmpngt_uqps xmm2,xmm6,xmm4
vcmpngt_uqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpngt_uqps xmm7,xmm6,[rcx]
vcmpfalse_osps xmm2,xmm6,xmm4
vcmpfalse_osps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpfalse_osps xmm7,xmm6,[rcx]
vcmpneq_osps xmm2,xmm6,xmm4
vcmpneq_osps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneq_osps xmm7,xmm6,[rcx]
vcmpge_oqps xmm2,xmm6,xmm4
vcmpge_oqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpge_oqps xmm7,xmm6,[rcx]
vcmpgt_oqps xmm2,xmm6,xmm4
vcmpgt_oqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpgt_oqps xmm7,xmm6,[rcx]
vcmptrue_usps xmm2,xmm6,xmm4
vcmptrue_usps xmm7,xmm6,XMMWORD PTR [rcx]
vcmptrue_usps xmm7,xmm6,[rcx]
vgf2p8mulb xmm6, xmm5, xmm4
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [rcx]
vgf2p8mulb xmm6, xmm5, [rcx]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [rax+r14*8-123456]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [rdx+2032]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [rdx+2048]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [rdx-2048]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [rdx-2064]
# Tests for op mem128, xmm, xmm
vmaskmovps xmm6,xmm4,XMMWORD PTR [rcx]
vmaskmovps xmm6,xmm4,[rcx]
vmaskmovpd xmm6,xmm4,XMMWORD PTR [rcx]
vmaskmovpd xmm6,xmm4,[rcx]
# Tests for op imm8, xmm/mem128, xmm
vaeskeygenassist xmm6,xmm4,7
vaeskeygenassist xmm6,XMMWORD PTR [rcx],7
vaeskeygenassist xmm6,[rcx],7
vpcmpestri xmm6,xmm4,7
vpcmpestri xmm6,XMMWORD PTR [rcx],7
vpcmpestri xmm6,[rcx],7
vpcmpestrm xmm6,xmm4,7
vpcmpestrm xmm6,XMMWORD PTR [rcx],7
vpcmpestrm xmm6,[rcx],7
vpcmpistri xmm6,xmm4,7
vpcmpistri xmm6,XMMWORD PTR [rcx],7
vpcmpistri xmm6,[rcx],7
vpcmpistrm xmm6,xmm4,7
vpcmpistrm xmm6,XMMWORD PTR [rcx],7
vpcmpistrm xmm6,[rcx],7
vpermilpd xmm6,xmm4,7
vpermilpd xmm6,XMMWORD PTR [rcx],7
vpermilpd xmm6,[rcx],7
vpermilps xmm6,xmm4,7
vpermilps xmm6,XMMWORD PTR [rcx],7
vpermilps xmm6,[rcx],7
vpshufd xmm6,xmm4,7
vpshufd xmm6,XMMWORD PTR [rcx],7
vpshufd xmm6,[rcx],7
vpshufhw xmm6,xmm4,7
vpshufhw xmm6,XMMWORD PTR [rcx],7
vpshufhw xmm6,[rcx],7
vpshuflw xmm6,xmm4,7
vpshuflw xmm6,XMMWORD PTR [rcx],7
vpshuflw xmm6,[rcx],7
vroundpd xmm6,xmm4,7
vroundpd xmm6,XMMWORD PTR [rcx],7
vroundpd xmm6,[rcx],7
vroundps xmm6,xmm4,7
vroundps xmm6,XMMWORD PTR [rcx],7
vroundps xmm6,[rcx],7
# Tests for op xmm, xmm, mem128
vmaskmovps XMMWORD PTR [rcx],xmm6,xmm4
vmaskmovps [rcx],xmm6,xmm4
vmaskmovpd XMMWORD PTR [rcx],xmm6,xmm4
vmaskmovpd [rcx],xmm6,xmm4
# Tests for op imm8, xmm/mem128, xmm, xmm
vblendpd xmm2,xmm6,xmm4,7
vblendpd xmm2,xmm6,XMMWORD PTR [rcx],7
vblendpd xmm2,xmm6,[rcx],7
vblendps xmm2,xmm6,xmm4,7
vblendps xmm2,xmm6,XMMWORD PTR [rcx],7
vblendps xmm2,xmm6,[rcx],7
vcmppd xmm2,xmm6,xmm4,7
vcmppd xmm2,xmm6,XMMWORD PTR [rcx],7
vcmppd xmm2,xmm6,[rcx],7
vcmpps xmm2,xmm6,xmm4,7
vcmpps xmm2,xmm6,XMMWORD PTR [rcx],7
vcmpps xmm2,xmm6,[rcx],7
vdppd xmm2,xmm6,xmm4,7
vdppd xmm2,xmm6,XMMWORD PTR [rcx],7
vdppd xmm2,xmm6,[rcx],7
vdpps xmm2,xmm6,xmm4,7
vdpps xmm2,xmm6,XMMWORD PTR [rcx],7
vdpps xmm2,xmm6,[rcx],7
vmpsadbw xmm2,xmm6,xmm4,7
vmpsadbw xmm2,xmm6,XMMWORD PTR [rcx],7
vmpsadbw xmm2,xmm6,[rcx],7
vpalignr xmm2,xmm6,xmm4,7
vpalignr xmm2,xmm6,XMMWORD PTR [rcx],7
vpalignr xmm2,xmm6,[rcx],7
vpblendw xmm2,xmm6,xmm4,7
vpblendw xmm2,xmm6,XMMWORD PTR [rcx],7
vpblendw xmm2,xmm6,[rcx],7
vpclmulqdq xmm2,xmm6,xmm4,7
vpclmulqdq xmm2,xmm6,XMMWORD PTR [rcx],7
vpclmulqdq xmm2,xmm6,[rcx],7
vshufpd xmm2,xmm6,xmm4,7
vshufpd xmm2,xmm6,XMMWORD PTR [rcx],7
vshufpd xmm2,xmm6,[rcx],7
vshufps xmm2,xmm6,xmm4,7
vshufps xmm2,xmm6,XMMWORD PTR [rcx],7
vshufps xmm2,xmm6,[rcx],7
vgf2p8affineqb xmm6, xmm5, xmm4, 0xab
vgf2p8affineqb xmm6, xmm5, xmm4, 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [rcx], 123
vgf2p8affineqb xmm6, xmm5, [rcx], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [rax+r14*8-123456], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [rdx+2032], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [rdx+2048], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [rdx-2048], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [rdx-2064], 123
vgf2p8affineinvqb xmm6, xmm5, xmm4, 0xab
vgf2p8affineinvqb xmm6, xmm5, xmm4, 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [rcx], 123
vgf2p8affineinvqb xmm6, xmm5, [rcx], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [rax+r14*8-123456], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [rdx+2032], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [rdx+2048], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [rdx-2048], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [rdx-2064], 123
# Tests for op xmm, xmm/mem128, xmm, xmm
vblendvpd xmm7,xmm2,xmm6,xmm4
vblendvpd xmm7,xmm2,XMMWORD PTR [rcx],xmm4
vblendvpd xmm7,xmm2,[rcx],xmm4
vblendvps xmm7,xmm2,xmm6,xmm4
vblendvps xmm7,xmm2,XMMWORD PTR [rcx],xmm4
vblendvps xmm7,xmm2,[rcx],xmm4
vpblendvb xmm7,xmm2,xmm6,xmm4
vpblendvb xmm7,xmm2,XMMWORD PTR [rcx],xmm4
vpblendvb xmm7,xmm2,[rcx],xmm4
# Tests for op mem64, ymm
vbroadcastsd ymm4,QWORD PTR [rcx]
vbroadcastsd ymm4,[rcx]
# Tests for op xmm/mem64, xmm
vcomisd xmm6,xmm4
vcomisd xmm4,QWORD PTR [rcx]
vcomisd xmm4,[rcx]
vcvtdq2pd xmm6,xmm4
vcvtdq2pd xmm4,QWORD PTR [rcx]
vcvtdq2pd xmm4,[rcx]
vcvtps2pd xmm6,xmm4
vcvtps2pd xmm4,QWORD PTR [rcx]
vcvtps2pd xmm4,[rcx]
vmovddup xmm6,xmm4
vmovddup xmm4,QWORD PTR [rcx]
vmovddup xmm4,[rcx]
vpmovsxbw xmm6,xmm4
vpmovsxbw xmm4,QWORD PTR [rcx]
vpmovsxbw xmm4,[rcx]
vpmovsxwd xmm6,xmm4
vpmovsxwd xmm4,QWORD PTR [rcx]
vpmovsxwd xmm4,[rcx]
vpmovsxdq xmm6,xmm4
vpmovsxdq xmm4,QWORD PTR [rcx]
vpmovsxdq xmm4,[rcx]
vpmovzxbw xmm6,xmm4
vpmovzxbw xmm4,QWORD PTR [rcx]
vpmovzxbw xmm4,[rcx]
vpmovzxwd xmm6,xmm4
vpmovzxwd xmm4,QWORD PTR [rcx]
vpmovzxwd xmm4,[rcx]
vpmovzxdq xmm6,xmm4
vpmovzxdq xmm4,QWORD PTR [rcx]
vpmovzxdq xmm4,[rcx]
vucomisd xmm6,xmm4
vucomisd xmm4,QWORD PTR [rcx]
vucomisd xmm4,[rcx]
# Tests for op mem64, xmm
vmovsd xmm4,QWORD PTR [rcx]
vmovsd xmm4,[rcx]
# Tests for op xmm, mem64
vmovlpd QWORD PTR [rcx],xmm4
vmovlpd [rcx],xmm4
vmovlps QWORD PTR [rcx],xmm4
vmovlps [rcx],xmm4
vmovhpd QWORD PTR [rcx],xmm4
vmovhpd [rcx],xmm4
vmovhps QWORD PTR [rcx],xmm4
vmovhps [rcx],xmm4
vmovsd QWORD PTR [rcx],xmm4
vmovsd [rcx],xmm4
# Tests for op xmm, regq/mem64
# Tests for op regq/mem64, xmm
vmovd rcx,xmm4
vmovd xmm4,rcx
vmovd [rcx],xmm4
vmovd xmm4,[rcx]
vmovq rcx,xmm4
vmovq xmm4,rcx
vmovq QWORD PTR [rcx],xmm4
vmovq xmm4,QWORD PTR [rcx]
vmovq [rcx],xmm4
vmovq xmm4,[rcx]
# Tests for op xmm/mem64, regl
vcvtsd2si ecx,xmm4
vcvtsd2si ecx,QWORD PTR [rcx]
vcvtsd2si ecx,[rcx]
vcvttsd2si ecx,xmm4
vcvttsd2si ecx,QWORD PTR [rcx]
vcvttsd2si ecx,[rcx]
# Tests for op xmm/mem64, regq
vcvtsd2si rcx,xmm4
vcvtsd2si rcx,QWORD PTR [rcx]
vcvtsd2si rcx,[rcx]
vcvttsd2si rcx,xmm4
vcvttsd2si rcx,QWORD PTR [rcx]
vcvttsd2si rcx,[rcx]
# Tests for op regq/mem64, xmm, xmm
vcvtsi2sdq xmm6,xmm4,rcx
vcvtsi2sdq xmm6,xmm4,QWORD PTR [rcx]
vcvtsi2sdq xmm6,xmm4,[rcx]
vcvtsi2ssq xmm6,xmm4,rcx
vcvtsi2ssq xmm6,xmm4,QWORD PTR [rcx]
vcvtsi2ssq xmm6,xmm4,[rcx]
# Tests for op imm8, regq/mem64, xmm, xmm
vpinsrq xmm6,xmm4,rcx,7
vpinsrq xmm6,xmm4,QWORD PTR [rcx],7
vpinsrq xmm6,xmm4,[rcx],7
# Testsf for op imm8, xmm, regq/mem64
vpextrq rcx,xmm4,7
vpextrq QWORD PTR [rcx],xmm4,7
vpextrq [rcx],xmm4,7
# Tests for op mem64, xmm, xmm
vmovlpd xmm6,xmm4,QWORD PTR [rcx]
vmovlpd xmm6,xmm4,[rcx]
vmovlps xmm6,xmm4,QWORD PTR [rcx]
vmovlps xmm6,xmm4,[rcx]
vmovhpd xmm6,xmm4,QWORD PTR [rcx]
vmovhpd xmm6,xmm4,[rcx]
vmovhps xmm6,xmm4,QWORD PTR [rcx]
vmovhps xmm6,xmm4,[rcx]
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd xmm2,xmm6,xmm4,7
vcmpsd xmm2,xmm6,QWORD PTR [rcx],7
vcmpsd xmm2,xmm6,[rcx],7
vroundsd xmm2,xmm6,xmm4,7
vroundsd xmm2,xmm6,QWORD PTR [rcx],7
vroundsd xmm2,xmm6,[rcx],7
# Tests for op xmm/mem64, xmm, xmm
vaddsd xmm2,xmm6,xmm4
vaddsd xmm2,xmm6,QWORD PTR [rcx]
vaddsd xmm2,xmm6,[rcx]
vcvtsd2ss xmm2,xmm6,xmm4
vcvtsd2ss xmm2,xmm6,QWORD PTR [rcx]
vcvtsd2ss xmm2,xmm6,[rcx]
vdivsd xmm2,xmm6,xmm4
vdivsd xmm2,xmm6,QWORD PTR [rcx]
vdivsd xmm2,xmm6,[rcx]
vmaxsd xmm2,xmm6,xmm4
vmaxsd xmm2,xmm6,QWORD PTR [rcx]
vmaxsd xmm2,xmm6,[rcx]
vminsd xmm2,xmm6,xmm4
vminsd xmm2,xmm6,QWORD PTR [rcx]
vminsd xmm2,xmm6,[rcx]
vmulsd xmm2,xmm6,xmm4
vmulsd xmm2,xmm6,QWORD PTR [rcx]
vmulsd xmm2,xmm6,[rcx]
vsqrtsd xmm2,xmm6,xmm4
vsqrtsd xmm2,xmm6,QWORD PTR [rcx]
vsqrtsd xmm2,xmm6,[rcx]
vsubsd xmm2,xmm6,xmm4
vsubsd xmm2,xmm6,QWORD PTR [rcx]
vsubsd xmm2,xmm6,[rcx]
vcmpeqsd xmm2,xmm6,xmm4
vcmpeqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpeqsd xmm2,xmm6,[rcx]
vcmpltsd xmm2,xmm6,xmm4
vcmpltsd xmm2,xmm6,QWORD PTR [rcx]
vcmpltsd xmm2,xmm6,[rcx]
vcmplesd xmm2,xmm6,xmm4
vcmplesd xmm2,xmm6,QWORD PTR [rcx]
vcmplesd xmm2,xmm6,[rcx]
vcmpunordsd xmm2,xmm6,xmm4
vcmpunordsd xmm2,xmm6,QWORD PTR [rcx]
vcmpunordsd xmm2,xmm6,[rcx]
vcmpneqsd xmm2,xmm6,xmm4
vcmpneqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpneqsd xmm2,xmm6,[rcx]
vcmpnltsd xmm2,xmm6,xmm4
vcmpnltsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnltsd xmm2,xmm6,[rcx]
vcmpnlesd xmm2,xmm6,xmm4
vcmpnlesd xmm2,xmm6,QWORD PTR [rcx]
vcmpnlesd xmm2,xmm6,[rcx]
vcmpordsd xmm2,xmm6,xmm4
vcmpordsd xmm2,xmm6,QWORD PTR [rcx]
vcmpordsd xmm2,xmm6,[rcx]
vcmpeq_uqsd xmm2,xmm6,xmm4
vcmpeq_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpeq_uqsd xmm2,xmm6,[rcx]
vcmpngesd xmm2,xmm6,xmm4
vcmpngesd xmm2,xmm6,QWORD PTR [rcx]
vcmpngesd xmm2,xmm6,[rcx]
vcmpngtsd xmm2,xmm6,xmm4
vcmpngtsd xmm2,xmm6,QWORD PTR [rcx]
vcmpngtsd xmm2,xmm6,[rcx]
vcmpfalsesd xmm2,xmm6,xmm4
vcmpfalsesd xmm2,xmm6,QWORD PTR [rcx]
vcmpfalsesd xmm2,xmm6,[rcx]
vcmpneq_oqsd xmm2,xmm6,xmm4
vcmpneq_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpneq_oqsd xmm2,xmm6,[rcx]
vcmpgesd xmm2,xmm6,xmm4
vcmpgesd xmm2,xmm6,QWORD PTR [rcx]
vcmpgesd xmm2,xmm6,[rcx]
vcmpgtsd xmm2,xmm6,xmm4
vcmpgtsd xmm2,xmm6,QWORD PTR [rcx]
vcmpgtsd xmm2,xmm6,[rcx]
vcmptruesd xmm2,xmm6,xmm4
vcmptruesd xmm2,xmm6,QWORD PTR [rcx]
vcmptruesd xmm2,xmm6,[rcx]
vcmpeq_ossd xmm2,xmm6,xmm4
vcmpeq_ossd xmm2,xmm6,QWORD PTR [rcx]
vcmpeq_ossd xmm2,xmm6,[rcx]
vcmplt_oqsd xmm2,xmm6,xmm4
vcmplt_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmplt_oqsd xmm2,xmm6,[rcx]
vcmple_oqsd xmm2,xmm6,xmm4
vcmple_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmple_oqsd xmm2,xmm6,[rcx]
vcmpunord_ssd xmm2,xmm6,xmm4
vcmpunord_ssd xmm2,xmm6,QWORD PTR [rcx]
vcmpunord_ssd xmm2,xmm6,[rcx]
vcmpneq_ussd xmm2,xmm6,xmm4
vcmpneq_ussd xmm2,xmm6,QWORD PTR [rcx]
vcmpneq_ussd xmm2,xmm6,[rcx]
vcmpnlt_uqsd xmm2,xmm6,xmm4
vcmpnlt_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnlt_uqsd xmm2,xmm6,[rcx]
vcmpnle_uqsd xmm2,xmm6,xmm4
vcmpnle_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnle_uqsd xmm2,xmm6,[rcx]
vcmpord_ssd xmm2,xmm6,xmm4
vcmpord_ssd xmm2,xmm6,QWORD PTR [rcx]
vcmpord_ssd xmm2,xmm6,[rcx]
vcmpeq_ussd xmm2,xmm6,xmm4
vcmpeq_ussd xmm2,xmm6,QWORD PTR [rcx]
vcmpeq_ussd xmm2,xmm6,[rcx]
vcmpnge_uqsd xmm2,xmm6,xmm4
vcmpnge_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnge_uqsd xmm2,xmm6,[rcx]
vcmpngt_uqsd xmm2,xmm6,xmm4
vcmpngt_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpngt_uqsd xmm2,xmm6,[rcx]
vcmpfalse_ossd xmm2,xmm6,xmm4
vcmpfalse_ossd xmm2,xmm6,QWORD PTR [rcx]
vcmpfalse_ossd xmm2,xmm6,[rcx]
vcmpneq_ossd xmm2,xmm6,xmm4
vcmpneq_ossd xmm2,xmm6,QWORD PTR [rcx]
vcmpneq_ossd xmm2,xmm6,[rcx]
vcmpge_oqsd xmm2,xmm6,xmm4
vcmpge_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpge_oqsd xmm2,xmm6,[rcx]
vcmpgt_oqsd xmm2,xmm6,xmm4
vcmpgt_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpgt_oqsd xmm2,xmm6,[rcx]
vcmptrue_ussd xmm2,xmm6,xmm4
vcmptrue_ussd xmm2,xmm6,QWORD PTR [rcx]
vcmptrue_ussd xmm2,xmm6,[rcx]
# Tests for op mem64
vldmxcsr DWORD PTR [rcx]
vldmxcsr [rcx]
vstmxcsr DWORD PTR [rcx]
vstmxcsr [rcx]
# Tests for op xmm/mem32, xmm, xmm
vaddss xmm2,xmm6,xmm4
vaddss xmm2,xmm6,DWORD PTR [rcx]
vaddss xmm2,xmm6,[rcx]
vcvtss2sd xmm2,xmm6,xmm4
vcvtss2sd xmm2,xmm6,DWORD PTR [rcx]
vcvtss2sd xmm2,xmm6,[rcx]
vdivss xmm2,xmm6,xmm4
vdivss xmm2,xmm6,DWORD PTR [rcx]
vdivss xmm2,xmm6,[rcx]
vmaxss xmm2,xmm6,xmm4
vmaxss xmm2,xmm6,DWORD PTR [rcx]
vmaxss xmm2,xmm6,[rcx]
vminss xmm2,xmm6,xmm4
vminss xmm2,xmm6,DWORD PTR [rcx]
vminss xmm2,xmm6,[rcx]
vmulss xmm2,xmm6,xmm4
vmulss xmm2,xmm6,DWORD PTR [rcx]
vmulss xmm2,xmm6,[rcx]
vrcpss xmm2,xmm6,xmm4
vrcpss xmm2,xmm6,DWORD PTR [rcx]
vrcpss xmm2,xmm6,[rcx]
vrsqrtss xmm2,xmm6,xmm4
vrsqrtss xmm2,xmm6,DWORD PTR [rcx]
vrsqrtss xmm2,xmm6,[rcx]
vsqrtss xmm2,xmm6,xmm4
vsqrtss xmm2,xmm6,DWORD PTR [rcx]
vsqrtss xmm2,xmm6,[rcx]
vsubss xmm2,xmm6,xmm4
vsubss xmm2,xmm6,DWORD PTR [rcx]
vsubss xmm2,xmm6,[rcx]
vcmpeqss xmm2,xmm6,xmm4
vcmpeqss xmm2,xmm6,DWORD PTR [rcx]
vcmpeqss xmm2,xmm6,[rcx]
vcmpltss xmm2,xmm6,xmm4
vcmpltss xmm2,xmm6,DWORD PTR [rcx]
vcmpltss xmm2,xmm6,[rcx]
vcmpless xmm2,xmm6,xmm4
vcmpless xmm2,xmm6,DWORD PTR [rcx]
vcmpless xmm2,xmm6,[rcx]
vcmpunordss xmm2,xmm6,xmm4
vcmpunordss xmm2,xmm6,DWORD PTR [rcx]
vcmpunordss xmm2,xmm6,[rcx]
vcmpneqss xmm2,xmm6,xmm4
vcmpneqss xmm2,xmm6,DWORD PTR [rcx]
vcmpneqss xmm2,xmm6,[rcx]
vcmpnltss xmm2,xmm6,xmm4
vcmpnltss xmm2,xmm6,DWORD PTR [rcx]
vcmpnltss xmm2,xmm6,[rcx]
vcmpnless xmm2,xmm6,xmm4
vcmpnless xmm2,xmm6,DWORD PTR [rcx]
vcmpnless xmm2,xmm6,[rcx]
vcmpordss xmm2,xmm6,xmm4
vcmpordss xmm2,xmm6,DWORD PTR [rcx]
vcmpordss xmm2,xmm6,[rcx]
vcmpeq_uqss xmm2,xmm6,xmm4
vcmpeq_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpeq_uqss xmm2,xmm6,[rcx]
vcmpngess xmm2,xmm6,xmm4
vcmpngess xmm2,xmm6,DWORD PTR [rcx]
vcmpngess xmm2,xmm6,[rcx]
vcmpngtss xmm2,xmm6,xmm4
vcmpngtss xmm2,xmm6,DWORD PTR [rcx]
vcmpngtss xmm2,xmm6,[rcx]
vcmpfalsess xmm2,xmm6,xmm4
vcmpfalsess xmm2,xmm6,DWORD PTR [rcx]
vcmpfalsess xmm2,xmm6,[rcx]
vcmpneq_oqss xmm2,xmm6,xmm4
vcmpneq_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmpneq_oqss xmm2,xmm6,[rcx]
vcmpgess xmm2,xmm6,xmm4
vcmpgess xmm2,xmm6,DWORD PTR [rcx]
vcmpgess xmm2,xmm6,[rcx]
vcmpgtss xmm2,xmm6,xmm4
vcmpgtss xmm2,xmm6,DWORD PTR [rcx]
vcmpgtss xmm2,xmm6,[rcx]
vcmptruess xmm2,xmm6,xmm4
vcmptruess xmm2,xmm6,DWORD PTR [rcx]
vcmptruess xmm2,xmm6,[rcx]
vcmpeq_osss xmm2,xmm6,xmm4
vcmpeq_osss xmm2,xmm6,DWORD PTR [rcx]
vcmpeq_osss xmm2,xmm6,[rcx]
vcmplt_oqss xmm2,xmm6,xmm4
vcmplt_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmplt_oqss xmm2,xmm6,[rcx]
vcmple_oqss xmm2,xmm6,xmm4
vcmple_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmple_oqss xmm2,xmm6,[rcx]
vcmpunord_sss xmm2,xmm6,xmm4
vcmpunord_sss xmm2,xmm6,DWORD PTR [rcx]
vcmpunord_sss xmm2,xmm6,[rcx]
vcmpneq_usss xmm2,xmm6,xmm4
vcmpneq_usss xmm2,xmm6,DWORD PTR [rcx]
vcmpneq_usss xmm2,xmm6,[rcx]
vcmpnlt_uqss xmm2,xmm6,xmm4
vcmpnlt_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpnlt_uqss xmm2,xmm6,[rcx]
vcmpnle_uqss xmm2,xmm6,xmm4
vcmpnle_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpnle_uqss xmm2,xmm6,[rcx]
vcmpord_sss xmm2,xmm6,xmm4
vcmpord_sss xmm2,xmm6,DWORD PTR [rcx]
vcmpord_sss xmm2,xmm6,[rcx]
vcmpeq_usss xmm2,xmm6,xmm4
vcmpeq_usss xmm2,xmm6,DWORD PTR [rcx]
vcmpeq_usss xmm2,xmm6,[rcx]
vcmpnge_uqss xmm2,xmm6,xmm4
vcmpnge_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpnge_uqss xmm2,xmm6,[rcx]
vcmpngt_uqss xmm2,xmm6,xmm4
vcmpngt_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpngt_uqss xmm2,xmm6,[rcx]
vcmpfalse_osss xmm2,xmm6,xmm4
vcmpfalse_osss xmm2,xmm6,DWORD PTR [rcx]
vcmpfalse_osss xmm2,xmm6,[rcx]
vcmpneq_osss xmm2,xmm6,xmm4
vcmpneq_osss xmm2,xmm6,DWORD PTR [rcx]
vcmpneq_osss xmm2,xmm6,[rcx]
vcmpge_oqss xmm2,xmm6,xmm4
vcmpge_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmpge_oqss xmm2,xmm6,[rcx]
vcmpgt_oqss xmm2,xmm6,xmm4
vcmpgt_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmpgt_oqss xmm2,xmm6,[rcx]
vcmptrue_usss xmm2,xmm6,xmm4
vcmptrue_usss xmm2,xmm6,DWORD PTR [rcx]
vcmptrue_usss xmm2,xmm6,[rcx]
# Tests for op mem32, ymm
vbroadcastss ymm4,DWORD PTR [rcx]
vbroadcastss ymm4,[rcx]
# Tests for op xmm/mem32, xmm
vcomiss xmm6,xmm4
vcomiss xmm4,DWORD PTR [rcx]
vcomiss xmm4,[rcx]
vpmovsxbd xmm6,xmm4
vpmovsxbd xmm4,DWORD PTR [rcx]
vpmovsxbd xmm4,[rcx]
vpmovsxwq xmm6,xmm4
vpmovsxwq xmm4,DWORD PTR [rcx]
vpmovsxwq xmm4,[rcx]
vpmovzxbd xmm6,xmm4
vpmovzxbd xmm4,DWORD PTR [rcx]
vpmovzxbd xmm4,[rcx]
vpmovzxwq xmm6,xmm4
vpmovzxwq xmm4,DWORD PTR [rcx]
vpmovzxwq xmm4,[rcx]
vucomiss xmm6,xmm4
vucomiss xmm4,DWORD PTR [rcx]
vucomiss xmm4,[rcx]
# Tests for op mem32, xmm
vbroadcastss xmm4,DWORD PTR [rcx]
vbroadcastss xmm4,[rcx]
vmovss xmm4,DWORD PTR [rcx]
vmovss xmm4,[rcx]
# Tests for op xmm, mem32
vmovss DWORD PTR [rcx],xmm4
vmovss [rcx],xmm4
# Tests for op xmm, regl/mem32
# Tests for op regl/mem32, xmm
vmovd ecx,xmm4
vmovd DWORD PTR [rcx],xmm4
vmovd xmm4,ecx
vmovd xmm4,DWORD PTR [rcx]
vmovd [rcx],xmm4
vmovd xmm4,[rcx]
# Tests for op xmm/mem32, regl
vcvtss2si ecx,xmm4
vcvtss2si ecx,DWORD PTR [rcx]
vcvtss2si ecx,[rcx]
vcvttss2si ecx,xmm4
vcvttss2si ecx,DWORD PTR [rcx]
vcvttss2si ecx,[rcx]
# Tests for op xmm/mem32, regq
vcvtss2si rcx,xmm4
vcvtss2si rcx,DWORD PTR [rcx]
vcvtss2si rcx,[rcx]
vcvttss2si rcx,xmm4
vcvttss2si rcx,DWORD PTR [rcx]
vcvttss2si rcx,[rcx]
# Tests for op xmm, regq
vmovmskpd rcx,xmm4
vmovmskps rcx,xmm4
vpmovmskb rcx,xmm4
# Tests for op imm8, xmm, regq/mem32
vextractps rcx,xmm4,7
vextractps DWORD PTR [rcx],xmm4,7
vextractps [rcx],xmm4,7
# Tests for op imm8, xmm, regl/mem32
vpextrd ecx,xmm4,7
vpextrd DWORD PTR [rcx],xmm4,7
vpextrd [rcx],xmm4,7
vextractps ecx,xmm4,7
vextractps DWORD PTR [rcx],xmm4,7
vextractps [rcx],xmm4,7
# Tests for op imm8, regl/mem32, xmm, xmm
vpinsrd xmm6,xmm4,ecx,7
vpinsrd xmm6,xmm4,DWORD PTR [rcx],7
vpinsrd xmm6,xmm4,[rcx],7
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd xmm6,xmm4,ecx
vcvtsi2sd xmm6,xmm4,DWORD PTR [rcx]
vcvtsi2ss xmm6,xmm4,ecx
vcvtsi2ss xmm6,xmm4,DWORD PTR [rcx]
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss xmm2,xmm6,xmm4,7
vcmpss xmm2,xmm6,DWORD PTR [rcx],7
vcmpss xmm2,xmm6,[rcx],7
vinsertps xmm2,xmm6,xmm4,7
vinsertps xmm2,xmm6,DWORD PTR [rcx],7
vinsertps xmm2,xmm6,[rcx],7
vroundss xmm2,xmm6,xmm4,7
vroundss xmm2,xmm6,DWORD PTR [rcx],7
vroundss xmm2,xmm6,[rcx],7
# Tests for op xmm/m16, xmm
vpmovsxbq xmm6,xmm4
vpmovsxbq xmm4,WORD PTR [rcx]
vpmovsxbq xmm4,[rcx]
vpmovzxbq xmm6,xmm4
vpmovzxbq xmm4,WORD PTR [rcx]
vpmovzxbq xmm4,[rcx]
# Tests for op imm8, xmm, regl/mem16
vpextrw ecx,xmm4,7
vpextrw WORD PTR [rcx],xmm4,7
vpextrw [rcx],xmm4,7
# Tests for op imm8, xmm, regq/mem16
vpextrw rcx,xmm4,7
vpextrw WORD PTR [rcx],xmm4,7
vpextrw [rcx],xmm4,7
# Tests for op imm8, regl/mem16, xmm, xmm
vpinsrw xmm6,xmm4,ecx,7
vpinsrw xmm6,xmm4,WORD PTR [rcx],7
vpinsrw xmm6,xmm4,[rcx],7
vpinsrw xmm6,xmm4,rcx,7
vpinsrw xmm6,xmm4,WORD PTR [rcx],7
vpinsrw xmm6,xmm4,[rcx],7
# Tests for op imm8, xmm, regl/mem8
vpextrb ecx,xmm4,7
vpextrb BYTE PTR [rcx],xmm4,7
vpextrb [rcx],xmm4,7
# Tests for op imm8, regl/mem8, xmm, xmm
vpinsrb xmm6,xmm4,ecx,7
vpinsrb xmm6,xmm4,BYTE PTR [rcx],7
vpinsrb xmm6,xmm4,[rcx],7
# Tests for op imm8, xmm, regq
vpextrw rcx,xmm4,7
# Tests for op imm8, xmm, regq/mem8
vpextrb rcx,xmm4,7
vpextrb BYTE PTR [rcx],xmm4,7
vpextrb [rcx],xmm4,7
# Tests for op xmm, xmm
vmaskmovdqu xmm6,xmm4
vmovq xmm6,xmm4
# Tests for op xmm, regl
vmovmskpd ecx,xmm4
vmovmskps ecx,xmm4
vpmovmskb ecx,xmm4
# Tests for op xmm, xmm, xmm
vmovhlps xmm2,xmm6,xmm4
vmovlhps xmm2,xmm6,xmm4
vmovsd xmm2,xmm6,xmm4
vmovss xmm2,xmm6,xmm4
# Tests for op imm8, xmm, xmm
vpslld xmm6,xmm4,7
vpslldq xmm6,xmm4,7
vpsllq xmm6,xmm4,7
vpsllw xmm6,xmm4,7
vpsrad xmm6,xmm4,7
vpsraw xmm6,xmm4,7
vpsrld xmm6,xmm4,7
vpsrldq xmm6,xmm4,7
vpsrlq xmm6,xmm4,7
vpsrlw xmm6,xmm4,7
# Tests for op imm8, xmm, regl
vpextrw ecx,xmm4,7
# Tests for op ymm, regl
vmovmskpd ecx,ymm4
vmovmskps ecx,ymm4
# Tests for op ymm, regq
vmovmskpd rcx,ymm4
vmovmskps rcx,ymm4
# Default instructions without suffixes.
vcvtpd2dq xmm6,xmm4
vcvtpd2dq xmm6,ymm4
vcvtpd2ps xmm6,xmm4
vcvtpd2ps xmm6,ymm4
vcvttpd2dq xmm6,xmm4
vcvttpd2dq xmm6,ymm4
#Tests with different memory and register operands.
vldmxcsr DWORD PTR ds:0x12345678
vmovdqa xmm8,XMMWORD PTR ds:0x12345678
vmovdqa XMMWORD PTR ds:0x12345678,xmm8
vmovd DWORD PTR ds:0x12345678,xmm8
vcvtsd2si r8d,QWORD PTR ds:0x12345678
vcvtdq2pd ymm8,XMMWORD PTR ds:0x12345678
vcvtpd2ps xmm8,YMMWORD PTR ds:0x12345678
vpavgb xmm15,xmm8,XMMWORD PTR ds:0x12345678
vaeskeygenassist xmm8,XMMWORD PTR ds:0x12345678,7
vpextrb ds:0x12345678,xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR ds:0x12345678
vpclmulqdq xmm15,xmm8,XMMWORD PTR ds:0x12345678,7
vblendvps xmm14,xmm12,XMMWORD PTR ds:0x12345678,xmm8
vpinsrb xmm15,xmm8,ds:0x12345678,7
vmovdqa ymm8,YMMWORD PTR ds:0x12345678
vmovdqa YMMWORD PTR ds:0x12345678,ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR ds:0x12345678
vroundpd ymm8,YMMWORD PTR ds:0x12345678,7
vextractf128 XMMWORD PTR ds:0x12345678,ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR ds:0x12345678,7
vblendvpd ymm14,ymm12,YMMWORD PTR ds:0x12345678,ymm8
vldmxcsr DWORD PTR [rbp]
vmovdqa xmm8,XMMWORD PTR [rbp]
vmovdqa XMMWORD PTR [rbp],xmm8
vmovd DWORD PTR [rbp],xmm8
vcvtsd2si r8d,QWORD PTR [rbp]
vcvtdq2pd ymm8,XMMWORD PTR [rbp]
vcvtpd2ps xmm8,YMMWORD PTR [rbp]
vpavgb xmm15,xmm8,XMMWORD PTR [rbp]
vaeskeygenassist xmm8,XMMWORD PTR [rbp],7
vpextrb [rbp],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbp]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rbp],7
vblendvps xmm14,xmm12,XMMWORD PTR [rbp],xmm8
vpinsrb xmm15,xmm8,[rbp],7
vmovdqa ymm8,YMMWORD PTR [rbp]
vmovdqa YMMWORD PTR [rbp],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rbp]
vroundpd ymm8,YMMWORD PTR [rbp],7
vextractf128 XMMWORD PTR [rbp],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rbp],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rbp],ymm8
vldmxcsr DWORD PTR [rbp+0x99]
vmovdqa xmm8,XMMWORD PTR [rbp+0x99]
vmovdqa XMMWORD PTR [rbp+0x99],xmm8
vmovd DWORD PTR [rbp+0x99],xmm8
vcvtsd2si r8d,QWORD PTR [rbp+0x99]
vcvtdq2pd ymm8,XMMWORD PTR [rbp+0x99]
vcvtpd2ps xmm8,YMMWORD PTR [rbp+0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [rbp+0x99]
vaeskeygenassist xmm8,XMMWORD PTR [rbp+0x99],7
vpextrb [rbp+0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbp+0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rbp+0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [rbp+0x99],xmm8
vpinsrb xmm15,xmm8,[rbp+0x99],7
vmovdqa ymm8,YMMWORD PTR [rbp+0x99]
vmovdqa YMMWORD PTR [rbp+0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rbp+0x99]
vroundpd ymm8,YMMWORD PTR [rbp+0x99],7
vextractf128 XMMWORD PTR [rbp+0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rbp+0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rbp+0x99],ymm8
vldmxcsr DWORD PTR [r15+0x99]
vmovdqa xmm8,XMMWORD PTR [r15+0x99]
vmovdqa XMMWORD PTR [r15+0x99],xmm8
vmovd DWORD PTR [r15+0x99],xmm8
vcvtsd2si r8d,QWORD PTR [r15+0x99]
vcvtdq2pd ymm8,XMMWORD PTR [r15+0x99]
vcvtpd2ps xmm8,YMMWORD PTR [r15+0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [r15+0x99]
vaeskeygenassist xmm8,XMMWORD PTR [r15+0x99],7
vpextrb [r15+0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [r15+0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [r15+0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [r15+0x99],xmm8
vpinsrb xmm15,xmm8,[r15+0x99],7
vmovdqa ymm8,YMMWORD PTR [r15+0x99]
vmovdqa YMMWORD PTR [r15+0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [r15+0x99]
vroundpd ymm8,YMMWORD PTR [r15+0x99],7
vextractf128 XMMWORD PTR [r15+0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [r15+0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [r15+0x99],ymm8
vldmxcsr DWORD PTR [rip+0x99]
vmovdqa xmm8,XMMWORD PTR [rip+0x99]
vmovdqa XMMWORD PTR [rip+0x99],xmm8
vmovd DWORD PTR [rip+0x99],xmm8
vcvtsd2si r8d,QWORD PTR [rip+0x99]
vcvtdq2pd ymm8,XMMWORD PTR [rip+0x99]
vcvtpd2ps xmm8,YMMWORD PTR [rip+0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [rip+0x99]
vaeskeygenassist xmm8,XMMWORD PTR [rip+0x99],7
vpextrb [rip+0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rip+0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rip+0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [rip+0x99],xmm8
vpinsrb xmm15,xmm8,[rip+0x99],7
vmovdqa ymm8,YMMWORD PTR [rip+0x99]
vmovdqa YMMWORD PTR [rip+0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rip+0x99]
vroundpd ymm8,YMMWORD PTR [rip+0x99],7
vextractf128 XMMWORD PTR [rip+0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rip+0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rip+0x99],ymm8
vldmxcsr DWORD PTR [rsp+0x99]
vmovdqa xmm8,XMMWORD PTR [rsp+0x99]
vmovdqa XMMWORD PTR [rsp+0x99],xmm8
vmovd DWORD PTR [rsp+0x99],xmm8
vcvtsd2si r8d,QWORD PTR [rsp+0x99]
vcvtdq2pd ymm8,XMMWORD PTR [rsp+0x99]
vcvtpd2ps xmm8,YMMWORD PTR [rsp+0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [rsp+0x99]
vaeskeygenassist xmm8,XMMWORD PTR [rsp+0x99],7
vpextrb [rsp+0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rsp+0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rsp+0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [rsp+0x99],xmm8
vpinsrb xmm15,xmm8,[rsp+0x99],7
vmovdqa ymm8,YMMWORD PTR [rsp+0x99]
vmovdqa YMMWORD PTR [rsp+0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rsp+0x99]
vroundpd ymm8,YMMWORD PTR [rsp+0x99],7
vextractf128 XMMWORD PTR [rsp+0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rsp+0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rsp+0x99],ymm8
vldmxcsr DWORD PTR [r12+0x99]
vmovdqa xmm8,XMMWORD PTR [r12+0x99]
vmovdqa XMMWORD PTR [r12+0x99],xmm8
vmovd DWORD PTR [r12+0x99],xmm8
vcvtsd2si r8d,QWORD PTR [r12+0x99]
vcvtdq2pd ymm8,XMMWORD PTR [r12+0x99]
vcvtpd2ps xmm8,YMMWORD PTR [r12+0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [r12+0x99]
vaeskeygenassist xmm8,XMMWORD PTR [r12+0x99],7
vpextrb [r12+0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [r12+0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [r12+0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [r12+0x99],xmm8
vpinsrb xmm15,xmm8,[r12+0x99],7
vmovdqa ymm8,YMMWORD PTR [r12+0x99]
vmovdqa YMMWORD PTR [r12+0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [r12+0x99]
vroundpd ymm8,YMMWORD PTR [r12+0x99],7
vextractf128 XMMWORD PTR [r12+0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [r12+0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [r12+0x99],ymm8
vldmxcsr DWORD PTR [riz*1-0x99]
vmovdqa xmm8,XMMWORD PTR [riz*1-0x99]
vmovdqa XMMWORD PTR [riz*1-0x99],xmm8
vmovd DWORD PTR [riz*1-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [riz*1-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [riz*1-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [riz*1-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [riz*1-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [riz*1-0x99],7
vpextrb [riz*1-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [riz*1-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [riz*1-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [riz*1-0x99],xmm8
vpinsrb xmm15,xmm8,[riz*1-0x99],7
vmovdqa ymm8,YMMWORD PTR [riz*1-0x99]
vmovdqa YMMWORD PTR [riz*1-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [riz*1-0x99]
vroundpd ymm8,YMMWORD PTR [riz*1-0x99],7
vextractf128 XMMWORD PTR [riz*1-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [riz*1-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [riz*1-0x99],ymm8
vldmxcsr DWORD PTR [riz*2-0x99]
vmovdqa xmm8,XMMWORD PTR [riz*2-0x99]
vmovdqa XMMWORD PTR [riz*2-0x99],xmm8
vmovd DWORD PTR [riz*2-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [riz*2-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [riz*2-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [riz*2-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [riz*2-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [riz*2-0x99],7
vpextrb [riz*2-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [riz*2-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [riz*2-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [riz*2-0x99],xmm8
vpinsrb xmm15,xmm8,[riz*2-0x99],7
vmovdqa ymm8,YMMWORD PTR [riz*2-0x99]
vmovdqa YMMWORD PTR [riz*2-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [riz*2-0x99]
vroundpd ymm8,YMMWORD PTR [riz*2-0x99],7
vextractf128 XMMWORD PTR [riz*2-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [riz*2-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [riz*2-0x99],ymm8
vldmxcsr DWORD PTR [rbx+riz*1-0x99]
vmovdqa xmm8,XMMWORD PTR [rbx+riz*1-0x99]
vmovdqa XMMWORD PTR [rbx+riz*1-0x99],xmm8
vmovd DWORD PTR [rbx+riz*1-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [rbx+riz*1-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [rbx+riz*1-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [rbx+riz*1-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [rbx+riz*1-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [rbx+riz*1-0x99],7
vpextrb [rbx+riz*1-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbx+riz*1-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rbx+riz*1-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [rbx+riz*1-0x99],xmm8
vpinsrb xmm15,xmm8,[rbx+riz*1-0x99],7
vmovdqa ymm8,YMMWORD PTR [rbx+riz*1-0x99]
vmovdqa YMMWORD PTR [rbx+riz*1-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rbx+riz*1-0x99]
vroundpd ymm8,YMMWORD PTR [rbx+riz*1-0x99],7
vextractf128 XMMWORD PTR [rbx+riz*1-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rbx+riz*1-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rbx+riz*1-0x99],ymm8
vldmxcsr DWORD PTR [rbx+riz*2-0x99]
vmovdqa xmm8,XMMWORD PTR [rbx+riz*2-0x99]
vmovdqa XMMWORD PTR [rbx+riz*2-0x99],xmm8
vmovd DWORD PTR [rbx+riz*2-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [rbx+riz*2-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [rbx+riz*2-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [rbx+riz*2-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [rbx+riz*2-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [rbx+riz*2-0x99],7
vpextrb [rbx+riz*2-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbx+riz*2-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rbx+riz*2-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [rbx+riz*2-0x99],xmm8
vpinsrb xmm15,xmm8,[rbx+riz*2-0x99],7
vmovdqa ymm8,YMMWORD PTR [rbx+riz*2-0x99]
vmovdqa YMMWORD PTR [rbx+riz*2-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rbx+riz*2-0x99]
vroundpd ymm8,YMMWORD PTR [rbx+riz*2-0x99],7
vextractf128 XMMWORD PTR [rbx+riz*2-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rbx+riz*2-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rbx+riz*2-0x99],ymm8
vldmxcsr DWORD PTR [r12+r15*4-0x99]
vmovdqa xmm8,XMMWORD PTR [r12+r15*4-0x99]
vmovdqa XMMWORD PTR [r12+r15*4-0x99],xmm8
vmovd DWORD PTR [r12+r15*4-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [r12+r15*4-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [r12+r15*4-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [r12+r15*4-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [r12+r15*4-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [r12+r15*4-0x99],7
vpextrb [r12+r15*4-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [r12+r15*4-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [r12+r15*4-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [r12+r15*4-0x99],xmm8
vpinsrb xmm15,xmm8,[r12+r15*4-0x99],7
vmovdqa ymm8,YMMWORD PTR [r12+r15*4-0x99]
vmovdqa YMMWORD PTR [r12+r15*4-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [r12+r15*4-0x99]
vroundpd ymm8,YMMWORD PTR [r12+r15*4-0x99],7
vextractf128 XMMWORD PTR [r12+r15*4-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [r12+r15*4-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [r12+r15*4-0x99],ymm8
vldmxcsr DWORD PTR [r8+r15*8-0x99]
vmovdqa xmm8,XMMWORD PTR [r8+r15*8-0x99]
vmovdqa XMMWORD PTR [r8+r15*8-0x99],xmm8
vmovd DWORD PTR [r8+r15*8-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [r8+r15*8-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [r8+r15*8-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [r8+r15*8-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [r8+r15*8-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [r8+r15*8-0x99],7
vpextrb [r8+r15*8-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [r8+r15*8-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [r8+r15*8-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [r8+r15*8-0x99],xmm8
vpinsrb xmm15,xmm8,[r8+r15*8-0x99],7
vmovdqa ymm8,YMMWORD PTR [r8+r15*8-0x99]
vmovdqa YMMWORD PTR [r8+r15*8-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [r8+r15*8-0x99]
vroundpd ymm8,YMMWORD PTR [r8+r15*8-0x99],7
vextractf128 XMMWORD PTR [r8+r15*8-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [r8+r15*8-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [r8+r15*8-0x99],ymm8
vldmxcsr DWORD PTR [rbp+r12*4-0x99]
vmovdqa xmm8,XMMWORD PTR [rbp+r12*4-0x99]
vmovdqa XMMWORD PTR [rbp+r12*4-0x99],xmm8
vmovd DWORD PTR [rbp+r12*4-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [rbp+r12*4-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [rbp+r12*4-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [rbp+r12*4-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [rbp+r12*4-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [rbp+r12*4-0x99],7
vpextrb [rbp+r12*4-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbp+r12*4-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rbp+r12*4-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [rbp+r12*4-0x99],xmm8
vpinsrb xmm15,xmm8,[rbp+r12*4-0x99],7
vmovdqa ymm8,YMMWORD PTR [rbp+r12*4-0x99]
vmovdqa YMMWORD PTR [rbp+r12*4-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rbp+r12*4-0x99]
vroundpd ymm8,YMMWORD PTR [rbp+r12*4-0x99],7
vextractf128 XMMWORD PTR [rbp+r12*4-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rbp+r12*4-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rbp+r12*4-0x99],ymm8
vldmxcsr DWORD PTR [rsp+r13*1-0x99]
vmovdqa xmm8,XMMWORD PTR [rsp+r13*1-0x99]
vmovdqa XMMWORD PTR [rsp+r13*1-0x99],xmm8
vmovd DWORD PTR [rsp+r13*1-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [rsp+r13*1-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [rsp+r13*1-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [rsp+r13*1-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [rsp+r13*1-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [rsp+r13*1-0x99],7
vpextrb [rsp+r13*1-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rsp+r13*1-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rsp+r13*1-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [rsp+r13*1-0x99],xmm8
vpinsrb xmm15,xmm8,[rsp+r13*1-0x99],7
vmovdqa ymm8,YMMWORD PTR [rsp+r13*1-0x99]
vmovdqa YMMWORD PTR [rsp+r13*1-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rsp+r13*1-0x99]
vroundpd ymm8,YMMWORD PTR [rsp+r13*1-0x99],7
vextractf128 XMMWORD PTR [rsp+r13*1-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rsp+r13*1-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rsp+r13*1-0x99],ymm8
# Tests for all register operands.
vmovmskpd r8d,xmm8
vpslld xmm15,xmm8,7
vmovmskps r8d,ymm8
vmovdqa xmm15,xmm8
vmovd r8d,xmm8
vcvtsd2si r8d,xmm8
vcvtdq2pd ymm8,xmm8
vcvtpd2ps xmm8,ymm8
vaeskeygenassist xmm15,xmm8,7
vpextrb r8d,xmm8,7
vcvtsi2sd xmm15,xmm8,r8d
vpclmulqdq xmm12,xmm15,xmm8,7
vblendvps xmm14,xmm12,xmm8,xmm8
vpinsrb xmm15,xmm8,r8d,7
vmovdqa ymm15,ymm8
vpermilpd ymm12,ymm15,ymm8
vroundpd ymm15,ymm8,7
vextractf128 xmm8,ymm8,7
vperm2f128 ymm12,ymm15,ymm8,7
vblendvpd ymm14,ymm12,ymm15,ymm8
vinsertf128 ymm15,ymm8,xmm8,7
# Tests for different memory/register operand
vcvtsd2si r8,QWORD PTR [rcx]
vextractps r8,xmm8,10
vcvtss2si r8,DWORD PTR [rcx]
vpinsrw xmm8,xmm15,r8,7
|
tactcomplabs/xbgas-binutils-gdb
| 1,107
|
gas/testsuite/gas/i386/tlsnopic.s
|
.section ".tdata", "awT", @progbits
.globl baz
.hidden baz
.globl var
.hidden var2
bar: .long 27
baz: .long 29
var: .long 31
var2: .long 33
.text
.globl fn
.type fn,@function
fn:
/* Main binary, no PIC. */
1: movl 1b, %edx
addl $_GLOBAL_OFFSET_TABLE_+[.-1b], %edx
/* foo can be anywhere in startup TLS. */
movl %gs:0, %eax
subl foo@GOTTPOFF(%edx), %eax
/* %eax now contains &foo. */
/* bar only in the main program. */
movl %gs:0, %eax
subl $bar@TPOFF, %eax
/* %eax now contains &bar. */
/* baz only in the main program. */
movl %gs:0, %ecx
/* Arbitrary instructions in between. */
nop
subl $baz@TPOFF, %ecx
/* %ecx now contains &baz. */
/* var and var2 only in the main program. */
movl %gs:0, %ecx
/* Arbitrary instructions in between. */
nop
nop
leal var@NTPOFF(%ecx), %eax
/* Arbitrary instructions in between. */
nop
leal var2@NTPOFF(%ecx), %edx
/* foo can be anywhere in startup TLS. */
movl foo@INDNTPOFF, %eax
movl %gs:(%eax), %eax
/* %eax now contains foo. */
movl %gs:0, %eax
addl foo@INDNTPOFF, %eax
/* %eax now contains &foo. */
ret
|
tactcomplabs/xbgas-binutils-gdb
| 1,345
|
gas/testsuite/gas/i386/sse4_2.s
|
# Streaming SIMD extensions 4.2 Instructions
.text
foo:
crc32 %cl,%ebx
crc32 %cx,%ebx
crc32 %ecx,%ebx
crc32b (%ecx),%ebx
crc32w (%ecx),%ebx
crc32l (%ecx),%ebx
crc32b %cl,%ebx
crc32w %cx,%ebx
crc32l %ecx,%ebx
pcmpgtq (%ecx),%xmm0
pcmpgtq %xmm1,%xmm0
pcmpestri $0x0,(%ecx),%xmm0
pcmpestri $0x0,%xmm1,%xmm0
pcmpestrm $0x1,(%ecx),%xmm0
pcmpestrm $0x1,%xmm1,%xmm0
pcmpistri $0x2,(%ecx),%xmm0
pcmpistri $0x2,%xmm1,%xmm0
pcmpistrm $0x3,(%ecx),%xmm0
pcmpistrm $0x3,%xmm1,%xmm0
popcnt (%ecx),%bx
popcnt (%ecx),%ebx
popcntw (%ecx),%bx
popcntl (%ecx),%ebx
popcnt %cx,%bx
popcnt %ecx,%ebx
popcntw %cx,%bx
popcntl %ecx,%ebx
.intel_syntax noprefix
crc32 ebx,cl
crc32 ebx,cx
crc32 ebx,ecx
crc32 ebx,BYTE PTR [ecx]
crc32 ebx,WORD PTR [ecx]
crc32 ebx,DWORD PTR [ecx]
crc32 ebx,cl
crc32 ebx,cx
crc32 ebx,ecx
pcmpgtq xmm0,XMMWORD PTR [ecx]
pcmpgtq xmm0,xmm1
pcmpestri xmm0,XMMWORD PTR [ecx],0x0
pcmpestri xmm0,xmm1,0x0
pcmpestrm xmm0,XMMWORD PTR [ecx],0x1
pcmpestrm xmm0,xmm1,0x1
pcmpistri xmm0,XMMWORD PTR [ecx],0x2
pcmpistri xmm0,xmm1,0x2
pcmpistrm xmm0,XMMWORD PTR [ecx],0x3
pcmpistrm xmm0,xmm1,0x3
popcnt bx,WORD PTR [ecx]
popcnt ebx,DWORD PTR [ecx]
popcnt bx,WORD PTR [ecx]
popcnt ebx,DWORD PTR [ecx]
popcnt bx,cx
popcnt ebx,ecx
popcnt bx,cx
popcnt ebx,ecx
.p2align 4,0
|
tactcomplabs/xbgas-binutils-gdb
| 1,165
|
gas/testsuite/gas/i386/bmi2.s
|
# Check 32bit BMI2 instructions
.allow_index_reg
.text
_start:
# Test for op r32, r/m32, imm8
rorx $7,%eax,%ebx
rorx $7,(%ecx),%ebx
# Test for op r32, r32, r/m32
mulx %eax,%ebx,%esi
mulx (%ecx),%ebx,%esi
pdep %eax,%ebx,%esi
pdep (%ecx),%ebx,%esi
pext %eax,%ebx,%esi
pext (%ecx),%ebx,%esi
# Test for op r32, r/m32, r32
bzhi %eax,%ebx,%esi
bzhi %ebx,(%ecx),%esi
sarx %eax,%ebx,%esi
sarx %ebx,(%ecx),%esi
shlx %eax,%ebx,%esi
shlx %ebx,(%ecx),%esi
shrx %eax,%ebx,%esi
shrx %ebx,(%ecx),%esi
.intel_syntax noprefix
# Test for op r32, r/m32, imm8
rorx ebx,eax,7
rorx ebx,DWORD PTR [ecx],7
rorx ebx,[ecx],7
# Test for op r32, r32, r/m32
mulx esi,ebx,eax
mulx esi,ebx,DWORD PTR [ecx]
mulx esi,ebx,[ecx]
pdep esi,ebx,eax
pdep esi,ebx,DWORD PTR [ecx]
pdep esi,ebx,[ecx]
pext esi,ebx,eax
pext esi,ebx,DWORD PTR [ecx]
pext esi,ebx,[ecx]
# Test for op r32, r/m32, r32
bzhi esi,ebx,eax
bzhi esi,DWORD PTR [ecx],ebx
bzhi esi,[ecx],ebx
sarx esi,ebx,eax
sarx esi,DWORD PTR [ecx],ebx
sarx esi,[ecx],ebx
shlx esi,ebx,eax
shlx esi,DWORD PTR [ecx],ebx
shlx esi,[ecx],ebx
shrx esi,ebx,eax
shrx esi,DWORD PTR [ecx],ebx
shrx esi,[ecx],ebx
|
tactcomplabs/xbgas-binutils-gdb
| 5,796
|
gas/testsuite/gas/i386/x86_64.s
|
.text
.intel_syntax noprefix
# REX prefix and addressing modes.
add edx,ecx
add edx,r9d
add r10d,ecx
add rdx,rcx
add r10,r9
add r8d,eax
add r8w,ax
add r8,rax
add eax,0x44332211
add rax,0xfffffffff4332211
add ax,0x4433
add rax,0x44332211
add dl,cl
add bh,dh
add dil,sil
add r15b,sil
add dil,r14b
add r15b,r14b
PUSH RAX
PUSH R8
POP R9
ADD AL,0x11
ADD AH,0x11
ADD SPL,0x11
ADD R8B,0x11
ADD R12B,0x11
MOV RAX,CR0
MOV R8,CR0
MOV RAX,CR8
MOV CR8,RAX
REP MOVSQ #[RSI],[RDI]
REP MOVSW #[RSI,[RDI]
REP MOVSQ #[RSI],[RDI]
MOV AL, 0x11
MOV AH, 0x11
MOV SPL, 0x11
MOV R12B, 0x11
MOV EAX,0x11223344
MOV R8D,0x11223344
MOV RAX,0x1122334455667788
MOV R8,0x1122334455667788
add eax,[rax]
ADD EAX,[R8]
ADD R8D,[R8]
ADD RAX,[R8]
ADD EAX,[0x22222222+RIP]
ADD EAX,[RBP+0x00]
ADD EAX,FLAT:[0x22222222]
ADD EAX,[R13+0]
ADD EAX,[RAX+RAX*4]
ADD EAX,[R8+RAX*4]
ADD R8D,[R8+RAX*4]
ADD EAX,[R8+R8*4]
ADD [RCX+R8*4],R8D
ADD EDX,[RAX+RAX*8]
ADD EDX,[RAX+RCX*8]
ADD EDX,[RAX+RDX*8]
ADD EDX,[RAX+RBX*8]
ADD EDX,[RAX]
ADD EDX,[RAX+RBP*8]
ADD EDX,[RAX+RSI*8]
ADD EDX,[RAX+RDI*8]
ADD EDX,[RAX+R8*8]
ADD EDX,[RAX+R9*8]
ADD EDX,[RAX+R10*8]
ADD EDX,[RAX+R11*8]
ADD EDX,[RAX+R12*8]
ADD EDX,[RAX+R13*8]
ADD EDX,[RAX+R14*8]
ADD EDX,[RAX+R15*8]
ADD ECX,0x11
ADD DWORD PTR [RAX],0x11
ADD QWORD PTR [RAX],0x11
ADD DWORD PTR [R8],0x11
ADD DWORD PTR [RCX+RAX*4],0x11
ADD DWORD PTR [R9+RAX*4],0x11
ADD DWORD PTR [RCX+R8*4],0x11
ADD DWORD PTR [0x22222222+RIP],0x33
ADD QWORD PTR [RIP+0x22222222],0x33
ADD DWORD PTR [RIP+0x22222222],0x33333333
ADD QWORD PTR [RIP+0x22222222],0x33333333
ADD DWORD PTR [RAX*8+0x22222222],0x33
ADD DWORD PTR [RAX+0x22222222],0x33
ADD DWORD PTR [RAX+0x22222222],0x33
ADD DWORD PTR [R8+RBP*8],0x33
ADD DWORD PTR FLAT:[0x22222222],0x33
#new instructions
MOVABS AL,FLAT:[0x8877665544332211]
MOVABS EAX,FLAT:[0x8877665544332211]
MOVABS FLAT:[0x8877665544332211],AL
MOVABS FLAT:[0x8877665544332211],EAX
MOVABS RAX,FLAT:[0x8877665544332211]
MOVABS FLAT:[0x8877665544332211],RAX
cqo
cdqe
movsx rax, eax
movsx rax, ax
movsx rax, al
retf
retf 16
retfw
retfw 2
retfd
retfd 4
retfq
retfq 8
bar:
.att_syntax
#testcase for symbol references.
#immediates - various sizes:
mov $symbol, %al
mov $symbol, %ax
mov $symbol, %eax
mov $symbol, %rax
#addressing modes:
#absolute 64bit addressing
movabs symbol, %eax
#absolute 32bit addressing
mov symbol, %eax
#arithmetic
mov symbol(%rax), %eax
#RIP relative
mov symbol(%rip), %eax
.intel_syntax noprefix
#immediates - various sizes:
mov al, offset flat:symbol
mov ax, offset flat:symbol
mov eax, offset flat:symbol
mov rax, offset flat:symbol
#parts aren't supported by the parser, yet (and not at all for symbol refs)
#mov eax, high part symbol
#mov eax, low part symbol
#addressing modes
#absolute 64bit addressing
movabs eax, [symbol]
#absolute 32bit addressing
mov eax, [symbol]
#arithmetic
mov eax, [rax+symbol]
#RIP relative
mov eax, [rip+symbol]
foo:
.att_syntax
#absolute 64bit addressing
mov 0x8877665544332211,%al
mov 0x8877665544332211,%ax
mov 0x8877665544332211,%eax
mov 0x8877665544332211,%rax
mov %al,0x8877665544332211
mov %ax,0x8877665544332211
mov %eax,0x8877665544332211
mov %rax,0x8877665544332211
movb 0x8877665544332211,%al
movw 0x8877665544332211,%ax
movl 0x8877665544332211,%eax
movq 0x8877665544332211,%rax
movb %al,0x8877665544332211
movw %ax,0x8877665544332211
movl %eax,0x8877665544332211
movq %rax,0x8877665544332211
#absolute signed 32bit addressing
mov 0xffffffffff332211,%al
mov 0xffffffffff332211,%ax
mov 0xffffffffff332211,%eax
mov 0xffffffffff332211,%rax
mov %al,0xffffffffff332211
mov %ax,0xffffffffff332211
mov %eax,0xffffffffff332211
mov %rax,0xffffffffff332211
movb 0xffffffffff332211,%al
movw 0xffffffffff332211,%ax
movl 0xffffffffff332211,%eax
movq 0xffffffffff332211,%rax
movb %al,0xffffffffff332211
movw %ax,0xffffffffff332211
movl %eax,0xffffffffff332211
movq %rax,0xffffffffff332211
cmpxchg16b (%rax)
.intel_syntax noprefix
cmpxchg16b oword ptr [rax]
.att_syntax
movsx %al, %si
movsx %al, %esi
movsx %al, %rsi
movsx %ax, %esi
movsx %ax, %rsi
movsx %eax, %rsi
movsx (%rax), %dx
movsbl (%rax), %edx
movsbq (%rax), %rdx
movsbw (%rax), %dx
movswl (%rax), %edx
movswq (%rax), %rdx
movzx %al, %si
movzx %al, %esi
movzx %al, %rsi
movzx %ax, %esi
movzx %ax, %rsi
movzx (%rax), %dx
movzb (%rax), %edx
movzb (%rax), %rdx
movzb (%rax), %dx
movzbl (%rax), %edx
movzbq (%rax), %rdx
movzbw (%rax), %dx
movzwl (%rax), %edx
movzwq (%rax), %rdx
.intel_syntax noprefix
movsx si,al
movsx esi,al
movsx rsi,al
movsx esi,ax
movsx rsi,ax
movsx rsi,eax
movsx edx,BYTE PTR [rax]
movsx rdx,BYTE PTR [rax]
movsx dx,BYTE PTR [rax]
movsx edx,WORD PTR [rax]
movsx rdx,WORD PTR [rax]
movzx si,al
movzx esi,al
movzx rsi,al
movzx esi,ax
movzx rsi,ax
movzx edx,BYTE PTR [rax]
movzx rdx,BYTE PTR [rax]
movzx dx,BYTE PTR [rax]
movzx edx,WORD PTR [rax]
movzx rdx,WORD PTR [rax]
movq xmm1,QWORD PTR [rsp]
movq xmm1,[rsp]
movq QWORD PTR [rsp],xmm1
movq [rsp],xmm1
.att_syntax
fnstsw
fnstsw %ax
fstsw
fstsw %ax
.intel_syntax noprefix
fnstsw
fnstsw ax
fstsw
fstsw ax
.att_syntax
movsx (%rax),%ax
movsxb (%rax), %dx
movsxb (%rax), %edx
movsxb (%rax), %rdx
movsxw (%rax), %edx
movsxw (%rax), %rdx
movsxl (%rax), %rdx
movsxd (%rax),%rax
movzx (%rax),%ax
movzxb (%rax), %dx
movzxb (%rax), %edx
movzxb (%rax), %rdx
movzxw (%rax), %edx
movzxw (%rax), %rdx
movnti %eax, (%rax)
movntil %eax, (%rax)
movnti %rax, (%rax)
movntiq %rax, (%rax)
.intel_syntax noprefix
movsx ax, BYTE PTR [rax]
movsx eax, BYTE PTR [rax]
movsx eax, WORD PTR [rax]
movsx rax, WORD PTR [rax]
movsx rax, DWORD PTR [rax]
movsxd rax, [rax]
movzx ax, BYTE PTR [rax]
movzx eax, BYTE PTR [rax]
movzx eax, WORD PTR [rax]
movzx rax, WORD PTR [rax]
movnti dword ptr [rax], eax
movnti qword ptr [rax], rax
mov eax, tr1
mov tr0, rcx
|
tactcomplabs/xbgas-binutils-gdb
| 13,859
|
gas/testsuite/gas/i386/avx512f_vl-wig.s
|
# Check 32bit AVX512{F,VL} WIG instructions
.allow_index_reg
.text
_start:
vpmovsxbd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq 254(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq 256(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq -256(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq -258(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq 254(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq 256(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq -256(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq -258(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
.intel_syntax noprefix
vpmovsxbd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovsxbd xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovsxbd xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovsxbd ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovsxbd ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [ecx] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [edx+254] # AVX512{F,VL} Disp8
vpmovsxbq xmm6{k7}, WORD PTR [edx+256] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [edx-256] # AVX512{F,VL} Disp8
vpmovsxbq xmm6{k7}, WORD PTR [edx-258] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovsxbq ymm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovsxbq ymm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovsxwd xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovsxwd xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovsxwq xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovsxwq xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovsxwq ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovsxwq ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovzxbd xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovzxbd xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovzxbd ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovzxbd ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [ecx] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [edx+254] # AVX512{F,VL} Disp8
vpmovzxbq xmm6{k7}, WORD PTR [edx+256] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [edx-256] # AVX512{F,VL} Disp8
vpmovzxbq xmm6{k7}, WORD PTR [edx-258] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovzxbq ymm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovzxbq ymm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovzxwd xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovzxwd xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovzxwq xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovzxwq xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovzxwq ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovzxwq ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 1,101
|
gas/testsuite/gas/i386/intel-regs.s
|
.text
.intel_syntax noprefix
mov eax, tmm1
.arch i286
.code16
mov ax, eax ; add [bx+si], al
mov ax, rax ; add [bx+si], al
mov ax, axl ; add [bx+si], al
mov ax, r8b ; add [bx+si], al
mov ax, r8w ; add [bx+si], al
mov ax, r8d ; add [bx+si], al
mov ax, r8 ; add [bx+si], al
mov ax, fs ; add [bx+si], al
mov ax, st ; add [bx+si], al
mov ax, cr0 ; add [bx+si], al
mov ax, dr0 ; add [bx+si], al
mov ax, tr0 ; add [bx+si], al
mov ax, mm0 ; add [bx+si], al
mov ax, xmm0 ; add [bx+si], al
mov ax, ymm0 ; add [bx+si], al
mov ax, xmm16 ; add [bx+si], al
mov ax, zmm0 ; add [bx+si], al
.arch generic32
.code32
mov eax, rax
mov eax, axl
mov eax, r8b
mov eax, r8w
mov eax, r8d
mov eax, r8
mov eax, st
mov eax, cr0
mov eax, dr0
mov eax, tr0
mov eax, mm0
mov eax, xmm0
mov eax, ymm0
mov eax, xmm16
mov eax, zmm0
.arch .387
ffree st
.arch .mmx
pxor mm0, mm0
.arch .sse
xorps xmm0, xmm0
.arch .avx
vxorps ymm0, ymm0, ymm0
.arch generic64
.code64
mov axl, r8b
mov ax, r8w
mov eax, r8d
mov rax, r8
ymm8:
jmp ymm8
tmm0:
jmp tmm0
|
tactcomplabs/xbgas-binutils-gdb
| 1,246
|
gas/testsuite/gas/i386/x86-64-lfence-load.s
|
.text
_start:
vldmxcsr (%rbp)
lgdt (%rbp)
vmptrld (%rbp)
vmclear (%rbp)
invpcid (%rbp), %rdx
invlpg (%eax)
clflush (%rbp)
clflushopt (%rbp)
clwb (%rbp)
cldemote (%rbp)
bndmk (%rbp), %bnd1
bndcl (%rbp), %bnd1
bndcu (%rbp), %bnd1
bndcn (%rbp), %bnd1
bndstx %bnd1, (%rbp)
bndldx (%rbp), %bnd1
prefetcht0 (%rbp)
prefetcht1 (%rbp)
prefetcht2 (%rbp)
prefetchw (%rbp)
pop %fs
popf
xlatb (%rbx)
fsts (%rbp)
flds (%rbp)
fistl (%rbp)
fists (%rbp)
fildl (%rbp)
filds (%rbp)
fsave (%rbp)
frstor (%rbp)
filds (%rbp)
fisttps (%rbp)
fldenv (%rbp)
fstenv (%rbp)
fadds (%rbp)
fadds (%rsp)
fadd %st(3),%st
fadds (%rcx)
filds (%rcx)
fists (%rcx)
xrstor (%rcx)
prefetchnta (%rcx)
cmpxchg8b (%rcx)
cmpxchg16b (%rcx)
incl %ecx
lgdt (%rax)
pfcmpeq 2(%rsi),%mm4
popq (%rax)
popq %rax
rclw (%rcx)
testl $1,(%rcx)
incl (%rcx)
notl (%rcx)
divl (%rcx)
mull (%rcx)
idivl (%rcx)
imull (%rcx)
leaq (%rax,%rax,2), %rax
leave
outsb
lodsb
rep movsl
rep scasl
rep cmpsl
rep lodsl
addl $1, (%r11)
btl $1, (%r11)
xadd %rax,(%rbx)
xadd %rax,%rbx
xchg %rax,(%rbx)
xchg %rax,%rbx
cmp %rax,0x40(%rbp)
cmp 0x40(%rbp),%rax
add %rax,0x40(%rbp)
add (%rax),%rax
test %rax,0x40(%rbp)
test 0x40(%rbp),%rax
|
tactcomplabs/xbgas-binutils-gdb
| 1,828
|
gas/testsuite/gas/i386/x86-64-notrack.s
|
# Check 64bit NOTRACK prefix
.allow_index_reg
.text
_start:
notrack call *%rax
notrack call *%r8
notrack jmp *%rax
notrack jmp *%r8
notrack call *(%rax)
notrack call *(%r8)
notrack jmp *(%rax)
notrack jmp *(%r8)
notrack call *(%eax)
notrack call *(%r8d)
notrack jmp *(%eax)
notrack jmp *(%r8d)
notrack bnd call *%rax
notrack bnd call *%r8
notrack bnd jmp *%rax
notrack bnd jmp *%r8
notrack bnd call *(%rax)
notrack bnd call *(%r8)
notrack bnd jmp *(%rax)
notrack bnd jmp *(%r8)
notrack bnd call *(%eax)
notrack bnd call *(%r8d)
notrack bnd jmp *(%eax)
notrack bnd jmp *(%r8d)
bnd notrack call *%rax
bnd notrack call *%r8
bnd notrack call *(%rax)
bnd notrack call *(%r8)
bnd notrack call *(%eax)
bnd notrack call *(%r8d)
.intel_syntax noprefix
notrack call rax
notrack call r8
notrack jmp rax
notrack jmp r8
notrack call QWORD PTR [rax]
notrack call QWORD PTR [r8]
notrack jmp QWORD PTR [rax]
notrack jmp QWORD PTR [r8]
notrack call QWORD PTR [eax]
notrack call QWORD PTR [r8d]
notrack jmp QWORD PTR [eax]
notrack jmp QWORD PTR [r8d]
notrack bnd call rax
notrack bnd call r8
notrack bnd jmp rax
notrack bnd jmp r8
notrack bnd call QWORD PTR [rax]
notrack bnd call QWORD PTR [r8]
notrack bnd jmp QWORD PTR [rax]
notrack bnd jmp QWORD PTR [r8]
notrack bnd call QWORD PTR [eax]
notrack bnd call QWORD PTR [r8d]
notrack bnd jmp QWORD PTR [eax]
notrack bnd jmp QWORD PTR [r8d]
bnd notrack call rax
bnd notrack call r8
bnd notrack call QWORD PTR [rax]
bnd notrack call QWORD PTR [r8]
bnd notrack call QWORD PTR [eax]
bnd notrack call QWORD PTR [r8d]
# bnd notrack callq *%rax
.byte 0xf2
.byte 0x3e
.byte 0xff
.byte 0xd0
# ds callw *%ax
.byte 0x3e
.byte 0x66
.byte 0xff
.byte 0xd0
# ds callw *%ax
.byte 0x66
.byte 0x3e
.byte 0xff
.byte 0xd0
|
tactcomplabs/xbgas-binutils-gdb
| 3,723
|
gas/testsuite/gas/i386/x86-64-bmi2.s
|
# Check 64bit BMI2 instructions
.allow_index_reg
.text
_start:
# Test for op r32, r/m32, imm8
rorx $7,%eax,%ebx
rorx $7,(%rcx),%ebx
rorx $7,%r9d,%r15d
rorx $7,(%rcx),%r15d
# Test for op r32, r32, r/m32
mulx %eax,%ebx,%esi
mulx (%rcx),%ebx,%esi
mulx %r9d,%r15d,%r10d
mulx (%rcx),%r15d,%r10d
pdep %eax,%ebx,%esi
pdep (%rcx),%ebx,%esi
pdep %r9d,%r15d,%r10d
pdep (%rcx),%r15d,%r10d
pext %eax,%ebx,%esi
pext (%rcx),%ebx,%esi
pext %r9d,%r15d,%r10d
pext (%rcx),%r15d,%r10d
# Test for op r32, r/m32, r32
bzhi %eax,%ebx,%esi
bzhi %ebx,(%rcx),%esi
bzhi %r9d,%r15d,%r10d
bzhi %r9d,(%rcx),%r10d
sarx %eax,%ebx,%esi
sarx %ebx,(%rcx),%esi
sarx %r9d,%r15d,%r10d
sarx %r9d,(%rcx),%r10d
shlx %eax,%ebx,%esi
shlx %ebx,(%rcx),%esi
shlx %r9d,%r15d,%r10d
shlx %r9d,(%rcx),%r10d
shrx %eax,%ebx,%esi
shrx %ebx,(%rcx),%esi
shrx %r9d,%r15d,%r10d
shrx %r9d,(%rcx),%r10d
# Test for op r64, r/m64, imm8
rorx $7,%rax,%rbx
rorx $7,(%rcx),%rbx
rorx $7,%r9,%r15
rorx $7,(%rcx),%r15
# Test for op r64, r64, r/m64
mulx %rax,%rbx,%rsi
mulx (%rcx),%rbx,%rsi
mulx %r9,%r15,%r10
mulx (%rcx),%r15,%r10
pdep %rax,%rbx,%rsi
pdep (%rcx),%rbx,%rsi
pdep %r9,%r15,%r10
pdep (%rcx),%r15,%r10
pext %rax,%rbx,%rsi
pext (%rcx),%rbx,%rsi
pext %r9,%r15,%r10
pext (%rcx),%r15,%r10
# Test for op r64, r/m64, r64
bzhi %rax,%rbx,%rsi
bzhi %rax,(%rcx),%rsi
bzhi %r9,%r15,%r10
bzhi %r9,(%rcx),%r10
sarx %rax,%rbx,%rsi
sarx %rax,(%rcx),%rsi
sarx %r9,%r15,%r10
sarx %r9,(%rcx),%r10
shlx %rax,%rbx,%rsi
shlx %rax,(%rcx),%rsi
shlx %r9,%r15,%r10
shlx %r9,(%rcx),%r10
shrx %rax,%rbx,%rsi
shrx %rax,(%rcx),%rsi
shrx %r9,%r15,%r10
shrx %r9,(%rcx),%r10
.intel_syntax noprefix
# Test for op r32, r/m32, imm8
rorx ebx,eax,7
rorx ebx,DWORD PTR [rcx],7
rorx r10d,r9d,7
rorx r10d,DWORD PTR [rcx],7
rorx ebx,[rcx],7
# Test for op r32, r32, r/m32
mulx esi,ebx,eax
mulx esi,ebx,DWORD PTR [rcx]
mulx r15d,r10d,r9d
mulx r15d,r10d,DWORD PTR [rcx]
mulx esi,ebx,[rcx]
pdep esi,ebx,eax
pdep esi,ebx,DWORD PTR [rcx]
pdep r15d,r10d,r9d
pdep r15d,r10d,DWORD PTR [rcx]
pdep esi,ebx,[rcx]
pext esi,ebx,eax
pext esi,ebx,DWORD PTR [rcx]
pext r15d,r10d,r9d
pext r15d,r10d,DWORD PTR [rcx]
pext esi,ebx,[rcx]
# Test for op r32, r/m32, r32
bzhi esi,ebx,eax
bzhi esi,DWORD PTR [rcx],ebx
bzhi r15d,r10d,r9d
bzhi r15d,DWORD PTR [rcx],r9d
bzhi esi,[rcx],ebx
sarx esi,ebx,eax
sarx esi,DWORD PTR [rcx],ebx
sarx r15d,r10d,r9d
sarx r15d,DWORD PTR [rcx],r9d
sarx esi,[rcx],ebx
shlx esi,ebx,eax
shlx esi,DWORD PTR [rcx],ebx
shlx r15d,r10d,r9d
shlx r15d,DWORD PTR [rcx],r9d
shlx esi,[rcx],ebx
shrx esi,ebx,eax
shrx esi,DWORD PTR [rcx],ebx
shrx r15d,r10d,r9d
shrx r15d,DWORD PTR [rcx],r9d
shrx esi,[rcx],ebx
# Test for op r64, r/m64, imm8
rorx rbx,rax,7
rorx rbx,QWORD PTR [rcx],7
rorx r15,r9,7
rorx r15,QWORD PTR [rcx],7
rorx rbx,[rcx],7
# Test for op r64, r64, r/m64
mulx rsi,rbx,rax
mulx rsi,rbx,QWORD PTR [rcx]
mulx r10,r15,r9
mulx r10,r15,QWORD PTR [rcx]
mulx rsi,rbx,[rcx]
pdep rsi,rbx,rax
pdep rsi,rbx,QWORD PTR [rcx]
pdep r10,r15,r9
pdep r10,r15,QWORD PTR [rcx]
pdep rsi,rbx,[rcx]
pext rsi,rbx,rax
pext rsi,rbx,QWORD PTR [rcx]
pext r10,r15,r9
pext r10,r15,QWORD PTR [rcx]
pext rsi,rbx,[rcx]
# Test for op r64, r/m64, r64
bzhi rsi,rbx,rax
bzhi rsi,QWORD PTR [rcx],rax
bzhi r10,r15,r9
bzhi r10,QWORD PTR [rcx],r9
bzhi rsi,[rcx],rax
sarx rsi,rbx,rax
sarx rsi,QWORD PTR [rcx],rax
sarx r10,r15,r9
sarx r10,QWORD PTR [rcx],r9
sarx rsi,[rcx],rax
shlx rsi,rbx,rax
shlx rsi,QWORD PTR [rcx],rax
shlx r10,r15,r9
shlx r10,QWORD PTR [rcx],r9
shlx rsi,[rcx],rax
shrx rsi,rbx,rax
shrx rsi,QWORD PTR [rcx],rax
shrx r10,r15,r9
shrx r10,QWORD PTR [rcx],r9
shrx rsi,[rcx],rax
|
tactcomplabs/xbgas-binutils-gdb
| 10,578
|
gas/testsuite/gas/i386/x86-64-avx512vbmi2.s
|
# Check 64bit AVX512VBMI2 instructions
.allow_index_reg
.text
_start:
vpcompressb %zmm30, (%rcx){%k7} # AVX512VBMI2
vpcompressb %zmm30, 0x123(%rax,%r14,8) # AVX512VBMI2
vpcompressb %zmm30, 126(%rdx) # AVX512VBMI2 Disp8
vpcompressb %zmm29, %zmm30 # AVX512VBMI2
vpcompressb %zmm29, %zmm30{%k7} # AVX512VBMI2
vpcompressb %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpcompressw %zmm30, (%rcx){%k7} # AVX512VBMI2
vpcompressw %zmm30, 0x123(%rax,%r14,8) # AVX512VBMI2
vpcompressw %zmm30, 254(%rdx) # AVX512VBMI2 Disp8
vpcompressw %zmm29, %zmm30 # AVX512VBMI2
vpcompressw %zmm29, %zmm30{%k7} # AVX512VBMI2
vpcompressw %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpexpandb (%rcx), %zmm30{%k7} # AVX512VBMI2
vpexpandb (%rcx), %zmm30{%k7}{z} # AVX512VBMI2
vpexpandb 0x123(%rax,%r14,8), %zmm30 # AVX512VBMI2
vpexpandb 126(%rdx), %zmm30 # AVX512VBMI2 Disp8
vpexpandb %zmm29, %zmm30 # AVX512VBMI2
vpexpandb %zmm29, %zmm30{%k7} # AVX512VBMI2
vpexpandb %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpexpandw (%rcx), %zmm30{%k7} # AVX512VBMI2
vpexpandw (%rcx), %zmm30{%k7}{z} # AVX512VBMI2
vpexpandw 0x123(%rax,%r14,8), %zmm30 # AVX512VBMI2
vpexpandw 254(%rdx), %zmm30 # AVX512VBMI2 Disp8
vpexpandw %zmm29, %zmm30 # AVX512VBMI2
vpexpandw %zmm29, %zmm30{%k7} # AVX512VBMI2
vpexpandw %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshldvw %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshldvw %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshldvw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshldvw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshldvw 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshldvd %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshldvd %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshldvd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshldvd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshldvd 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshldvd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512VBMI2 Disp8
vpshldvq %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshldvq %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshldvq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshldvq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshldvq 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshldvq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512VBMI2 Disp8
vpshrdvw %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshrdvw %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshrdvw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshrdvw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshrdvw 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshrdvd %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshrdvd %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshrdvd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshrdvd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshrdvd 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshrdvq %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshrdvq %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshrdvq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshrdvq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshrdvq 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshldw $0xab, %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshldw $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshldw $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshldw $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshldw $123, 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshldd $0xab, %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshldd $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshldd $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshldd $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshldd $123, 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshldq $0xab, %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshldq $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshldq $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshldq $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshldq $123, 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshldq $123, 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512VBMI2
vpshrdw $0xab, %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshrdw $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshrdw $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshrdw $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshrdw $123, 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshrdd $0xab, %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshrdd $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshrdd $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshrdd $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshrdd $123, (%rcx){1to16}, %zmm29, %zmm30 # AVX512VBMI2
vpshrdd $123, 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshrdq $0xab, %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshrdq $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshrdq $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshrdq $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshrdq $123, (%rcx){1to8}, %zmm29, %zmm30 # AVX512VBMI2
vpshrdq $123, 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
.intel_syntax noprefix
vpcompressb ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512VBMI2
vpcompressb ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512VBMI2
vpcompressb ZMMWORD PTR [rdx+126], zmm30 # AVX512VBMI2 Disp8
vpcompressb zmm30, zmm29 # AVX512VBMI2
vpcompressb zmm30{k7}, zmm29 # AVX512VBMI2
vpcompressb zmm30{k7}{z}, zmm29 # AVX512VBMI2
vpcompressw ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512VBMI2
vpcompressw ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512VBMI2
vpcompressw ZMMWORD PTR [rdx+254], zmm30 # AVX512VBMI2 Disp8
vpcompressw zmm30, zmm29 # AVX512VBMI2
vpcompressw zmm30{k7}, zmm29 # AVX512VBMI2
vpcompressw zmm30{k7}{z}, zmm29 # AVX512VBMI2
vpexpandb zmm30{k7}, ZMMWORD PTR [rcx] # AVX512VBMI2
vpexpandb zmm30{k7}{z}, ZMMWORD PTR [rcx] # AVX512VBMI2
vpexpandb zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpexpandb zmm30, ZMMWORD PTR [rdx+126] # AVX512VBMI2 Disp8
vpexpandb zmm30, zmm29 # AVX512VBMI2
vpexpandb zmm30{k7}, zmm29 # AVX512VBMI2
vpexpandb zmm30{k7}{z}, zmm29 # AVX512VBMI2
vpexpandw zmm30{k7}, ZMMWORD PTR [rcx] # AVX512VBMI2
vpexpandw zmm30{k7}{z}, ZMMWORD PTR [rcx] # AVX512VBMI2
vpexpandw zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpexpandw zmm30, ZMMWORD PTR [rdx+254] # AVX512VBMI2 Disp8
vpexpandw zmm30, zmm29 # AVX512VBMI2
vpexpandw zmm30{k7}, zmm29 # AVX512VBMI2
vpexpandw zmm30{k7}{z}, zmm29 # AVX512VBMI2
vpshldvw zmm30, zmm29, zmm28 # AVX512VBMI2
vpshldvw zmm30{k7}, zmm29, zmm28 # AVX512VBMI2
vpshldvw zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI2
vpshldvw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpshldvw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI2 Disp8
vpshldvd zmm30, zmm29, zmm28 # AVX512VBMI2
vpshldvd zmm30{k7}, zmm29, zmm28 # AVX512VBMI2
vpshldvd zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI2
vpshldvd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpshldvd zmm30, zmm29, [rcx]{1to16} # AVX512VBMI2
vpshldvd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI2 Disp8
vpshldvd zmm30, zmm29, [rdx+508]{1to16} # AVX512VBMI2 Disp8
vpshldvq zmm30, zmm29, zmm28 # AVX512VBMI2
vpshldvq zmm30{k7}, zmm29, zmm28 # AVX512VBMI2
vpshldvq zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI2
vpshldvq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpshldvq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI2 Disp8
vpshldvq zmm30, zmm29, [rdx+1016]{1to8} # AVX512VBMI2 Disp8
vpshrdvw zmm30, zmm29, zmm28 # AVX512VBMI2
vpshrdvw zmm30{k7}, zmm29, zmm28 # AVX512VBMI2
vpshrdvw zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI2
vpshrdvw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpshrdvw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI2 Disp8
vpshrdvd zmm30, zmm29, zmm28 # AVX512VBMI2
vpshrdvd zmm30{k7}, zmm29, zmm28 # AVX512VBMI2
vpshrdvd zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI2
vpshrdvd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpshrdvd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI2 Disp8
vpshrdvd zmm30, zmm29, [rdx+508]{1to16} # AVX512VBMI2 Disp8
vpshrdvq zmm30, zmm29, zmm28 # AVX512VBMI2
vpshrdvq zmm30{k7}, zmm29, zmm28 # AVX512VBMI2
vpshrdvq zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI2
vpshrdvq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpshrdvq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI2 Disp8
vpshrdvq zmm30, zmm29, [rdx+1016]{1to8} # AVX512VBMI2 Disp8
vpshldw zmm30, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldw zmm30{k7}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldw zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VBMI2
vpshldw zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512VBMI2 Disp8
vpshldd zmm30, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldd zmm30{k7}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldd zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VBMI2
vpshldd zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512VBMI2 Disp8
vpshldd zmm30, zmm29, [rdx+508]{1to16}, 123 # AVX512VBMI2 Disp8
vpshldq zmm30, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldq zmm30{k7}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldq zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VBMI2
vpshldq zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512VBMI2 Disp8
vpshldq zmm30, zmm29, [rdx+1016]{1to8}, 123 # AVX512VBMI2 Disp8
vpshrdw zmm30, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdw zmm30{k7}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdw zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VBMI2
vpshrdw zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512VBMI2 Disp8
vpshrdd zmm30, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdd zmm30{k7}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdd zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VBMI2
vpshrdd zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512VBMI2 Disp8
vpshrdd zmm30, zmm29, [rdx+508]{1to16}, 123 # AVX512VBMI2 Disp8
vpshrdq zmm30, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdq zmm30{k7}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdq zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VBMI2
vpshrdq zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512VBMI2 Disp8
vpshrdq zmm30, zmm29, [rdx+1016]{1to8}, 123 # AVX512VBMI2 Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 15,990
|
gas/testsuite/gas/i386/x86-64-avx256int.s
|
# Check x86-64 256it integer AVX instructions
.allow_index_reg
.text
_start:
# Tests for op ymm, regl
vpmovmskb %ymm4,%ecx
# Tests for op ymm, regq
vpmovmskb %ymm4,%rcx
# Tests for op imm8, ymm, ymm
vpslld $7,%ymm6,%ymm2
vpslldq $7,%ymm6,%ymm2
vpsllq $7,%ymm6,%ymm2
vpsllw $7,%ymm6,%ymm2
vpsrad $7,%ymm6,%ymm2
vpsraw $7,%ymm6,%ymm2
vpsrld $7,%ymm6,%ymm2
vpsrldq $7,%ymm6,%ymm2
vpsrlq $7,%ymm6,%ymm2
vpsrlw $7,%ymm6,%ymm2
# Tests for op imm8, ymm/mem256, ymm
vpshufd $7,%ymm6,%ymm2
vpshufd $7,(%rcx),%ymm6
vpshufhw $7,%ymm6,%ymm2
vpshufhw $7,(%rcx),%ymm6
vpshuflw $7,%ymm6,%ymm2
vpshuflw $7,(%rcx),%ymm6
# Tests for op ymm/mem256, ymm, ymm
vpackssdw %ymm4,%ymm6,%ymm2
vpackssdw (%rcx),%ymm6,%ymm2
vpacksswb %ymm4,%ymm6,%ymm2
vpacksswb (%rcx),%ymm6,%ymm2
vpackusdw %ymm4,%ymm6,%ymm2
vpackusdw (%rcx),%ymm6,%ymm2
vpackuswb %ymm4,%ymm6,%ymm2
vpackuswb (%rcx),%ymm6,%ymm2
vpaddb %ymm4,%ymm6,%ymm2
vpaddb (%rcx),%ymm6,%ymm2
vpaddw %ymm4,%ymm6,%ymm2
vpaddw (%rcx),%ymm6,%ymm2
vpaddd %ymm4,%ymm6,%ymm2
vpaddd (%rcx),%ymm6,%ymm2
vpaddq %ymm4,%ymm6,%ymm2
vpaddq (%rcx),%ymm6,%ymm2
vpaddsb %ymm4,%ymm6,%ymm2
vpaddsb (%rcx),%ymm6,%ymm2
vpaddsw %ymm4,%ymm6,%ymm2
vpaddsw (%rcx),%ymm6,%ymm2
vpaddusb %ymm4,%ymm6,%ymm2
vpaddusb (%rcx),%ymm6,%ymm2
vpaddusw %ymm4,%ymm6,%ymm2
vpaddusw (%rcx),%ymm6,%ymm2
vpand %ymm4,%ymm6,%ymm2
vpand (%rcx),%ymm6,%ymm2
vpandn %ymm4,%ymm6,%ymm2
vpandn (%rcx),%ymm6,%ymm2
vpavgb %ymm4,%ymm6,%ymm2
vpavgb (%rcx),%ymm6,%ymm2
vpavgw %ymm4,%ymm6,%ymm2
vpavgw (%rcx),%ymm6,%ymm2
vpcmpeqb %ymm4,%ymm6,%ymm2
vpcmpeqb (%rcx),%ymm6,%ymm2
vpcmpeqw %ymm4,%ymm6,%ymm2
vpcmpeqw (%rcx),%ymm6,%ymm2
vpcmpeqd %ymm4,%ymm6,%ymm2
vpcmpeqd (%rcx),%ymm6,%ymm2
vpcmpeqq %ymm4,%ymm6,%ymm2
vpcmpeqq (%rcx),%ymm6,%ymm2
vpcmpgtb %ymm4,%ymm6,%ymm2
vpcmpgtb (%rcx),%ymm6,%ymm2
vpcmpgtw %ymm4,%ymm6,%ymm2
vpcmpgtw (%rcx),%ymm6,%ymm2
vpcmpgtd %ymm4,%ymm6,%ymm2
vpcmpgtd (%rcx),%ymm6,%ymm2
vpcmpgtq %ymm4,%ymm6,%ymm2
vpcmpgtq (%rcx),%ymm6,%ymm2
vphaddw %ymm4,%ymm6,%ymm2
vphaddw (%rcx),%ymm6,%ymm2
vphaddd %ymm4,%ymm6,%ymm2
vphaddd (%rcx),%ymm6,%ymm2
vphaddsw %ymm4,%ymm6,%ymm2
vphaddsw (%rcx),%ymm6,%ymm2
vphsubw %ymm4,%ymm6,%ymm2
vphsubw (%rcx),%ymm6,%ymm2
vphsubd %ymm4,%ymm6,%ymm2
vphsubd (%rcx),%ymm6,%ymm2
vphsubsw %ymm4,%ymm6,%ymm2
vphsubsw (%rcx),%ymm6,%ymm2
vpmaddwd %ymm4,%ymm6,%ymm2
vpmaddwd (%rcx),%ymm6,%ymm2
vpmaddubsw %ymm4,%ymm6,%ymm2
vpmaddubsw (%rcx),%ymm6,%ymm2
vpmaxsb %ymm4,%ymm6,%ymm2
vpmaxsb (%rcx),%ymm6,%ymm2
vpmaxsw %ymm4,%ymm6,%ymm2
vpmaxsw (%rcx),%ymm6,%ymm2
vpmaxsd %ymm4,%ymm6,%ymm2
vpmaxsd (%rcx),%ymm6,%ymm2
vpmaxub %ymm4,%ymm6,%ymm2
vpmaxub (%rcx),%ymm6,%ymm2
vpmaxuw %ymm4,%ymm6,%ymm2
vpmaxuw (%rcx),%ymm6,%ymm2
vpmaxud %ymm4,%ymm6,%ymm2
vpmaxud (%rcx),%ymm6,%ymm2
vpminsb %ymm4,%ymm6,%ymm2
vpminsb (%rcx),%ymm6,%ymm2
vpminsw %ymm4,%ymm6,%ymm2
vpminsw (%rcx),%ymm6,%ymm2
vpminsd %ymm4,%ymm6,%ymm2
vpminsd (%rcx),%ymm6,%ymm2
vpminub %ymm4,%ymm6,%ymm2
vpminub (%rcx),%ymm6,%ymm2
vpminuw %ymm4,%ymm6,%ymm2
vpminuw (%rcx),%ymm6,%ymm2
vpminud %ymm4,%ymm6,%ymm2
vpminud (%rcx),%ymm6,%ymm2
vpmulhuw %ymm4,%ymm6,%ymm2
vpmulhuw (%rcx),%ymm6,%ymm2
vpmulhrsw %ymm4,%ymm6,%ymm2
vpmulhrsw (%rcx),%ymm6,%ymm2
vpmulhw %ymm4,%ymm6,%ymm2
vpmulhw (%rcx),%ymm6,%ymm2
vpmullw %ymm4,%ymm6,%ymm2
vpmullw (%rcx),%ymm6,%ymm2
vpmulld %ymm4,%ymm6,%ymm2
vpmulld (%rcx),%ymm6,%ymm2
vpmuludq %ymm4,%ymm6,%ymm2
vpmuludq (%rcx),%ymm6,%ymm2
vpmuldq %ymm4,%ymm6,%ymm2
vpmuldq (%rcx),%ymm6,%ymm2
vpor %ymm4,%ymm6,%ymm2
vpor (%rcx),%ymm6,%ymm2
vpsadbw %ymm4,%ymm6,%ymm2
vpsadbw (%rcx),%ymm6,%ymm2
vpshufb %ymm4,%ymm6,%ymm2
vpshufb (%rcx),%ymm6,%ymm2
vpsignb %ymm4,%ymm6,%ymm2
vpsignb (%rcx),%ymm6,%ymm2
vpsignw %ymm4,%ymm6,%ymm2
vpsignw (%rcx),%ymm6,%ymm2
vpsignd %ymm4,%ymm6,%ymm2
vpsignd (%rcx),%ymm6,%ymm2
vpsubb %ymm4,%ymm6,%ymm2
vpsubb (%rcx),%ymm6,%ymm2
vpsubw %ymm4,%ymm6,%ymm2
vpsubw (%rcx),%ymm6,%ymm2
vpsubd %ymm4,%ymm6,%ymm2
vpsubd (%rcx),%ymm6,%ymm2
vpsubq %ymm4,%ymm6,%ymm2
vpsubq (%rcx),%ymm6,%ymm2
vpsubsb %ymm4,%ymm6,%ymm2
vpsubsb (%rcx),%ymm6,%ymm2
vpsubsw %ymm4,%ymm6,%ymm2
vpsubsw (%rcx),%ymm6,%ymm2
vpsubusb %ymm4,%ymm6,%ymm2
vpsubusb (%rcx),%ymm6,%ymm2
vpsubusw %ymm4,%ymm6,%ymm2
vpsubusw (%rcx),%ymm6,%ymm2
vpunpckhbw %ymm4,%ymm6,%ymm2
vpunpckhbw (%rcx),%ymm6,%ymm2
vpunpckhwd %ymm4,%ymm6,%ymm2
vpunpckhwd (%rcx),%ymm6,%ymm2
vpunpckhdq %ymm4,%ymm6,%ymm2
vpunpckhdq (%rcx),%ymm6,%ymm2
vpunpckhqdq %ymm4,%ymm6,%ymm2
vpunpckhqdq (%rcx),%ymm6,%ymm2
vpunpcklbw %ymm4,%ymm6,%ymm2
vpunpcklbw (%rcx),%ymm6,%ymm2
vpunpcklwd %ymm4,%ymm6,%ymm2
vpunpcklwd (%rcx),%ymm6,%ymm2
vpunpckldq %ymm4,%ymm6,%ymm2
vpunpckldq (%rcx),%ymm6,%ymm2
vpunpcklqdq %ymm4,%ymm6,%ymm2
vpunpcklqdq (%rcx),%ymm6,%ymm2
vpxor %ymm4,%ymm6,%ymm2
vpxor (%rcx),%ymm6,%ymm2
# Tests for op ymm/mem256, ymm
vpabsb %ymm4,%ymm6
vpabsb (%rcx),%ymm4
vpabsw %ymm4,%ymm6
vpabsw (%rcx),%ymm4
vpabsd %ymm4,%ymm6
vpabsd (%rcx),%ymm4
# Tests for op imm8, ymm/mem256, ymm, ymm
vmpsadbw $7,%ymm4,%ymm6,%ymm2
vmpsadbw $7,(%rcx),%ymm6,%ymm2
vpalignr $7,%ymm4,%ymm6,%ymm2
vpalignr $7,(%rcx),%ymm6,%ymm2
vpblendw $7,%ymm4,%ymm6,%ymm2
vpblendw $7,(%rcx),%ymm6,%ymm2
# Tests for op ymm, ymm/mem256, ymm, ymm
vpblendvb %ymm4,%ymm6,%ymm2,%ymm7
vpblendvb %ymm4,(%rcx),%ymm2,%ymm7
# Tests for op xmm/mem128, ymm, ymm
vpsllw %xmm4,%ymm6,%ymm2
vpsllw (%rcx),%ymm6,%ymm2
vpslld %xmm4,%ymm6,%ymm2
vpslld (%rcx),%ymm6,%ymm2
vpsllq %xmm4,%ymm6,%ymm2
vpsllq (%rcx),%ymm6,%ymm2
vpsraw %xmm4,%ymm6,%ymm2
vpsraw (%rcx),%ymm6,%ymm2
vpsrad %xmm4,%ymm6,%ymm2
vpsrad (%rcx),%ymm6,%ymm2
vpsrlw %xmm4,%ymm6,%ymm2
vpsrlw (%rcx),%ymm6,%ymm2
vpsrld %xmm4,%ymm6,%ymm2
vpsrld (%rcx),%ymm6,%ymm2
vpsrlq %xmm4,%ymm6,%ymm2
vpsrlq (%rcx),%ymm6,%ymm2
# Tests for op xmm/mem128, ymm
vpmovsxbw %xmm4,%ymm4
vpmovsxbw (%rcx),%ymm4
vpmovsxwd %xmm4,%ymm4
vpmovsxwd (%rcx),%ymm4
vpmovsxdq %xmm4,%ymm4
vpmovsxdq (%rcx),%ymm4
vpmovzxbw %xmm4,%ymm4
vpmovzxbw (%rcx),%ymm4
vpmovzxwd %xmm4,%ymm4
vpmovzxwd (%rcx),%ymm4
vpmovzxdq %xmm4,%ymm4
vpmovzxdq (%rcx),%ymm4
# Tests for op xmm/mem64, ymm
vpmovsxbd %xmm4,%ymm6
vpmovsxbd (%rcx),%ymm4
vpmovsxwq %xmm4,%ymm6
vpmovsxwq (%rcx),%ymm4
vpmovzxbd %xmm4,%ymm6
vpmovzxbd (%rcx),%ymm4
vpmovzxwq %xmm4,%ymm6
vpmovzxwq (%rcx),%ymm4
# Tests for op xmm/mem32, ymm
vpmovsxbq %xmm4,%ymm4
vpmovsxbq (%rcx),%ymm4
vpmovzxbq %xmm4,%ymm4
vpmovzxbq (%rcx),%ymm4
.intel_syntax noprefix
# Tests for op ymm, regl
vpmovmskb ecx,ymm4
# Tests for op ymm, regq
vpmovmskb rcx,ymm4
# Tests for op imm8, ymm, ymm
vpslld ymm2,ymm6,7
vpslldq ymm2,ymm6,7
vpsllq ymm2,ymm6,7
vpsllw ymm2,ymm6,7
vpsrad ymm2,ymm6,7
vpsraw ymm2,ymm6,7
vpsrld ymm2,ymm6,7
vpsrldq ymm2,ymm6,7
vpsrlq ymm2,ymm6,7
vpsrlw ymm2,ymm6,7
# Tests for op imm8, ymm/mem256, ymm
vpshufd ymm2,ymm6,7
vpshufd ymm6,YMMWORD PTR [rcx],7
vpshufd ymm6,[rcx],7
vpshufhw ymm2,ymm6,7
vpshufhw ymm6,YMMWORD PTR [rcx],7
vpshufhw ymm6,[rcx],7
vpshuflw ymm2,ymm6,7
vpshuflw ymm6,YMMWORD PTR [rcx],7
vpshuflw ymm6,[rcx],7
# Tests for op ymm/mem256, ymm, ymm
vpackssdw ymm2,ymm6,ymm4
vpackssdw ymm2,ymm6,YMMWORD PTR [rcx]
vpackssdw ymm2,ymm6,[rcx]
vpacksswb ymm2,ymm6,ymm4
vpacksswb ymm2,ymm6,YMMWORD PTR [rcx]
vpacksswb ymm2,ymm6,[rcx]
vpackusdw ymm2,ymm6,ymm4
vpackusdw ymm2,ymm6,YMMWORD PTR [rcx]
vpackusdw ymm2,ymm6,[rcx]
vpackuswb ymm2,ymm6,ymm4
vpackuswb ymm2,ymm6,YMMWORD PTR [rcx]
vpackuswb ymm2,ymm6,[rcx]
vpaddb ymm2,ymm6,ymm4
vpaddb ymm2,ymm6,YMMWORD PTR [rcx]
vpaddb ymm2,ymm6,[rcx]
vpaddw ymm2,ymm6,ymm4
vpaddw ymm2,ymm6,YMMWORD PTR [rcx]
vpaddw ymm2,ymm6,[rcx]
vpaddd ymm2,ymm6,ymm4
vpaddd ymm2,ymm6,YMMWORD PTR [rcx]
vpaddd ymm2,ymm6,[rcx]
vpaddq ymm2,ymm6,ymm4
vpaddq ymm2,ymm6,YMMWORD PTR [rcx]
vpaddq ymm2,ymm6,[rcx]
vpaddsb ymm2,ymm6,ymm4
vpaddsb ymm2,ymm6,YMMWORD PTR [rcx]
vpaddsb ymm2,ymm6,[rcx]
vpaddsw ymm2,ymm6,ymm4
vpaddsw ymm2,ymm6,YMMWORD PTR [rcx]
vpaddsw ymm2,ymm6,[rcx]
vpaddusb ymm2,ymm6,ymm4
vpaddusb ymm2,ymm6,YMMWORD PTR [rcx]
vpaddusb ymm2,ymm6,[rcx]
vpaddusw ymm2,ymm6,ymm4
vpaddusw ymm2,ymm6,YMMWORD PTR [rcx]
vpaddusw ymm2,ymm6,[rcx]
vpand ymm2,ymm6,ymm4
vpand ymm2,ymm6,YMMWORD PTR [rcx]
vpand ymm2,ymm6,[rcx]
vpandn ymm2,ymm6,ymm4
vpandn ymm2,ymm6,YMMWORD PTR [rcx]
vpandn ymm2,ymm6,[rcx]
vpavgb ymm2,ymm6,ymm4
vpavgb ymm2,ymm6,YMMWORD PTR [rcx]
vpavgb ymm2,ymm6,[rcx]
vpavgw ymm2,ymm6,ymm4
vpavgw ymm2,ymm6,YMMWORD PTR [rcx]
vpavgw ymm2,ymm6,[rcx]
vpcmpeqb ymm2,ymm6,ymm4
vpcmpeqb ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpeqb ymm2,ymm6,[rcx]
vpcmpeqw ymm2,ymm6,ymm4
vpcmpeqw ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpeqw ymm2,ymm6,[rcx]
vpcmpeqd ymm2,ymm6,ymm4
vpcmpeqd ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpeqd ymm2,ymm6,[rcx]
vpcmpeqq ymm2,ymm6,ymm4
vpcmpeqq ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpeqq ymm2,ymm6,[rcx]
vpcmpgtb ymm2,ymm6,ymm4
vpcmpgtb ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpgtb ymm2,ymm6,[rcx]
vpcmpgtw ymm2,ymm6,ymm4
vpcmpgtw ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpgtw ymm2,ymm6,[rcx]
vpcmpgtd ymm2,ymm6,ymm4
vpcmpgtd ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpgtd ymm2,ymm6,[rcx]
vpcmpgtq ymm2,ymm6,ymm4
vpcmpgtq ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpgtq ymm2,ymm6,[rcx]
vphaddw ymm2,ymm6,ymm4
vphaddw ymm2,ymm6,YMMWORD PTR [rcx]
vphaddw ymm2,ymm6,[rcx]
vphaddd ymm2,ymm6,ymm4
vphaddd ymm2,ymm6,YMMWORD PTR [rcx]
vphaddd ymm2,ymm6,[rcx]
vphaddsw ymm2,ymm6,ymm4
vphaddsw ymm2,ymm6,YMMWORD PTR [rcx]
vphaddsw ymm2,ymm6,[rcx]
vphsubw ymm2,ymm6,ymm4
vphsubw ymm2,ymm6,YMMWORD PTR [rcx]
vphsubw ymm2,ymm6,[rcx]
vphsubd ymm2,ymm6,ymm4
vphsubd ymm2,ymm6,YMMWORD PTR [rcx]
vphsubd ymm2,ymm6,[rcx]
vphsubsw ymm2,ymm6,ymm4
vphsubsw ymm2,ymm6,YMMWORD PTR [rcx]
vphsubsw ymm2,ymm6,[rcx]
vpmaddwd ymm2,ymm6,ymm4
vpmaddwd ymm2,ymm6,YMMWORD PTR [rcx]
vpmaddwd ymm2,ymm6,[rcx]
vpmaddubsw ymm2,ymm6,ymm4
vpmaddubsw ymm2,ymm6,YMMWORD PTR [rcx]
vpmaddubsw ymm2,ymm6,[rcx]
vpmaxsb ymm2,ymm6,ymm4
vpmaxsb ymm2,ymm6,YMMWORD PTR [rcx]
vpmaxsb ymm2,ymm6,[rcx]
vpmaxsw ymm2,ymm6,ymm4
vpmaxsw ymm2,ymm6,YMMWORD PTR [rcx]
vpmaxsw ymm2,ymm6,[rcx]
vpmaxsd ymm2,ymm6,ymm4
vpmaxsd ymm2,ymm6,YMMWORD PTR [rcx]
vpmaxsd ymm2,ymm6,[rcx]
vpmaxub ymm2,ymm6,ymm4
vpmaxub ymm2,ymm6,YMMWORD PTR [rcx]
vpmaxub ymm2,ymm6,[rcx]
vpmaxuw ymm2,ymm6,ymm4
vpmaxuw ymm2,ymm6,YMMWORD PTR [rcx]
vpmaxuw ymm2,ymm6,[rcx]
vpmaxud ymm2,ymm6,ymm4
vpmaxud ymm2,ymm6,YMMWORD PTR [rcx]
vpmaxud ymm2,ymm6,[rcx]
vpminsb ymm2,ymm6,ymm4
vpminsb ymm2,ymm6,YMMWORD PTR [rcx]
vpminsb ymm2,ymm6,[rcx]
vpminsw ymm2,ymm6,ymm4
vpminsw ymm2,ymm6,YMMWORD PTR [rcx]
vpminsw ymm2,ymm6,[rcx]
vpminsd ymm2,ymm6,ymm4
vpminsd ymm2,ymm6,YMMWORD PTR [rcx]
vpminsd ymm2,ymm6,[rcx]
vpminub ymm2,ymm6,ymm4
vpminub ymm2,ymm6,YMMWORD PTR [rcx]
vpminub ymm2,ymm6,[rcx]
vpminuw ymm2,ymm6,ymm4
vpminuw ymm2,ymm6,YMMWORD PTR [rcx]
vpminuw ymm2,ymm6,[rcx]
vpminud ymm2,ymm6,ymm4
vpminud ymm2,ymm6,YMMWORD PTR [rcx]
vpminud ymm2,ymm6,[rcx]
vpmulhuw ymm2,ymm6,ymm4
vpmulhuw ymm2,ymm6,YMMWORD PTR [rcx]
vpmulhuw ymm2,ymm6,[rcx]
vpmulhrsw ymm2,ymm6,ymm4
vpmulhrsw ymm2,ymm6,YMMWORD PTR [rcx]
vpmulhrsw ymm2,ymm6,[rcx]
vpmulhw ymm2,ymm6,ymm4
vpmulhw ymm2,ymm6,YMMWORD PTR [rcx]
vpmulhw ymm2,ymm6,[rcx]
vpmullw ymm2,ymm6,ymm4
vpmullw ymm2,ymm6,YMMWORD PTR [rcx]
vpmullw ymm2,ymm6,[rcx]
vpmulld ymm2,ymm6,ymm4
vpmulld ymm2,ymm6,YMMWORD PTR [rcx]
vpmulld ymm2,ymm6,[rcx]
vpmuludq ymm2,ymm6,ymm4
vpmuludq ymm2,ymm6,YMMWORD PTR [rcx]
vpmuludq ymm2,ymm6,[rcx]
vpmuldq ymm2,ymm6,ymm4
vpmuldq ymm2,ymm6,YMMWORD PTR [rcx]
vpmuldq ymm2,ymm6,[rcx]
vpor ymm2,ymm6,ymm4
vpor ymm2,ymm6,YMMWORD PTR [rcx]
vpor ymm2,ymm6,[rcx]
vpsadbw ymm2,ymm6,ymm4
vpsadbw ymm2,ymm6,YMMWORD PTR [rcx]
vpsadbw ymm2,ymm6,[rcx]
vpshufb ymm2,ymm6,ymm4
vpshufb ymm2,ymm6,YMMWORD PTR [rcx]
vpshufb ymm2,ymm6,[rcx]
vpsignb ymm2,ymm6,ymm4
vpsignb ymm2,ymm6,YMMWORD PTR [rcx]
vpsignb ymm2,ymm6,[rcx]
vpsignw ymm2,ymm6,ymm4
vpsignw ymm2,ymm6,YMMWORD PTR [rcx]
vpsignw ymm2,ymm6,[rcx]
vpsignd ymm2,ymm6,ymm4
vpsignd ymm2,ymm6,YMMWORD PTR [rcx]
vpsignd ymm2,ymm6,[rcx]
vpsubb ymm2,ymm6,ymm4
vpsubb ymm2,ymm6,YMMWORD PTR [rcx]
vpsubb ymm2,ymm6,[rcx]
vpsubw ymm2,ymm6,ymm4
vpsubw ymm2,ymm6,YMMWORD PTR [rcx]
vpsubw ymm2,ymm6,[rcx]
vpsubd ymm2,ymm6,ymm4
vpsubd ymm2,ymm6,YMMWORD PTR [rcx]
vpsubd ymm2,ymm6,[rcx]
vpsubq ymm2,ymm6,ymm4
vpsubq ymm2,ymm6,YMMWORD PTR [rcx]
vpsubq ymm2,ymm6,[rcx]
vpsubsb ymm2,ymm6,ymm4
vpsubsb ymm2,ymm6,YMMWORD PTR [rcx]
vpsubsb ymm2,ymm6,[rcx]
vpsubsw ymm2,ymm6,ymm4
vpsubsw ymm2,ymm6,YMMWORD PTR [rcx]
vpsubsw ymm2,ymm6,[rcx]
vpsubusb ymm2,ymm6,ymm4
vpsubusb ymm2,ymm6,YMMWORD PTR [rcx]
vpsubusb ymm2,ymm6,[rcx]
vpsubusw ymm2,ymm6,ymm4
vpsubusw ymm2,ymm6,YMMWORD PTR [rcx]
vpsubusw ymm2,ymm6,[rcx]
vpunpckhbw ymm2,ymm6,ymm4
vpunpckhbw ymm2,ymm6,YMMWORD PTR [rcx]
vpunpckhbw ymm2,ymm6,[rcx]
vpunpckhwd ymm2,ymm6,ymm4
vpunpckhwd ymm2,ymm6,YMMWORD PTR [rcx]
vpunpckhwd ymm2,ymm6,[rcx]
vpunpckhdq ymm2,ymm6,ymm4
vpunpckhdq ymm2,ymm6,YMMWORD PTR [rcx]
vpunpckhdq ymm2,ymm6,[rcx]
vpunpckhqdq ymm2,ymm6,ymm4
vpunpckhqdq ymm2,ymm6,YMMWORD PTR [rcx]
vpunpckhqdq ymm2,ymm6,[rcx]
vpunpcklbw ymm2,ymm6,ymm4
vpunpcklbw ymm2,ymm6,YMMWORD PTR [rcx]
vpunpcklbw ymm2,ymm6,[rcx]
vpunpcklwd ymm2,ymm6,ymm4
vpunpcklwd ymm2,ymm6,YMMWORD PTR [rcx]
vpunpcklwd ymm2,ymm6,[rcx]
vpunpckldq ymm2,ymm6,ymm4
vpunpckldq ymm2,ymm6,YMMWORD PTR [rcx]
vpunpckldq ymm2,ymm6,[rcx]
vpunpcklqdq ymm2,ymm6,ymm4
vpunpcklqdq ymm2,ymm6,YMMWORD PTR [rcx]
vpunpcklqdq ymm2,ymm6,[rcx]
vpxor ymm2,ymm6,ymm4
vpxor ymm2,ymm6,YMMWORD PTR [rcx]
vpxor ymm2,ymm6,[rcx]
# Tests for op ymm/mem256, ymm
vpabsb ymm6,ymm4
vpabsb ymm4,YMMWORD PTR [rcx]
vpabsb ymm4,[rcx]
vpabsw ymm6,ymm4
vpabsw ymm4,YMMWORD PTR [rcx]
vpabsw ymm4,[rcx]
vpabsd ymm6,ymm4
vpabsd ymm4,YMMWORD PTR [rcx]
vpabsd ymm4,[rcx]
# Tests for op imm8, ymm/mem256, ymm, ymm
vmpsadbw ymm2,ymm6,ymm4,7
vmpsadbw ymm2,ymm6,YMMWORD PTR [rcx],7
vmpsadbw ymm2,ymm6,[rcx],7
vpalignr ymm2,ymm6,ymm4,7
vpalignr ymm2,ymm6,YMMWORD PTR [rcx],7
vpalignr ymm2,ymm6,[rcx],7
vpblendw ymm2,ymm6,ymm4,7
vpblendw ymm2,ymm6,YMMWORD PTR [rcx],7
vpblendw ymm2,ymm6,[rcx],7
# Tests for op ymm, ymm/mem256, ymm, ymm
vpblendvb ymm7,ymm2,ymm6,ymm4
vpblendvb ymm7,ymm2,YMMWORD PTR [rcx],ymm4
vpblendvb ymm7,ymm2,[rcx],ymm4
# Tests for op xmm/mem128, ymm, ymm
vpsllw ymm2,ymm6,xmm4
vpsllw ymm2,ymm6,XMMWORD PTR [rcx]
vpsllw ymm2,ymm6,[rcx]
vpslld ymm2,ymm6,xmm4
vpslld ymm2,ymm6,XMMWORD PTR [rcx]
vpslld ymm2,ymm6,[rcx]
vpsllq ymm2,ymm6,xmm4
vpsllq ymm2,ymm6,XMMWORD PTR [rcx]
vpsllq ymm2,ymm6,[rcx]
vpsraw ymm2,ymm6,xmm4
vpsraw ymm2,ymm6,XMMWORD PTR [rcx]
vpsraw ymm2,ymm6,[rcx]
vpsrad ymm2,ymm6,xmm4
vpsrad ymm2,ymm6,XMMWORD PTR [rcx]
vpsrad ymm2,ymm6,[rcx]
vpsrlw ymm2,ymm6,xmm4
vpsrlw ymm2,ymm6,XMMWORD PTR [rcx]
vpsrlw ymm2,ymm6,[rcx]
vpsrld ymm2,ymm6,xmm4
vpsrld ymm2,ymm6,XMMWORD PTR [rcx]
vpsrld ymm2,ymm6,[rcx]
vpsrlq ymm2,ymm6,xmm4
vpsrlq ymm2,ymm6,XMMWORD PTR [rcx]
vpsrlq ymm2,ymm6,[rcx]
# Tests for op xmm/mem128, ymm
vpmovsxbw ymm4,xmm4
vpmovsxbw ymm4,XMMWORD PTR [rcx]
vpmovsxbw ymm4,[rcx]
vpmovsxwd ymm4,xmm4
vpmovsxwd ymm4,XMMWORD PTR [rcx]
vpmovsxwd ymm4,[rcx]
vpmovsxdq ymm4,xmm4
vpmovsxdq ymm4,XMMWORD PTR [rcx]
vpmovsxdq ymm4,[rcx]
vpmovzxbw ymm4,xmm4
vpmovzxbw ymm4,XMMWORD PTR [rcx]
vpmovzxbw ymm4,[rcx]
vpmovzxwd ymm4,xmm4
vpmovzxwd ymm4,XMMWORD PTR [rcx]
vpmovzxwd ymm4,[rcx]
vpmovzxdq ymm4,xmm4
vpmovzxdq ymm4,XMMWORD PTR [rcx]
vpmovzxdq ymm4,[rcx]
# Tests for op xmm/mem64, ymm
vpmovsxbd ymm6,xmm4
vpmovsxbd ymm4,QWORD PTR [rcx]
vpmovsxbd ymm4,[rcx]
vpmovsxwq ymm6,xmm4
vpmovsxwq ymm4,QWORD PTR [rcx]
vpmovsxwq ymm4,[rcx]
vpmovzxbd ymm6,xmm4
vpmovzxbd ymm4,QWORD PTR [rcx]
vpmovzxbd ymm4,[rcx]
vpmovzxwq ymm6,xmm4
vpmovzxwq ymm4,QWORD PTR [rcx]
vpmovzxwq ymm4,[rcx]
# Tests for op xmm/mem32, ymm
vpmovsxbq ymm4,xmm4
vpmovsxbq ymm4,DWORD PTR [rcx]
vpmovsxbq ymm4,[rcx]
vpmovzxbq ymm4,xmm4
vpmovzxbq ymm4,DWORD PTR [rcx]
vpmovzxbq ymm4,[rcx]
|
tactcomplabs/xbgas-binutils-gdb
| 159,280
|
gas/testsuite/gas/i386/avx512bw_vl.s
|
# Check 32bit AVX512{BW,VL} instructions
.allow_index_reg
.text
_start:
vpabsb %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpabsb %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpabsb (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpabsb -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpabsb 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsb 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsb -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsb -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsb %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpabsb %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpabsb (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpabsb -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpabsb 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsb 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpabsb -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsb -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpabsw %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpabsw %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpabsw (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpabsw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpabsw 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsw 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsw -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsw -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsw %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpabsw %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpabsw (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpabsw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpabsw 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsw 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpabsw -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsw -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpackssdw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpackssdw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackssdw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpackssdw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackssdw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackssdw (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackssdw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackssdw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackssdw 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackssdw -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpacksswb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpacksswb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpackusdw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpackusdw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpackuswb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpackuswb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddusb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddusb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddusw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddusw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpalignr $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpalignr $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpavgb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpavgb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpavgw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpavgw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpblendmb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpblendmb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpblendmb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpblendmb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpblendmb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpblendmb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastb %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastb %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastb (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastb -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastb 127(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastb 128(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastb -128(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastb -129(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastb %xmm5, %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastb %xmm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastb (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastb -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastb 127(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastb 128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastb -128(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastb -129(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastb %eax, %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastb %eax, %xmm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastb %ebp, %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastb %eax, %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastb %eax, %ymm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastb %ebp, %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastw %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastw %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastw (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastw 254(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastw 256(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastw -256(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastw -258(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastw %xmm5, %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastw %xmm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastw (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastw 254(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastw 256(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastw -256(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastw -258(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastw %eax, %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastw %eax, %xmm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastw %ebp, %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastw %eax, %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastw %eax, %ymm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastw %ebp, %ymm6{%k7} # AVX512{BW,VL}
vpcmpeqb %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpblendmw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpblendmw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpblendmw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpblendmw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpblendmw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpblendmw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpblendmw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaddubsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaddubsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaddwd (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaddwd (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxub (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxub (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxuw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxuw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminub (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminub 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminub -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminub (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminub 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminub -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminuw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminuw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminuw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminuw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminuw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminuw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovsxbw (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw 1016(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw 1024(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw -1024(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw -1032(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmovsxbw (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw 2032(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw 2048(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw -2048(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw -2064(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovzxbw (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw 1016(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw 1024(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw -1024(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw -1032(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmovzxbw (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw 2032(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw 2048(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw -2048(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw -2064(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmulhrsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmulhrsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmulhuw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmulhuw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmulhw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmulhw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmullw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmullw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmullw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmullw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmullw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmullw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpshufb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpshufb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpshufhw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpshufhw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpshuflw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpshuflw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsllw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsllw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsraw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsraw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsrlw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsrlw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsrlw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsrlw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsraw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsraw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsrlvw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlvw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsrlvw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlvw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlvw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlvw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlvw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlvw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlvw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlvw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsrlvw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlvw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlvw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlvw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlvw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlvw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsravw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsravw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsravw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsravw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsravw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsravw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsravw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsravw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsravw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsravw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsravw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsravw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsravw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsravw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsravw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsravw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubusb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubusb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubusw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubusw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpckhbw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpckhbw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpckhwd (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpckhwd (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpcklbw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpcklbw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpcklwd (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpcklwd (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmovwb %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovwb %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovwb %ymm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovwb %ymm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovswb %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovswb %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovswb %ymm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovswb %ymm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovuswb %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovuswb %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovuswb %ymm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovuswb %ymm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vdbpsadbw $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vdbpsadbw $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vdbpsadbw $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vdbpsadbw $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vdbpsadbw $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vdbpsadbw $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vdbpsadbw $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vdbpsadbw $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vdbpsadbw $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vdbpsadbw $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpermw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpermw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpermw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpermw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpermw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpermw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermt2w %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermt2w %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpermt2w (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermt2w -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermt2w 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpermt2w 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermt2w -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpermt2w -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermt2w %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermt2w %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpermt2w (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermt2w -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermt2w 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpermt2w 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermt2w -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpermt2w -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsllw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsllw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsllvw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllvw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsllvw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllvw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllvw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllvw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllvw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllvw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllvw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllvw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsllvw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllvw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllvw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllvw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllvw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllvw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vmovdqu8 %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vmovdqu8 %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vmovdqu8 (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu8 -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu8 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu8 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu8 -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu8 -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu8 %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vmovdqu8 %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vmovdqu8 (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vmovdqu8 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vmovdqu8 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu8 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vmovdqu8 -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu8 -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vmovdqu16 %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vmovdqu16 %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vmovdqu16 (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu16 -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu16 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu16 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu16 -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu16 -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu16 %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vmovdqu16 %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vmovdqu16 (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vmovdqu16 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vmovdqu16 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu16 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vmovdqu16 -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu16 -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmovwb %xmm6, (%ecx){%k7} # AVX512{BW,VL}
vpmovwb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vpmovwb %xmm6, 1016(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovwb %xmm6, 1024(%edx){%k7} # AVX512{BW,VL}
vpmovwb %xmm6, -1024(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovwb %xmm6, -1032(%edx){%k7} # AVX512{BW,VL}
vpmovwb %ymm6, (%ecx){%k7} # AVX512{BW,VL}
vpmovwb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vpmovwb %ymm6, 2032(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovwb %ymm6, 2048(%edx){%k7} # AVX512{BW,VL}
vpmovwb %ymm6, -2048(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovwb %ymm6, -2064(%edx){%k7} # AVX512{BW,VL}
vpmovswb %xmm6, (%ecx){%k7} # AVX512{BW,VL}
vpmovswb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vpmovswb %xmm6, 1016(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovswb %xmm6, 1024(%edx){%k7} # AVX512{BW,VL}
vpmovswb %xmm6, -1024(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovswb %xmm6, -1032(%edx){%k7} # AVX512{BW,VL}
vpmovswb %ymm6, (%ecx){%k7} # AVX512{BW,VL}
vpmovswb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vpmovswb %ymm6, 2032(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovswb %ymm6, 2048(%edx){%k7} # AVX512{BW,VL}
vpmovswb %ymm6, -2048(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovswb %ymm6, -2064(%edx){%k7} # AVX512{BW,VL}
vpmovuswb %xmm6, (%ecx){%k7} # AVX512{BW,VL}
vpmovuswb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vpmovuswb %xmm6, 1016(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovuswb %xmm6, 1024(%edx){%k7} # AVX512{BW,VL}
vpmovuswb %xmm6, -1024(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovuswb %xmm6, -1032(%edx){%k7} # AVX512{BW,VL}
vpmovuswb %ymm6, (%ecx){%k7} # AVX512{BW,VL}
vpmovuswb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vpmovuswb %ymm6, 2032(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovuswb %ymm6, 2048(%edx){%k7} # AVX512{BW,VL}
vpmovuswb %ymm6, -2048(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovuswb %ymm6, -2064(%edx){%k7} # AVX512{BW,VL}
vmovdqu8 %xmm6, (%ecx){%k7} # AVX512{BW,VL}
vmovdqu8 %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vmovdqu8 %xmm6, 2032(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu8 %xmm6, 2048(%edx){%k7} # AVX512{BW,VL}
vmovdqu8 %xmm6, -2048(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu8 %xmm6, -2064(%edx){%k7} # AVX512{BW,VL}
vmovdqu8 %ymm6, (%ecx){%k7} # AVX512{BW,VL}
vmovdqu8 %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vmovdqu8 %ymm6, 4064(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu8 %ymm6, 4096(%edx){%k7} # AVX512{BW,VL}
vmovdqu8 %ymm6, -4096(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu8 %ymm6, -4128(%edx){%k7} # AVX512{BW,VL}
vmovdqu16 %xmm6, (%ecx){%k7} # AVX512{BW,VL}
vmovdqu16 %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vmovdqu16 %xmm6, 2032(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu16 %xmm6, 2048(%edx){%k7} # AVX512{BW,VL}
vmovdqu16 %xmm6, -2048(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu16 %xmm6, -2064(%edx){%k7} # AVX512{BW,VL}
vmovdqu16 %ymm6, (%ecx){%k7} # AVX512{BW,VL}
vmovdqu16 %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vmovdqu16 %ymm6, 4064(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu16 %ymm6, 4096(%edx){%k7} # AVX512{BW,VL}
vmovdqu16 %ymm6, -4096(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu16 %ymm6, -4128(%edx){%k7} # AVX512{BW,VL}
vpermi2w %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermi2w %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpermi2w (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermi2w -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermi2w 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpermi2w 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermi2w -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpermi2w -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermi2w %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermi2w %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpermi2w (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermi2w -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermi2w 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpermi2w 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermi2w -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpermi2w -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vptestmb %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmb (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmb -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmb 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmb 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmb -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmb -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmb %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmb (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmb -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmb 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmb 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmb -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmb -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmw %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmw (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmw -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmw 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmw 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmw -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmw -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmw %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmw (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmw -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmw 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmw 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmw -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmw -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpmovb2m %xmm6, %k5 # AVX512{BW,VL}
vpmovb2m %ymm6, %k5 # AVX512{BW,VL}
vpmovw2m %xmm6, %k5 # AVX512{BW,VL}
vpmovw2m %ymm6, %k5 # AVX512{BW,VL}
vpmovm2b %k5, %xmm6 # AVX512{BW,VL}
vpmovm2b %k5, %ymm6 # AVX512{BW,VL}
vpmovm2w %k5, %xmm6 # AVX512{BW,VL}
vpmovm2w %k5, %ymm6 # AVX512{BW,VL}
vptestnmb %xmm4, %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb (%ecx), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb 2032(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmb 2048(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb -2048(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmb -2064(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb %ymm4, %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb (%ecx), %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb -123456(%esp,%esi,8), %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb 4064(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmb 4096(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb -4096(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmb -4128(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw %xmm4, %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw (%ecx), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw 2032(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmw 2048(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw -2048(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmw -2064(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw %ymm4, %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw (%ecx), %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw -123456(%esp,%esi,8), %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw 4064(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmw 4096(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw -4096(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmw -4128(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL}
vpcmpb $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpb $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpb $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpb $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpb $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $0, %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpb $0, %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpleb %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpleb 0x7f0(%eax), %xmm6, %k5 # AVX512{BW,VL} Disp8
vpcmpleb 0x800(%eax), %xmm6, %k5 # AVX512{BW,VL}
vpcmpleb %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpleb 0xfe0(%eax), %ymm6, %k5 # AVX512{BW,VL} Disp8
vpcmpleb 0x1000(%eax), %ymm6, %k5 # AVX512{BW,VL}
vpcmpltb %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpltb %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpneqb %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpneqb %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnleb %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnleb %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnltb %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnltb %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpw $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpw $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpw $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpw $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpw $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $0, %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpw $0, %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmplew %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmplew 0x7f0(%eax), %xmm6, %k5 # AVX512{BW,VL} Disp8
vpcmplew 0x800(%eax), %xmm6, %k5 # AVX512{BW,VL}
vpcmplew %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmplew 0xfe0(%eax), %ymm6, %k5 # AVX512{BW,VL} Disp8
vpcmplew 0x1000(%eax), %ymm6, %k5 # AVX512{BW,VL}
vpcmpltw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpltw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpneqw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpneqw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnlew %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnlew %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnltw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnltw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpub $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpub $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpub $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpub $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpub $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $0, %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpub $0, %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpleub %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpleub %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpltub %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpltub %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnequb %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnequb %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnleub %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnleub %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnltub %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnltub %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpuw $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpuw $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpuw $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpuw $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpuw $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $0, %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpuw $0, %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpleuw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpleuw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpltuw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpltuw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnequw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnequw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnleuw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnleuw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnltuw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnltuw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
.intel_syntax noprefix
vpabsb xmm6{k7}, xmm5 # AVX512{BW,VL}
vpabsb xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpabsb xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpabsb xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpabsb ymm6{k7}, ymm5 # AVX512{BW,VL}
vpabsb ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpabsb ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpabsb ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpabsw xmm6{k7}, xmm5 # AVX512{BW,VL}
vpabsw xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpabsw xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpabsw xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpabsw ymm6{k7}, ymm5 # AVX512{BW,VL}
vpabsw ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpabsw ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpabsw ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpackssdw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, [eax]{1to4} # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpackssdw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpackssdw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{BW,VL} Disp8
vpackssdw xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{BW,VL} Disp8
vpackssdw xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpackssdw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, [eax]{1to8} # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpackssdw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpackssdw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{BW,VL} Disp8
vpackssdw ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{BW,VL} Disp8
vpackssdw ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpacksswb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpacksswb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpackusdw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, [eax]{1to4} # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpackusdw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpackusdw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{BW,VL} Disp8
vpackusdw xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{BW,VL} Disp8
vpackusdw xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpackusdw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, [eax]{1to8} # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpackusdw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpackusdw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{BW,VL} Disp8
vpackusdw ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{BW,VL} Disp8
vpackusdw ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpackuswb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpackuswb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, xmm4, 0xab # AVX512{BW,VL}
vpalignr xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, xmm4, 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, ymm4, 0xab # AVX512{BW,VL}
vpalignr ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, ymm4, 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpavgb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpavgb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpavgw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpavgw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpblendmb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpblendmb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpblendmb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpblendmb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpblendmb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpblendmb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpblendmb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpblendmb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpblendmb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpblendmb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpblendmb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpblendmb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpblendmb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpblendmb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpblendmb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpblendmb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpbroadcastb xmm6{k7}, xmm5 # AVX512{BW,VL}
vpbroadcastb xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpbroadcastb xmm6{k7}, BYTE PTR [ecx] # AVX512{BW,VL}
vpbroadcastb xmm6{k7}, BYTE PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpbroadcastb xmm6{k7}, BYTE PTR [edx+127] # AVX512{BW,VL} Disp8
vpbroadcastb xmm6{k7}, BYTE PTR [edx+128] # AVX512{BW,VL}
vpbroadcastb xmm6{k7}, BYTE PTR [edx-128] # AVX512{BW,VL} Disp8
vpbroadcastb xmm6{k7}, BYTE PTR [edx-129] # AVX512{BW,VL}
vpbroadcastb ymm6{k7}, xmm5 # AVX512{BW,VL}
vpbroadcastb ymm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpbroadcastb ymm6{k7}, BYTE PTR [ecx] # AVX512{BW,VL}
vpbroadcastb ymm6{k7}, BYTE PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpbroadcastb ymm6{k7}, BYTE PTR [edx+127] # AVX512{BW,VL} Disp8
vpbroadcastb ymm6{k7}, BYTE PTR [edx+128] # AVX512{BW,VL}
vpbroadcastb ymm6{k7}, BYTE PTR [edx-128] # AVX512{BW,VL} Disp8
vpbroadcastb ymm6{k7}, BYTE PTR [edx-129] # AVX512{BW,VL}
vpbroadcastb xmm6{k7}, eax # AVX512{BW,VL}
vpbroadcastb xmm6{k7}{z}, eax # AVX512{BW,VL}
vpbroadcastb xmm6{k7}, ebp # AVX512{BW,VL}
vpbroadcastb ymm6{k7}, eax # AVX512{BW,VL}
vpbroadcastb ymm6{k7}{z}, eax # AVX512{BW,VL}
vpbroadcastb ymm6{k7}, ebp # AVX512{BW,VL}
vpbroadcastw xmm6{k7}, xmm5 # AVX512{BW,VL}
vpbroadcastw xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpbroadcastw xmm6{k7}, WORD PTR [ecx] # AVX512{BW,VL}
vpbroadcastw xmm6{k7}, WORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpbroadcastw xmm6{k7}, WORD PTR [edx+254] # AVX512{BW,VL} Disp8
vpbroadcastw xmm6{k7}, WORD PTR [edx+256] # AVX512{BW,VL}
vpbroadcastw xmm6{k7}, WORD PTR [edx-256] # AVX512{BW,VL} Disp8
vpbroadcastw xmm6{k7}, WORD PTR [edx-258] # AVX512{BW,VL}
vpbroadcastw ymm6{k7}, xmm5 # AVX512{BW,VL}
vpbroadcastw ymm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpbroadcastw ymm6{k7}, WORD PTR [ecx] # AVX512{BW,VL}
vpbroadcastw ymm6{k7}, WORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpbroadcastw ymm6{k7}, WORD PTR [edx+254] # AVX512{BW,VL} Disp8
vpbroadcastw ymm6{k7}, WORD PTR [edx+256] # AVX512{BW,VL}
vpbroadcastw ymm6{k7}, WORD PTR [edx-256] # AVX512{BW,VL} Disp8
vpbroadcastw ymm6{k7}, WORD PTR [edx-258] # AVX512{BW,VL}
vpbroadcastw xmm6{k7}, eax # AVX512{BW,VL}
vpbroadcastw xmm6{k7}{z}, eax # AVX512{BW,VL}
vpbroadcastw xmm6{k7}, ebp # AVX512{BW,VL}
vpbroadcastw ymm6{k7}, eax # AVX512{BW,VL}
vpbroadcastw ymm6{k7}{z}, eax # AVX512{BW,VL}
vpbroadcastw ymm6{k7}, ebp # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpblendmw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpblendmw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpblendmw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpblendmw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpblendmw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpblendmw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpblendmw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpblendmw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpblendmw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpblendmw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpblendmw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpblendmw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpblendmw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpblendmw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpblendmw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpblendmw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddubsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddubsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddwd xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddwd ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxub xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxub ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxuw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxuw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminub xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminub ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminuw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminuw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, xmm5 # AVX512{BW,VL}
vpmovsxbw xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [ecx] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [edx+1016] # AVX512{BW,VL} Disp8
vpmovsxbw xmm6{k7}, QWORD PTR [edx+1024] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [edx-1024] # AVX512{BW,VL} Disp8
vpmovsxbw xmm6{k7}, QWORD PTR [edx-1032] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, xmm5 # AVX512{BW,VL}
vpmovsxbw ymm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, xmm5 # AVX512{BW,VL}
vpmovzxbw xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [ecx] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [edx+1016] # AVX512{BW,VL} Disp8
vpmovzxbw xmm6{k7}, QWORD PTR [edx+1024] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [edx-1024] # AVX512{BW,VL} Disp8
vpmovzxbw xmm6{k7}, QWORD PTR [edx-1032] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, xmm5 # AVX512{BW,VL}
vpmovzxbw ymm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhrsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhrsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhuw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhuw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmullw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmullw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpshufb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpshufb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpshufhw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpshufhw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpshufhw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpshufhw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpshufhw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpshufhw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpshufhw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpshufhw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpshufhw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpshuflw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpshuflw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpshuflw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpshuflw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpshuflw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpshuflw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpshuflw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpshuflw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsllw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, xmm4 # AVX512{BW,VL}
vpsllw ymm6{k7}{z}, ymm5, xmm4 # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsraw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, xmm4 # AVX512{BW,VL}
vpsraw ymm6{k7}{z}, ymm5, xmm4 # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsrlw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, xmm4 # AVX512{BW,VL}
vpsrlw ymm6{k7}{z}, ymm5, xmm4 # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpsrlw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpsrlw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpsraw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpsraw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpsrlvw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsrlvw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsrlvw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsrlvw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsrlvw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsrlvw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsrlvw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsrlvw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsrlvw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsrlvw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsrlvw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsrlvw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsrlvw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsrlvw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsrlvw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsrlvw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsravw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsravw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsravw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsravw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsravw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsravw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsravw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsravw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsravw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsravw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsravw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsravw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsravw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsravw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsravw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsravw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhbw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhbw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhwd xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhwd ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklbw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklbw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklwd xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklwd ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmovwb xmm6{k7}, xmm5 # AVX512{BW,VL}
vpmovwb xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovwb xmm6{k7}, ymm5 # AVX512{BW,VL}
vpmovwb xmm6{k7}{z}, ymm5 # AVX512{BW,VL}
vpmovswb xmm6{k7}, xmm5 # AVX512{BW,VL}
vpmovswb xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovswb xmm6{k7}, ymm5 # AVX512{BW,VL}
vpmovswb xmm6{k7}{z}, ymm5 # AVX512{BW,VL}
vpmovuswb xmm6{k7}, xmm5 # AVX512{BW,VL}
vpmovuswb xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovuswb xmm6{k7}, ymm5 # AVX512{BW,VL}
vpmovuswb xmm6{k7}{z}, ymm5 # AVX512{BW,VL}
vdbpsadbw xmm6{k7}, xmm5, xmm4, 0xab # AVX512{BW,VL}
vdbpsadbw xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{BW,VL}
vdbpsadbw xmm6{k7}, xmm5, xmm4, 123 # AVX512{BW,VL}
vdbpsadbw xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vdbpsadbw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vdbpsadbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vdbpsadbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vdbpsadbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vdbpsadbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vdbpsadbw ymm6{k7}, ymm5, ymm4, 0xab # AVX512{BW,VL}
vdbpsadbw ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{BW,VL}
vdbpsadbw ymm6{k7}, ymm5, ymm4, 123 # AVX512{BW,VL}
vdbpsadbw ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vdbpsadbw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vdbpsadbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vdbpsadbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vdbpsadbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vdbpsadbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpermw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpermw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpermw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpermw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpermw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpermw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpermw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpermw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpermw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpermw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpermw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpermw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpermw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpermw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpermw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpermw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpermt2w xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpermt2w xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpermt2w xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpermt2w xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpermt2w xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpermt2w xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpermt2w xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpermt2w xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpermt2w ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpermt2w ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpermt2w ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpermt2w ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpermt2w ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpermt2w ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpermt2w ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpermt2w ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpsllw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpsllw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpsllvw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsllvw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsllvw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsllvw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsllvw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsllvw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsllvw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsllvw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsllvw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsllvw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsllvw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsllvw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsllvw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsllvw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsllvw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsllvw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vmovdqu8 xmm6{k7}, xmm5 # AVX512{BW,VL}
vmovdqu8 xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vmovdqu8 xmm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vmovdqu8 xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vmovdqu8 xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vmovdqu8 xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vmovdqu8 xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vmovdqu8 xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vmovdqu8 ymm6{k7}, ymm5 # AVX512{BW,VL}
vmovdqu8 ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vmovdqu8 ymm6{k7}, YMMWORD PTR [ecx] # AVX512{BW,VL}
vmovdqu8 ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vmovdqu8 ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vmovdqu8 ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vmovdqu8 ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vmovdqu8 ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vmovdqu16 xmm6{k7}, xmm5 # AVX512{BW,VL}
vmovdqu16 xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vmovdqu16 xmm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vmovdqu16 xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vmovdqu16 xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vmovdqu16 xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vmovdqu16 xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vmovdqu16 xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vmovdqu16 ymm6{k7}, ymm5 # AVX512{BW,VL}
vmovdqu16 ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vmovdqu16 ymm6{k7}, YMMWORD PTR [ecx] # AVX512{BW,VL}
vmovdqu16 ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vmovdqu16 ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vmovdqu16 ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vmovdqu16 ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vmovdqu16 ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmovwb QWORD PTR [ecx]{k7}, xmm6 # AVX512{BW,VL}
vpmovwb QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{BW,VL}
vpmovwb QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{BW,VL} Disp8
vpmovwb QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{BW,VL}
vpmovwb QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{BW,VL} Disp8
vpmovwb QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{BW,VL}
vpmovwb XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{BW,VL}
vpmovwb XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{BW,VL}
vpmovwb XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{BW,VL} Disp8
vpmovwb XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{BW,VL}
vpmovwb XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{BW,VL} Disp8
vpmovwb XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{BW,VL}
vpmovswb QWORD PTR [ecx]{k7}, xmm6 # AVX512{BW,VL}
vpmovswb QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{BW,VL}
vpmovswb QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{BW,VL} Disp8
vpmovswb QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{BW,VL}
vpmovswb QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{BW,VL} Disp8
vpmovswb QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{BW,VL}
vpmovswb XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{BW,VL}
vpmovswb XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{BW,VL}
vpmovswb XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{BW,VL} Disp8
vpmovswb XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{BW,VL}
vpmovswb XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{BW,VL} Disp8
vpmovswb XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{BW,VL}
vpmovuswb QWORD PTR [ecx]{k7}, xmm6 # AVX512{BW,VL}
vpmovuswb QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{BW,VL}
vpmovuswb QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{BW,VL} Disp8
vpmovuswb QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{BW,VL}
vpmovuswb QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{BW,VL} Disp8
vpmovuswb QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{BW,VL}
vpmovuswb XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{BW,VL}
vpmovuswb XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{BW,VL}
vpmovuswb XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{BW,VL} Disp8
vpmovuswb XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{BW,VL}
vpmovuswb XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{BW,VL} Disp8
vpmovuswb XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu8 XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu8 XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu8 XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{BW,VL} Disp8
vmovdqu8 XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu8 XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{BW,VL} Disp8
vmovdqu8 XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu8 YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu8 YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu8 YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{BW,VL} Disp8
vmovdqu8 YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu8 YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{BW,VL} Disp8
vmovdqu8 YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu16 XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu16 XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu16 XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{BW,VL} Disp8
vmovdqu16 XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu16 XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{BW,VL} Disp8
vmovdqu16 XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu16 YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu16 YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu16 YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{BW,VL} Disp8
vmovdqu16 YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu16 YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{BW,VL} Disp8
vmovdqu16 YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{BW,VL}
vpermi2w xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpermi2w xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpermi2w xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpermi2w xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpermi2w xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpermi2w xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpermi2w xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpermi2w xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpermi2w ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpermi2w ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpermi2w ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpermi2w ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpermi2w ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpermi2w ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpermi2w ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpermi2w ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vptestmb k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vptestmb k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vptestmb k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestmb k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vptestmb k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vptestmb k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vptestmb k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vptestmb k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vptestmb k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vptestmb k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestmb k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vptestmb k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vptestmb k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vptestmb k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vptestmw k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vptestmw k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vptestmw k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestmw k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vptestmw k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vptestmw k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vptestmw k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vptestmw k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vptestmw k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vptestmw k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestmw k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vptestmw k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vptestmw k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vptestmw k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmovb2m k5, xmm6 # AVX512{BW,VL}
vpmovb2m k5, ymm6 # AVX512{BW,VL}
vpmovw2m k5, xmm6 # AVX512{BW,VL}
vpmovw2m k5, ymm6 # AVX512{BW,VL}
vpmovm2b xmm6, k5 # AVX512{BW,VL}
vpmovm2b ymm6, k5 # AVX512{BW,VL}
vpmovm2w xmm6, k5 # AVX512{BW,VL}
vpmovm2w ymm6, k5 # AVX512{BW,VL}
vptestnmb k5{k7}, xmm5, xmm4 # AVX512{BW,VL}
vptestnmb k5{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vptestnmb k5{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestnmb k5{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vptestnmb k5{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vptestnmb k5{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vptestnmb k5{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vptestnmb k5{k7}, ymm5, ymm4 # AVX512{BW,VL}
vptestnmb k5{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vptestnmb k5{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestnmb k5{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vptestnmb k5{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vptestnmb k5{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vptestnmb k5{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vptestnmw k5{k7}, xmm5, xmm4 # AVX512{BW,VL}
vptestnmw k5{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vptestnmw k5{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestnmw k5{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vptestnmw k5{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vptestnmw k5{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vptestnmw k5{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vptestnmw k5{k7}, ymm5, ymm4 # AVX512{BW,VL}
vptestnmw k5{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vptestnmw k5{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestnmw k5{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vptestnmw k5{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vptestnmw k5{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vptestnmw k5{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpb k5{k7}, xmm6, xmm5, 0xab # AVX512{BW,VL}
vpcmpb k5{k7}, xmm6, xmm5, 123 # AVX512{BW,VL}
vpcmpb k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpb k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpb k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpcmpb k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpcmpb k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpcmpb k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpcmpb k5{k7}, ymm6, ymm5, 0xab # AVX512{BW,VL}
vpcmpb k5{k7}, ymm6, ymm5, 123 # AVX512{BW,VL}
vpcmpb k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpb k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpb k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpcmpb k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpcmpb k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpcmpb k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, xmm6, xmm5, 0xab # AVX512{BW,VL}
vpcmpw k5{k7}, xmm6, xmm5, 123 # AVX512{BW,VL}
vpcmpw k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpcmpw k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpcmpw k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, ymm6, ymm5, 0xab # AVX512{BW,VL}
vpcmpw k5{k7}, ymm6, ymm5, 123 # AVX512{BW,VL}
vpcmpw k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpcmpw k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpcmpw k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, xmm6, xmm5, 0xab # AVX512{BW,VL}
vpcmpub k5{k7}, xmm6, xmm5, 123 # AVX512{BW,VL}
vpcmpub k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpcmpub k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpcmpub k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, ymm6, ymm5, 0xab # AVX512{BW,VL}
vpcmpub k5{k7}, ymm6, ymm5, 123 # AVX512{BW,VL}
vpcmpub k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpcmpub k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpcmpub k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, xmm6, xmm5, 0xab # AVX512{BW,VL}
vpcmpuw k5{k7}, xmm6, xmm5, 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpcmpuw k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpcmpuw k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, ymm6, ymm5, 0xab # AVX512{BW,VL}
vpcmpuw k5{k7}, ymm6, ymm5, 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpcmpuw k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpcmpuw k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 15,895
|
gas/testsuite/gas/i386/avx256int.s
|
# Check i386 256bit integer AVX instructions
.allow_index_reg
.text
_start:
# Tests for op ymm, regl
vpmovmskb %ymm4,%ecx
# Tests for op imm8, ymm, ymm
vpslld $7,%ymm6,%ymm2
vpslldq $7,%ymm6,%ymm2
vpsllq $7,%ymm6,%ymm2
vpsllw $7,%ymm6,%ymm2
vpsrad $7,%ymm6,%ymm2
vpsraw $7,%ymm6,%ymm2
vpsrld $7,%ymm6,%ymm2
vpsrldq $7,%ymm6,%ymm2
vpsrlq $7,%ymm6,%ymm2
vpsrlw $7,%ymm6,%ymm2
# Tests for op imm8, ymm/mem256, ymm
vpshufd $7,%ymm6,%ymm2
vpshufd $7,(%ecx),%ymm6
vpshufhw $7,%ymm6,%ymm2
vpshufhw $7,(%ecx),%ymm6
vpshuflw $7,%ymm6,%ymm2
vpshuflw $7,(%ecx),%ymm6
# Tests for op ymm/mem256, ymm, ymm
vpackssdw %ymm4,%ymm6,%ymm2
vpackssdw (%ecx),%ymm6,%ymm2
vpacksswb %ymm4,%ymm6,%ymm2
vpacksswb (%ecx),%ymm6,%ymm2
vpackusdw %ymm4,%ymm6,%ymm2
vpackusdw (%ecx),%ymm6,%ymm2
vpackuswb %ymm4,%ymm6,%ymm2
vpackuswb (%ecx),%ymm6,%ymm2
vpaddb %ymm4,%ymm6,%ymm2
vpaddb (%ecx),%ymm6,%ymm2
vpaddw %ymm4,%ymm6,%ymm2
vpaddw (%ecx),%ymm6,%ymm2
vpaddd %ymm4,%ymm6,%ymm2
vpaddd (%ecx),%ymm6,%ymm2
vpaddq %ymm4,%ymm6,%ymm2
vpaddq (%ecx),%ymm6,%ymm2
vpaddsb %ymm4,%ymm6,%ymm2
vpaddsb (%ecx),%ymm6,%ymm2
vpaddsw %ymm4,%ymm6,%ymm2
vpaddsw (%ecx),%ymm6,%ymm2
vpaddusb %ymm4,%ymm6,%ymm2
vpaddusb (%ecx),%ymm6,%ymm2
vpaddusw %ymm4,%ymm6,%ymm2
vpaddusw (%ecx),%ymm6,%ymm2
vpand %ymm4,%ymm6,%ymm2
vpand (%ecx),%ymm6,%ymm2
vpandn %ymm4,%ymm6,%ymm2
vpandn (%ecx),%ymm6,%ymm2
vpavgb %ymm4,%ymm6,%ymm2
vpavgb (%ecx),%ymm6,%ymm2
vpavgw %ymm4,%ymm6,%ymm2
vpavgw (%ecx),%ymm6,%ymm2
vpcmpeqb %ymm4,%ymm6,%ymm2
vpcmpeqb (%ecx),%ymm6,%ymm2
vpcmpeqw %ymm4,%ymm6,%ymm2
vpcmpeqw (%ecx),%ymm6,%ymm2
vpcmpeqd %ymm4,%ymm6,%ymm2
vpcmpeqd (%ecx),%ymm6,%ymm2
vpcmpeqq %ymm4,%ymm6,%ymm2
vpcmpeqq (%ecx),%ymm6,%ymm2
vpcmpgtb %ymm4,%ymm6,%ymm2
vpcmpgtb (%ecx),%ymm6,%ymm2
vpcmpgtw %ymm4,%ymm6,%ymm2
vpcmpgtw (%ecx),%ymm6,%ymm2
vpcmpgtd %ymm4,%ymm6,%ymm2
vpcmpgtd (%ecx),%ymm6,%ymm2
vpcmpgtq %ymm4,%ymm6,%ymm2
vpcmpgtq (%ecx),%ymm6,%ymm2
vphaddw %ymm4,%ymm6,%ymm2
vphaddw (%ecx),%ymm6,%ymm2
vphaddd %ymm4,%ymm6,%ymm2
vphaddd (%ecx),%ymm6,%ymm2
vphaddsw %ymm4,%ymm6,%ymm2
vphaddsw (%ecx),%ymm6,%ymm2
vphsubw %ymm4,%ymm6,%ymm2
vphsubw (%ecx),%ymm6,%ymm2
vphsubd %ymm4,%ymm6,%ymm2
vphsubd (%ecx),%ymm6,%ymm2
vphsubsw %ymm4,%ymm6,%ymm2
vphsubsw (%ecx),%ymm6,%ymm2
vpmaddwd %ymm4,%ymm6,%ymm2
vpmaddwd (%ecx),%ymm6,%ymm2
vpmaddubsw %ymm4,%ymm6,%ymm2
vpmaddubsw (%ecx),%ymm6,%ymm2
vpmaxsb %ymm4,%ymm6,%ymm2
vpmaxsb (%ecx),%ymm6,%ymm2
vpmaxsw %ymm4,%ymm6,%ymm2
vpmaxsw (%ecx),%ymm6,%ymm2
vpmaxsd %ymm4,%ymm6,%ymm2
vpmaxsd (%ecx),%ymm6,%ymm2
vpmaxub %ymm4,%ymm6,%ymm2
vpmaxub (%ecx),%ymm6,%ymm2
vpmaxuw %ymm4,%ymm6,%ymm2
vpmaxuw (%ecx),%ymm6,%ymm2
vpmaxud %ymm4,%ymm6,%ymm2
vpmaxud (%ecx),%ymm6,%ymm2
vpminsb %ymm4,%ymm6,%ymm2
vpminsb (%ecx),%ymm6,%ymm2
vpminsw %ymm4,%ymm6,%ymm2
vpminsw (%ecx),%ymm6,%ymm2
vpminsd %ymm4,%ymm6,%ymm2
vpminsd (%ecx),%ymm6,%ymm2
vpminub %ymm4,%ymm6,%ymm2
vpminub (%ecx),%ymm6,%ymm2
vpminuw %ymm4,%ymm6,%ymm2
vpminuw (%ecx),%ymm6,%ymm2
vpminud %ymm4,%ymm6,%ymm2
vpminud (%ecx),%ymm6,%ymm2
vpmulhuw %ymm4,%ymm6,%ymm2
vpmulhuw (%ecx),%ymm6,%ymm2
vpmulhrsw %ymm4,%ymm6,%ymm2
vpmulhrsw (%ecx),%ymm6,%ymm2
vpmulhw %ymm4,%ymm6,%ymm2
vpmulhw (%ecx),%ymm6,%ymm2
vpmullw %ymm4,%ymm6,%ymm2
vpmullw (%ecx),%ymm6,%ymm2
vpmulld %ymm4,%ymm6,%ymm2
vpmulld (%ecx),%ymm6,%ymm2
vpmuludq %ymm4,%ymm6,%ymm2
vpmuludq (%ecx),%ymm6,%ymm2
vpmuldq %ymm4,%ymm6,%ymm2
vpmuldq (%ecx),%ymm6,%ymm2
vpor %ymm4,%ymm6,%ymm2
vpor (%ecx),%ymm6,%ymm2
vpsadbw %ymm4,%ymm6,%ymm2
vpsadbw (%ecx),%ymm6,%ymm2
vpshufb %ymm4,%ymm6,%ymm2
vpshufb (%ecx),%ymm6,%ymm2
vpsignb %ymm4,%ymm6,%ymm2
vpsignb (%ecx),%ymm6,%ymm2
vpsignw %ymm4,%ymm6,%ymm2
vpsignw (%ecx),%ymm6,%ymm2
vpsignd %ymm4,%ymm6,%ymm2
vpsignd (%ecx),%ymm6,%ymm2
vpsubb %ymm4,%ymm6,%ymm2
vpsubb (%ecx),%ymm6,%ymm2
vpsubw %ymm4,%ymm6,%ymm2
vpsubw (%ecx),%ymm6,%ymm2
vpsubd %ymm4,%ymm6,%ymm2
vpsubd (%ecx),%ymm6,%ymm2
vpsubq %ymm4,%ymm6,%ymm2
vpsubq (%ecx),%ymm6,%ymm2
vpsubsb %ymm4,%ymm6,%ymm2
vpsubsb (%ecx),%ymm6,%ymm2
vpsubsw %ymm4,%ymm6,%ymm2
vpsubsw (%ecx),%ymm6,%ymm2
vpsubusb %ymm4,%ymm6,%ymm2
vpsubusb (%ecx),%ymm6,%ymm2
vpsubusw %ymm4,%ymm6,%ymm2
vpsubusw (%ecx),%ymm6,%ymm2
vpunpckhbw %ymm4,%ymm6,%ymm2
vpunpckhbw (%ecx),%ymm6,%ymm2
vpunpckhwd %ymm4,%ymm6,%ymm2
vpunpckhwd (%ecx),%ymm6,%ymm2
vpunpckhdq %ymm4,%ymm6,%ymm2
vpunpckhdq (%ecx),%ymm6,%ymm2
vpunpckhqdq %ymm4,%ymm6,%ymm2
vpunpckhqdq (%ecx),%ymm6,%ymm2
vpunpcklbw %ymm4,%ymm6,%ymm2
vpunpcklbw (%ecx),%ymm6,%ymm2
vpunpcklwd %ymm4,%ymm6,%ymm2
vpunpcklwd (%ecx),%ymm6,%ymm2
vpunpckldq %ymm4,%ymm6,%ymm2
vpunpckldq (%ecx),%ymm6,%ymm2
vpunpcklqdq %ymm4,%ymm6,%ymm2
vpunpcklqdq (%ecx),%ymm6,%ymm2
vpxor %ymm4,%ymm6,%ymm2
vpxor (%ecx),%ymm6,%ymm2
# Tests for op ymm/mem256, ymm
vpabsb %ymm4,%ymm6
vpabsb (%ecx),%ymm4
vpabsw %ymm4,%ymm6
vpabsw (%ecx),%ymm4
vpabsd %ymm4,%ymm6
vpabsd (%ecx),%ymm4
# Tests for op imm8, ymm/mem256, ymm, ymm
vmpsadbw $7,%ymm4,%ymm6,%ymm2
vmpsadbw $7,(%ecx),%ymm6,%ymm2
vpalignr $7,%ymm4,%ymm6,%ymm2
vpalignr $7,(%ecx),%ymm6,%ymm2
vpblendw $7,%ymm4,%ymm6,%ymm2
vpblendw $7,(%ecx),%ymm6,%ymm2
# Tests for op ymm, ymm/mem256, ymm, ymm
vpblendvb %ymm4,%ymm6,%ymm2,%ymm7
vpblendvb %ymm4,(%ecx),%ymm2,%ymm7
# Tests for op xmm/mem128, ymm, ymm
vpsllw %xmm4,%ymm6,%ymm2
vpsllw (%ecx),%ymm6,%ymm2
vpslld %xmm4,%ymm6,%ymm2
vpslld (%ecx),%ymm6,%ymm2
vpsllq %xmm4,%ymm6,%ymm2
vpsllq (%ecx),%ymm6,%ymm2
vpsraw %xmm4,%ymm6,%ymm2
vpsraw (%ecx),%ymm6,%ymm2
vpsrad %xmm4,%ymm6,%ymm2
vpsrad (%ecx),%ymm6,%ymm2
vpsrlw %xmm4,%ymm6,%ymm2
vpsrlw (%ecx),%ymm6,%ymm2
vpsrld %xmm4,%ymm6,%ymm2
vpsrld (%ecx),%ymm6,%ymm2
vpsrlq %xmm4,%ymm6,%ymm2
vpsrlq (%ecx),%ymm6,%ymm2
# Tests for op xmm/mem128, ymm
vpmovsxbw %xmm4,%ymm4
vpmovsxbw (%ecx),%ymm4
vpmovsxwd %xmm4,%ymm4
vpmovsxwd (%ecx),%ymm4
vpmovsxdq %xmm4,%ymm4
vpmovsxdq (%ecx),%ymm4
vpmovzxbw %xmm4,%ymm4
vpmovzxbw (%ecx),%ymm4
vpmovzxwd %xmm4,%ymm4
vpmovzxwd (%ecx),%ymm4
vpmovzxdq %xmm4,%ymm4
vpmovzxdq (%ecx),%ymm4
# Tests for op xmm/mem64, ymm
vpmovsxbd %xmm4,%ymm6
vpmovsxbd (%ecx),%ymm4
vpmovsxwq %xmm4,%ymm6
vpmovsxwq (%ecx),%ymm4
vpmovzxbd %xmm4,%ymm6
vpmovzxbd (%ecx),%ymm4
vpmovzxwq %xmm4,%ymm6
vpmovzxwq (%ecx),%ymm4
# Tests for op xmm/mem32, ymm
vpmovsxbq %xmm4,%ymm4
vpmovsxbq (%ecx),%ymm4
vpmovzxbq %xmm4,%ymm4
vpmovzxbq (%ecx),%ymm4
.intel_syntax noprefix
# Tests for op ymm, regl
vpmovmskb ecx,ymm4
# Tests for op imm8, ymm, ymm
vpslld ymm2,ymm6,7
vpslldq ymm2,ymm6,7
vpsllq ymm2,ymm6,7
vpsllw ymm2,ymm6,7
vpsrad ymm2,ymm6,7
vpsraw ymm2,ymm6,7
vpsrld ymm2,ymm6,7
vpsrldq ymm2,ymm6,7
vpsrlq ymm2,ymm6,7
vpsrlw ymm2,ymm6,7
# Tests for op imm8, ymm/mem256, ymm
vpshufd ymm2,ymm6,7
vpshufd ymm6,YMMWORD PTR [ecx],7
vpshufd ymm6,[ecx],7
vpshufhw ymm2,ymm6,7
vpshufhw ymm6,YMMWORD PTR [ecx],7
vpshufhw ymm6,[ecx],7
vpshuflw ymm2,ymm6,7
vpshuflw ymm6,YMMWORD PTR [ecx],7
vpshuflw ymm6,[ecx],7
# Tests for op ymm/mem256, ymm, ymm
vpackssdw ymm2,ymm6,ymm4
vpackssdw ymm2,ymm6,YMMWORD PTR [ecx]
vpackssdw ymm2,ymm6,[ecx]
vpacksswb ymm2,ymm6,ymm4
vpacksswb ymm2,ymm6,YMMWORD PTR [ecx]
vpacksswb ymm2,ymm6,[ecx]
vpackusdw ymm2,ymm6,ymm4
vpackusdw ymm2,ymm6,YMMWORD PTR [ecx]
vpackusdw ymm2,ymm6,[ecx]
vpackuswb ymm2,ymm6,ymm4
vpackuswb ymm2,ymm6,YMMWORD PTR [ecx]
vpackuswb ymm2,ymm6,[ecx]
vpaddb ymm2,ymm6,ymm4
vpaddb ymm2,ymm6,YMMWORD PTR [ecx]
vpaddb ymm2,ymm6,[ecx]
vpaddw ymm2,ymm6,ymm4
vpaddw ymm2,ymm6,YMMWORD PTR [ecx]
vpaddw ymm2,ymm6,[ecx]
vpaddd ymm2,ymm6,ymm4
vpaddd ymm2,ymm6,YMMWORD PTR [ecx]
vpaddd ymm2,ymm6,[ecx]
vpaddq ymm2,ymm6,ymm4
vpaddq ymm2,ymm6,YMMWORD PTR [ecx]
vpaddq ymm2,ymm6,[ecx]
vpaddsb ymm2,ymm6,ymm4
vpaddsb ymm2,ymm6,YMMWORD PTR [ecx]
vpaddsb ymm2,ymm6,[ecx]
vpaddsw ymm2,ymm6,ymm4
vpaddsw ymm2,ymm6,YMMWORD PTR [ecx]
vpaddsw ymm2,ymm6,[ecx]
vpaddusb ymm2,ymm6,ymm4
vpaddusb ymm2,ymm6,YMMWORD PTR [ecx]
vpaddusb ymm2,ymm6,[ecx]
vpaddusw ymm2,ymm6,ymm4
vpaddusw ymm2,ymm6,YMMWORD PTR [ecx]
vpaddusw ymm2,ymm6,[ecx]
vpand ymm2,ymm6,ymm4
vpand ymm2,ymm6,YMMWORD PTR [ecx]
vpand ymm2,ymm6,[ecx]
vpandn ymm2,ymm6,ymm4
vpandn ymm2,ymm6,YMMWORD PTR [ecx]
vpandn ymm2,ymm6,[ecx]
vpavgb ymm2,ymm6,ymm4
vpavgb ymm2,ymm6,YMMWORD PTR [ecx]
vpavgb ymm2,ymm6,[ecx]
vpavgw ymm2,ymm6,ymm4
vpavgw ymm2,ymm6,YMMWORD PTR [ecx]
vpavgw ymm2,ymm6,[ecx]
vpcmpeqb ymm2,ymm6,ymm4
vpcmpeqb ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpeqb ymm2,ymm6,[ecx]
vpcmpeqw ymm2,ymm6,ymm4
vpcmpeqw ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpeqw ymm2,ymm6,[ecx]
vpcmpeqd ymm2,ymm6,ymm4
vpcmpeqd ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpeqd ymm2,ymm6,[ecx]
vpcmpeqq ymm2,ymm6,ymm4
vpcmpeqq ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpeqq ymm2,ymm6,[ecx]
vpcmpgtb ymm2,ymm6,ymm4
vpcmpgtb ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpgtb ymm2,ymm6,[ecx]
vpcmpgtw ymm2,ymm6,ymm4
vpcmpgtw ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpgtw ymm2,ymm6,[ecx]
vpcmpgtd ymm2,ymm6,ymm4
vpcmpgtd ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpgtd ymm2,ymm6,[ecx]
vpcmpgtq ymm2,ymm6,ymm4
vpcmpgtq ymm2,ymm6,YMMWORD PTR [ecx]
vpcmpgtq ymm2,ymm6,[ecx]
vphaddw ymm2,ymm6,ymm4
vphaddw ymm2,ymm6,YMMWORD PTR [ecx]
vphaddw ymm2,ymm6,[ecx]
vphaddd ymm2,ymm6,ymm4
vphaddd ymm2,ymm6,YMMWORD PTR [ecx]
vphaddd ymm2,ymm6,[ecx]
vphaddsw ymm2,ymm6,ymm4
vphaddsw ymm2,ymm6,YMMWORD PTR [ecx]
vphaddsw ymm2,ymm6,[ecx]
vphsubw ymm2,ymm6,ymm4
vphsubw ymm2,ymm6,YMMWORD PTR [ecx]
vphsubw ymm2,ymm6,[ecx]
vphsubd ymm2,ymm6,ymm4
vphsubd ymm2,ymm6,YMMWORD PTR [ecx]
vphsubd ymm2,ymm6,[ecx]
vphsubsw ymm2,ymm6,ymm4
vphsubsw ymm2,ymm6,YMMWORD PTR [ecx]
vphsubsw ymm2,ymm6,[ecx]
vpmaddwd ymm2,ymm6,ymm4
vpmaddwd ymm2,ymm6,YMMWORD PTR [ecx]
vpmaddwd ymm2,ymm6,[ecx]
vpmaddubsw ymm2,ymm6,ymm4
vpmaddubsw ymm2,ymm6,YMMWORD PTR [ecx]
vpmaddubsw ymm2,ymm6,[ecx]
vpmaxsb ymm2,ymm6,ymm4
vpmaxsb ymm2,ymm6,YMMWORD PTR [ecx]
vpmaxsb ymm2,ymm6,[ecx]
vpmaxsw ymm2,ymm6,ymm4
vpmaxsw ymm2,ymm6,YMMWORD PTR [ecx]
vpmaxsw ymm2,ymm6,[ecx]
vpmaxsd ymm2,ymm6,ymm4
vpmaxsd ymm2,ymm6,YMMWORD PTR [ecx]
vpmaxsd ymm2,ymm6,[ecx]
vpmaxub ymm2,ymm6,ymm4
vpmaxub ymm2,ymm6,YMMWORD PTR [ecx]
vpmaxub ymm2,ymm6,[ecx]
vpmaxuw ymm2,ymm6,ymm4
vpmaxuw ymm2,ymm6,YMMWORD PTR [ecx]
vpmaxuw ymm2,ymm6,[ecx]
vpmaxud ymm2,ymm6,ymm4
vpmaxud ymm2,ymm6,YMMWORD PTR [ecx]
vpmaxud ymm2,ymm6,[ecx]
vpminsb ymm2,ymm6,ymm4
vpminsb ymm2,ymm6,YMMWORD PTR [ecx]
vpminsb ymm2,ymm6,[ecx]
vpminsw ymm2,ymm6,ymm4
vpminsw ymm2,ymm6,YMMWORD PTR [ecx]
vpminsw ymm2,ymm6,[ecx]
vpminsd ymm2,ymm6,ymm4
vpminsd ymm2,ymm6,YMMWORD PTR [ecx]
vpminsd ymm2,ymm6,[ecx]
vpminub ymm2,ymm6,ymm4
vpminub ymm2,ymm6,YMMWORD PTR [ecx]
vpminub ymm2,ymm6,[ecx]
vpminuw ymm2,ymm6,ymm4
vpminuw ymm2,ymm6,YMMWORD PTR [ecx]
vpminuw ymm2,ymm6,[ecx]
vpminud ymm2,ymm6,ymm4
vpminud ymm2,ymm6,YMMWORD PTR [ecx]
vpminud ymm2,ymm6,[ecx]
vpmulhuw ymm2,ymm6,ymm4
vpmulhuw ymm2,ymm6,YMMWORD PTR [ecx]
vpmulhuw ymm2,ymm6,[ecx]
vpmulhrsw ymm2,ymm6,ymm4
vpmulhrsw ymm2,ymm6,YMMWORD PTR [ecx]
vpmulhrsw ymm2,ymm6,[ecx]
vpmulhw ymm2,ymm6,ymm4
vpmulhw ymm2,ymm6,YMMWORD PTR [ecx]
vpmulhw ymm2,ymm6,[ecx]
vpmullw ymm2,ymm6,ymm4
vpmullw ymm2,ymm6,YMMWORD PTR [ecx]
vpmullw ymm2,ymm6,[ecx]
vpmulld ymm2,ymm6,ymm4
vpmulld ymm2,ymm6,YMMWORD PTR [ecx]
vpmulld ymm2,ymm6,[ecx]
vpmuludq ymm2,ymm6,ymm4
vpmuludq ymm2,ymm6,YMMWORD PTR [ecx]
vpmuludq ymm2,ymm6,[ecx]
vpmuldq ymm2,ymm6,ymm4
vpmuldq ymm2,ymm6,YMMWORD PTR [ecx]
vpmuldq ymm2,ymm6,[ecx]
vpor ymm2,ymm6,ymm4
vpor ymm2,ymm6,YMMWORD PTR [ecx]
vpor ymm2,ymm6,[ecx]
vpsadbw ymm2,ymm6,ymm4
vpsadbw ymm2,ymm6,YMMWORD PTR [ecx]
vpsadbw ymm2,ymm6,[ecx]
vpshufb ymm2,ymm6,ymm4
vpshufb ymm2,ymm6,YMMWORD PTR [ecx]
vpshufb ymm2,ymm6,[ecx]
vpsignb ymm2,ymm6,ymm4
vpsignb ymm2,ymm6,YMMWORD PTR [ecx]
vpsignb ymm2,ymm6,[ecx]
vpsignw ymm2,ymm6,ymm4
vpsignw ymm2,ymm6,YMMWORD PTR [ecx]
vpsignw ymm2,ymm6,[ecx]
vpsignd ymm2,ymm6,ymm4
vpsignd ymm2,ymm6,YMMWORD PTR [ecx]
vpsignd ymm2,ymm6,[ecx]
vpsubb ymm2,ymm6,ymm4
vpsubb ymm2,ymm6,YMMWORD PTR [ecx]
vpsubb ymm2,ymm6,[ecx]
vpsubw ymm2,ymm6,ymm4
vpsubw ymm2,ymm6,YMMWORD PTR [ecx]
vpsubw ymm2,ymm6,[ecx]
vpsubd ymm2,ymm6,ymm4
vpsubd ymm2,ymm6,YMMWORD PTR [ecx]
vpsubd ymm2,ymm6,[ecx]
vpsubq ymm2,ymm6,ymm4
vpsubq ymm2,ymm6,YMMWORD PTR [ecx]
vpsubq ymm2,ymm6,[ecx]
vpsubsb ymm2,ymm6,ymm4
vpsubsb ymm2,ymm6,YMMWORD PTR [ecx]
vpsubsb ymm2,ymm6,[ecx]
vpsubsw ymm2,ymm6,ymm4
vpsubsw ymm2,ymm6,YMMWORD PTR [ecx]
vpsubsw ymm2,ymm6,[ecx]
vpsubusb ymm2,ymm6,ymm4
vpsubusb ymm2,ymm6,YMMWORD PTR [ecx]
vpsubusb ymm2,ymm6,[ecx]
vpsubusw ymm2,ymm6,ymm4
vpsubusw ymm2,ymm6,YMMWORD PTR [ecx]
vpsubusw ymm2,ymm6,[ecx]
vpunpckhbw ymm2,ymm6,ymm4
vpunpckhbw ymm2,ymm6,YMMWORD PTR [ecx]
vpunpckhbw ymm2,ymm6,[ecx]
vpunpckhwd ymm2,ymm6,ymm4
vpunpckhwd ymm2,ymm6,YMMWORD PTR [ecx]
vpunpckhwd ymm2,ymm6,[ecx]
vpunpckhdq ymm2,ymm6,ymm4
vpunpckhdq ymm2,ymm6,YMMWORD PTR [ecx]
vpunpckhdq ymm2,ymm6,[ecx]
vpunpckhqdq ymm2,ymm6,ymm4
vpunpckhqdq ymm2,ymm6,YMMWORD PTR [ecx]
vpunpckhqdq ymm2,ymm6,[ecx]
vpunpcklbw ymm2,ymm6,ymm4
vpunpcklbw ymm2,ymm6,YMMWORD PTR [ecx]
vpunpcklbw ymm2,ymm6,[ecx]
vpunpcklwd ymm2,ymm6,ymm4
vpunpcklwd ymm2,ymm6,YMMWORD PTR [ecx]
vpunpcklwd ymm2,ymm6,[ecx]
vpunpckldq ymm2,ymm6,ymm4
vpunpckldq ymm2,ymm6,YMMWORD PTR [ecx]
vpunpckldq ymm2,ymm6,[ecx]
vpunpcklqdq ymm2,ymm6,ymm4
vpunpcklqdq ymm2,ymm6,YMMWORD PTR [ecx]
vpunpcklqdq ymm2,ymm6,[ecx]
vpxor ymm2,ymm6,ymm4
vpxor ymm2,ymm6,YMMWORD PTR [ecx]
vpxor ymm2,ymm6,[ecx]
# Tests for op ymm/mem256, ymm
vpabsb ymm6,ymm4
vpabsb ymm4,YMMWORD PTR [ecx]
vpabsb ymm4,[ecx]
vpabsw ymm6,ymm4
vpabsw ymm4,YMMWORD PTR [ecx]
vpabsw ymm4,[ecx]
vpabsd ymm6,ymm4
vpabsd ymm4,YMMWORD PTR [ecx]
vpabsd ymm4,[ecx]
# Tests for op imm8, ymm/mem256, ymm, ymm
vmpsadbw ymm2,ymm6,ymm4,7
vmpsadbw ymm2,ymm6,YMMWORD PTR [ecx],7
vmpsadbw ymm2,ymm6,[ecx],7
vpalignr ymm2,ymm6,ymm4,7
vpalignr ymm2,ymm6,YMMWORD PTR [ecx],7
vpalignr ymm2,ymm6,[ecx],7
vpblendw ymm2,ymm6,ymm4,7
vpblendw ymm2,ymm6,YMMWORD PTR [ecx],7
vpblendw ymm2,ymm6,[ecx],7
# Tests for op ymm, ymm/mem256, ymm, ymm
vpblendvb ymm7,ymm2,ymm6,ymm4
vpblendvb ymm7,ymm2,YMMWORD PTR [ecx],ymm4
vpblendvb ymm7,ymm2,[ecx],ymm4
# Tests for op xmm/mem128, ymm, ymm
vpsllw ymm2,ymm6,xmm4
vpsllw ymm2,ymm6,XMMWORD PTR [ecx]
vpsllw ymm2,ymm6,[ecx]
vpslld ymm2,ymm6,xmm4
vpslld ymm2,ymm6,XMMWORD PTR [ecx]
vpslld ymm2,ymm6,[ecx]
vpsllq ymm2,ymm6,xmm4
vpsllq ymm2,ymm6,XMMWORD PTR [ecx]
vpsllq ymm2,ymm6,[ecx]
vpsraw ymm2,ymm6,xmm4
vpsraw ymm2,ymm6,XMMWORD PTR [ecx]
vpsraw ymm2,ymm6,[ecx]
vpsrad ymm2,ymm6,xmm4
vpsrad ymm2,ymm6,XMMWORD PTR [ecx]
vpsrad ymm2,ymm6,[ecx]
vpsrlw ymm2,ymm6,xmm4
vpsrlw ymm2,ymm6,XMMWORD PTR [ecx]
vpsrlw ymm2,ymm6,[ecx]
vpsrld ymm2,ymm6,xmm4
vpsrld ymm2,ymm6,XMMWORD PTR [ecx]
vpsrld ymm2,ymm6,[ecx]
vpsrlq ymm2,ymm6,xmm4
vpsrlq ymm2,ymm6,XMMWORD PTR [ecx]
vpsrlq ymm2,ymm6,[ecx]
# Tests for op xmm/mem128, ymm
vpmovsxbw ymm4,xmm4
vpmovsxbw ymm4,XMMWORD PTR [ecx]
vpmovsxbw ymm4,[ecx]
vpmovsxwd ymm4,xmm4
vpmovsxwd ymm4,XMMWORD PTR [ecx]
vpmovsxwd ymm4,[ecx]
vpmovsxdq ymm4,xmm4
vpmovsxdq ymm4,XMMWORD PTR [ecx]
vpmovsxdq ymm4,[ecx]
vpmovzxbw ymm4,xmm4
vpmovzxbw ymm4,XMMWORD PTR [ecx]
vpmovzxbw ymm4,[ecx]
vpmovzxwd ymm4,xmm4
vpmovzxwd ymm4,XMMWORD PTR [ecx]
vpmovzxwd ymm4,[ecx]
vpmovzxdq ymm4,xmm4
vpmovzxdq ymm4,XMMWORD PTR [ecx]
vpmovzxdq ymm4,[ecx]
# Tests for op xmm/mem64, ymm
vpmovsxbd ymm6,xmm4
vpmovsxbd ymm4,QWORD PTR [ecx]
vpmovsxbd ymm4,[ecx]
vpmovsxwq ymm6,xmm4
vpmovsxwq ymm4,QWORD PTR [ecx]
vpmovsxwq ymm4,[ecx]
vpmovzxbd ymm6,xmm4
vpmovzxbd ymm4,QWORD PTR [ecx]
vpmovzxbd ymm4,[ecx]
vpmovzxwq ymm6,xmm4
vpmovzxwq ymm4,QWORD PTR [ecx]
vpmovzxwq ymm4,[ecx]
# Tests for op xmm/mem32, ymm
vpmovsxbq ymm4,xmm4
vpmovsxbq ymm4,DWORD PTR [ecx]
vpmovsxbq ymm4,[ecx]
vpmovzxbq ymm4,xmm4
vpmovzxbq ymm4,DWORD PTR [ecx]
vpmovzxbq ymm4,[ecx]
|
tactcomplabs/xbgas-binutils-gdb
| 1,374
|
gas/testsuite/gas/i386/x86-64-specific-reg.s
|
# 64bit insns with special register requirements
.text
special:
.irp reg1, ax, cx, dx, bx, sp, bp, si, di
lodsb %ds:(%r\reg1)
stosb %es:(%r\reg1)
scasb %es:(%r\reg1)
insb %dx, %es:(%r\reg1)
outsb %ds:(%r\reg1), %dx
xlatb %ds:(%r\reg1)
movsb %ds:(%r\reg1), %es:(%rdi)
movsb %ds:(%rsi), %es:(%r\reg1)
cmpsb %es:(%r\reg1), %ds:(%rsi)
cmpsb %es:(%rdi), %ds:(%r\reg1)
mwait %r\reg1, %rcx
mwait %rax, %r\reg1
monitor %r\reg1, %rcx, %rdx
monitor %rax, %r\reg1, %rdx
monitor %rax, %rcx, %r\reg1
vmload %r\reg1
vmrun %r\reg1
vmsave %r\reg1
invlpga %r\reg1, %ecx
invlpga %rax, %e\reg1
skinit %e\reg1
.endr
.irp reg1, 8, 9, 10, 11, 12, 13, 14, 15
lodsb %ds:(%r\reg1)
stosb %es:(%r\reg1)
scasb %es:(%r\reg1)
insb %dx, %es:(%r\reg1)
outsb %ds:(%r\reg1), %dx
xlatb %ds:(%r\reg1)
movsb %ds:(%r\reg1), %es:(%rdi)
movsb %ds:(%rsi), %es:(%r\reg1)
cmpsb %es:(%r\reg1), %ds:(%rsi)
cmpsb %es:(%rdi), %ds:(%r\reg1)
mwait %r\reg1, %rcx
mwait %rax, %r\reg1
monitor %r\reg1, %rcx, %rdx
monitor %rax, %r\reg1, %rdx
monitor %rax, %rcx, %r\reg1
vmload %r\reg1
vmrun %r\reg1
vmsave %r\reg1
invlpga %r\reg1, %ecx
invlpga %rax, %r\reg1\(d)
skinit %r\reg1\(d)
.endr
.irp n, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
blendvpd %xmm\n, %xmm\n, %xmm\n
blendvps %xmm\n, %xmm\n, %xmm\n
pblendvb %xmm\n, %xmm\n, %xmm\n
.endr
|
tactcomplabs/xbgas-binutils-gdb
| 1,256
|
gas/testsuite/gas/i386/sse-noavx.s
|
# Check SSE instructions without AVX equivalent
.text
_start:
crc32 %cl,%ebx
cvtpd2pi %xmm3,%mm2
cvtpi2pd %mm3,%xmm2
cvtpi2ps %mm3,%xmm2
cvtps2pi %xmm7,%mm6
cvttpd2pi %xmm4,%mm3
cvttps2pi %xmm4,%mm3
fisttps (%eax)
fisttpl (%eax)
fisttpll (%eax)
lfence
maskmovq %mm7,%mm0
mfence
monitor
movdq2q %xmm0, %mm1
movnti %eax, (%eax)
movntq %mm2,(%eax)
movq2dq %mm0, %xmm1
mwait
pabsb %mm1,%mm0
pabsd %mm1,%mm0
pabsw %mm1,%mm0
paddq %mm1,%mm0
palignr $0x2,%mm1,%mm0
pavgb %mm1,%mm0
pavgw %mm3,%mm2
pextrw $0x0,%mm1,%eax
phaddd %mm1,%mm0
phaddsw %mm1,%mm0
phaddw %mm1,%mm0
phsubd %mm1,%mm0
phsubsw %mm1,%mm0
phsubw %mm1,%mm0
pinsrw $0x2,%edx,%mm2
pmaddubsw %mm1,%mm0
pmaxsw %mm1,%mm0
pmaxub %mm2,%mm2
pminsw %mm5,%mm4
pminub %mm7,%mm6
pmovmskb %mm5,%eax
pmulhrsw %mm1,%mm0
pmulhuw %mm5,%mm4
pmuludq %mm0, %mm1
popcnt %ebx,%ecx
prefetchnta (%eax)
prefetcht0 (%eax)
prefetcht1 (%eax)
prefetcht2 (%eax)
psadbw %mm7,%mm6
pshufb %mm1,%mm0
pshufw $0x1,%mm2,%mm3
psignb %mm1,%mm0
psignd %mm1,%mm0
psignw %mm1,%mm0
psubq %mm1,%mm0
sfence
|
tactcomplabs/xbgas-binutils-gdb
| 5,370
|
gas/testsuite/gas/i386/dw2-compress-1.s
|
/* This testcase is copied from a similar test in GDB.
Copyright (C) 2010-2022 Free Software Foundation, Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
/* This tests that gdb can read compressed sections. The contents
are a basic assembly file, but the .debug_abbrev section has been
comrpessed using zlib. */
/* Dummy function to provide debug information for. */
.text
.Lbegin_text1:
.globl func_cu1
.type func_cu1, %function
func_cu1:
.Lbegin_func_cu1:
.int 0
.Lend_func_cu1:
.size func_cu1, .-func_cu1
.Lend_text1:
/* Debug information */
.section .debug_info
.Lcu1_begin:
/* CU header */
.4byte .Lcu1_end - .Lcu1_start /* Length of Compilation Unit */
.Lcu1_start:
.2byte 2 /* DWARF Version */
.4byte .Labbrev1_begin /* Offset into abbrev section */
.byte 4 /* Pointer size */
/* CU die */
.uleb128 1 /* Abbrev: DW_TAG_compile_unit */
.4byte .Lline1_begin /* DW_AT_stmt_list */
.4byte .Lend_text1 /* DW_AT_high_pc */
.4byte .Lbegin_text1 /* DW_AT_low_pc */
.ascii "file1.txt\0" /* DW_AT_name */
.ascii "GNU C 3.3.3\0" /* DW_AT_producer */
.byte 1 /* DW_AT_language (C) */
/* func_cu1 */
.uleb128 2 /* Abbrev: DW_TAG_subprogram */
.byte 1 /* DW_AT_external */
.byte 1 /* DW_AT_decl_file */
.byte 2 /* DW_AT_decl_line */
.ascii "func_cu1\0" /* DW_AT_name */
.4byte .Ltype_int-.Lcu1_begin /* DW_AT_type */
.4byte .Lbegin_func_cu1 /* DW_AT_low_pc */
.4byte .Lend_func_cu1 /* DW_AT_high_pc */
.byte 1 /* DW_AT_frame_base: length */
.byte 0x55 /* DW_AT_frame_base: DW_OP_reg5 */
.Ltype_int:
.uleb128 3 /* Abbrev: DW_TAG_base_type */
.ascii "int\0" /* DW_AT_name */
.byte 4 /* DW_AT_byte_size */
.byte 5 /* DW_AT_encoding */
.byte 0 /* End of children of CU */
.Lcu1_end:
/* Abbrev table */
.section .debug_abbrev
.Labbrev1_begin:
.uleb128 1 /* Abbrev code */
.uleb128 0x11 /* DW_TAG_compile_unit */
.byte 1 /* has_children */
.uleb128 0x10 /* DW_AT_stmt_list */
.uleb128 0x6 /* DW_FORM_data4 */
.uleb128 0x12 /* DW_AT_high_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x11 /* DW_AT_low_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x3 /* DW_AT_name */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0x25 /* DW_AT_producer */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0x13 /* DW_AT_language */
.uleb128 0xb /* DW_FORM_data1 */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.uleb128 2 /* Abbrev code */
.uleb128 0x2e /* DW_TAG_subprogram */
.byte 0 /* has_children */
.uleb128 0x3f /* DW_AT_external */
.uleb128 0xc /* DW_FORM_flag */
.uleb128 0x3a /* DW_AT_decl_file */
.uleb128 0xb /* DW_FORM_data1 */
.uleb128 0x3b /* DW_AT_decl_line */
.uleb128 0xb /* DW_FORM_data1 */
.uleb128 0x3 /* DW_AT_name */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0x49 /* DW_AT_type */
.uleb128 0x13 /* DW_FORM_ref4 */
.uleb128 0x11 /* DW_AT_low_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x12 /* DW_AT_high_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x40 /* DW_AT_frame_base */
.uleb128 0xa /* DW_FORM_block1 */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.uleb128 3 /* Abbrev code */
.uleb128 0x24 /* DW_TAG_base_type */
.byte 0 /* has_children */
.uleb128 0x3 /* DW_AT_name */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0xb /* DW_AT_byte_size */
.uleb128 0xb /* DW_FORM_data1 */
.uleb128 0x3e /* DW_AT_encoding */
.uleb128 0xb /* DW_FORM_data1 */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
/* Line table */
.section .debug_line
.Lline1_begin:
.4byte .Lline1_end - .Lline1_start /* Initial length */
.Lline1_start:
.2byte 2 /* Version */
.4byte .Lline1_lines - .Lline1_hdr /* header_length */
.Lline1_hdr:
.byte 1 /* Minimum insn length */
.byte 1 /* default_is_stmt */
.byte 1 /* line_base */
.byte 1 /* line_range */
.byte 0x10 /* opcode_base */
/* Standard lengths */
.byte 0
.byte 1
.byte 1
.byte 1
.byte 1
.byte 0
.byte 0
.byte 0
.byte 1
.byte 0
.byte 0
.byte 1
.byte 0
.byte 0
.byte 0
/* Include directories */
.byte 0
/* File names */
.ascii "file1.txt\0"
.uleb128 0
.uleb128 0
.uleb128 0
.byte 0
.Lline1_lines:
.byte 0 /* DW_LNE_set_address */
.uleb128 5
.byte 2
.4byte .Lbegin_func_cu1
.byte 3 /* DW_LNS_advance_line */
.sleb128 3 /* ... to 4 */
.byte 1 /* DW_LNS_copy */
.byte 1 /* DW_LNS_copy (second time as an end-of-prologue marker) */
.byte 0 /* DW_LNE_set_address */
.uleb128 5
.byte 2
.4byte .Lend_func_cu1
.byte 0 /* DW_LNE_end_of_sequence */
.uleb128 1
.byte 1
.Lline1_end:
|
tactcomplabs/xbgas-binutils-gdb
| 5,983
|
gas/testsuite/gas/i386/x86-64-optimize-2.s
|
# Check 64bit instructions with optimized encoding
.allow_index_reg
.text
_start:
vandnpd %zmm1, %zmm1, %zmm15{%k7}
vandnpd %ymm1, %ymm1, %ymm15 {%k7} {z}
vandnpd %zmm1, %zmm1, %zmm15
vandnpd %ymm1, %ymm1, %ymm15
vandnpd %zmm1, %zmm1, %zmm16
vandnpd %ymm1, %ymm1, %ymm16
vandnpd %zmm17, %zmm17, %zmm1
vandnpd %ymm17, %ymm17, %ymm1
vandnps %zmm1, %zmm1, %zmm15{%k7}
vandnps %ymm1, %ymm1, %ymm15{z}{%k7}
vandnps %zmm1, %zmm1, %zmm15
vandnps %ymm1, %ymm1, %ymm15
vandnps %zmm1, %zmm1, %zmm16
vandnps %ymm1, %ymm1, %ymm16
vandnps %zmm17, %zmm17, %zmm1
vandnps %ymm17, %ymm17, %ymm1
vpandn %ymm1, %ymm1, %ymm15
vpandnd %zmm1, %zmm1, %zmm15{%k7}
vpandnd %ymm1, %ymm1, %ymm15{z}{%k7}
vpandnd %zmm1, %zmm1, %zmm15
vpandnd %ymm1, %ymm1, %ymm15
vpandnd %zmm1, %zmm1, %zmm16
vpandnd %ymm1, %ymm1, %ymm16
vpandnd %zmm17, %zmm17, %zmm1
vpandnd %ymm17, %ymm17, %ymm1
vpandnq %zmm1, %zmm1, %zmm15{%k7}
vpandnq %ymm1, %ymm1, %ymm15{z}{%k7}
vpandnq %zmm1, %zmm1, %zmm15
vpandnq %ymm1, %ymm1, %ymm15
vpandnq %zmm1, %zmm1, %zmm16
vpandnq %ymm1, %ymm1, %ymm16
vpandnq %zmm17, %zmm17, %zmm1
vpandnq %ymm17, %ymm17, %ymm1
vxorpd %zmm1, %zmm1, %zmm15{%k7}
vxorpd %ymm1, %ymm1, %ymm15{z}{%k7}
vxorpd %zmm1, %zmm1, %zmm15
vxorpd %ymm1, %ymm1, %ymm15
vxorpd %zmm1, %zmm1, %zmm16
vxorpd %ymm1, %ymm1, %ymm16
vxorpd %zmm17, %zmm17, %zmm1
vxorpd %ymm17, %ymm17, %ymm1
vxorps %zmm1, %zmm1, %zmm15{%k7}
vxorps %ymm1, %ymm1, %ymm15{z}{%k7}
vxorps %zmm1, %zmm1, %zmm15
vxorps %ymm1, %ymm1, %ymm15
vxorps %zmm1, %zmm1, %zmm16
vxorps %ymm1, %ymm1, %ymm16
vxorps %zmm17, %zmm17, %zmm1
vxorps %ymm17, %ymm17, %ymm1
vpxor %ymm1, %ymm1, %ymm15
vpxord %zmm1, %zmm1, %zmm15{%k7}
vpxord %ymm1, %ymm1, %ymm15{z}{%k7}
vpxord %zmm1, %zmm1, %zmm15
vpxord %ymm1, %ymm1, %ymm15
vpxord %zmm1, %zmm1, %zmm16
vpxord %ymm1, %ymm1, %ymm16
vpxord %zmm17, %zmm17, %zmm1
vpxord %ymm17, %ymm17, %ymm1
vpxorq %zmm1, %zmm1, %zmm15{%k7}
vpxorq %ymm1, %ymm1, %ymm15{z}{%k7}
vpxorq %zmm1, %zmm1, %zmm15
vpxorq %ymm1, %ymm1, %ymm15
vpxorq %zmm1, %zmm1, %zmm16
vpxorq %ymm1, %ymm1, %ymm16
vpxorq %zmm17, %zmm17, %zmm1
vpxorq %ymm17, %ymm17, %ymm1
vpsubb %zmm1, %zmm1, %zmm15{%k7}
vpsubb %ymm1, %ymm1, %ymm15{z}{%k7}
vpsubb %zmm1, %zmm1, %zmm15
vpsubb %ymm1, %ymm1, %ymm15
vpsubb %zmm1, %zmm1, %zmm16
vpsubb %ymm1, %ymm1, %ymm16
vpsubb %zmm17, %zmm17, %zmm1
vpsubb %ymm17, %ymm17, %ymm1
vpsubw %zmm1, %zmm1, %zmm15{%k7}
vpsubw %ymm1, %ymm1, %ymm15{z}{%k7}
vpsubw %zmm1, %zmm1, %zmm15
vpsubw %ymm1, %ymm1, %ymm15
vpsubw %zmm1, %zmm1, %zmm16
vpsubw %ymm1, %ymm1, %ymm16
vpsubw %zmm17, %zmm17, %zmm1
vpsubw %ymm17, %ymm17, %ymm1
vpsubd %zmm1, %zmm1, %zmm15{%k7}
vpsubd %ymm1, %ymm1, %ymm15{z}{%k7}
vpsubd %zmm1, %zmm1, %zmm15
vpsubd %ymm1, %ymm1, %ymm15
vpsubd %zmm1, %zmm1, %zmm16
vpsubd %ymm1, %ymm1, %ymm16
vpsubd %zmm17, %zmm17, %zmm1
vpsubd %ymm17, %ymm17, %ymm1
vpsubq %zmm1, %zmm1, %zmm15{%k7}
vpsubq %ymm1, %ymm1, %ymm15{z}{%k7}
vpsubq %zmm1, %zmm1, %zmm15
vpsubq %ymm1, %ymm1, %ymm15
vpsubq %zmm1, %zmm1, %zmm16
vpsubq %ymm1, %ymm1, %ymm16
vpsubq %zmm17, %zmm17, %zmm1
vpsubq %ymm17, %ymm17, %ymm1
vmovdqa32 %xmm1, %xmm2
vmovdqa64 %xmm1, %xmm2
vmovdqu8 %xmm1, %xmm2
vmovdqu16 %xmm1, %xmm2
vmovdqu32 %xmm1, %xmm2
vmovdqu64 %xmm1, %xmm2
vmovdqa32 %xmm11, %xmm12
vmovdqa64 %xmm11, %xmm12
vmovdqu8 %xmm11, %xmm12
vmovdqu16 %xmm11, %xmm12
vmovdqu32 %xmm11, %xmm12
vmovdqu64 %xmm11, %xmm12
vmovdqa32 127(%rax), %xmm2
vmovdqa64 127(%rax), %xmm2
vmovdqu8 127(%rax), %xmm2
vmovdqu16 127(%rax), %xmm2
vmovdqu32 127(%rax), %xmm2
vmovdqu64 127(%rax), %xmm2
vmovdqa32 %xmm1, 128(%rax)
vmovdqa64 %xmm1, 128(%rax)
vmovdqu8 %xmm1, 128(%rax)
vmovdqu16 %xmm1, 128(%rax)
vmovdqu32 %xmm1, 128(%rax)
vmovdqu64 %xmm1, 128(%rax)
vmovdqa32 %ymm1, %ymm2
vmovdqa64 %ymm1, %ymm2
vmovdqu8 %ymm1, %ymm2
vmovdqu16 %ymm1, %ymm2
vmovdqu32 %ymm1, %ymm2
vmovdqu64 %ymm1, %ymm2
vmovdqa32 %ymm11, %ymm12
vmovdqa64 %ymm11, %ymm12
vmovdqu8 %ymm11, %ymm12
vmovdqu16 %ymm11, %ymm12
vmovdqu32 %ymm11, %ymm12
vmovdqu64 %ymm11, %ymm12
vmovdqa32 127(%rax), %ymm2
vmovdqa64 127(%rax), %ymm2
vmovdqu8 127(%rax), %ymm2
vmovdqu16 127(%rax), %ymm2
vmovdqu32 127(%rax), %ymm2
vmovdqu64 127(%rax), %ymm2
vmovdqa32 %ymm1, 128(%rax)
vmovdqa64 %ymm1, 128(%rax)
vmovdqu8 %ymm1, 128(%rax)
vmovdqu16 %ymm1, 128(%rax)
vmovdqu32 %ymm1, 128(%rax)
vmovdqu64 %ymm1, 128(%rax)
vmovdqa32 (%rax), %zmm2
vpandd %xmm2, %xmm3, %xmm4
vpandq %xmm12, %xmm3, %xmm4
vpandnd %xmm2, %xmm13, %xmm4
vpandnq %xmm2, %xmm3, %xmm14
vpord %xmm2, %xmm3, %xmm4
vporq %xmm12, %xmm3, %xmm4
vpxord %xmm2, %xmm13, %xmm4
vpxorq %xmm2, %xmm3, %xmm14
vpandd %ymm2, %ymm3, %ymm4
vpandq %ymm12, %ymm3, %ymm4
vpandnd %ymm2, %ymm13, %ymm4
vpandnq %ymm2, %ymm3, %ymm14
vpord %ymm2, %ymm3, %ymm4
vporq %ymm12, %ymm3, %ymm4
vpxord %ymm2, %ymm13, %ymm4
vpxorq %ymm2, %ymm3, %ymm14
vpandd 112(%rax), %xmm2, %xmm3
vpandq 112(%rax), %xmm2, %xmm3
vpandnd 112(%rax), %xmm2, %xmm3
vpandnq 112(%rax), %xmm2, %xmm3
vpord 112(%rax), %xmm2, %xmm3
vporq 112(%rax), %xmm2, %xmm3
vpxord 112(%rax), %xmm2, %xmm3
vpxorq 112(%rax), %xmm2, %xmm3
vpandd 128(%rax), %xmm2, %xmm3
vpandq 128(%rax), %xmm2, %xmm3
vpandnd 128(%rax), %xmm2, %xmm3
vpandnq 128(%rax), %xmm2, %xmm3
vpord 128(%rax), %xmm2, %xmm3
vporq 128(%rax), %xmm2, %xmm3
vpxord 128(%rax), %xmm2, %xmm3
vpxorq 128(%rax), %xmm2, %xmm3
vpandd 96(%rax), %ymm2, %ymm3
vpandq 96(%rax), %ymm2, %ymm3
vpandnd 96(%rax), %ymm2, %ymm3
vpandnq 96(%rax), %ymm2, %ymm3
vpord 96(%rax), %ymm2, %ymm3
vporq 96(%rax), %ymm2, %ymm3
vpxord 96(%rax), %ymm2, %ymm3
vpxorq 96(%rax), %ymm2, %ymm3
vpandd 128(%rax), %ymm2, %ymm3
vpandq 128(%rax), %ymm2, %ymm3
vpandnd 128(%rax), %ymm2, %ymm3
vpandnq 128(%rax), %ymm2, %ymm3
vpord 128(%rax), %ymm2, %ymm3
vporq 128(%rax), %ymm2, %ymm3
vpxord 128(%rax), %ymm2, %ymm3
vpxorq 128(%rax), %ymm2, %ymm3
|
tactcomplabs/xbgas-binutils-gdb
| 6,513
|
gas/testsuite/gas/i386/avx512f-rcig.s
|
# Check 32bit AVX512F-RCIG instructions
.allow_index_reg
.text
_start:
vcmppd $0xab, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmppd $123, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpps $0xab, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpps $123, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpsd $0xab, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpsd $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpss $0xab, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpss $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcomisd {sae}, %xmm5, %xmm6 # AVX512F
vcomiss {sae}, %xmm5, %xmm6 # AVX512F
vcvtph2ps {sae}, %ymm5, %zmm6{%k7} # AVX512F
vcvtps2pd {sae}, %ymm5, %zmm6{%k7} # AVX512F
vcvtps2ph $0xab, {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtps2ph $123, {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtss2sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvttpd2dq {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvttps2dq {sae}, %zmm5, %zmm6 # AVX512F
vcvttsd2si {sae}, %xmm6, %eax # AVX512F
vcvttsd2si {sae}, %xmm6, %ebp # AVX512F
vcvttss2si {sae}, %xmm6, %eax # AVX512F
vcvttss2si {sae}, %xmm6, %ebp # AVX512F
vgetexppd {sae}, %zmm5, %zmm6 # AVX512F
vgetexpps {sae}, %zmm5, %zmm6 # AVX512F
vgetexpsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetexpss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantpd $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantpd $123, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantps $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantps $123, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantsd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmaxpd {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmaxps {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmaxsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmaxss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vminpd {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vminps {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vminsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vminss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vucomisd {sae}, %xmm5, %xmm6 # AVX512F
vucomiss {sae}, %xmm5, %xmm6 # AVX512F
vfixupimmpd $0xab, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmps $0xab, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmps $123, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmsd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscalepd $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vrndscalepd $123, {sae}, %zmm5, %zmm6 # AVX512F
vrndscaleps $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vrndscaleps $123, {sae}, %zmm5, %zmm6 # AVX512F
vrndscalesd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvttpd2udq {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvttps2udq {sae}, %zmm5, %zmm6 # AVX512F
vcvttsd2usi {sae}, %xmm6, %eax # AVX512F
vcvttsd2usi {sae}, %xmm6, %ebp # AVX512F
vcvttss2usi {sae}, %xmm6, %eax # AVX512F
vcvttss2usi {sae}, %xmm6, %ebp # AVX512F
.intel_syntax noprefix
vcmppd k5, zmm6, zmm5, {sae}, 0xab # AVX512F
vcmppd k5, zmm6, zmm5, {sae}, 123 # AVX512F
vcmpps k5, zmm6, zmm5, {sae}, 0xab # AVX512F
vcmpps k5, zmm6, zmm5, {sae}, 123 # AVX512F
vcmpsd k5{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vcmpsd k5{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vcmpss k5{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vcmpss k5{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vcomisd xmm6, xmm5, {sae} # AVX512F
vcomiss xmm6, xmm5, {sae} # AVX512F
vcvtph2ps zmm6{k7}, ymm5, {sae} # AVX512F
vcvtps2pd zmm6{k7}, ymm5, {sae} # AVX512F
vcvtps2ph ymm6{k7}, zmm5, {sae}, 0xab # AVX512F
vcvtps2ph ymm6{k7}, zmm5, {sae}, 123 # AVX512F
vcvtss2sd xmm6{k7}, xmm5, xmm4, {sae} # AVX512F
vcvttpd2dq ymm6{k7}, zmm5, {sae} # AVX512F
vcvttps2dq zmm6, zmm5, {sae} # AVX512F
vcvttsd2si eax, xmm6, {sae} # AVX512F
vcvttsd2si ebp, xmm6, {sae} # AVX512F
vcvttss2si eax, xmm6, {sae} # AVX512F
vcvttss2si ebp, xmm6, {sae} # AVX512F
vgetexppd zmm6, zmm5, {sae} # AVX512F
vgetexpps zmm6, zmm5, {sae} # AVX512F
vgetexpsd xmm6{k7}, xmm5, xmm4, {sae} # AVX512F
vgetexpss xmm6{k7}, xmm5, xmm4, {sae} # AVX512F
vgetmantpd zmm6, zmm5, {sae}, 0xab # AVX512F
vgetmantpd zmm6, zmm5, {sae}, 123 # AVX512F
vgetmantps zmm6, zmm5, {sae}, 0xab # AVX512F
vgetmantps zmm6, zmm5, {sae}, 123 # AVX512F
vgetmantsd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vgetmantsd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vgetmantss xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vgetmantss xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vmaxpd zmm6, zmm5, zmm4, {sae} # AVX512F
vmaxps zmm6, zmm5, zmm4, {sae} # AVX512F
vmaxsd xmm6{k7}, xmm5, xmm4, {sae} # AVX512F
vmaxss xmm6{k7}, xmm5, xmm4, {sae} # AVX512F
vminpd zmm6, zmm5, zmm4, {sae} # AVX512F
vminps zmm6, zmm5, zmm4, {sae} # AVX512F
vminsd xmm6{k7}, xmm5, xmm4, {sae} # AVX512F
vminss xmm6{k7}, xmm5, xmm4, {sae} # AVX512F
vucomisd xmm6, xmm5, {sae} # AVX512F
vucomiss xmm6, xmm5, {sae} # AVX512F
vfixupimmpd zmm6, zmm5, zmm4, {sae}, 0xab # AVX512F
vfixupimmpd zmm6, zmm5, zmm4, {sae}, 123 # AVX512F
vfixupimmps zmm6, zmm5, zmm4, {sae}, 0xab # AVX512F
vfixupimmps zmm6, zmm5, zmm4, {sae}, 123 # AVX512F
vfixupimmsd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vfixupimmsd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vfixupimmss xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vfixupimmss xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vrndscalepd zmm6, zmm5, {sae}, 0xab # AVX512F
vrndscalepd zmm6, zmm5, {sae}, 123 # AVX512F
vrndscaleps zmm6, zmm5, {sae}, 0xab # AVX512F
vrndscaleps zmm6, zmm5, {sae}, 123 # AVX512F
vrndscalesd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vrndscalesd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vrndscaless xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512F
vrndscaless xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512F
vcvttpd2udq ymm6{k7}, zmm5, {sae} # AVX512F
vcvttps2udq zmm6, zmm5, {sae} # AVX512F
vcvttsd2usi eax, xmm6, {sae} # AVX512F
vcvttsd2usi ebp, xmm6, {sae} # AVX512F
vcvttss2usi eax, xmm6, {sae} # AVX512F
vcvttss2usi ebp, xmm6, {sae} # AVX512F
|
tactcomplabs/xbgas-binutils-gdb
| 2,181
|
gas/testsuite/gas/i386/sha.s
|
# Check SHA instructions
.allow_index_reg
.text
_start:
sha1rnds4 $9, %xmm2, %xmm1
sha1rnds4 $7, (%eax), %xmm2
sha1rnds4 $5, 0x12(%eax), %xmm3
sha1rnds4 $1, (%eax,%ebx,2), %xmm4
sha1nexte %xmm2, %xmm1
sha1nexte (%eax), %xmm1
sha1nexte 0x12(%eax), %xmm1
sha1nexte (%eax,%ebx,2), %xmm1
sha1msg1 %xmm2, %xmm1
sha1msg1 (%eax), %xmm1
sha1msg1 0x12(%eax), %xmm1
sha1msg1 (%eax,%ebx,2), %xmm1
sha1msg2 %xmm2, %xmm1
sha1msg2 (%eax), %xmm1
sha1msg2 0x12(%eax), %xmm1
sha1msg2 (%eax,%ebx,2), %xmm1
sha256rnds2 %xmm2, %xmm1
sha256rnds2 (%eax), %xmm1
sha256rnds2 0x12(%eax), %xmm1
sha256rnds2 (%eax,%ebx,2), %xmm1
sha256rnds2 %xmm0, %xmm2, %xmm1
sha256rnds2 %xmm0, (%eax), %xmm1
sha256rnds2 %xmm0, 0x12(%eax), %xmm1
sha256rnds2 %xmm0, (%eax,%ebx,2), %xmm1
sha256msg1 %xmm2, %xmm1
sha256msg1 (%eax), %xmm1
sha256msg1 0x12(%eax), %xmm1
sha256msg1 (%eax,%ebx,2), %xmm1
sha256msg2 %xmm2, %xmm1
sha256msg2 (%eax), %xmm1
sha256msg2 0x12(%eax), %xmm1
sha256msg2 (%eax,%ebx,2), %xmm1
.intel_syntax noprefix
sha1rnds4 xmm1, xmm2, 9
sha1rnds4 xmm2, XMMWORD PTR [eax], 7
sha1rnds4 xmm3, XMMWORD PTR [eax+0x12], 5
sha1rnds4 xmm4, XMMWORD PTR [eax+ebx*2], 1
sha1nexte xmm1, xmm2
sha1nexte xmm2, XMMWORD PTR [eax]
sha1nexte xmm3, XMMWORD PTR [eax+0x12]
sha1nexte xmm4, XMMWORD PTR [eax+ebx*2]
sha1msg1 xmm1, xmm2
sha1msg1 xmm2, XMMWORD PTR [eax]
sha1msg1 xmm3, XMMWORD PTR [eax+0x12]
sha1msg1 xmm4, XMMWORD PTR [eax+ebx*2]
sha1msg2 xmm1, xmm2
sha1msg2 xmm2, XMMWORD PTR [eax]
sha1msg2 xmm3, XMMWORD PTR [eax+0x12]
sha1msg2 xmm4, XMMWORD PTR [eax+ebx*2]
sha256rnds2 xmm1, xmm2
sha256rnds2 xmm2, XMMWORD PTR [eax]
sha256rnds2 xmm3, XMMWORD PTR [eax+0x12]
sha256rnds2 xmm4, XMMWORD PTR [eax+ebx*2]
sha256rnds2 xmm1, xmm2, xmm0
sha256rnds2 xmm2, XMMWORD PTR [eax], xmm0
sha256rnds2 xmm3, XMMWORD PTR [eax+0x12], xmm0
sha256rnds2 xmm4, XMMWORD PTR [eax+ebx*2], xmm0
sha256msg1 xmm1, xmm2
sha256msg1 xmm2, XMMWORD PTR [eax]
sha256msg1 xmm3, XMMWORD PTR [eax+0x12]
sha256msg1 xmm4, XMMWORD PTR [eax+ebx*2]
sha256msg2 xmm1, xmm2
sha256msg2 xmm2, XMMWORD PTR [eax]
sha256msg2 xmm3, XMMWORD PTR [eax+0x12]
sha256msg2 xmm4, XMMWORD PTR [eax+ebx*2]
|
tactcomplabs/xbgas-binutils-gdb
| 5,300
|
gas/testsuite/gas/i386/simd.s
|
.text
_start:
addsubps 0x12345678,%xmm1
comisd 0x12345678,%xmm1
comiss 0x12345678,%xmm1
cvtdq2pd 0x12345678,%xmm1
cvtpd2dq 0x12345678,%xmm1
cvtps2pd 0x12345678,%xmm1
cvttps2dq 0x12345678,%xmm1
haddps 0x12345678,%xmm1
movdqu %xmm1,0x12345678
movdqu 0x12345678,%xmm1
movhpd %xmm1,0x12345678
movhpd 0x12345678,%xmm1
movhps %xmm1,0x12345678
movhps 0x12345678,%xmm1
movlpd %xmm1,0x12345678
movlpd 0x12345678,%xmm1
movlps %xmm1,0x12345678
movlps 0x12345678,%xmm1
movshdup 0x12345678,%xmm1
movsldup 0x12345678,%xmm1
pshufhw $0x90,0x12345678,%xmm1
pshuflw $0x90,0x12345678,%xmm1
punpcklbw 0x12345678,%mm1
punpckldq 0x12345678,%mm1
punpcklwd 0x12345678,%mm1
punpcklbw 0x12345678,%xmm1
punpckldq 0x12345678,%xmm1
punpcklwd 0x12345678,%xmm1
punpcklqdq 0x12345678,%xmm1
ucomisd 0x12345678,%xmm1
ucomiss 0x12345678,%xmm1
cmpeqsd (%eax),%xmm0
cmpeqss (%eax),%xmm0
cvtpi2pd (%eax),%xmm0
cvtpi2ps (%eax),%xmm0
cvtps2pi (%eax),%mm0
cvtsd2si (%eax),%eax
cvttsd2si (%eax),%eax
cvtsd2ss (%eax),%xmm0
cvtss2sd (%eax),%xmm0
cvtss2si (%eax),%eax
cvttss2si (%eax),%eax
divsd (%eax),%xmm0
divss (%eax),%xmm0
maxsd (%eax),%xmm0
maxss (%eax),%xmm0
minss (%eax),%xmm0
minss (%eax),%xmm0
movntsd %xmm0,(%eax)
movntss %xmm0,(%eax)
movsd (%eax),%xmm0
movsd %xmm0,(%eax)
movss (%eax),%xmm0
movss %xmm0,(%eax)
mulsd (%eax),%xmm0
mulss (%eax),%xmm0
rcpss (%eax),%xmm0
roundsd $0,(%eax),%xmm0
roundss $0,(%eax),%xmm0
rsqrtss (%eax),%xmm0
sqrtsd (%eax),%xmm0
sqrtss (%eax),%xmm0
subsd (%eax),%xmm0
subss (%eax),%xmm0
pmovsxbw (%eax),%xmm0
pmovsxbd (%eax),%xmm0
pmovsxbq (%eax),%xmm0
pmovsxwd (%eax),%xmm0
pmovsxwq (%eax),%xmm0
pmovsxdq (%eax),%xmm0
pmovzxbw (%eax),%xmm0
pmovzxbd (%eax),%xmm0
pmovzxbq (%eax),%xmm0
pmovzxwd (%eax),%xmm0
pmovzxwq (%eax),%xmm0
pmovzxdq (%eax),%xmm0
insertps $0x0,(%eax),%xmm0
unpckhpd (%eax),%xmm1
unpckhps (%eax),%xmm1
unpcklpd (%eax),%xmm1
unpcklps (%eax),%xmm1
cmpss $0x10,%xmm7,%xmm6
cmpss $0x10,(%eax),%xmm7
cmpsd $0x10,%xmm7,%xmm6
cmpsd $0x10,(%eax),%xmm7
cvtsi2ss %eax, %xmm1
cvtsi2sd %eax, %xmm1
cvtsi2ssl %eax, %xmm1
cvtsi2sdl %eax, %xmm1
cvtsi2ss (%eax), %xmm1
cvtsi2sd (%eax), %xmm1
cvtsi2ssl (%eax), %xmm1
cvtsi2sdl (%eax), %xmm1
.intel_syntax noprefix
addsubps xmm1,XMMWORD PTR ds:0x12345678
comisd xmm1,QWORD PTR ds:0x12345678
comiss xmm1,DWORD PTR ds:0x12345678
cvtdq2pd xmm1,QWORD PTR ds:0x12345678
cvtpd2dq xmm1,XMMWORD PTR ds:0x12345678
cvtps2pd xmm1,QWORD PTR ds:0x12345678
cvttps2dq xmm1,XMMWORD PTR ds:0x12345678
haddps xmm1,XMMWORD PTR ds:0x12345678
movdqu XMMWORD PTR ds:0x12345678,xmm1
movdqu xmm1,XMMWORD PTR ds:0x12345678
movhpd QWORD PTR ds:0x12345678,xmm1
movhpd xmm1,QWORD PTR ds:0x12345678
movhps QWORD PTR ds:0x12345678,xmm1
movhps xmm1,QWORD PTR ds:0x12345678
movlpd QWORD PTR ds:0x12345678,xmm1
movlpd xmm1,QWORD PTR ds:0x12345678
movlps QWORD PTR ds:0x12345678,xmm1
movlps xmm1,QWORD PTR ds:0x12345678
movshdup xmm1,XMMWORD PTR ds:0x12345678
movsldup xmm1,XMMWORD PTR ds:0x12345678
pshufhw xmm1,XMMWORD PTR ds:0x12345678,0x90
pshuflw xmm1,XMMWORD PTR ds:0x12345678,0x90
punpcklbw mm1,DWORD PTR ds:0x12345678
punpckldq mm1,DWORD PTR ds:0x12345678
punpcklwd mm1,DWORD PTR ds:0x12345678
punpcklbw xmm1,XMMWORD PTR ds:0x12345678
punpckldq xmm1,XMMWORD PTR ds:0x12345678
punpcklwd xmm1,XMMWORD PTR ds:0x12345678
punpcklqdq xmm1,XMMWORD PTR ds:0x12345678
ucomisd xmm1,QWORD PTR ds:0x12345678
ucomiss xmm1,DWORD PTR ds:0x12345678
cmpeqsd xmm0,QWORD PTR [eax]
cmpeqss xmm0,DWORD PTR [eax]
cvtpi2pd xmm0,QWORD PTR [eax]
cvtpi2ps xmm0,QWORD PTR [eax]
cvtps2pi mm0,QWORD PTR [eax]
cvtsd2si eax,QWORD PTR [eax]
cvttsd2si eax,QWORD PTR [eax]
cvtsd2ss xmm0,QWORD PTR [eax]
cvtss2sd xmm0,DWORD PTR [eax]
cvtss2si eax,DWORD PTR [eax]
cvttss2si eax,DWORD PTR [eax]
divsd xmm0,QWORD PTR [eax]
divss xmm0,DWORD PTR [eax]
maxsd xmm0,QWORD PTR [eax]
maxss xmm0,DWORD PTR [eax]
minss xmm0,DWORD PTR [eax]
minss xmm0,DWORD PTR [eax]
movntsd QWORD PTR [eax],xmm0
movntss DWORD PTR [eax],xmm0
movsd xmm0,QWORD PTR [eax]
movsd QWORD PTR [eax],xmm0
movss xmm0,DWORD PTR [eax]
movss DWORD PTR [eax],xmm0
mulsd xmm0,QWORD PTR [eax]
mulss xmm0,DWORD PTR [eax]
rcpss xmm0,DWORD PTR [eax]
roundsd xmm0,QWORD PTR [eax],0x0
roundss xmm0,DWORD PTR [eax],0x0
rsqrtss xmm0,DWORD PTR [eax]
sqrtsd xmm0,QWORD PTR [eax]
sqrtss xmm0,DWORD PTR [eax]
subsd xmm0,QWORD PTR [eax]
subss xmm0,DWORD PTR [eax]
pmovsxbw xmm0,QWORD PTR [eax]
pmovsxbd xmm0,DWORD PTR [eax]
pmovsxbq xmm0,WORD PTR [eax]
pmovsxwd xmm0,QWORD PTR [eax]
pmovsxwq xmm0,DWORD PTR [eax]
pmovsxdq xmm0,QWORD PTR [eax]
pmovzxbw xmm0,QWORD PTR [eax]
pmovzxbd xmm0,DWORD PTR [eax]
pmovzxbq xmm0,WORD PTR [eax]
pmovzxwd xmm0,QWORD PTR [eax]
pmovzxwq xmm0,DWORD PTR [eax]
pmovzxdq xmm0,QWORD PTR [eax]
insertps xmm0,DWORD PTR [eax],0x0
unpckhpd xmm0,XMMWORD PTR [eax]
unpckhps xmm0,XMMWORD PTR [eax]
unpcklpd xmm0,XMMWORD PTR [eax]
unpcklps xmm0,XMMWORD PTR [eax]
cmpss xmm6,xmm7,0x10
cmpss xmm7,DWORD PTR [eax],0x10
cmpsd xmm6,xmm7,0x10
cmpsd xmm7,QWORD PTR [eax],0x10
cvtsi2ss xmm1,eax
cvtsi2sd xmm1,eax
cvtsi2ssd xmm1,eax
cvtsi2sdd xmm1,eax
cvtsi2ss xmm1,DWORD PTR [eax]
cvtsi2ss xmm1,[eax]
cvtsi2sd xmm1,DWORD PTR [eax]
cvtsi2sd xmm1,[eax]
cvtsi2ssd xmm1,DWORD PTR [eax]
cvtsi2sdd xmm1,DWORD PTR [eax]
cvttps2pi mm0,QWORD PTR[eax]
|
tactcomplabs/xbgas-binutils-gdb
| 112,772
|
gas/testsuite/gas/i386/x86-64-avx512bw_vl-wig.s
|
# Check 64bit AVX512{BW,VL} WIG instructions
.allow_index_reg
.text
_start:
vpabsb %xmm29, %xmm30 # AVX512{BW,VL}
vpabsb %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpabsb %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpabsb (%rcx), %xmm30 # AVX512{BW,VL}
vpabsb 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpabsb 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpabsb 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpabsb -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpabsb -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpabsb %ymm29, %ymm30 # AVX512{BW,VL}
vpabsb %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpabsb %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpabsb (%rcx), %ymm30 # AVX512{BW,VL}
vpabsb 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpabsb 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpabsb 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpabsb -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpabsb -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpabsw %xmm29, %xmm30 # AVX512{BW,VL}
vpabsw %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpabsw %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpabsw (%rcx), %xmm30 # AVX512{BW,VL}
vpabsw 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpabsw 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpabsw 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpabsw -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpabsw -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpabsw %ymm29, %ymm30 # AVX512{BW,VL}
vpabsw %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpabsw %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpabsw (%rcx), %ymm30 # AVX512{BW,VL}
vpabsw 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpabsw 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpabsw 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpabsw -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpabsw -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpacksswb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpacksswb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpacksswb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpacksswb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpacksswb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpacksswb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpacksswb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpacksswb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpacksswb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpacksswb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpacksswb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpacksswb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpacksswb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpackuswb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpackuswb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpackuswb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpackuswb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpackuswb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpackuswb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpackuswb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpackuswb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddsb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddsb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddsb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddsb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddsb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddsb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddsb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddsb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddusb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddusb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddusb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddusb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddusb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddusb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddusb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddusb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddusw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddusw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddusw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddusw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddusw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddusw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddusw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddusw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $0xab, %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpalignr $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpalignr $123, %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $123, (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpalignr $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpalignr $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $0xab, %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpalignr $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpalignr $123, %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $123, (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpalignr $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpalignr $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpavgb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpavgb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpavgb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpavgb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpavgb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpavgb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpavgb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpavgb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpavgw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpavgw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpavgw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpavgw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpavgw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpavgw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpavgw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpavgw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpcmpeqb %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqb 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqb -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqb %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqb 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqb 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqb 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqb -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqb -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqw 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqw -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqw 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqw -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtb 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtb -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtb 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtb -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtw 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtw -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtw 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtw -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpmaddubsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaddubsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaddubsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaddubsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaddubsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddubsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaddubsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaddubsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddubsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddubsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaddubsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddubsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaddubsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaddwd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaddwd (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaddwd 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaddwd -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaddwd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaddwd (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaddwd 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaddwd -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaxsb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaxsb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxsb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxsb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaxsb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaxsb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxsb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxsb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaxsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaxsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaxsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaxsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaxub %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaxub (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxub 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxub -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaxub %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaxub (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxub 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxub -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaxuw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaxuw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxuw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxuw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaxuw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaxuw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxuw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxuw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpminsb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpminsb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminsb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminsb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpminsb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpminsb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminsb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminsb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpminsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpminsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpminsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpminsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminub %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpminub %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpminub %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpminub (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminub 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpminub 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminub 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminub -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminub -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminub %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpminub %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpminub %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpminub (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminub 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpminub 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminub 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminub -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminub -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpminuw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpminuw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminuw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminuw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpminuw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpminuw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminuw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminuw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmovsxbw %xmm29, %xmm30 # AVX512{BW,VL}
vpmovsxbw %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmovsxbw (%rcx), %xmm30 # AVX512{BW,VL}
vpmovsxbw 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpmovsxbw 1016(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpmovsxbw 1024(%rdx), %xmm30 # AVX512{BW,VL}
vpmovsxbw -1024(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpmovsxbw -1032(%rdx), %xmm30 # AVX512{BW,VL}
vpmovsxbw %xmm29, %ymm30 # AVX512{BW,VL}
vpmovsxbw %xmm29, %ymm30{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmovsxbw (%rcx), %ymm30 # AVX512{BW,VL}
vpmovsxbw 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpmovsxbw 2032(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpmovsxbw 2048(%rdx), %ymm30 # AVX512{BW,VL}
vpmovsxbw -2048(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpmovsxbw -2064(%rdx), %ymm30 # AVX512{BW,VL}
vpmovzxbw %xmm29, %xmm30 # AVX512{BW,VL}
vpmovzxbw %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmovzxbw (%rcx), %xmm30 # AVX512{BW,VL}
vpmovzxbw 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpmovzxbw 1016(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpmovzxbw 1024(%rdx), %xmm30 # AVX512{BW,VL}
vpmovzxbw -1024(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpmovzxbw -1032(%rdx), %xmm30 # AVX512{BW,VL}
vpmovzxbw %xmm29, %ymm30 # AVX512{BW,VL}
vpmovzxbw %xmm29, %ymm30{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmovzxbw (%rcx), %ymm30 # AVX512{BW,VL}
vpmovzxbw 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpmovzxbw 2032(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpmovzxbw 2048(%rdx), %ymm30 # AVX512{BW,VL}
vpmovzxbw -2048(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpmovzxbw -2064(%rdx), %ymm30 # AVX512{BW,VL}
vpmulhrsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmulhrsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmulhrsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhrsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhrsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhrsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmulhrsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmulhrsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhrsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhrsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhrsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhrsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhrsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmulhuw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmulhuw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhuw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhuw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmulhuw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmulhuw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhuw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhuw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmulhw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmulhw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmulhw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmulhw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmullw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmullw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmullw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmullw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmullw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmullw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmullw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmullw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsadbw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsadbw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsadbw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsadbw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpshufb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpshufb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpshufb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpshufb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpshufb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpshufb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpshufb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpshufb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufhw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpshufhw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpshufhw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpshufhw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpshufhw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpshufhw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpshufhw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpshufhw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpshufhw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpshufhw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpshufhw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpshufhw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpshufhw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpshufhw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpshufhw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpshufhw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpshufhw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpshufhw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpshuflw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpshuflw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpshuflw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpshuflw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpshuflw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpshuflw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpshuflw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpshuflw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpshuflw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpshuflw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpshuflw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpshuflw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpshuflw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpshuflw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpshuflw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpshuflw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpshuflw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpshuflw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsllw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsllw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsllw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsllw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsllw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw %xmm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw %xmm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsllw %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsllw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw 2032(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsllw 2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw -2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsllw -2064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsraw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsraw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsraw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsraw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw %xmm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw %xmm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsraw %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsraw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw 2032(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsraw 2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw -2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsraw -2064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsrlw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsrlw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsrlw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsrlw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw %xmm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw %xmm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsrlw %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsrlw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw 2032(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsrlw 2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw -2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsrlw -2064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrldq $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrldq $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrldq $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpsrldq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpsrldq $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsrldq $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpsrldq $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsrldq $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpsrldq $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrldq $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrldq $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpsrldq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpsrldq $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsrldq $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpsrldq $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsrldq $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsrlw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsrlw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpsrlw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpsrlw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsrlw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpsrlw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsrlw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpsrlw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsrlw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpsrlw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpsrlw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsrlw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpsrlw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsrlw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsraw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsraw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsraw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpsraw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpsraw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsraw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpsraw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsraw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpsraw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsraw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsraw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpsraw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpsraw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsraw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpsraw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsraw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsubb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubsb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubsb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubsb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubsb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubsb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubsb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubsb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubsb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubusb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubusb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubusb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubusb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubusb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubusb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubusb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubusb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubusw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubusw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubusw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubusw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubusw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubusw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubusw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubusw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpunpckhbw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpunpckhbw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpckhbw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpckhbw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpunpckhbw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpunpckhbw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpckhbw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpckhbw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpunpckhwd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpunpckhwd (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpckhwd 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpckhwd -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpunpckhwd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpunpckhwd (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpckhwd 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpckhwd -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpunpcklbw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpunpcklbw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpcklbw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpcklbw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpunpcklbw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpunpcklbw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpcklbw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpcklbw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpunpcklwd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpunpcklwd (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpcklwd 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpcklwd -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpunpcklwd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpunpcklwd (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpcklwd 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpcklwd -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpslldq $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpslldq $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpslldq $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpslldq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpslldq $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpslldq $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpslldq $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpslldq $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpslldq $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpslldq $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpslldq $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpslldq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpslldq $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpslldq $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpslldq $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpslldq $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsllw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsllw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsllw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpsllw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpsllw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsllw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpsllw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsllw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpsllw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsllw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsllw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpsllw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpsllw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsllw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpsllw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsllw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
.intel_syntax noprefix
vpabsb xmm30, xmm29 # AVX512{BW,VL}
vpabsb xmm30{k7}, xmm29 # AVX512{BW,VL}
vpabsb xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpabsb xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpabsb xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpabsb xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpabsb xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpabsb xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpabsb xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpabsb ymm30, ymm29 # AVX512{BW,VL}
vpabsb ymm30{k7}, ymm29 # AVX512{BW,VL}
vpabsb ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vpabsb ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpabsb ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpabsb ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpabsb ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpabsb ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpabsb ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpabsw xmm30, xmm29 # AVX512{BW,VL}
vpabsw xmm30{k7}, xmm29 # AVX512{BW,VL}
vpabsw xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpabsw xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpabsw xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpabsw xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpabsw xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpabsw xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpabsw xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpabsw ymm30, ymm29 # AVX512{BW,VL}
vpabsw ymm30{k7}, ymm29 # AVX512{BW,VL}
vpabsw ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vpabsw ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpabsw ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpabsw ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpabsw ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpabsw ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpabsw ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpacksswb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpacksswb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpacksswb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpacksswb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpacksswb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpacksswb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpacksswb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpacksswb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpacksswb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpacksswb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpacksswb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpacksswb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpacksswb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpacksswb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpacksswb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpacksswb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpacksswb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpacksswb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpackuswb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpackuswb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpackuswb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpackuswb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpackuswb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpackuswb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpackuswb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpackuswb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpackuswb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpackuswb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpackuswb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpackuswb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpackuswb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpackuswb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpackuswb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpackuswb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpackuswb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpackuswb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddsb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddsb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddsb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddsb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddsb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddsb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddsb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddsb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddsb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddsb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddsb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddsb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddsb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddsb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddsb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddsb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddsb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddsb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddusb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddusb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddusb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddusb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddusb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddusb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddusb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddusb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddusb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddusb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddusb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddusb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddusb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddusb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddusb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddusb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddusb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddusb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddusw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddusw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddusw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddusw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddusw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddusw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddusw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddusw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddusw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddusw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddusw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddusw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddusw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddusw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddusw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddusw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddusw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddusw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpalignr xmm30, xmm29, xmm28, 0xab # AVX512{BW,VL}
vpalignr xmm30{k7}, xmm29, xmm28, 0xab # AVX512{BW,VL}
vpalignr xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{BW,VL}
vpalignr xmm30, xmm29, xmm28, 123 # AVX512{BW,VL}
vpalignr xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpalignr xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpalignr xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpalignr xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpalignr xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpalignr xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, ymm28, 0xab # AVX512{BW,VL}
vpalignr ymm30{k7}, ymm29, ymm28, 0xab # AVX512{BW,VL}
vpalignr ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{BW,VL}
vpalignr ymm30, ymm29, ymm28, 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpalignr ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpalignr ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpavgb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpavgb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpavgb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpavgb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpavgb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpavgb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpavgb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpavgb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpavgb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpavgb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpavgb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpavgb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpavgb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpavgb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpavgb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpavgb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpavgb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpavgb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpavgw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpavgw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpavgw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpavgw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpavgw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpavgw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpavgw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpavgw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpavgw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpavgw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpavgw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpavgw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpavgw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpavgw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpavgw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpavgw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpavgw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpavgw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpcmpeqb k5, xmm30, xmm29 # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm30, xmm29 # AVX512{BW,VL}
vpcmpeqb k5, xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpeqb k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpeqb k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpcmpeqb k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpcmpeqb k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpcmpeqb k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpcmpeqb k5, ymm30, ymm29 # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm30, ymm29 # AVX512{BW,VL}
vpcmpeqb k5, ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpeqb k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpeqb k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpcmpeqb k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpcmpeqb k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpcmpeqb k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpcmpeqw k5, xmm30, xmm29 # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm30, xmm29 # AVX512{BW,VL}
vpcmpeqw k5, xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpeqw k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpeqw k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpcmpeqw k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpcmpeqw k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpcmpeqw k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpcmpeqw k5, ymm30, ymm29 # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm30, ymm29 # AVX512{BW,VL}
vpcmpeqw k5, ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpeqw k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpeqw k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpcmpeqw k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpcmpeqw k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpcmpeqw k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpcmpgtb k5, xmm30, xmm29 # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm30, xmm29 # AVX512{BW,VL}
vpcmpgtb k5, xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpgtb k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpgtb k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpcmpgtb k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpcmpgtb k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpcmpgtb k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpcmpgtb k5, ymm30, ymm29 # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm30, ymm29 # AVX512{BW,VL}
vpcmpgtb k5, ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpgtb k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpgtb k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpcmpgtb k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpcmpgtb k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpcmpgtb k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpcmpgtw k5, xmm30, xmm29 # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm30, xmm29 # AVX512{BW,VL}
vpcmpgtw k5, xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpgtw k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpgtw k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpcmpgtw k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpcmpgtw k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpcmpgtw k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpcmpgtw k5, ymm30, ymm29 # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm30, ymm29 # AVX512{BW,VL}
vpcmpgtw k5, ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpgtw k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpgtw k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpcmpgtw k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpcmpgtw k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpcmpgtw k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaddubsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaddubsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaddubsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaddubsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaddwd xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaddwd xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaddwd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaddwd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaddwd ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaddwd ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaddwd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaddwd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaxsb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaxsb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaxsb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaxsb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaxsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaxsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaxsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaxsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaxub xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaxub xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxub xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxub xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxub xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxub xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaxub xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaxub xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaxub xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaxub ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaxub ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxub ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxub ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxub ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxub ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaxub ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaxub ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaxub ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaxuw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxuw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaxuw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaxuw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaxuw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxuw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaxuw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaxuw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpminsb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpminsb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpminsb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpminsb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpminsb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminsb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpminsb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpminsb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpminsb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpminsb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpminsb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpminsb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpminsb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpminsb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminsb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpminsb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpminsb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpminsb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpminsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpminsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpminsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpminsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpminsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpminsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpminsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpminsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpminsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpminsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpminsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpminsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpminsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpminsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpminsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpminsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpminub xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpminub xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpminub xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpminub xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpminub xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminub xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpminub xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpminub xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpminub xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpminub ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpminub ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpminub ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpminub ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpminub ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminub ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpminub ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpminub ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpminub ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpminuw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpminuw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpminuw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpminuw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpminuw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminuw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpminuw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpminuw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpminuw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpminuw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpminuw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpminuw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpminuw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpminuw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminuw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpminuw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpminuw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpminuw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmovsxbw xmm30, xmm29 # AVX512{BW,VL}
vpmovsxbw xmm30{k7}, xmm29 # AVX512{BW,VL}
vpmovsxbw xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovsxbw xmm30, QWORD PTR [rcx] # AVX512{BW,VL}
vpmovsxbw xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmovsxbw xmm30, QWORD PTR [rdx+1016] # AVX512{BW,VL} Disp8
vpmovsxbw xmm30, QWORD PTR [rdx+1024] # AVX512{BW,VL}
vpmovsxbw xmm30, QWORD PTR [rdx-1024] # AVX512{BW,VL} Disp8
vpmovsxbw xmm30, QWORD PTR [rdx-1032] # AVX512{BW,VL}
vpmovsxbw ymm30, xmm29 # AVX512{BW,VL}
vpmovsxbw ymm30{k7}, xmm29 # AVX512{BW,VL}
vpmovsxbw ymm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovsxbw ymm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmovsxbw ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmovsxbw ymm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmovsxbw ymm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmovsxbw ymm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmovsxbw ymm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmovzxbw xmm30, xmm29 # AVX512{BW,VL}
vpmovzxbw xmm30{k7}, xmm29 # AVX512{BW,VL}
vpmovzxbw xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovzxbw xmm30, QWORD PTR [rcx] # AVX512{BW,VL}
vpmovzxbw xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmovzxbw xmm30, QWORD PTR [rdx+1016] # AVX512{BW,VL} Disp8
vpmovzxbw xmm30, QWORD PTR [rdx+1024] # AVX512{BW,VL}
vpmovzxbw xmm30, QWORD PTR [rdx-1024] # AVX512{BW,VL} Disp8
vpmovzxbw xmm30, QWORD PTR [rdx-1032] # AVX512{BW,VL}
vpmovzxbw ymm30, xmm29 # AVX512{BW,VL}
vpmovzxbw ymm30{k7}, xmm29 # AVX512{BW,VL}
vpmovzxbw ymm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovzxbw ymm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmovzxbw ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmovzxbw ymm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmovzxbw ymm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmovzxbw ymm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmovzxbw ymm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmulhrsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhrsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmulhrsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhrsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmulhuw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhuw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmulhuw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmulhuw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmulhuw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhuw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmulhuw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmulhuw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmulhw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmulhw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmulhw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmulhw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmulhw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmulhw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmulhw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmulhw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmulhw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmulhw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmullw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmullw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmullw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmullw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmullw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmullw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmullw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmullw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmullw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmullw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmullw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmullw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmullw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmullw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmullw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmullw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmullw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmullw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsadbw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsadbw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsadbw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsadbw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsadbw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsadbw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsadbw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsadbw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsadbw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsadbw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsadbw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsadbw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsadbw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsadbw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpshufb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpshufb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpshufb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpshufb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpshufb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpshufb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpshufb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpshufb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpshufb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpshufb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpshufb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpshufb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpshufb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpshufb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpshufb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpshufb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpshufb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpshufb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpshufhw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpshufhw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpshufhw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpshufhw xmm30, xmm29, 123 # AVX512{BW,VL}
vpshufhw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpshufhw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpshufhw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpshufhw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpshufhw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpshufhw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpshufhw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpshufhw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpshufhw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpshufhw ymm30, ymm29, 123 # AVX512{BW,VL}
vpshufhw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpshufhw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpshufhw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpshufhw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpshufhw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpshufhw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpshuflw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpshuflw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpshuflw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpshuflw xmm30, xmm29, 123 # AVX512{BW,VL}
vpshuflw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpshuflw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpshuflw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpshuflw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpshuflw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpshuflw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpshuflw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpshuflw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpshuflw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpshuflw ymm30, ymm29, 123 # AVX512{BW,VL}
vpshuflw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpshuflw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpshuflw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpshuflw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpshuflw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpshuflw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsllw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsllw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsllw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsllw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsllw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsllw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsllw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsllw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsllw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsllw ymm30, ymm29, xmm28 # AVX512{BW,VL}
vpsllw ymm30{k7}, ymm29, xmm28 # AVX512{BW,VL}
vpsllw ymm30{k7}{z}, ymm29, xmm28 # AVX512{BW,VL}
vpsllw ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsllw ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsllw ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsllw ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsllw ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsllw ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsraw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsraw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsraw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsraw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsraw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsraw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsraw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsraw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsraw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsraw ymm30, ymm29, xmm28 # AVX512{BW,VL}
vpsraw ymm30{k7}, ymm29, xmm28 # AVX512{BW,VL}
vpsraw ymm30{k7}{z}, ymm29, xmm28 # AVX512{BW,VL}
vpsraw ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsraw ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsraw ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsraw ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsraw ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsraw ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsrlw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsrlw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsrlw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsrlw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsrlw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsrlw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsrlw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsrlw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsrlw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsrlw ymm30, ymm29, xmm28 # AVX512{BW,VL}
vpsrlw ymm30{k7}, ymm29, xmm28 # AVX512{BW,VL}
vpsrlw ymm30{k7}{z}, ymm29, xmm28 # AVX512{BW,VL}
vpsrlw ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsrlw ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsrlw ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsrlw ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsrlw ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsrlw ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsrldq xmm30, xmm29, 0xab # AVX512{BW,VL}
vpsrldq xmm30, xmm29, 123 # AVX512{BW,VL}
vpsrldq xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsrldq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsrldq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpsrldq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpsrldq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpsrldq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpsrldq ymm30, ymm29, 0xab # AVX512{BW,VL}
vpsrldq ymm30, ymm29, 123 # AVX512{BW,VL}
vpsrldq ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsrldq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsrldq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpsrldq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpsrldq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpsrldq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsrlw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpsrlw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpsrlw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpsrlw xmm30, xmm29, 123 # AVX512{BW,VL}
vpsrlw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsrlw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsrlw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpsrlw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpsrlw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpsrlw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpsrlw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpsrlw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpsrlw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpsrlw ymm30, ymm29, 123 # AVX512{BW,VL}
vpsrlw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsrlw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsrlw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpsrlw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpsrlw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpsrlw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsraw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpsraw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpsraw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpsraw xmm30, xmm29, 123 # AVX512{BW,VL}
vpsraw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsraw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsraw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpsraw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpsraw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpsraw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpsraw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpsraw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpsraw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpsraw ymm30, ymm29, 123 # AVX512{BW,VL}
vpsraw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsraw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsraw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpsraw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpsraw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpsraw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsubb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubsb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubsb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubsb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubsb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubsb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubsb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubsb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubsb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubsb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubsb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubsb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubsb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubsb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubsb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubsb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubsb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubsb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubsb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubusb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubusb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubusb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubusb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubusb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubusb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubusb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubusb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubusb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubusb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubusb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubusb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubusb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubusb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubusb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubusb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubusb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubusb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubusw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubusw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubusw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubusw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubusw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubusw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubusw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubusw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubusw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubusw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubusw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubusw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubusw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubusw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubusw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubusw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubusw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubusw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhbw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhbw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhbw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhbw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhwd xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhwd xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhwd ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhwd ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklbw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklbw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklbw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklbw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklwd xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklwd xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklwd ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklwd ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpslldq xmm30, xmm29, 0xab # AVX512{BW,VL}
vpslldq xmm30, xmm29, 123 # AVX512{BW,VL}
vpslldq xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpslldq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpslldq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpslldq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpslldq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpslldq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpslldq ymm30, ymm29, 0xab # AVX512{BW,VL}
vpslldq ymm30, ymm29, 123 # AVX512{BW,VL}
vpslldq ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpslldq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpslldq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpslldq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpslldq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpslldq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsllw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpsllw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpsllw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpsllw xmm30, xmm29, 123 # AVX512{BW,VL}
vpsllw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsllw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsllw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpsllw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpsllw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpsllw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpsllw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpsllw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpsllw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpsllw ymm30, ymm29, 123 # AVX512{BW,VL}
vpsllw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsllw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsllw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpsllw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpsllw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpsllw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 6,555
|
gas/testsuite/gas/i386/avx2.s
|
# Check i386 AVX2 instructions
.allow_index_reg
.text
_start:
# Tests for op mem256, mask, ymm
# Tests for op ymm, mask, mem256
vpmaskmovd (%ecx),%ymm4,%ymm6
vpmaskmovd %ymm4,%ymm6,(%ecx)
vpmaskmovq (%ecx),%ymm4,%ymm6
vpmaskmovq %ymm4,%ymm6,(%ecx)
# Tests for op imm8, ymm/mem256, ymm
vpermpd $7,%ymm6,%ymm2
vpermpd $7,(%ecx),%ymm6
vpermq $7,%ymm6,%ymm2
vpermq $7,(%ecx),%ymm6
# Tests for op ymm/mem256, ymm, ymm
vpermd %ymm4,%ymm6,%ymm2
vpermd (%ecx),%ymm6,%ymm2
vpermps %ymm4,%ymm6,%ymm2
vpermps (%ecx),%ymm6,%ymm2
vpsllvd %ymm4,%ymm6,%ymm2
vpsllvd (%ecx),%ymm6,%ymm2
vpsllvq %ymm4,%ymm6,%ymm2
vpsllvq (%ecx),%ymm6,%ymm2
vpsravd %ymm4,%ymm6,%ymm2
vpsravd (%ecx),%ymm6,%ymm2
vpsrlvd %ymm4,%ymm6,%ymm2
vpsrlvd (%ecx),%ymm6,%ymm2
vpsrlvq %ymm4,%ymm6,%ymm2
vpsrlvq (%ecx),%ymm6,%ymm2
# Tests for op mem256, ymm
vmovntdqa (%ecx),%ymm4
# Tests for op ymm, xmm
vbroadcastsd %xmm4,%ymm6
vbroadcastss %xmm4,%ymm6
# Tests for op imm8, ymm/mem256, ymm, ymm
vpblendd $7,%ymm4,%ymm6,%ymm2
vpblendd $7,(%ecx),%ymm6,%ymm2
vperm2i128 $7,%ymm4,%ymm6,%ymm2
vperm2i128 $7,(%ecx),%ymm6,%ymm2
# Tests for op imm8, xmm/mem128, ymm, ymm
vinserti128 $7,%xmm4,%ymm4,%ymm6
vinserti128 $7,(%ecx),%ymm4,%ymm6
# Tests for op mem128, ymm
vbroadcasti128 (%ecx),%ymm4
# Tests for op xmm/mem128, xmm, xmm
vpsllvd %xmm4,%xmm6,%xmm2
vpsllvd (%ecx),%xmm6,%xmm7
vpsllvq %xmm4,%xmm6,%xmm2
vpsllvq (%ecx),%xmm6,%xmm7
vpsravd %xmm4,%xmm6,%xmm2
vpsravd (%ecx),%xmm6,%xmm7
vpsrlvd %xmm4,%xmm6,%xmm2
vpsrlvd (%ecx),%xmm6,%xmm7
vpsrlvq %xmm4,%xmm6,%xmm2
vpsrlvq (%ecx),%xmm6,%xmm7
# Tests for op mem128, xmm, xmm
vpmaskmovd (%ecx),%xmm4,%xmm6
vpmaskmovq (%ecx),%xmm4,%xmm6
# Tests for op imm8, ymm, xmm128/mem
vextracti128 $7,%ymm4,%xmm6
vextracti128 $7,%ymm4,(%ecx)
# Tests for op xmm, xmm, mem128
vpmaskmovd %xmm4,%xmm6,(%ecx)
vpmaskmovq %xmm4,%xmm6,(%ecx)
# Tests for op imm8, xmm/mem128, xmm, xmm
vpblendd $7,%xmm4,%xmm6,%xmm2
vpblendd $7,(%ecx),%xmm6,%xmm2
# Tests for op xmm/mem64, xmm
vpbroadcastq %xmm4,%xmm6
vpbroadcastq (%ecx),%xmm4
# Tests for op xmm/mem64, ymm
vpbroadcastq %xmm4,%ymm6
vpbroadcastq (%ecx),%ymm4
# Tests for op xmm/mem32, ymm
vpbroadcastd %xmm4,%ymm4
vpbroadcastd (%ecx),%ymm4
# Tests for op xmm/mem32, xmm
vpbroadcastd %xmm4,%xmm6
vpbroadcastd (%ecx),%xmm4
# Tests for op xmm/m16, xmm
vpbroadcastw %xmm4,%xmm6
vpbroadcastw (%ecx),%xmm4
# Tests for op xmm/m16, ymm
vpbroadcastw %xmm4,%ymm6
vpbroadcastw (%ecx),%ymm4
# Tests for op xmm/m8, xmm
vpbroadcastb %xmm4,%xmm6
vpbroadcastb (%ecx),%xmm4
# Tests for op xmm/m8, ymm
vpbroadcastb %xmm4,%ymm6
vpbroadcastb (%ecx),%ymm4
# Tests for op xmm, xmm
vbroadcastss %xmm4,%xmm6
.intel_syntax noprefix
# Tests for op mem256, mask, ymm
# Tests for op ymm, mask, mem256
vpmaskmovd ymm6,ymm4,YMMWORD PTR [ecx]
vpmaskmovd YMMWORD PTR [ecx],ymm6,ymm4
vpmaskmovd ymm6,ymm4,[ecx]
vpmaskmovd [ecx],ymm6,ymm4
vpmaskmovq ymm6,ymm4,YMMWORD PTR [ecx]
vpmaskmovq YMMWORD PTR [ecx],ymm6,ymm4
vpmaskmovq ymm6,ymm4,[ecx]
vpmaskmovq [ecx],ymm6,ymm4
# Tests for op imm8, ymm/mem256, ymm
vpermpd ymm2,ymm6,7
vpermpd ymm6,YMMWORD PTR [ecx],7
vpermpd ymm6,[ecx],7
vpermq ymm2,ymm6,7
vpermq ymm6,YMMWORD PTR [ecx],7
vpermq ymm6,[ecx],7
# Tests for op ymm/mem256, ymm, ymm
vpermd ymm2,ymm6,ymm4
vpermd ymm2,ymm6,YMMWORD PTR [ecx]
vpermd ymm2,ymm6,[ecx]
vpermps ymm2,ymm6,ymm4
vpermps ymm2,ymm6,YMMWORD PTR [ecx]
vpermps ymm2,ymm6,[ecx]
vpsllvd ymm2,ymm6,ymm4
vpsllvd ymm2,ymm6,YMMWORD PTR [ecx]
vpsllvd ymm2,ymm6,[ecx]
vpsllvq ymm2,ymm6,ymm4
vpsllvq ymm2,ymm6,YMMWORD PTR [ecx]
vpsllvq ymm2,ymm6,[ecx]
vpsravd ymm2,ymm6,ymm4
vpsravd ymm2,ymm6,YMMWORD PTR [ecx]
vpsravd ymm2,ymm6,[ecx]
vpsrlvd ymm2,ymm6,ymm4
vpsrlvd ymm2,ymm6,YMMWORD PTR [ecx]
vpsrlvd ymm2,ymm6,[ecx]
vpsrlvq ymm2,ymm6,ymm4
vpsrlvq ymm2,ymm6,YMMWORD PTR [ecx]
vpsrlvq ymm2,ymm6,[ecx]
# Tests for op mem256, ymm
vmovntdqa ymm4,YMMWORD PTR [ecx]
vmovntdqa ymm4,[ecx]
# Tests for op ymm, xmm
vbroadcastsd ymm6,xmm4
vbroadcastss ymm6,xmm4
# Tests for op imm8, ymm/mem256, ymm, ymm
vpblendd ymm2,ymm6,ymm4,7
vpblendd ymm2,ymm6,YMMWORD PTR [ecx],7
vpblendd ymm2,ymm6,[ecx],7
vperm2i128 ymm2,ymm6,ymm4,7
vperm2i128 ymm2,ymm6,YMMWORD PTR [ecx],7
vperm2i128 ymm2,ymm6,[ecx],7
# Tests for op imm8, xmm/mem128, ymm, ymm
vinserti128 ymm6,ymm4,xmm4,7
vinserti128 ymm6,ymm4,XMMWORD PTR [ecx],7
vinserti128 ymm6,ymm4,[ecx],7
# Tests for op mem128, ymm
vbroadcasti128 ymm4,XMMWORD PTR [ecx]
vbroadcasti128 ymm4,[ecx]
# Tests for op xmm/mem128, xmm, xmm
vpsllvd xmm2,xmm6,xmm4
vpsllvd xmm7,xmm6,XMMWORD PTR [ecx]
vpsllvd xmm7,xmm6,[ecx]
vpsllvq xmm2,xmm6,xmm4
vpsllvq xmm7,xmm6,XMMWORD PTR [ecx]
vpsllvq xmm7,xmm6,[ecx]
vpsravd xmm2,xmm6,xmm4
vpsravd xmm7,xmm6,XMMWORD PTR [ecx]
vpsravd xmm7,xmm6,[ecx]
vpsrlvd xmm2,xmm6,xmm4
vpsrlvd xmm7,xmm6,XMMWORD PTR [ecx]
vpsrlvd xmm7,xmm6,[ecx]
vpsrlvq xmm2,xmm6,xmm4
vpsrlvq xmm7,xmm6,XMMWORD PTR [ecx]
vpsrlvq xmm7,xmm6,[ecx]
# Tests for op mem128, xmm, xmm
vpmaskmovd xmm6,xmm4,XMMWORD PTR [ecx]
vpmaskmovd xmm6,xmm4,[ecx]
vpmaskmovq xmm6,xmm4,XMMWORD PTR [ecx]
vpmaskmovq xmm6,xmm4,[ecx]
# Tests for op imm8, ymm, xmm128/mem
vextracti128 xmm6,ymm4,7
vextracti128 XMMWORD PTR [ecx],ymm4,7
vextracti128 [ecx],ymm4,7
# Tests for op xmm, xmm, mem128
vpmaskmovd XMMWORD PTR [ecx],xmm6,xmm4
vpmaskmovd [ecx],xmm6,xmm4
vpmaskmovq XMMWORD PTR [ecx],xmm6,xmm4
vpmaskmovq [ecx],xmm6,xmm4
# Tests for op imm8, xmm/mem128, xmm, xmm
vpblendd xmm2,xmm6,xmm4,7
vpblendd xmm2,xmm6,XMMWORD PTR [ecx],7
vpblendd xmm2,xmm6,[ecx],7
# Tests for op xmm/mem64, xmm
vpbroadcastq xmm6,xmm4
vpbroadcastq xmm4,QWORD PTR [ecx]
vpbroadcastq xmm4,[ecx]
# Tests for op xmm/mem64, ymm
vpbroadcastq ymm6,xmm4
vpbroadcastq ymm4,QWORD PTR [ecx]
vpbroadcastq ymm4,[ecx]
# Tests for op xmm/mem32, ymm
vpbroadcastd ymm4,xmm4
vpbroadcastd ymm4,DWORD PTR [ecx]
vpbroadcastd ymm4,[ecx]
# Tests for op xmm/mem32, xmm
vpbroadcastd xmm6,xmm4
vpbroadcastd xmm4,DWORD PTR [ecx]
vpbroadcastd xmm4,[ecx]
# Tests for op xmm/m16, xmm
vpbroadcastw xmm6,xmm4
vpbroadcastw xmm4,WORD PTR [ecx]
vpbroadcastw xmm4,[ecx]
# Tests for op xmm/m16, ymm
vpbroadcastw ymm6,xmm4
vpbroadcastw ymm4,WORD PTR [ecx]
vpbroadcastw ymm4,[ecx]
# Tests for op xmm/m8, xmm
vpbroadcastb xmm6,xmm4
vpbroadcastb xmm4,BYTE PTR [ecx]
vpbroadcastb xmm4,[ecx]
# Tests for op xmm/m8, ymm
vpbroadcastb ymm6,xmm4
vpbroadcastb ymm4,BYTE PTR [ecx]
vpbroadcastb ymm4,[ecx]
# Tests for op xmm, xmm
vbroadcastss xmm6,xmm4
|
tactcomplabs/xbgas-binutils-gdb
| 4,111
|
gas/testsuite/gas/i386/fma-scalar.s
|
# Check AVX scalar instructions
.allow_index_reg
.text
_start:
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd %xmm4,%xmm6,%xmm2
vfmadd132sd (%ecx),%xmm6,%xmm2
vfmadd213sd %xmm4,%xmm6,%xmm2
vfmadd213sd (%ecx),%xmm6,%xmm2
vfmadd231sd %xmm4,%xmm6,%xmm2
vfmadd231sd (%ecx),%xmm6,%xmm2
vfmsub132sd %xmm4,%xmm6,%xmm2
vfmsub132sd (%ecx),%xmm6,%xmm2
vfmsub213sd %xmm4,%xmm6,%xmm2
vfmsub213sd (%ecx),%xmm6,%xmm2
vfmsub231sd %xmm4,%xmm6,%xmm2
vfmsub231sd (%ecx),%xmm6,%xmm2
vfnmadd132sd %xmm4,%xmm6,%xmm2
vfnmadd132sd (%ecx),%xmm6,%xmm2
vfnmadd213sd %xmm4,%xmm6,%xmm2
vfnmadd213sd (%ecx),%xmm6,%xmm2
vfnmadd231sd %xmm4,%xmm6,%xmm2
vfnmadd231sd (%ecx),%xmm6,%xmm2
vfnmsub132sd %xmm4,%xmm6,%xmm2
vfnmsub132sd (%ecx),%xmm6,%xmm2
vfnmsub213sd %xmm4,%xmm6,%xmm2
vfnmsub213sd (%ecx),%xmm6,%xmm2
vfnmsub231sd %xmm4,%xmm6,%xmm2
vfnmsub231sd (%ecx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss %xmm4,%xmm6,%xmm2
vfmadd132ss (%ecx),%xmm6,%xmm2
vfmadd213ss %xmm4,%xmm6,%xmm2
vfmadd213ss (%ecx),%xmm6,%xmm2
vfmadd231ss %xmm4,%xmm6,%xmm2
vfmadd231ss (%ecx),%xmm6,%xmm2
vfmsub132ss %xmm4,%xmm6,%xmm2
vfmsub132ss (%ecx),%xmm6,%xmm2
vfmsub213ss %xmm4,%xmm6,%xmm2
vfmsub213ss (%ecx),%xmm6,%xmm2
vfmsub231ss %xmm4,%xmm6,%xmm2
vfmsub231ss (%ecx),%xmm6,%xmm2
vfnmadd132ss %xmm4,%xmm6,%xmm2
vfnmadd132ss (%ecx),%xmm6,%xmm2
vfnmadd213ss %xmm4,%xmm6,%xmm2
vfnmadd213ss (%ecx),%xmm6,%xmm2
vfnmadd231ss %xmm4,%xmm6,%xmm2
vfnmadd231ss (%ecx),%xmm6,%xmm2
vfnmsub132ss %xmm4,%xmm6,%xmm2
vfnmsub132ss (%ecx),%xmm6,%xmm2
vfnmsub213ss %xmm4,%xmm6,%xmm2
vfnmsub213ss (%ecx),%xmm6,%xmm2
vfnmsub231ss %xmm4,%xmm6,%xmm2
vfnmsub231ss (%ecx),%xmm6,%xmm2
.intel_syntax noprefix
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd xmm2,xmm6,xmm4
vfmadd132sd xmm2,xmm6,QWORD PTR [ecx]
vfmadd132sd xmm2,xmm6,[ecx]
vfmadd213sd xmm2,xmm6,xmm4
vfmadd213sd xmm2,xmm6,QWORD PTR [ecx]
vfmadd213sd xmm2,xmm6,[ecx]
vfmadd231sd xmm2,xmm6,xmm4
vfmadd231sd xmm2,xmm6,QWORD PTR [ecx]
vfmadd231sd xmm2,xmm6,[ecx]
vfmsub132sd xmm2,xmm6,xmm4
vfmsub132sd xmm2,xmm6,QWORD PTR [ecx]
vfmsub132sd xmm2,xmm6,[ecx]
vfmsub213sd xmm2,xmm6,xmm4
vfmsub213sd xmm2,xmm6,QWORD PTR [ecx]
vfmsub213sd xmm2,xmm6,[ecx]
vfmsub231sd xmm2,xmm6,xmm4
vfmsub231sd xmm2,xmm6,QWORD PTR [ecx]
vfmsub231sd xmm2,xmm6,[ecx]
vfnmadd132sd xmm2,xmm6,xmm4
vfnmadd132sd xmm2,xmm6,QWORD PTR [ecx]
vfnmadd132sd xmm2,xmm6,[ecx]
vfnmadd213sd xmm2,xmm6,xmm4
vfnmadd213sd xmm2,xmm6,QWORD PTR [ecx]
vfnmadd213sd xmm2,xmm6,[ecx]
vfnmadd231sd xmm2,xmm6,xmm4
vfnmadd231sd xmm2,xmm6,QWORD PTR [ecx]
vfnmadd231sd xmm2,xmm6,[ecx]
vfnmsub132sd xmm2,xmm6,xmm4
vfnmsub132sd xmm2,xmm6,QWORD PTR [ecx]
vfnmsub132sd xmm2,xmm6,[ecx]
vfnmsub213sd xmm2,xmm6,xmm4
vfnmsub213sd xmm2,xmm6,QWORD PTR [ecx]
vfnmsub213sd xmm2,xmm6,[ecx]
vfnmsub231sd xmm2,xmm6,xmm4
vfnmsub231sd xmm2,xmm6,QWORD PTR [ecx]
vfnmsub231sd xmm2,xmm6,[ecx]
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss xmm2,xmm6,xmm4
vfmadd132ss xmm2,xmm6,DWORD PTR [ecx]
vfmadd132ss xmm2,xmm6,[ecx]
vfmadd213ss xmm2,xmm6,xmm4
vfmadd213ss xmm2,xmm6,DWORD PTR [ecx]
vfmadd213ss xmm2,xmm6,[ecx]
vfmadd231ss xmm2,xmm6,xmm4
vfmadd231ss xmm2,xmm6,DWORD PTR [ecx]
vfmadd231ss xmm2,xmm6,[ecx]
vfmsub132ss xmm2,xmm6,xmm4
vfmsub132ss xmm2,xmm6,DWORD PTR [ecx]
vfmsub132ss xmm2,xmm6,[ecx]
vfmsub213ss xmm2,xmm6,xmm4
vfmsub213ss xmm2,xmm6,DWORD PTR [ecx]
vfmsub213ss xmm2,xmm6,[ecx]
vfmsub231ss xmm2,xmm6,xmm4
vfmsub231ss xmm2,xmm6,DWORD PTR [ecx]
vfmsub231ss xmm2,xmm6,[ecx]
vfnmadd132ss xmm2,xmm6,xmm4
vfnmadd132ss xmm2,xmm6,DWORD PTR [ecx]
vfnmadd132ss xmm2,xmm6,[ecx]
vfnmadd213ss xmm2,xmm6,xmm4
vfnmadd213ss xmm2,xmm6,DWORD PTR [ecx]
vfnmadd213ss xmm2,xmm6,[ecx]
vfnmadd231ss xmm2,xmm6,xmm4
vfnmadd231ss xmm2,xmm6,DWORD PTR [ecx]
vfnmadd231ss xmm2,xmm6,[ecx]
vfnmsub132ss xmm2,xmm6,xmm4
vfnmsub132ss xmm2,xmm6,DWORD PTR [ecx]
vfnmsub132ss xmm2,xmm6,[ecx]
vfnmsub213ss xmm2,xmm6,xmm4
vfnmsub213ss xmm2,xmm6,DWORD PTR [ecx]
vfnmsub213ss xmm2,xmm6,[ecx]
vfnmsub231ss xmm2,xmm6,xmm4
vfnmsub231ss xmm2,xmm6,DWORD PTR [ecx]
vfnmsub231ss xmm2,xmm6,[ecx]
|
tactcomplabs/xbgas-binutils-gdb
| 1,771
|
gas/testsuite/gas/i386/dwarf2-line-2.s
|
.file "dwarf2-test.c"
.text
.section .text.startup,"ax",@progbits
.p2align 4
.globl main
.type main, @function
main:
.cfi_startproc
nop
ret
.cfi_endproc
.size main, .-main
.text
.section .debug_info,"",%progbits
.long 0x0
.value 0x2
.long .Ldebug_abbrev0
.byte 0x8
.uleb128 0x1
.section .debug_abbrev,"",@progbits
.Ldebug_abbrev0:
.uleb128 0x0 # (abbrev code)
.uleb128 0x0 # (abbrev code)
.uleb128 0x0 # (abbrev code)
# A non-empty .debug_line section is ok when not using .loc directives
.section .debug_line
.Lline1_begin:
.4byte .Lline1_end - .Lline1_start /* Initial length */
.Lline1_start:
.2byte 2 /* Version */
.4byte .Lline1_lines - .Lline1_hdr /* header_length */
.Lline1_hdr:
.byte 1 /* Minimum insn length */
.byte 1 /* default_is_stmt */
.byte 1 /* line_base */
.byte 1 /* line_range */
.byte 0x10 /* opcode_base */
/* Standard lengths */
.byte 0
.byte 1
.byte 1
.byte 1
.byte 1
.byte 0
.byte 0
.byte 0
.byte 1
.byte 0
.byte 0
.byte 1
.byte 0
.byte 0
.byte 0
/* Include directories */
.byte 0
/* File names */
.ascii "file1.txt\0"
.uleb128 0
.uleb128 0
.uleb128 0
.byte 0
.Lline1_lines:
.byte 0 /* DW_LNE_set_address */
.uleb128 5
.byte 2
.4byte .Lbegin_func_cu1
.byte 3 /* DW_LNS_advance_line */
.sleb128 3 /* ... to 4 */
.byte 1 /* DW_LNS_copy */
.byte 1 /* DW_LNS_copy (second time as an end-of-prologue marker) */
.byte 0 /* DW_LNE_set_address */
.uleb128 5
.byte 2
.4byte .Lend_func_cu1
.byte 0 /* DW_LNE_end_of_sequence */
.uleb128 1
.byte 1
|
tactcomplabs/xbgas-binutils-gdb
| 3,065
|
gas/testsuite/gas/i386/x86-64-bundle.s
|
.bundle_align_mode 5
# We use these macros to test each pattern at every offset from
# bundle alignment, i.e. [0,31].
.macro offset_insn insn_name, offset
.p2align 5
\insn_name\()_offset_\offset\():
.if \offset
.space \offset, 0xf4
.endif
\insn_name
.endm
.macro test_offsets insn_name
offset_insn \insn_name, 0
offset_insn \insn_name, 1
offset_insn \insn_name, 2
offset_insn \insn_name, 3
offset_insn \insn_name, 4
offset_insn \insn_name, 5
offset_insn \insn_name, 6
offset_insn \insn_name, 7
offset_insn \insn_name, 8
offset_insn \insn_name, 9
offset_insn \insn_name, 10
offset_insn \insn_name, 11
offset_insn \insn_name, 12
offset_insn \insn_name, 13
offset_insn \insn_name, 14
offset_insn \insn_name, 15
offset_insn \insn_name, 16
offset_insn \insn_name, 17
offset_insn \insn_name, 18
offset_insn \insn_name, 19
offset_insn \insn_name, 20
offset_insn \insn_name, 21
offset_insn \insn_name, 22
offset_insn \insn_name, 23
offset_insn \insn_name, 24
offset_insn \insn_name, 25
offset_insn \insn_name, 26
offset_insn \insn_name, 27
offset_insn \insn_name, 28
offset_insn \insn_name, 29
offset_insn \insn_name, 30
offset_insn \insn_name, 31
.endm
# These are vanilla (non-relaxed) instructions of each length.
.macro test_1
clc
.endm
.macro test_2
add %eax,%eax
.endm
.macro test_3
and $3,%eax
.endm
.macro test_4
lock andl $3,(%rax)
.endm
.macro test_5
mov $0x11223344,%eax
.endm
.macro test_6
movl %eax,0x11223344(%rsi)
.endm
.macro test_7
movl $0x11223344,0x7f(%rsi)
.endm
.macro test_8
lock addl $0x11223344,0x10(%rsi)
.endm
.macro test_9
lock addl $0x11223344,%fs:0x10(%rsi)
.endm
.macro test_10
movl $0x11223344,0x7ff(%rsi)
.endm
.macro test_11
lock addl $0x11223344,0x7ff(%rsi)
.endm
.macro test_12
lock addl $0x11223344,%fs:0x7ff(%rsi)
.endm
.macro test_13
lock addl $0x11223344,%fs:0x7ff(%r11)
.endm
test_offsets test_1
test_offsets test_2
test_offsets test_3
test_offsets test_4
test_offsets test_5
test_offsets test_6
test_offsets test_7
test_offsets test_8
test_offsets test_9
test_offsets test_10
test_offsets test_11
test_offsets test_12
test_offsets test_13
# The only relaxation cases are the jump instructions.
# For each of the three flavors of jump (unconditional, conditional,
# and conditional with prediction), we test a case that can be relaxed
# to its shortest form, and one that must use the long form.
.macro jmp_2
jmp jmp_2_\@
movl $0xdeadbeef,%eax
jmp_2_\@\():
movl $0xb00b,%eax
.endm
.macro jmp_5
jmp jmp_5_\@
.rept 128
clc
.endr
jmp_5_\@\():
movl $0xb00b,%eax
.endm
.macro cjmp_2
jz cjmp_2_\@
movl $0xdeadbeef,%eax
cjmp_2_\@\():
movl $0xb00b,%eax
.endm
.macro cjmp_6
jz cjmp_6_\@
.rept 128
clc
.endr
cjmp_6_\@\():
movl $0xb00b,%eax
.endm
.macro pjmp_3
jz,pt pjmp_3_\@
movl $0xdeadbeef,%eax
pjmp_3_\@\():
movl $0xb00b,%eax
.endm
.macro pjmp_7
jz,pt pjmp_7_\@
.rept 128
clc
.endr
pjmp_7_\@\():
movl $0xb00b,%eax
.endm
test_offsets jmp_2
test_offsets cjmp_2
test_offsets pjmp_3
test_offsets jmp_5
test_offsets cjmp_6
test_offsets pjmp_7
.p2align 5
hlt
|
tactcomplabs/xbgas-binutils-gdb
| 16,675
|
gas/testsuite/gas/i386/avx-scalar.s
|
# Check AVX scalar instructions
.allow_index_reg
.text
_start:
# Tests for op xmm/mem64, xmm
vcomisd %xmm4,%xmm6
vcomisd (%ecx),%xmm4
vucomisd %xmm4,%xmm6
vucomisd (%ecx),%xmm4
# Tests for op mem64, xmm
vmovsd (%ecx),%xmm4
# Tests for op xmm, mem64
vmovsd %xmm4,(%ecx)
# Tests for op xmm/mem64, regl
vcvtsd2si %xmm4,%ecx
vcvtsd2si (%ecx),%ecx
vcvttsd2si %xmm4,%ecx
vcvttsd2si (%ecx),%ecx
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd $7,%xmm4,%xmm6,%xmm2
vcmpsd $7,(%ecx),%xmm6,%xmm2
vroundsd $7,%xmm4,%xmm6,%xmm2
vroundsd $7,(%ecx),%xmm6,%xmm2
# Tests for op xmm/mem64, xmm, xmm
vaddsd %xmm4,%xmm6,%xmm2
vaddsd (%ecx),%xmm6,%xmm2
vcvtsd2ss %xmm4,%xmm6,%xmm2
vcvtsd2ss (%ecx),%xmm6,%xmm2
vdivsd %xmm4,%xmm6,%xmm2
vdivsd (%ecx),%xmm6,%xmm2
vmaxsd %xmm4,%xmm6,%xmm2
vmaxsd (%ecx),%xmm6,%xmm2
vminsd %xmm4,%xmm6,%xmm2
vminsd (%ecx),%xmm6,%xmm2
vmulsd %xmm4,%xmm6,%xmm2
vmulsd (%ecx),%xmm6,%xmm2
vsqrtsd %xmm4,%xmm6,%xmm2
vsqrtsd (%ecx),%xmm6,%xmm2
vsubsd %xmm4,%xmm6,%xmm2
vsubsd (%ecx),%xmm6,%xmm2
vcmpeqsd %xmm4,%xmm6,%xmm2
vcmpeqsd (%ecx),%xmm6,%xmm2
vcmpltsd %xmm4,%xmm6,%xmm2
vcmpltsd (%ecx),%xmm6,%xmm2
vcmplesd %xmm4,%xmm6,%xmm2
vcmplesd (%ecx),%xmm6,%xmm2
vcmpunordsd %xmm4,%xmm6,%xmm2
vcmpunordsd (%ecx),%xmm6,%xmm2
vcmpneqsd %xmm4,%xmm6,%xmm2
vcmpneqsd (%ecx),%xmm6,%xmm2
vcmpnltsd %xmm4,%xmm6,%xmm2
vcmpnltsd (%ecx),%xmm6,%xmm2
vcmpnlesd %xmm4,%xmm6,%xmm2
vcmpnlesd (%ecx),%xmm6,%xmm2
vcmpordsd %xmm4,%xmm6,%xmm2
vcmpordsd (%ecx),%xmm6,%xmm2
vcmpeq_uqsd %xmm4,%xmm6,%xmm2
vcmpeq_uqsd (%ecx),%xmm6,%xmm2
vcmpngesd %xmm4,%xmm6,%xmm2
vcmpngesd (%ecx),%xmm6,%xmm2
vcmpngtsd %xmm4,%xmm6,%xmm2
vcmpngtsd (%ecx),%xmm6,%xmm2
vcmpfalsesd %xmm4,%xmm6,%xmm2
vcmpfalsesd (%ecx),%xmm6,%xmm2
vcmpneq_oqsd %xmm4,%xmm6,%xmm2
vcmpneq_oqsd (%ecx),%xmm6,%xmm2
vcmpgesd %xmm4,%xmm6,%xmm2
vcmpgesd (%ecx),%xmm6,%xmm2
vcmpgtsd %xmm4,%xmm6,%xmm2
vcmpgtsd (%ecx),%xmm6,%xmm2
vcmptruesd %xmm4,%xmm6,%xmm2
vcmptruesd (%ecx),%xmm6,%xmm2
vcmpeq_ossd %xmm4,%xmm6,%xmm2
vcmpeq_ossd (%ecx),%xmm6,%xmm2
vcmplt_oqsd %xmm4,%xmm6,%xmm2
vcmplt_oqsd (%ecx),%xmm6,%xmm2
vcmple_oqsd %xmm4,%xmm6,%xmm2
vcmple_oqsd (%ecx),%xmm6,%xmm2
vcmpunord_ssd %xmm4,%xmm6,%xmm2
vcmpunord_ssd (%ecx),%xmm6,%xmm2
vcmpneq_ussd %xmm4,%xmm6,%xmm2
vcmpneq_ussd (%ecx),%xmm6,%xmm2
vcmpnlt_uqsd %xmm4,%xmm6,%xmm2
vcmpnlt_uqsd (%ecx),%xmm6,%xmm2
vcmpnle_uqsd %xmm4,%xmm6,%xmm2
vcmpnle_uqsd (%ecx),%xmm6,%xmm2
vcmpord_ssd %xmm4,%xmm6,%xmm2
vcmpord_ssd (%ecx),%xmm6,%xmm2
vcmpeq_ussd %xmm4,%xmm6,%xmm2
vcmpeq_ussd (%ecx),%xmm6,%xmm2
vcmpnge_uqsd %xmm4,%xmm6,%xmm2
vcmpnge_uqsd (%ecx),%xmm6,%xmm2
vcmpngt_uqsd %xmm4,%xmm6,%xmm2
vcmpngt_uqsd (%ecx),%xmm6,%xmm2
vcmpfalse_ossd %xmm4,%xmm6,%xmm2
vcmpfalse_ossd (%ecx),%xmm6,%xmm2
vcmpneq_ossd %xmm4,%xmm6,%xmm2
vcmpneq_ossd (%ecx),%xmm6,%xmm2
vcmpge_oqsd %xmm4,%xmm6,%xmm2
vcmpge_oqsd (%ecx),%xmm6,%xmm2
vcmpgt_oqsd %xmm4,%xmm6,%xmm2
vcmpgt_oqsd (%ecx),%xmm6,%xmm2
vcmptrue_ussd %xmm4,%xmm6,%xmm2
vcmptrue_ussd (%ecx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm, xmm
vaddss %xmm4,%xmm6,%xmm2
vaddss (%ecx),%xmm6,%xmm2
vcvtss2sd %xmm4,%xmm6,%xmm2
vcvtss2sd (%ecx),%xmm6,%xmm2
vdivss %xmm4,%xmm6,%xmm2
vdivss (%ecx),%xmm6,%xmm2
vmaxss %xmm4,%xmm6,%xmm2
vmaxss (%ecx),%xmm6,%xmm2
vminss %xmm4,%xmm6,%xmm2
vminss (%ecx),%xmm6,%xmm2
vmulss %xmm4,%xmm6,%xmm2
vmulss (%ecx),%xmm6,%xmm2
vrcpss %xmm4,%xmm6,%xmm2
vrcpss (%ecx),%xmm6,%xmm2
vrsqrtss %xmm4,%xmm6,%xmm2
vrsqrtss (%ecx),%xmm6,%xmm2
vsqrtss %xmm4,%xmm6,%xmm2
vsqrtss (%ecx),%xmm6,%xmm2
vsubss %xmm4,%xmm6,%xmm2
vsubss (%ecx),%xmm6,%xmm2
vcmpeqss %xmm4,%xmm6,%xmm2
vcmpeqss (%ecx),%xmm6,%xmm2
vcmpltss %xmm4,%xmm6,%xmm2
vcmpltss (%ecx),%xmm6,%xmm2
vcmpless %xmm4,%xmm6,%xmm2
vcmpless (%ecx),%xmm6,%xmm2
vcmpunordss %xmm4,%xmm6,%xmm2
vcmpunordss (%ecx),%xmm6,%xmm2
vcmpneqss %xmm4,%xmm6,%xmm2
vcmpneqss (%ecx),%xmm6,%xmm2
vcmpnltss %xmm4,%xmm6,%xmm2
vcmpnltss (%ecx),%xmm6,%xmm2
vcmpnless %xmm4,%xmm6,%xmm2
vcmpnless (%ecx),%xmm6,%xmm2
vcmpordss %xmm4,%xmm6,%xmm2
vcmpordss (%ecx),%xmm6,%xmm2
vcmpeq_uqss %xmm4,%xmm6,%xmm2
vcmpeq_uqss (%ecx),%xmm6,%xmm2
vcmpngess %xmm4,%xmm6,%xmm2
vcmpngess (%ecx),%xmm6,%xmm2
vcmpngtss %xmm4,%xmm6,%xmm2
vcmpngtss (%ecx),%xmm6,%xmm2
vcmpfalsess %xmm4,%xmm6,%xmm2
vcmpfalsess (%ecx),%xmm6,%xmm2
vcmpneq_oqss %xmm4,%xmm6,%xmm2
vcmpneq_oqss (%ecx),%xmm6,%xmm2
vcmpgess %xmm4,%xmm6,%xmm2
vcmpgess (%ecx),%xmm6,%xmm2
vcmpgtss %xmm4,%xmm6,%xmm2
vcmpgtss (%ecx),%xmm6,%xmm2
vcmptruess %xmm4,%xmm6,%xmm2
vcmptruess (%ecx),%xmm6,%xmm2
vcmpeq_osss %xmm4,%xmm6,%xmm2
vcmpeq_osss (%ecx),%xmm6,%xmm2
vcmplt_oqss %xmm4,%xmm6,%xmm2
vcmplt_oqss (%ecx),%xmm6,%xmm2
vcmple_oqss %xmm4,%xmm6,%xmm2
vcmple_oqss (%ecx),%xmm6,%xmm2
vcmpunord_sss %xmm4,%xmm6,%xmm2
vcmpunord_sss (%ecx),%xmm6,%xmm2
vcmpneq_usss %xmm4,%xmm6,%xmm2
vcmpneq_usss (%ecx),%xmm6,%xmm2
vcmpnlt_uqss %xmm4,%xmm6,%xmm2
vcmpnlt_uqss (%ecx),%xmm6,%xmm2
vcmpnle_uqss %xmm4,%xmm6,%xmm2
vcmpnle_uqss (%ecx),%xmm6,%xmm2
vcmpord_sss %xmm4,%xmm6,%xmm2
vcmpord_sss (%ecx),%xmm6,%xmm2
vcmpeq_usss %xmm4,%xmm6,%xmm2
vcmpeq_usss (%ecx),%xmm6,%xmm2
vcmpnge_uqss %xmm4,%xmm6,%xmm2
vcmpnge_uqss (%ecx),%xmm6,%xmm2
vcmpngt_uqss %xmm4,%xmm6,%xmm2
vcmpngt_uqss (%ecx),%xmm6,%xmm2
vcmpfalse_osss %xmm4,%xmm6,%xmm2
vcmpfalse_osss (%ecx),%xmm6,%xmm2
vcmpneq_osss %xmm4,%xmm6,%xmm2
vcmpneq_osss (%ecx),%xmm6,%xmm2
vcmpge_oqss %xmm4,%xmm6,%xmm2
vcmpge_oqss (%ecx),%xmm6,%xmm2
vcmpgt_oqss %xmm4,%xmm6,%xmm2
vcmpgt_oqss (%ecx),%xmm6,%xmm2
vcmptrue_usss %xmm4,%xmm6,%xmm2
vcmptrue_usss (%ecx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm
vcomiss %xmm4,%xmm6
vcomiss (%ecx),%xmm4
vucomiss %xmm4,%xmm6
vucomiss (%ecx),%xmm4
# Tests for op mem32, xmm
vmovss (%ecx),%xmm4
# Tests for op xmm, mem32
vmovss %xmm4,(%ecx)
# Tests for op xmm/mem32, regl
vcvtss2si %xmm4,%ecx
vcvtss2si (%ecx),%ecx
vcvttss2si %xmm4,%ecx
vcvttss2si (%ecx),%ecx
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd %ecx,%xmm4,%xmm6
vcvtsi2sd (%ecx),%xmm4,%xmm6
vcvtsi2ss %ecx,%xmm4,%xmm6
vcvtsi2ss (%ecx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss $7,%xmm4,%xmm6,%xmm2
vcmpss $7,(%ecx),%xmm6,%xmm2
vroundss $7,%xmm4,%xmm6,%xmm2
vroundss $7,(%ecx),%xmm6,%xmm2
# Tests for op xmm, xmm, xmm
vmovsd %xmm4,%xmm6,%xmm2
vmovss %xmm4,%xmm6,%xmm2
#Tests with different memory and register operands.
vcvtsi2sdl 0x1234,%xmm0,%xmm7
vcvtsi2sdl (%ebp),%xmm0,%xmm7
vcvtsi2sdl (%esp),%xmm0,%xmm7
vcvtsi2sdl 0x99(%ebp),%xmm0,%xmm7
vcvtsi2sdl 0x99(,%eiz),%xmm0,%xmm7
vcvtsi2sdl 0x99(,%eiz,2),%xmm0,%xmm7
vcvtsi2sdl 0x99(%eax,%eiz),%xmm0,%xmm7
vcvtsi2sdl 0x99(%eax,%eiz,2),%xmm0,%xmm7
vcvtsi2sdl 0x99(%eax,%ebx,4),%xmm0,%xmm7
vcvtsi2sdl 0x99(%esp,%ecx,8),%xmm0,%xmm7
vcvtsi2sdl 0x99(%ebp,%edx,1),%xmm0,%xmm7
.intel_syntax noprefix
# Tests for op xmm/mem64, xmm
vcomisd xmm6,xmm4
vcomisd xmm4,QWORD PTR [ecx]
vcomisd xmm4,[ecx]
vucomisd xmm6,xmm4
vucomisd xmm4,QWORD PTR [ecx]
vucomisd xmm4,[ecx]
# Tests for op mem64, xmm
vmovsd xmm4,QWORD PTR [ecx]
vmovsd xmm4,[ecx]
# Tests for op xmm, mem64
vmovsd QWORD PTR [ecx],xmm4
vmovsd [ecx],xmm4
# Tests for op xmm/mem64, regl
vcvtsd2si ecx,xmm4
vcvtsd2si ecx,QWORD PTR [ecx]
vcvtsd2si ecx,[ecx]
vcvttsd2si ecx,xmm4
vcvttsd2si ecx,QWORD PTR [ecx]
vcvttsd2si ecx,[ecx]
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd xmm2,xmm6,xmm4,7
vcmpsd xmm2,xmm6,QWORD PTR [ecx],7
vcmpsd xmm2,xmm6,[ecx],7
vroundsd xmm2,xmm6,xmm4,7
vroundsd xmm2,xmm6,QWORD PTR [ecx],7
vroundsd xmm2,xmm6,[ecx],7
# Tests for op xmm/mem64, xmm, xmm
vaddsd xmm2,xmm6,xmm4
vaddsd xmm2,xmm6,QWORD PTR [ecx]
vaddsd xmm2,xmm6,[ecx]
vcvtsd2ss xmm2,xmm6,xmm4
vcvtsd2ss xmm2,xmm6,QWORD PTR [ecx]
vcvtsd2ss xmm2,xmm6,[ecx]
vdivsd xmm2,xmm6,xmm4
vdivsd xmm2,xmm6,QWORD PTR [ecx]
vdivsd xmm2,xmm6,[ecx]
vmaxsd xmm2,xmm6,xmm4
vmaxsd xmm2,xmm6,QWORD PTR [ecx]
vmaxsd xmm2,xmm6,[ecx]
vminsd xmm2,xmm6,xmm4
vminsd xmm2,xmm6,QWORD PTR [ecx]
vminsd xmm2,xmm6,[ecx]
vmulsd xmm2,xmm6,xmm4
vmulsd xmm2,xmm6,QWORD PTR [ecx]
vmulsd xmm2,xmm6,[ecx]
vsqrtsd xmm2,xmm6,xmm4
vsqrtsd xmm2,xmm6,QWORD PTR [ecx]
vsqrtsd xmm2,xmm6,[ecx]
vsubsd xmm2,xmm6,xmm4
vsubsd xmm2,xmm6,QWORD PTR [ecx]
vsubsd xmm2,xmm6,[ecx]
vcmpeqsd xmm2,xmm6,xmm4
vcmpeqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpeqsd xmm2,xmm6,[ecx]
vcmpltsd xmm2,xmm6,xmm4
vcmpltsd xmm2,xmm6,QWORD PTR [ecx]
vcmpltsd xmm2,xmm6,[ecx]
vcmplesd xmm2,xmm6,xmm4
vcmplesd xmm2,xmm6,QWORD PTR [ecx]
vcmplesd xmm2,xmm6,[ecx]
vcmpunordsd xmm2,xmm6,xmm4
vcmpunordsd xmm2,xmm6,QWORD PTR [ecx]
vcmpunordsd xmm2,xmm6,[ecx]
vcmpneqsd xmm2,xmm6,xmm4
vcmpneqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpneqsd xmm2,xmm6,[ecx]
vcmpnltsd xmm2,xmm6,xmm4
vcmpnltsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnltsd xmm2,xmm6,[ecx]
vcmpnlesd xmm2,xmm6,xmm4
vcmpnlesd xmm2,xmm6,QWORD PTR [ecx]
vcmpnlesd xmm2,xmm6,[ecx]
vcmpordsd xmm2,xmm6,xmm4
vcmpordsd xmm2,xmm6,QWORD PTR [ecx]
vcmpordsd xmm2,xmm6,[ecx]
vcmpeq_uqsd xmm2,xmm6,xmm4
vcmpeq_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpeq_uqsd xmm2,xmm6,[ecx]
vcmpngesd xmm2,xmm6,xmm4
vcmpngesd xmm2,xmm6,QWORD PTR [ecx]
vcmpngesd xmm2,xmm6,[ecx]
vcmpngtsd xmm2,xmm6,xmm4
vcmpngtsd xmm2,xmm6,QWORD PTR [ecx]
vcmpngtsd xmm2,xmm6,[ecx]
vcmpfalsesd xmm2,xmm6,xmm4
vcmpfalsesd xmm2,xmm6,QWORD PTR [ecx]
vcmpfalsesd xmm2,xmm6,[ecx]
vcmpneq_oqsd xmm2,xmm6,xmm4
vcmpneq_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpneq_oqsd xmm2,xmm6,[ecx]
vcmpgesd xmm2,xmm6,xmm4
vcmpgesd xmm2,xmm6,QWORD PTR [ecx]
vcmpgesd xmm2,xmm6,[ecx]
vcmpgtsd xmm2,xmm6,xmm4
vcmpgtsd xmm2,xmm6,QWORD PTR [ecx]
vcmpgtsd xmm2,xmm6,[ecx]
vcmptruesd xmm2,xmm6,xmm4
vcmptruesd xmm2,xmm6,QWORD PTR [ecx]
vcmptruesd xmm2,xmm6,[ecx]
vcmpeq_ossd xmm2,xmm6,xmm4
vcmpeq_ossd xmm2,xmm6,QWORD PTR [ecx]
vcmpeq_ossd xmm2,xmm6,[ecx]
vcmplt_oqsd xmm2,xmm6,xmm4
vcmplt_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmplt_oqsd xmm2,xmm6,[ecx]
vcmple_oqsd xmm2,xmm6,xmm4
vcmple_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmple_oqsd xmm2,xmm6,[ecx]
vcmpunord_ssd xmm2,xmm6,xmm4
vcmpunord_ssd xmm2,xmm6,QWORD PTR [ecx]
vcmpunord_ssd xmm2,xmm6,[ecx]
vcmpneq_ussd xmm2,xmm6,xmm4
vcmpneq_ussd xmm2,xmm6,QWORD PTR [ecx]
vcmpneq_ussd xmm2,xmm6,[ecx]
vcmpnlt_uqsd xmm2,xmm6,xmm4
vcmpnlt_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnlt_uqsd xmm2,xmm6,[ecx]
vcmpnle_uqsd xmm2,xmm6,xmm4
vcmpnle_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnle_uqsd xmm2,xmm6,[ecx]
vcmpord_ssd xmm2,xmm6,xmm4
vcmpord_ssd xmm2,xmm6,QWORD PTR [ecx]
vcmpord_ssd xmm2,xmm6,[ecx]
vcmpeq_ussd xmm2,xmm6,xmm4
vcmpeq_ussd xmm2,xmm6,QWORD PTR [ecx]
vcmpeq_ussd xmm2,xmm6,[ecx]
vcmpnge_uqsd xmm2,xmm6,xmm4
vcmpnge_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnge_uqsd xmm2,xmm6,[ecx]
vcmpngt_uqsd xmm2,xmm6,xmm4
vcmpngt_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpngt_uqsd xmm2,xmm6,[ecx]
vcmpfalse_ossd xmm2,xmm6,xmm4
vcmpfalse_ossd xmm2,xmm6,QWORD PTR [ecx]
vcmpfalse_ossd xmm2,xmm6,[ecx]
vcmpneq_ossd xmm2,xmm6,xmm4
vcmpneq_ossd xmm2,xmm6,QWORD PTR [ecx]
vcmpneq_ossd xmm2,xmm6,[ecx]
vcmpge_oqsd xmm2,xmm6,xmm4
vcmpge_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpge_oqsd xmm2,xmm6,[ecx]
vcmpgt_oqsd xmm2,xmm6,xmm4
vcmpgt_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpgt_oqsd xmm2,xmm6,[ecx]
vcmptrue_ussd xmm2,xmm6,xmm4
vcmptrue_ussd xmm2,xmm6,QWORD PTR [ecx]
vcmptrue_ussd xmm2,xmm6,[ecx]
# Tests for op xmm/mem32, xmm, xmm
vaddss xmm2,xmm6,xmm4
vaddss xmm2,xmm6,DWORD PTR [ecx]
vaddss xmm2,xmm6,[ecx]
vcvtss2sd xmm2,xmm6,xmm4
vcvtss2sd xmm2,xmm6,DWORD PTR [ecx]
vcvtss2sd xmm2,xmm6,[ecx]
vdivss xmm2,xmm6,xmm4
vdivss xmm2,xmm6,DWORD PTR [ecx]
vdivss xmm2,xmm6,[ecx]
vmaxss xmm2,xmm6,xmm4
vmaxss xmm2,xmm6,DWORD PTR [ecx]
vmaxss xmm2,xmm6,[ecx]
vminss xmm2,xmm6,xmm4
vminss xmm2,xmm6,DWORD PTR [ecx]
vminss xmm2,xmm6,[ecx]
vmulss xmm2,xmm6,xmm4
vmulss xmm2,xmm6,DWORD PTR [ecx]
vmulss xmm2,xmm6,[ecx]
vrcpss xmm2,xmm6,xmm4
vrcpss xmm2,xmm6,DWORD PTR [ecx]
vrcpss xmm2,xmm6,[ecx]
vrsqrtss xmm2,xmm6,xmm4
vrsqrtss xmm2,xmm6,DWORD PTR [ecx]
vrsqrtss xmm2,xmm6,[ecx]
vsqrtss xmm2,xmm6,xmm4
vsqrtss xmm2,xmm6,DWORD PTR [ecx]
vsqrtss xmm2,xmm6,[ecx]
vsubss xmm2,xmm6,xmm4
vsubss xmm2,xmm6,DWORD PTR [ecx]
vsubss xmm2,xmm6,[ecx]
vcmpeqss xmm2,xmm6,xmm4
vcmpeqss xmm2,xmm6,DWORD PTR [ecx]
vcmpeqss xmm2,xmm6,[ecx]
vcmpltss xmm2,xmm6,xmm4
vcmpltss xmm2,xmm6,DWORD PTR [ecx]
vcmpltss xmm2,xmm6,[ecx]
vcmpless xmm2,xmm6,xmm4
vcmpless xmm2,xmm6,DWORD PTR [ecx]
vcmpless xmm2,xmm6,[ecx]
vcmpunordss xmm2,xmm6,xmm4
vcmpunordss xmm2,xmm6,DWORD PTR [ecx]
vcmpunordss xmm2,xmm6,[ecx]
vcmpneqss xmm2,xmm6,xmm4
vcmpneqss xmm2,xmm6,DWORD PTR [ecx]
vcmpneqss xmm2,xmm6,[ecx]
vcmpnltss xmm2,xmm6,xmm4
vcmpnltss xmm2,xmm6,DWORD PTR [ecx]
vcmpnltss xmm2,xmm6,[ecx]
vcmpnless xmm2,xmm6,xmm4
vcmpnless xmm2,xmm6,DWORD PTR [ecx]
vcmpnless xmm2,xmm6,[ecx]
vcmpordss xmm2,xmm6,xmm4
vcmpordss xmm2,xmm6,DWORD PTR [ecx]
vcmpordss xmm2,xmm6,[ecx]
vcmpeq_uqss xmm2,xmm6,xmm4
vcmpeq_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpeq_uqss xmm2,xmm6,[ecx]
vcmpngess xmm2,xmm6,xmm4
vcmpngess xmm2,xmm6,DWORD PTR [ecx]
vcmpngess xmm2,xmm6,[ecx]
vcmpngtss xmm2,xmm6,xmm4
vcmpngtss xmm2,xmm6,DWORD PTR [ecx]
vcmpngtss xmm2,xmm6,[ecx]
vcmpfalsess xmm2,xmm6,xmm4
vcmpfalsess xmm2,xmm6,DWORD PTR [ecx]
vcmpfalsess xmm2,xmm6,[ecx]
vcmpneq_oqss xmm2,xmm6,xmm4
vcmpneq_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmpneq_oqss xmm2,xmm6,[ecx]
vcmpgess xmm2,xmm6,xmm4
vcmpgess xmm2,xmm6,DWORD PTR [ecx]
vcmpgess xmm2,xmm6,[ecx]
vcmpgtss xmm2,xmm6,xmm4
vcmpgtss xmm2,xmm6,DWORD PTR [ecx]
vcmpgtss xmm2,xmm6,[ecx]
vcmptruess xmm2,xmm6,xmm4
vcmptruess xmm2,xmm6,DWORD PTR [ecx]
vcmptruess xmm2,xmm6,[ecx]
vcmpeq_osss xmm2,xmm6,xmm4
vcmpeq_osss xmm2,xmm6,DWORD PTR [ecx]
vcmpeq_osss xmm2,xmm6,[ecx]
vcmplt_oqss xmm2,xmm6,xmm4
vcmplt_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmplt_oqss xmm2,xmm6,[ecx]
vcmple_oqss xmm2,xmm6,xmm4
vcmple_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmple_oqss xmm2,xmm6,[ecx]
vcmpunord_sss xmm2,xmm6,xmm4
vcmpunord_sss xmm2,xmm6,DWORD PTR [ecx]
vcmpunord_sss xmm2,xmm6,[ecx]
vcmpneq_usss xmm2,xmm6,xmm4
vcmpneq_usss xmm2,xmm6,DWORD PTR [ecx]
vcmpneq_usss xmm2,xmm6,[ecx]
vcmpnlt_uqss xmm2,xmm6,xmm4
vcmpnlt_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpnlt_uqss xmm2,xmm6,[ecx]
vcmpnle_uqss xmm2,xmm6,xmm4
vcmpnle_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpnle_uqss xmm2,xmm6,[ecx]
vcmpord_sss xmm2,xmm6,xmm4
vcmpord_sss xmm2,xmm6,DWORD PTR [ecx]
vcmpord_sss xmm2,xmm6,[ecx]
vcmpeq_usss xmm2,xmm6,xmm4
vcmpeq_usss xmm2,xmm6,DWORD PTR [ecx]
vcmpeq_usss xmm2,xmm6,[ecx]
vcmpnge_uqss xmm2,xmm6,xmm4
vcmpnge_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpnge_uqss xmm2,xmm6,[ecx]
vcmpngt_uqss xmm2,xmm6,xmm4
vcmpngt_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpngt_uqss xmm2,xmm6,[ecx]
vcmpfalse_osss xmm2,xmm6,xmm4
vcmpfalse_osss xmm2,xmm6,DWORD PTR [ecx]
vcmpfalse_osss xmm2,xmm6,[ecx]
vcmpneq_osss xmm2,xmm6,xmm4
vcmpneq_osss xmm2,xmm6,DWORD PTR [ecx]
vcmpneq_osss xmm2,xmm6,[ecx]
vcmpge_oqss xmm2,xmm6,xmm4
vcmpge_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmpge_oqss xmm2,xmm6,[ecx]
vcmpgt_oqss xmm2,xmm6,xmm4
vcmpgt_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmpgt_oqss xmm2,xmm6,[ecx]
vcmptrue_usss xmm2,xmm6,xmm4
vcmptrue_usss xmm2,xmm6,DWORD PTR [ecx]
vcmptrue_usss xmm2,xmm6,[ecx]
# Tests for op xmm/mem32, xmm
vcomiss xmm6,xmm4
vcomiss xmm4,DWORD PTR [ecx]
vcomiss xmm4,[ecx]
vucomiss xmm6,xmm4
vucomiss xmm4,DWORD PTR [ecx]
vucomiss xmm4,[ecx]
# Tests for op mem32, xmm
vmovss xmm4,DWORD PTR [ecx]
vmovss xmm4,[ecx]
# Tests for op xmm, mem32
vmovss DWORD PTR [ecx],xmm4
vmovss [ecx],xmm4
# Tests for op xmm/mem32, regl
vcvtss2si ecx,xmm4
vcvtss2si ecx,DWORD PTR [ecx]
vcvtss2si ecx,[ecx]
vcvttss2si ecx,xmm4
vcvttss2si ecx,DWORD PTR [ecx]
vcvttss2si ecx,[ecx]
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd xmm6,xmm4,ecx
vcvtsi2sd xmm6,xmm4,DWORD PTR [ecx]
vcvtsi2sd xmm6,xmm4,[ecx]
vcvtsi2ss xmm6,xmm4,ecx
vcvtsi2ss xmm6,xmm4,DWORD PTR [ecx]
vcvtsi2ss xmm6,xmm4,[ecx]
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss xmm2,xmm6,xmm4,7
vcmpss xmm2,xmm6,DWORD PTR [ecx],7
vcmpss xmm2,xmm6,[ecx],7
vroundss xmm2,xmm6,xmm4,7
vroundss xmm2,xmm6,DWORD PTR [ecx],7
vroundss xmm2,xmm6,[ecx],7
# Tests for op xmm, xmm, xmm
vmovsd xmm2,xmm6,xmm4
vmovss xmm2,xmm6,xmm4
#Tests with different memory and register operands.
vcvtsi2sd xmm7,xmm0,DWORD PTR ds:0x1234
vcvtsi2sd xmm7,xmm0,DWORD PTR [ebp]
vcvtsi2sd xmm7,xmm0,DWORD PTR [ebp+0x99]
vcvtsi2sd xmm7,xmm0,DWORD PTR [eiz*1+0x99]
vcvtsi2sd xmm7,xmm0,DWORD PTR [eiz*2+0x99]
vcvtsi2sd xmm7,xmm0,DWORD PTR [eax+eiz*1+0x99]
vcvtsi2sd xmm7,xmm0,DWORD PTR [eax+eiz*2+0x99]
vcvtsi2sd xmm7,xmm0,DWORD PTR [eax+ebx*4+0x99]
vcvtsi2sd xmm7,xmm0,DWORD PTR [esp+ecx*8+0x99]
vcvtsi2sd xmm7,xmm0,DWORD PTR [ebp+edx*1+0x99]
|
tactcomplabs/xbgas-binutils-gdb
| 7,066
|
gas/testsuite/gas/i386/avx512ifma_vl.s
|
# Check 32bit AVX512{IFMA,VL} instructions
.allow_index_reg
.text
_start:
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{IFMA,VL}
vpmadd52luq (%ecx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{IFMA,VL}
vpmadd52luq (%ecx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52luq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52luq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq %xmm4, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{IFMA,VL}
vpmadd52huq (%ecx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq %ymm4, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{IFMA,VL}
vpmadd52huq (%ecx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
vpmadd52huq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL} Disp8
vpmadd52huq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{IFMA,VL}
.intel_syntax noprefix
vpmadd52luq xmm6{k7}, xmm5, xmm4 # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}{z}, xmm5, xmm4 # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{IFMA,VL} Disp8
vpmadd52luq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{IFMA,VL} Disp8
vpmadd52luq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{IFMA,VL} Disp8
vpmadd52luq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{IFMA,VL}
vpmadd52luq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{IFMA,VL} Disp8
vpmadd52luq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, ymm4 # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}{z}, ymm5, ymm4 # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{IFMA,VL} Disp8
vpmadd52luq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{IFMA,VL} Disp8
vpmadd52luq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{IFMA,VL} Disp8
vpmadd52luq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{IFMA,VL}
vpmadd52luq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{IFMA,VL} Disp8
vpmadd52luq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, xmm4 # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}{z}, xmm5, xmm4 # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{IFMA,VL} Disp8
vpmadd52huq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{IFMA,VL} Disp8
vpmadd52huq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{IFMA,VL} Disp8
vpmadd52huq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{IFMA,VL}
vpmadd52huq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{IFMA,VL} Disp8
vpmadd52huq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, ymm4 # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}{z}, ymm5, ymm4 # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{IFMA,VL} Disp8
vpmadd52huq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{IFMA,VL} Disp8
vpmadd52huq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{IFMA,VL} Disp8
vpmadd52huq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{IFMA,VL}
vpmadd52huq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{IFMA,VL} Disp8
vpmadd52huq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{IFMA,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 1,972
|
gas/testsuite/gas/i386/x86-64-avx512vl_vpclmulqdq-wig.s
|
# Check 64bit AVX512VL,VPCLMULQDQ WIG instructions
.allow_index_reg
.text
_start:
vpclmulqdq $0xab, %xmm23, %xmm21, %xmm17 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 0x123(%rax,%r14,8), %xmm21, %xmm17 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 2032(%rdx), %xmm21, %xmm17 # AVX512VL,VPCLMULQDQ Disp8
vpclmulqdq $0xab, %ymm19, %ymm18, %ymm23 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 0x123(%rax,%r14,8), %ymm18, %ymm23 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 4064(%rdx), %ymm18, %ymm23 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq $0xab, %xmm23, %xmm21, %xmm17 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 0x123(%rax,%r14,8), %xmm21, %xmm17 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 2032(%rdx), %xmm21, %xmm17 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq $0xab, %ymm19, %ymm18, %ymm23 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 0x123(%rax,%r14,8), %ymm18, %ymm23 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 4064(%rdx), %ymm18, %ymm23 # AVX512VL,VPCLMULQDQ Disp8
.intel_syntax noprefix
vpclmulqdq xmm18, xmm22, xmm17, 0xab # AVX512VL,VPCLMULQDQ
vpclmulqdq xmm18, xmm22, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
vpclmulqdq xmm18, xmm22, XMMWORD PTR [rdx+2032], 123 # AVX512VL,VPCLMULQDQ Disp8
vpclmulqdq ymm26, ymm25, ymm23, 0xab # AVX512VL,VPCLMULQDQ
vpclmulqdq ymm26, ymm25, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
vpclmulqdq ymm26, ymm25, YMMWORD PTR [rdx+4064], 123 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq xmm18, xmm22, xmm17, 0xab # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq xmm18, xmm22, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq xmm18, xmm22, XMMWORD PTR [rdx+2032], 123 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq ymm26, ymm25, ymm23, 0xab # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq ymm26, ymm25, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq ymm26, ymm25, YMMWORD PTR [rdx+4064], 123 # AVX512VL,VPCLMULQDQ Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 3,612
|
gas/testsuite/gas/i386/general.s
|
.psize 0
.text
#test jumps and calls
1: jmp 1b
jmp xxx
jmp *xxx
jmp xxx(,1)
jmp *%edi
jmp %edi
jmp *(%edi)
jmp (%edi)
ljmp *xxx(,%edi,4)
ljmp xxx(,%edi,4)
ljmp *xxx
ljmp xxx(,1)
ljmp $0x1234,$xxx
call 1b
call xxx
call *xxx
call xxx(,1)
call *%edi
call %edi
call *(%edi)
call (%edi)
lcall *xxx(,%edi,4)
lcall xxx(,%edi,4)
lcall *xxx
lcall xxx(,1)
lcall $0x1234,$xxx
# test various segment reg insns
push %ds
pushl %ds
pop %ds
popl %ds
mov %ds,%eax
movl %ds,%eax
movl %ds,%ebx
mov %eax,%ds
movl %ebx,%ds
movl %eax,%ds
pushw %ds
popw %ds
mov %ds,%ax
movw %ds,%ax
movw %ds,%di
mov %ax,%ds
movw %ax,%ds
movw %di,%ds
# test various pushes
pushl $10
pushw $10
push $10
pushl $1000
pushw $1000
push $1000
pushl 1f
pushw 1f
push 1f
push (1f-.)(%ebx)
push 1f-.
# these, and others like them should have no operand size prefix
1: lldt %cx
lmsw %ax
# Just to make sure these don't become illegal due to over-enthusiastic
# register checking
movsbw %al,%di
movsbl %al,%ecx
movswl %ax,%ecx
movzbw %al,%di
movzbl %al,%ecx
movzwl %ax,%ecx
in %dx,%al
in %dx,%ax
in %dx,%eax
in (%dx),%al
in (%dx),%ax
in (%dx),%eax
inb %dx,%al
inw %dx,%ax
inl %dx,%eax
inb %dx
inw %dx
inl %dx
inb $255
inw $2
inl $4
in $13, %ax
out %al,%dx
out %ax,%dx
out %eax,%dx
out %al,(%dx)
out %ax,(%dx)
out %eax,(%dx)
outb %al,%dx
outw %ax,%dx
outl %eax,%dx
outb %dx
outw %dx
outl %dx
outb $255
outw $2
outl $4
out %ax, $13
# These are used in AIX.
inw (%dx)
outw (%dx)
movsb
cmpsw
scasl
xlatb
movsl %cs:(%esi),%es:(%edi)
setae (%ebx)
setaeb (%ebx)
setae %al
orb $1,%al
orl $0x100,%eax
orb $1,%bl
#these should give warnings
fldl %st(1)
fstl %st(2)
fstpl %st(3)
fcoml %st(4)
fcompl %st(5)
faddp %st(1),%st
fmulp %st(2),%st
fsub %st(3),%st
fsubr %st(4),%st
fdiv %st(5),%st
fdivr %st(6),%st
fadd
fsub
fmul
fdiv
fsubr
fdivr
#these should all be legal
btl %edx, 0x123456
btl %edx, %eax
orb $1,%al
orb $1,%bl
movl 17,%eax
mov 17,%eax
inw %dx,%ax
inl %dx,%eax
inw (%dx),%ax
inl (%dx),%eax
in (%dx),%al
in (%dx),%ax
in (%dx),%eax
movzbl (%edi,%esi),%edx
movzbl 28(%ebp),%eax
movzbl %al,%eax
movzbl %cl,%esi
xlat %es:(%ebx)
xlat
xlatb
1: fstp %st(0)
loop 1b
divb %cl
divw %cx
divl %ecx
div %cl
div %cx
div %ecx
div %cl,%al
div %cx,%ax
div %ecx,%eax
mov %si,%ds
movl %edi,%ds
pushl %ds
push %ds
mov 0,%al
mov 0x10000,%ax
mov %eax,%ebx
pushf
pushfl
pushfw
popf
popfl
popfw
mov %esi,(,%ebx,1)
andb $~0x80,foo
and $0xfffe,%ax
and $0xff00,%ax
and $0xfffe,%eax
and $0xff00,%eax
and $0xfffffffe,%eax
.code16
and $0xfffe,%ax
and $0xff00,%ax
and $0xfffe,%eax
and $0xff00,%eax
and $0xfffffffe,%eax
#check 16-bit code auto address prefix
.code16gcc
leal -256(%ebp),%edx
mov %al,-129(%ebp)
mov %ah,-128(%ebp)
leal -1760(%ebp),%ebx
movl %eax,140(%esp)
.code32
# Make sure that we won't remove movzb by accident.
movzb %al,%di
movzb %al,%ecx
.code16gcc
# Except for IRET use 32-bit implicit stack accesses by default.
call .
call *(%bx)
enter $0,$0
iret
lcall *(%bx)
lcall $0,$0
leave
lret
lret $0
push $0
push $0x1234
push (%bx)
push %es
push %fs
pusha
pushf
pop (%bx)
pop %es
pop %fs
popa
popf
ret
ret $0
# However use 16-bit branches not accessing the stack by default.
ja .
ja .+0x1234
jcxz .
jmp .
jmp .+0x1234
jmp *(%bx)
ljmp *(%bx)
ljmp $0,$0
loop .
syscall
sysenter
sysexit
sysret
xbegin .
# Use 16-bit layout by default for fldenv.
fldenv (%eax)
fldenvs (%eax)
fldenvl (%eax)
# Force a good alignment.
.p2align 4,0
|
tactcomplabs/xbgas-binutils-gdb
| 680,559
|
gas/testsuite/gas/i386/avx512f_vl.s
|
# Check 32bit AVX512{F,VL} instructions
.allow_index_reg
.text
_start:
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vaddpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vaddpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vaddps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vaddps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vaddps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vaddps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vaddps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vaddps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
valignd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $123, (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignd $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignd $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $123, 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignd $123, 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $123, -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignd $123, -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignd $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
valignd $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $123, (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignd $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignd $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignd $123, 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignd $123, -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignd $123, -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vblendmpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vblendmpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vblendmps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vblendmps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vblendmps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vblendmps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vblendmps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vblendmps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vbroadcastf32x4 (%ecx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastf32x4 (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vbroadcastf32x4 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vbroadcastf32x4 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcastf32x4 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastf32x4 -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcastf32x4 -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcasti32x4 (%ecx), %ymm6{%k7} # AVX512{F,VL}
vbroadcasti32x4 (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vbroadcasti32x4 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vbroadcasti32x4 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcasti32x4 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcasti32x4 -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcasti32x4 -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastsd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastsd (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vbroadcastsd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vbroadcastsd 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcastsd 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastsd -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcastsd -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastsd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vbroadcastsd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vbroadcastss (%ecx), %xmm6{%k7} # AVX512{F,VL}
vbroadcastss (%ecx), %xmm6{%k7}{z} # AVX512{F,VL}
vbroadcastss -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vbroadcastss 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vbroadcastss 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vbroadcastss -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vbroadcastss -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vbroadcastss (%ecx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastss (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vbroadcastss -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vbroadcastss 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcastss 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastss -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vbroadcastss -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vbroadcastss %xmm5, %xmm6{%k7} # AVX512{F,VL}
vbroadcastss %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vbroadcastss %xmm5, %ymm6{%k7} # AVX512{F,VL}
vbroadcastss %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcmppd $0xab, %xmm4, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, %xmm4, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, (%ecx), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, (%eax){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, 2032(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, 2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, -2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, -2064(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, 1016(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, 1024(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, -1024(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, -1032(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmppd $0xab, %ymm4, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, %ymm4, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, (%ecx), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, -123456(%esp,%esi,8), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, (%eax){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, 4064(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, 4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, -4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, -4128(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, 1016(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, 1024(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmppd $123, -1024(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmppd $123, -1032(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $0xab, %xmm4, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, %xmm4, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, (%ecx), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, (%eax){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, 2032(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, 2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, -2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, -2064(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, 508(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, 512(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, -512(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, -516(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL}
vcmpps $0xab, %ymm4, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, %ymm4, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, (%ecx), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, -123456(%esp,%esi,8), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, (%eax){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, 4064(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, 4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, -4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, -4128(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, 508(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, 512(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL}
vcmpps $123, -512(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vcmpps $123, -516(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL}
vcompresspd %xmm6, (%ecx){%k7} # AVX512{F,VL}
vcompresspd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vcompresspd %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vcompresspd %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vcompresspd %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vcompresspd %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vcompresspd %ymm6, (%ecx){%k7} # AVX512{F,VL}
vcompresspd %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vcompresspd %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vcompresspd %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vcompresspd %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vcompresspd %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vcompresspd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcompresspd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcompresspd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcompresspd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcompressps %xmm6, (%ecx){%k7} # AVX512{F,VL}
vcompressps %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vcompressps %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vcompressps %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vcompressps %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vcompressps %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vcompressps %ymm6, (%ecx){%k7} # AVX512{F,VL}
vcompressps %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vcompressps %ymm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vcompressps %ymm6, 512(%edx){%k7} # AVX512{F,VL}
vcompressps %ymm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vcompressps %ymm6, -516(%edx){%k7} # AVX512{F,VL}
vcompressps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcompressps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcompressps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcompressps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtdq2pd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtdq2pd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd 508(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd 512(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd -512(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd -516(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2pd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vcvtdq2pd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtdq2pd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2pd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2pd (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtdq2pd 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2pd -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2pd 508(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd 512(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtdq2pd -512(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2pd -516(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtdq2ps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtdq2ps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtdq2ps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtdq2ps -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtdq2ps -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtpd2dq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtpd2dqx (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqx -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dq (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqx 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqx 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqx -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqx -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqx 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqx 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqx -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqx -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dq %ymm5, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dq %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtpd2dqy (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqy -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqy 4064(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqy 4096(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqy -4096(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqy -4128(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqy 1016(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqy 1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2dqy -1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2dqy -1032(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2ps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2ps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtpd2psx (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psx -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2ps (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psx 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psx 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psx -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psx -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psx 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psx 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psx -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psx -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2ps %ymm5, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2ps %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtpd2psy (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psy -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2ps (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psy 4064(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psy 4096(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psy -4096(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psy -4128(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psy 1016(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psy 1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2psy -1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2psy -1032(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtpd2udqx (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqx -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udq (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqx 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqx 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqx -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqx -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqx 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqx 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqx -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqx -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udq %ymm5, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udq %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtpd2udqy (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqy -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqy 4064(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqy 4096(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqy -4096(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqy -4128(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqy 1016(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqy 1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtpd2udqy -1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtpd2udqy -1032(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtph2ps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtph2ps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtph2ps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtph2ps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtph2ps 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtph2ps 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtph2ps -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtph2ps -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtph2ps %xmm5, %ymm6{%k7} # AVX512{F,VL}
vcvtph2ps %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtph2ps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtph2ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtph2ps 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtph2ps 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtph2ps -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtph2ps -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtps2dq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2dq %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtps2dq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2dq -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2dq -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtps2pd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd 508(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd 512(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd -512(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd -516(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2pd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtps2pd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd 508(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd 512(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2pd -512(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2pd -516(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2ph $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtps2ph $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtps2ph $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtps2ph $0xab, %ymm5, %xmm6{%k7} # AVX512{F,VL}
vcvtps2ph $0xab, %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtps2ph $123, %ymm5, %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtps2udq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtps2udq %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcvtps2udq %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtps2udq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2udq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtps2udq (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2udq 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2udq -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtps2udq 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtps2udq -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtps2udq -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvttpd2dq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvttpd2dqx (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqx -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dq (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqx 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqx 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqx -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqx -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqx 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqx 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqx -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqx -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dq %ymm5, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dq %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvttpd2dqy (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqy -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqy 4064(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqy 4096(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqy -4096(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqy -4128(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqy 1016(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqy 1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2dqy -1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2dqy -1032(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvttps2dq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2dq %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcvttps2dq %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvttps2dq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvttps2dq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvttps2dq (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvttps2dq 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvttps2dq -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvttps2dq 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvttps2dq -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2dq -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtudq2pd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd 508(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd 512(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd -512(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd -516(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2pd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtudq2pd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd 508(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd 512(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2pd -512(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2pd -516(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvtudq2ps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvtudq2ps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtudq2ps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvtudq2ps -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvtudq2ps -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vdivpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vdivpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vdivpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vdivps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vdivps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vdivps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vdivps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vdivps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vdivps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vexpandpd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vexpandpd (%ecx), %xmm6{%k7}{z} # AVX512{F,VL}
vexpandpd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vexpandpd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vexpandpd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vexpandpd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vexpandpd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vexpandpd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vexpandpd (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vexpandpd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vexpandpd 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vexpandpd 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vexpandpd -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vexpandpd -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vexpandpd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vexpandpd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vexpandpd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vexpandpd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vexpandps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vexpandps (%ecx), %xmm6{%k7}{z} # AVX512{F,VL}
vexpandps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vexpandps 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vexpandps 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vexpandps -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vexpandps -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vexpandps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vexpandps (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vexpandps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vexpandps 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vexpandps 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vexpandps -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vexpandps -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vexpandps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vexpandps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vexpandps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vexpandps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vextractf32x4 $0xab, %ymm5, %xmm6{%k7} # AVX512{F,VL}
vextractf32x4 $0xab, %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vextractf32x4 $123, %ymm5, %xmm6{%k7} # AVX512{F,VL}
vextracti32x4 $0xab, %ymm5, %xmm6{%k7} # AVX512{F,VL}
vextracti32x4 $0xab, %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vextracti32x4 $123, %ymm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmadd132pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmadd132pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmadd132ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd132ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmadd132ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd132ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd132ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmadd213pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmadd213pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmadd213ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd213ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmadd213ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd213ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd213ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmadd231pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmadd231pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmadd231ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmadd231ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmadd231ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmadd231ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmadd231ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmaddsub132pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmaddsub132pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmaddsub132ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub132ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmaddsub132ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub132ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub132ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmaddsub213pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmaddsub213pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmaddsub213ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub213ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmaddsub213ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub213ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub213ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmaddsub231pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmaddsub231pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmaddsub231ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmaddsub231ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmaddsub231ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmaddsub231ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmaddsub231ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsub132pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsub132pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsub132ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub132ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsub132ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub132ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub132ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsub213pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsub213pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsub213ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub213ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsub213ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub213ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub213ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsub231pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsub231pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsub231ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsub231ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsub231ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsub231ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsub231ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsubadd132pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsubadd132pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsubadd132ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd132ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsubadd132ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd132ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd132ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsubadd213pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsubadd213pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsubadd213ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd213ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsubadd213ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd213ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd213ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsubadd231pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsubadd231pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfmsubadd231ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfmsubadd231ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfmsubadd231ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfmsubadd231ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfmsubadd231ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmadd132pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmadd132pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmadd132ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd132ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmadd132ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd132ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd132ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmadd213pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmadd213pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmadd213ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd213ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmadd213ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd213ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd213ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmadd231pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmadd231pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmadd231ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmadd231ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmadd231ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmadd231ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmadd231ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmsub132pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmsub132pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmsub132ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub132ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmsub132ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub132ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub132ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmsub213pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmsub213pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmsub213ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub213ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmsub213ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub213ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub213ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmsub231pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmsub231pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfnmsub231ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfnmsub231ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfnmsub231ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfnmsub231ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfnmsub231ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vgatherdpd 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vgatherdpd 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vgatherdpd 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vgatherdpd 123(%ebp,%xmm7,8), %ymm6{%k1} # AVX512{F,VL}
vgatherdpd 256(%eax,%xmm7), %ymm6{%k1} # AVX512{F,VL}
vgatherdpd 1024(%ecx,%xmm7,4), %ymm6{%k1} # AVX512{F,VL}
vgatherdps 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vgatherdps 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vgatherdps 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vgatherdps 123(%ebp,%ymm7,8), %ymm6{%k1} # AVX512{F,VL}
vgatherdps 256(%eax,%ymm7), %ymm6{%k1} # AVX512{F,VL}
vgatherdps 1024(%ecx,%ymm7,4), %ymm6{%k1} # AVX512{F,VL}
vgatherqpd 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vgatherqpd 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vgatherqpd 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vgatherqpd 123(%ebp,%ymm7,8), %ymm6{%k1} # AVX512{F,VL}
vgatherqpd 256(%eax,%ymm7), %ymm6{%k1} # AVX512{F,VL}
vgatherqpd 1024(%ecx,%ymm7,4), %ymm6{%k1} # AVX512{F,VL}
vgatherqps 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vgatherqps 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vgatherqps 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vgatherqps 123(%ebp,%ymm7,8), %xmm6{%k1} # AVX512{F,VL}
vgatherqps 256(%eax,%ymm7), %xmm6{%k1} # AVX512{F,VL}
vgatherqps 1024(%ecx,%ymm7,4), %xmm6{%k1} # AVX512{F,VL}
vgetexppd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vgetexppd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vgetexppd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vgetexppd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vgetexppd (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vgetexppd 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexppd 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetexppd -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexppd -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetexppd 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexppd 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vgetexppd -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexppd -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vgetexppd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vgetexppd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vgetexppd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vgetexppd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vgetexppd (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vgetexppd 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexppd 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetexppd -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexppd -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetexppd 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexppd 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vgetexppd -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexppd -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vgetexpps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vgetexpps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vgetexpps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vgetexpps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vgetexpps (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vgetexpps 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexpps 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetexpps -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexpps -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetexpps 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexpps 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vgetexpps -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetexpps -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vgetexpps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vgetexpps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vgetexpps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vgetexpps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vgetexpps (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vgetexpps 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexpps 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetexpps -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexpps -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetexpps 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexpps 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vgetexpps -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetexpps -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vgetmantpd $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vgetmantpd $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vgetmantpd $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vgetmantpd $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantpd $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vgetmantps $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vgetmantps $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vgetmantps $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vgetmantps $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vgetmantps $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vgetmantps $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vgetmantps $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetmantps $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vgetmantps $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vgetmantps $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vgetmantps $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vgetmantps $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vgetmantps $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vgetmantps $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vgetmantps $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vgetmantps $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vgetmantps $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetmantps $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vgetmantps $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vgetmantps $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vgetmantps $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vinsertf32x4 $0xab, %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinsertf32x4 $0xab, %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vinsertf32x4 $123, %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinsertf32x4 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinsertf32x4 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinsertf32x4 $123, 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vinsertf32x4 $123, 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinsertf32x4 $123, -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vinsertf32x4 $123, -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinserti32x4 $0xab, %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinserti32x4 $0xab, %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vinserti32x4 $123, %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinserti32x4 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinserti32x4 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinserti32x4 $123, 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vinserti32x4 $123, 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vinserti32x4 $123, -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vinserti32x4 $123, -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmaxpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmaxpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmaxps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmaxps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmaxps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmaxps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmaxps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmaxps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vminpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vminpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vminps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vminps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vminps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vminps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vminps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vminps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovapd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovapd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovapd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovapd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovapd 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovapd 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovapd -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovapd -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovapd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovapd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovapd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovapd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovapd 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovapd 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovapd -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovapd -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovaps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovaps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovaps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovaps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovaps 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovaps 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovaps -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovaps -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovaps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovaps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovaps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovaps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovaps 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovaps 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovaps -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovaps -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovddup %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovddup %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovddup (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovddup -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovddup 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovddup 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovddup -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovddup -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovddup %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovddup %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovddup (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovddup -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovddup 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovddup 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovddup -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovddup -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqa32 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqa32 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqa32 (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovdqa32 -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovdqa32 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqa32 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqa32 -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqa32 -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqa32 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqa32 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqa32 (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovdqa32 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovdqa32 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqa32 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqa32 -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqa32 -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqa64 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqa64 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqa64 (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovdqa64 -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovdqa64 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqa64 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqa64 -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqa64 -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqa64 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqa64 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqa64 (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovdqa64 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovdqa64 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqa64 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqa64 -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqa64 -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqu32 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqu32 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqu32 (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovdqu32 -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovdqu32 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqu32 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqu32 -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqu32 -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqu32 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqu32 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqu32 (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovdqu32 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovdqu32 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqu32 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqu32 -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqu32 -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqu64 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqu64 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqu64 (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovdqu64 -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovdqu64 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqu64 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqu64 -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovdqu64 -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovdqu64 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqu64 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqu64 (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovdqu64 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovdqu64 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqu64 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovdqu64 -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovdqu64 -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovshdup %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovshdup %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovshdup (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovshdup -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovshdup 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovshdup 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovshdup -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovshdup -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovshdup %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovshdup %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovshdup (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovshdup -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovshdup 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovshdup 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovshdup -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovshdup -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovsldup %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovsldup %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovsldup (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovsldup -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovsldup 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovsldup 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovsldup -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovsldup -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovsldup %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovsldup %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovsldup (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovsldup -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovsldup 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovsldup 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovsldup -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovsldup -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovupd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovupd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovupd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovupd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovupd 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovupd 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovupd -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovupd -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovupd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovupd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovupd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovupd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovupd 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovupd 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovupd -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovupd -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovups %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovups %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovups (%ecx), %xmm6{%k7} # AVX512{F,VL}
vmovups -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vmovups 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovups 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovups -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vmovups -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vmovups %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovups %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovups (%ecx), %ymm6{%k7} # AVX512{F,VL}
vmovups -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vmovups 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovups 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vmovups -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vmovups -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vmulpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmulpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmulpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmulps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vmulps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmulps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmulps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmulps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vmulps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpabsd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpabsd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpabsd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpabsd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpabsd (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpabsd 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsd 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpabsd -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsd -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpabsd 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsd 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpabsd -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsd -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpabsd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpabsd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpabsd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpabsd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpabsd (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpabsd 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsd 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpabsd -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsd -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpabsd 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsd 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpabsd -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsd -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpabsq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpabsq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpabsq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpabsq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpabsq (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpabsq 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsq 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpabsq -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsq -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpabsq 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsq 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpabsq -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpabsq -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpabsq %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpabsq %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpabsq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpabsq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpabsq (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpabsq 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsq 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpabsq -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsq -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpabsq 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsq 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpabsq -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpabsq -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpaddd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpaddd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpaddd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpaddq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpaddq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpaddq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpaddq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpaddq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpaddq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpandd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpandd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpandnd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpandnd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpandnq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandnq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandnq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpandnq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandnq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandnq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpandq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpandq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpandq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpandq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpandq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpandq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpblendmd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpblendmd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpbroadcastd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastd (%ecx), %xmm6{%k7}{z} # AVX512{F,VL}
vpbroadcastd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastd 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastd 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastd -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastd -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastd (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vpbroadcastd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastd 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastd 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastd -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastd -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpbroadcastd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpbroadcastd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpbroadcastd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpbroadcastd %eax, %xmm6{%k7} # AVX512{F,VL}
vpbroadcastd %eax, %xmm6{%k7}{z} # AVX512{F,VL}
vpbroadcastd %ebp, %xmm6{%k7} # AVX512{F,VL}
vpbroadcastd %eax, %ymm6{%k7} # AVX512{F,VL}
vpbroadcastd %eax, %ymm6{%k7}{z} # AVX512{F,VL}
vpbroadcastd %ebp, %ymm6{%k7} # AVX512{F,VL}
vpbroadcastq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastq (%ecx), %xmm6{%k7}{z} # AVX512{F,VL}
vpbroadcastq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastq 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastq 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastq -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastq -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpbroadcastq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastq (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vpbroadcastq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastq 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastq 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastq -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpbroadcastq -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpbroadcastq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpbroadcastq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpbroadcastq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpbroadcastq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpcmpd $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, (%eax){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, 508(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, 512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, -512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, -516(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, (%eax){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, 508(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, 512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, -512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpd $123, -516(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd (%eax){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd 508(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd 512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd -512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd -516(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd (%eax){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd 508(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd 512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqd -512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqd -516(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq (%eax){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq 1016(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq 1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq -1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq -1032(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq (%eax){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq 1016(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq 1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpeqq -1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpeqq -1032(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd (%eax){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd 508(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd 512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd -512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd -516(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd (%eax){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd 508(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd 512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtd -512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtd -516(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq (%eax){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq 1016(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq 1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq -1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq -1032(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq (%eax){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq 1016(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq 1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpgtq -1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpgtq -1032(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, (%eax){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, 1016(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, 1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, -1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, -1032(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, (%eax){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, 1016(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, 1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, -1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpq $123, -1032(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, (%eax){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, 508(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, 512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, -512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, -516(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, (%eax){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, 508(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, 512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, -512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpud $123, -516(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, (%eax){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, 1016(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, 1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, -1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, -1032(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, (%eax){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, 1016(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, 1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, -1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vpcmpuq $123, -1032(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpblendmq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpblendmq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpblendmq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpblendmq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpblendmq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpblendmq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpblendmq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpcompressd %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpcompressd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpcompressd %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressd %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vpcompressd %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressd %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vpcompressd %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpcompressd %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpcompressd %ymm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressd %ymm6, 512(%edx){%k7} # AVX512{F,VL}
vpcompressd %ymm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressd %ymm6, -516(%edx){%k7} # AVX512{F,VL}
vpcompressd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpcompressd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpcompressd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpcompressd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermilpd $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpermilpd $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpermilpd $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpermilpd $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpermilpd $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpermilpd $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpermilpd $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpermilpd $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermilpd $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpermilpd $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpermilpd $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermilpd $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermilpd $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermilpd $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermilpd $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermilpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermilpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermilpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermilps $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpermilps $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpermilps $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpermilps $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpermilps $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpermilps $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpermilps $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpermilps $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermilps $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpermilps $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpermilps $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpermilps $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermilps $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermilps $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpermilps $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpermilps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermilps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermilps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermilps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermilps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermilps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermilps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermpd $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpermpd $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpermpd $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermpd $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermpd $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermpd $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermpd $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermq $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpermq $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpermq $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermq $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermq $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpermq $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpermq $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpexpandd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpexpandd (%ecx), %xmm6{%k7}{z} # AVX512{F,VL}
vpexpandd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpexpandd 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpexpandd 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpexpandd -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpexpandd -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpexpandd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpexpandd (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vpexpandd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpexpandd 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpexpandd 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vpexpandd -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpexpandd -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vpexpandd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpexpandd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpexpandd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpexpandd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpexpandq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpexpandq (%ecx), %xmm6{%k7}{z} # AVX512{F,VL}
vpexpandq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpexpandq 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpexpandq 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpexpandq -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpexpandq -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpexpandq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpexpandq (%ecx), %ymm6{%k7}{z} # AVX512{F,VL}
vpexpandq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpexpandq 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpexpandq 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpexpandq -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpexpandq -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpexpandq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpexpandq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpexpandq %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpexpandq %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpgatherdd 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vpgatherdd 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vpgatherdd 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vpgatherdd 123(%ebp,%ymm7,8), %ymm6{%k1} # AVX512{F,VL}
vpgatherdd 256(%eax,%ymm7), %ymm6{%k1} # AVX512{F,VL}
vpgatherdd 1024(%ecx,%ymm7,4), %ymm6{%k1} # AVX512{F,VL}
vpgatherdq 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vpgatherdq 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vpgatherdq 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vpgatherdq 123(%ebp,%xmm7,8), %ymm6{%k1} # AVX512{F,VL}
vpgatherdq 256(%eax,%xmm7), %ymm6{%k1} # AVX512{F,VL}
vpgatherdq 1024(%ecx,%xmm7,4), %ymm6{%k1} # AVX512{F,VL}
vpgatherqd 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vpgatherqd 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vpgatherqd 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vpgatherqd 123(%ebp,%ymm7,8), %xmm6{%k1} # AVX512{F,VL}
vpgatherqd 256(%eax,%ymm7), %xmm6{%k1} # AVX512{F,VL}
vpgatherqd 1024(%ecx,%ymm7,4), %xmm6{%k1} # AVX512{F,VL}
vpgatherqq 123(%ebp,%xmm7,8), %xmm6{%k1} # AVX512{F,VL}
vpgatherqq 256(%eax,%xmm7), %xmm6{%k1} # AVX512{F,VL}
vpgatherqq 1024(%ecx,%xmm7,4), %xmm6{%k1} # AVX512{F,VL}
vpgatherqq 123(%ebp,%ymm7,8), %ymm6{%k1} # AVX512{F,VL}
vpgatherqq 256(%eax,%ymm7), %ymm6{%k1} # AVX512{F,VL}
vpgatherqq 1024(%ecx,%ymm7,4), %ymm6{%k1} # AVX512{F,VL}
vpmaxsd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmaxsd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmaxsd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmaxsq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxsq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmaxsq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxsq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxsq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmaxud (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxud 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxud -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxud 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxud -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxud %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmaxud (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxud 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxud -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxud 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxud -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxud -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmaxuq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmaxuq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmaxuq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmaxuq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmaxuq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpminsd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpminsd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpminsq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminsq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminsq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpminsq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminsq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminsq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpminud (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminud 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminud -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminud 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminud -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminud %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpminud (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminud 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminud -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminud 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminud -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminud -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpminuq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminuq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminuq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminuq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpminuq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpminuq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpminuq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminuq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminuq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminuq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpminuq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpminuq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbd -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbd -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq 254(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq 256(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq -256(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq -258(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxbq -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxbq -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxdq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxdq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxdq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxdq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxdq 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxdq 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxdq -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxdq -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxdq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxdq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxdq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxdq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxdq 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxdq 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxdq -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxdq -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwd -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwd -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovsxwq -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovsxwq -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbd -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbd -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq 254(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq 256(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq -256(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq -258(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq 508(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq 512(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxbq -512(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxbq -516(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxdq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxdq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxdq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxdq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxdq 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxdq 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxdq -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxdq -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxdq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxdq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxdq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxdq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxdq 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxdq 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxdq -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxdq -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd 1016(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd 1024(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd -1024(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd -1032(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd 2032(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd 2048(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwd -2048(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwd -2064(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq 508(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq 512(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq -512(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq -516(%edx), %xmm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq %xmm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq 1016(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq 1024(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmovzxwq -1024(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpmovzxwq -1032(%edx), %ymm6{%k7} # AVX512{F,VL}
vpmuldq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmuldq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuldq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuldq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuldq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuldq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuldq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuldq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmuldq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuldq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuldq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuldq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuldq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuldq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuldq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuldq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuldq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuldq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuldq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmulld (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmulld 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmulld -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmulld 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmulld -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmulld %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmulld (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmulld 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmulld -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmulld 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmulld -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmulld -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmuludq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuludq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuludq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuludq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpmuludq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmuludq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpmuludq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuludq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuludq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuludq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmuludq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpmuludq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpord (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpord 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpord -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpord 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpord -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpord %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpord (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpord 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpord -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpord 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpord -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpord -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vporq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vporq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vporq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vporq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vporq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vporq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vporq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vporq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vporq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vporq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vporq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vporq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpscatterdd %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vpscatterdd %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vpscatterdd %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vpscatterdd %ymm6, 123(%ebp,%ymm7,8){%k1} # AVX512{F,VL}
vpscatterdd %ymm6, 256(%eax,%ymm7){%k1} # AVX512{F,VL}
vpscatterdd %ymm6, 1024(%ecx,%ymm7,4){%k1} # AVX512{F,VL}
vpscatterdq %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vpscatterdq %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vpscatterdq %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vpscatterdq %ymm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vpscatterdq %ymm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vpscatterdq %ymm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vpscatterqd %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vpscatterqd %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vpscatterqd %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vpscatterqd %xmm6, 123(%ebp,%ymm7,8){%k1} # AVX512{F,VL}
vpscatterqd %xmm6, 256(%eax,%ymm7){%k1} # AVX512{F,VL}
vpscatterqd %xmm6, 1024(%ecx,%ymm7,4){%k1} # AVX512{F,VL}
vpscatterqq %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vpscatterqq %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vpscatterqq %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vpscatterqq %ymm6, 123(%ebp,%ymm7,8){%k1} # AVX512{F,VL}
vpscatterqq %ymm6, 256(%eax,%ymm7){%k1} # AVX512{F,VL}
vpscatterqq %ymm6, 1024(%ecx,%ymm7,4){%k1} # AVX512{F,VL}
vpshufd $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpshufd $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpshufd $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpshufd $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpshufd $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpshufd $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpshufd $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpshufd $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpshufd $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpshufd $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpshufd $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpshufd $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpshufd $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpshufd $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpshufd $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpshufd $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpshufd $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpshufd $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpshufd $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpshufd $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpshufd $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpslld %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpslld %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpslld (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpslld -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpslld 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpslld 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpslld -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpslld -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpslld %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpslld %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpslld (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpslld -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpslld 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpslld 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpslld -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpslld -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsllq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllq %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllq %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsllq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllq 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllq 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllq -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllq -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsllvd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsllvd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsllvq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllvq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllvq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsllvq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllvq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllvq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrad %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrad %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrad (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrad -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrad 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrad 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrad -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrad -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrad %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrad %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrad (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrad -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrad 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrad 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrad -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrad -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsraq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsraq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsraq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsraq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsraq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsraq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsraq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsraq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsraq %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsraq %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsraq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsraq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsraq 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsraq 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsraq -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsraq -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsravd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsravd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsravq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsravq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsravq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsravq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsravq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsravq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrld %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrld (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrld -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrld 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrld 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrld -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrld -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrld %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrld (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrld 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrld -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrlq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlq %xmm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlq %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrlq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlq 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlq 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlq -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlq -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrlvd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrlvd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrlvq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlvq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrlvq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlvq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlvq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrld $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrld $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrld $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpsrld $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpsrld $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpsrld $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsrld $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsrld $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpsrld $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpsrld $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrld $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrld $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpsrld $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpsrld $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpsrld $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsrld $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsrld $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpsrld $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrld $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpsrlq $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlq $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrlq $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrlq $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpsrlq $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpsrlq $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsrlq $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsrlq $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsrlq $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsrlq $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsrlq $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlq $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrlq $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrlq $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpsrlq $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpsrlq $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsrlq $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsrlq $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsrlq $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsrlq $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrlq $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsubd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsubd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsubd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsubq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsubq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsubq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsubq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsubq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsubq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vptestmd %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd (%eax){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd 508(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd 512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd -512(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd -516(%edx){1to4}, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmd %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmd (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmd -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmd (%eax){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmd 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmd -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmd 508(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd 512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmd -512(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmd -516(%edx){1to8}, %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq %xmm5, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq (%ecx), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq (%eax){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq 2032(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq 2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq -2048(%edx), %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq -2064(%edx), %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq 1016(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq 1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq -1024(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq -1032(%edx){1to2}, %xmm6, %k5{%k7} # AVX512{F,VL}
vptestmq %ymm5, %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq (%ecx), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq (%eax){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq 4064(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq 4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq -4096(%edx), %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq -4128(%edx), %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq 1016(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq 1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vptestmq -1024(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL} Disp8
vptestmq -1032(%edx){1to4}, %ymm6, %k5{%k7} # AVX512{F,VL}
vpunpckhdq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpunpckhdq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhdq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhdq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpunpckhdq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhdq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhdq (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhdq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhdq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhdq 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhdq -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhdq -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpunpckhqdq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckhqdq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpunpckhqdq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckhqdq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckhqdq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpunpckldq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpckldq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpunpckldq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpckldq -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpckldq -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpunpcklqdq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpunpcklqdq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpunpcklqdq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpunpcklqdq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpunpcklqdq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpxord (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxord 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxord -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxord 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxord -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxord %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpxord (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxord 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxord -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxord 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxord -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxord -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpxorq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxorq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxorq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxorq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpxorq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpxorq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpxorq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxorq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxorq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxorq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpxorq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpxorq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrcp14pd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrcp14pd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vrcp14pd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vrcp14pd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vrcp14pd (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrcp14pd 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vrcp14pd -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vrcp14pd 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrcp14pd -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrcp14pd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrcp14pd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vrcp14pd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vrcp14pd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vrcp14pd (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrcp14pd 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vrcp14pd -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vrcp14pd 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrcp14pd -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14pd -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrcp14ps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrcp14ps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vrcp14ps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vrcp14ps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vrcp14ps (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrcp14ps 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vrcp14ps -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vrcp14ps 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrcp14ps -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrcp14ps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrcp14ps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vrcp14ps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vrcp14ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vrcp14ps (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vrcp14ps 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vrcp14ps -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vrcp14ps 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vrcp14ps -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrcp14ps -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vrsqrt14pd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14pd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vrsqrt14pd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14pd -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14pd -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vrsqrt14ps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrsqrt14ps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vrsqrt14ps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vrsqrt14ps -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrsqrt14ps -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vscatterdpd %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vscatterdpd %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vscatterdpd %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vscatterdpd %ymm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vscatterdpd %ymm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vscatterdpd %ymm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vscatterdps %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vscatterdps %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vscatterdps %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vscatterdps %ymm6, 123(%ebp,%ymm7,8){%k1} # AVX512{F,VL}
vscatterdps %ymm6, 256(%eax,%ymm7){%k1} # AVX512{F,VL}
vscatterdps %ymm6, 1024(%ecx,%ymm7,4){%k1} # AVX512{F,VL}
vscatterqpd %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vscatterqpd %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vscatterqpd %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vscatterqpd %ymm6, 123(%ebp,%ymm7,8){%k1} # AVX512{F,VL}
vscatterqpd %ymm6, 256(%eax,%ymm7){%k1} # AVX512{F,VL}
vscatterqpd %ymm6, 1024(%ecx,%ymm7,4){%k1} # AVX512{F,VL}
vscatterqps %xmm6, 123(%ebp,%xmm7,8){%k1} # AVX512{F,VL}
vscatterqps %xmm6, 256(%eax,%xmm7){%k1} # AVX512{F,VL}
vscatterqps %xmm6, 1024(%ecx,%xmm7,4){%k1} # AVX512{F,VL}
vscatterqps %xmm6, 123(%ebp,%ymm7,8){%k1} # AVX512{F,VL}
vscatterqps %xmm6, 256(%eax,%ymm7){%k1} # AVX512{F,VL}
vscatterqps %xmm6, 1024(%ecx,%ymm7,4){%k1} # AVX512{F,VL}
vshufpd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vshufpd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $123, (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $123, -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufpd $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vshufpd $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $123, (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufpd $123, -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufpd $123, -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vshufps $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $123, (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $123, 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $123, -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vshufps $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vshufps $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $123, (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufps $123, -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufps $123, -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsqrtpd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsqrtpd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vsqrtpd (%ecx), %xmm6{%k7} # AVX512{F,VL}
vsqrtpd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vsqrtpd (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vsqrtpd 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vsqrtpd -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vsqrtpd 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vsqrtpd -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vsqrtpd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsqrtpd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vsqrtpd (%ecx), %ymm6{%k7} # AVX512{F,VL}
vsqrtpd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vsqrtpd (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vsqrtpd 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vsqrtpd -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vsqrtpd 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vsqrtpd -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtpd -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vsqrtps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsqrtps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vsqrtps (%ecx), %xmm6{%k7} # AVX512{F,VL}
vsqrtps -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vsqrtps (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vsqrtps 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtps 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vsqrtps -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtps -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vsqrtps 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtps 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vsqrtps -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vsqrtps -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vsqrtps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsqrtps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vsqrtps (%ecx), %ymm6{%k7} # AVX512{F,VL}
vsqrtps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vsqrtps (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vsqrtps 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtps 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vsqrtps -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtps -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vsqrtps 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtps 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vsqrtps -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vsqrtps -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vsubpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vsubpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vsubpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vsubps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vsubps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vsubps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vsubps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vsubps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vsubps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vunpckhpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vunpckhpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vunpckhps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpckhps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpckhps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vunpckhps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpckhps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpckhps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vunpcklpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vunpcklpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vunpcklps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vunpcklps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vunpcklps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vunpcklps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vunpcklps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vunpcklps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpternlogd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $123, (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $123, 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $123, -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogd $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpternlogd $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $123, (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogd $123, -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogd $123, -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpternlogq $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $123, (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $123, -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpternlogq $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpternlogq $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $123, (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpternlogq $123, -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpternlogq $123, -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpmovqb %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovqb %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovqb %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovqb %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsqb %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsqb %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsqb %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsqb %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusqb %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusqb %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusqb %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusqb %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovqw %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovqw %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovqw %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovqw %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsqw %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsqw %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsqw %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsqw %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusqw %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusqw %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusqw %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusqw %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovqd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovqd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovqd %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovqd %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsqd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsqd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsqd %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsqd %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusqd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusqd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusqd %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusqd %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovdb %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovdb %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovdb %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovdb %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsdb %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsdb %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsdb %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsdb %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusdb %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusdb %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusdb %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusdb %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovdw %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovdw %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovdw %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovdw %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsdw %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsdw %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovsdw %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovsdw %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusdw %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusdw %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpmovusdw %ymm5, %xmm6{%k7} # AVX512{F,VL}
vpmovusdw %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vshuff32x4 $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vshuff32x4 $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $123, (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff32x4 $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff32x4 $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff32x4 $123, 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff32x4 $123, -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff32x4 $123, -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vshuff64x2 $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $123, (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff64x2 $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff64x2 $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff64x2 $123, 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshuff64x2 $123, -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshuff64x2 $123, -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vshufi32x4 $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $123, (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi32x4 $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi32x4 $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi32x4 $123, 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi32x4 $123, -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi32x4 $123, -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vshufi64x2 $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $123, (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi64x2 $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi64x2 $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi64x2 $123, 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vshufi64x2 $123, -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vshufi64x2 $123, -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermt2d (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2d 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2d -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2d 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2d -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2d %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermt2d (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2d 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2d -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2d 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2d -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2d -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermt2q (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2q 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2q -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2q 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2q -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2q %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermt2q (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2q 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2q -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2q 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2q -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2q -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermt2ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermt2ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermt2pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermt2pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermt2pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermt2pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermt2pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
valignq $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $123, (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignq $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignq $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignq $123, 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $123, -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
valignq $123, -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
valignq $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
valignq $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $123, (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignq $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignq $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignq $123, 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
valignq $123, -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
valignq $123, -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vscalefpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vscalefpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vscalefps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vscalefps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vscalefps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vscalefps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vscalefps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vscalefps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfixupimmpd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmpd $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfixupimmpd $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmpd $123, -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmpd $123, -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vfixupimmps $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $123, (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $123, 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $123, -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vfixupimmps $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vfixupimmps $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $123, (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vfixupimmps $123, -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vfixupimmps $123, -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpslld $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpslld $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpslld $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpslld $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpslld $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpslld $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpslld $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpslld $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpslld $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpslld $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpslld $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpslld $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpslld $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpslld $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpslld $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpslld $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpslld $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpslld $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpslld $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpslld $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpslld $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpsllq $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllq $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsllq $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsllq $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpsllq $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpsllq $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsllq $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsllq $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsllq $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsllq $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsllq $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllq $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsllq $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsllq $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpsllq $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpsllq $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsllq $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsllq $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsllq $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsllq $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsllq $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsrad $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrad $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsrad $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsrad $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpsrad $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpsrad $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpsrad $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsrad $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsrad $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpsrad $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vpsrad $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrad $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsrad $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsrad $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpsrad $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpsrad $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpsrad $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsrad $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsrad $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpsrad $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsrad $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpsraq $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsraq $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpsraq $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpsraq $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vpsraq $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vpsraq $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsraq $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsraq $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vpsraq $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsraq $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vpsraq $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsraq $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpsraq $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpsraq $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vpsraq $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vpsraq $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsraq $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsraq $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vpsraq $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vpsraq $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vpsraq $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vprolvd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprolvd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprolvd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprold $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprold $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprold $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprold $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vprold $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vprold $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vprold $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprold $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vprold $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprold $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vprold $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprold $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vprold $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprold $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vprold $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprold $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprold $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprold $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vprold $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vprold $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vprold $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprold $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vprold $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprold $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vprold $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprold $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vprold $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprold $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vprolvq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprolvq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolvq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolvq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprolvq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolvq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolvq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolq $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolq $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprolq $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprolq $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vprolq $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vprolq $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vprolq $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vprolq $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vprolq $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vprolq $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vprolq $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolq $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprolq $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprolq $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vprolq $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vprolq $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vprolq $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vprolq $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vprolq $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vprolq $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprolq $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vprorvd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprorvd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvd 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvd -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprorvd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvd (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvd 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvd -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvd -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprord $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprord $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprord $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprord $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vprord $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vprord $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vprord $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprord $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vprord $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprord $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vprord $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprord $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vprord $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprord $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vprord $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprord $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprord $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprord $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vprord $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vprord $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vprord $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprord $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vprord $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprord $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vprord $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprord $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vprord $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprord $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vprorvq %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprorvq (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorvq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorvq %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprorvq (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorvq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorvq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorq $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorq $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vprorq $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vprorq $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vprorq $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vprorq $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vprorq $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vprorq $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vprorq $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vprorq $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vprorq $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorq $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vprorq $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vprorq $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vprorq $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vprorq $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vprorq $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vprorq $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vprorq $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vprorq $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vprorq $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vrndscalepd $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $123, (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vrndscalepd $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vrndscalepd $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $123, (%eax){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrndscalepd $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscalepd $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $0xab, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vrndscaleps $123, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $123, (%ecx), %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $123, (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $123, 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $123, -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vrndscaleps $0xab, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vrndscaleps $123, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $123, (%ecx), %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $123, (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $123, 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $123, -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vrndscaleps $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vrndscaleps $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpcompressq %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpcompressq %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpcompressq %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressq %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vpcompressq %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressq %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vpcompressq %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpcompressq %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpcompressq %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressq %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vpcompressq %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpcompressq %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vpcompressq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpcompressq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpcompressq %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpcompressq %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvtps2ph $0xab, %xmm6, (%ecx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %xmm6, (%ecx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vcvtps2ph $123, %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vcvtps2ph $123, %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vcvtps2ph $123, %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vcvtps2ph $0xab, %ymm6, (%ecx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %ymm6, (%ecx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vcvtps2ph $123, %ymm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vcvtps2ph $123, %ymm6, 2048(%edx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %ymm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vcvtps2ph $123, %ymm6, -2064(%edx){%k7} # AVX512{F,VL}
vextractf32x4 $0xab, %ymm5, (%ecx){%k7} # AVX512{F,VL}
vextractf32x4 $123, %ymm5, (%ecx){%k7} # AVX512{F,VL}
vextractf32x4 $123, %ymm5, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vextractf32x4 $123, %ymm5, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vextractf32x4 $123, %ymm5, 2048(%edx){%k7} # AVX512{F,VL}
vextractf32x4 $123, %ymm5, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vextractf32x4 $123, %ymm5, -2064(%edx){%k7} # AVX512{F,VL}
vextracti32x4 $0xab, %ymm5, (%ecx){%k7} # AVX512{F,VL}
vextracti32x4 $123, %ymm5, (%ecx){%k7} # AVX512{F,VL}
vextracti32x4 $123, %ymm5, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vextracti32x4 $123, %ymm5, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vextracti32x4 $123, %ymm5, 2048(%edx){%k7} # AVX512{F,VL}
vextracti32x4 $123, %ymm5, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vextracti32x4 $123, %ymm5, -2064(%edx){%k7} # AVX512{F,VL}
vmovapd %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovapd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovapd %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovapd %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovapd %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovapd %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovapd %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovapd %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovapd %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovapd %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovapd %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovapd %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vmovaps %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovaps %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovaps %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovaps %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovaps %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovaps %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovaps %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovaps %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovaps %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovaps %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovaps %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovaps %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vmovdqa32 %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqa32 %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqa32 %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa32 %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovdqa32 %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa32 %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovdqa32 %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqa32 %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqa32 %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa32 %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovdqa32 %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa32 %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vmovdqa64 %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqa64 %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqa64 %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa64 %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovdqa64 %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa64 %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovdqa64 %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqa64 %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqa64 %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa64 %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovdqa64 %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqa64 %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vmovdqu32 %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqu32 %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqu32 %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu32 %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovdqu32 %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu32 %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovdqu32 %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqu32 %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqu32 %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu32 %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovdqu32 %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu32 %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vmovdqu64 %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqu64 %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqu64 %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu64 %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovdqu64 %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu64 %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovdqu64 %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovdqu64 %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovdqu64 %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu64 %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovdqu64 %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovdqu64 %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vmovupd %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovupd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovupd %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovupd %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovupd %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovupd %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovupd %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovupd %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovupd %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovupd %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovupd %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovupd %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vmovups %xmm6, (%ecx){%k7} # AVX512{F,VL}
vmovups %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovups %xmm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vmovups %xmm6, 2048(%edx){%k7} # AVX512{F,VL}
vmovups %xmm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vmovups %xmm6, -2064(%edx){%k7} # AVX512{F,VL}
vmovups %ymm6, (%ecx){%k7} # AVX512{F,VL}
vmovups %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vmovups %ymm6, 4064(%edx){%k7} # AVX512{F,VL} Disp8
vmovups %ymm6, 4096(%edx){%k7} # AVX512{F,VL}
vmovups %ymm6, -4096(%edx){%k7} # AVX512{F,VL} Disp8
vmovups %ymm6, -4128(%edx){%k7} # AVX512{F,VL}
vpmovqb %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovqb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovqb %xmm6, 254(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqb %xmm6, 256(%edx){%k7} # AVX512{F,VL}
vpmovqb %xmm6, -256(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqb %xmm6, -258(%edx){%k7} # AVX512{F,VL}
vpmovqb %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovqb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovqb %ymm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqb %ymm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovqb %ymm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqb %ymm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovsqb %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsqb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsqb %xmm6, 254(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqb %xmm6, 256(%edx){%k7} # AVX512{F,VL}
vpmovsqb %xmm6, -256(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqb %xmm6, -258(%edx){%k7} # AVX512{F,VL}
vpmovsqb %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsqb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsqb %ymm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqb %ymm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovsqb %ymm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqb %ymm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovusqb %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusqb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusqb %xmm6, 254(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqb %xmm6, 256(%edx){%k7} # AVX512{F,VL}
vpmovusqb %xmm6, -256(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqb %xmm6, -258(%edx){%k7} # AVX512{F,VL}
vpmovusqb %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusqb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusqb %ymm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqb %ymm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovusqb %ymm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqb %ymm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovqw %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovqw %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovqw %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqw %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovqw %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqw %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovqw %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovqw %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovqw %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqw %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovqw %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqw %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovsqw %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsqw %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsqw %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqw %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovsqw %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqw %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovsqw %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsqw %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsqw %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqw %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovsqw %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqw %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovusqw %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusqw %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusqw %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqw %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovusqw %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqw %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovusqw %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusqw %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusqw %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqw %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovusqw %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqw %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovqd %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovqd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovqd %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqd %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovqd %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqd %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovqd %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovqd %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovqd %ymm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqd %ymm6, 2048(%edx){%k7} # AVX512{F,VL}
vpmovqd %ymm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vpmovqd %ymm6, -2064(%edx){%k7} # AVX512{F,VL}
vpmovsqd %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsqd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsqd %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqd %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovsqd %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqd %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovsqd %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsqd %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsqd %ymm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqd %ymm6, 2048(%edx){%k7} # AVX512{F,VL}
vpmovsqd %ymm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsqd %ymm6, -2064(%edx){%k7} # AVX512{F,VL}
vpmovusqd %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusqd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusqd %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqd %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovusqd %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqd %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovusqd %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusqd %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusqd %ymm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqd %ymm6, 2048(%edx){%k7} # AVX512{F,VL}
vpmovusqd %ymm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusqd %ymm6, -2064(%edx){%k7} # AVX512{F,VL}
vpmovdb %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovdb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovdb %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdb %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovdb %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdb %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovdb %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovdb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovdb %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdb %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovdb %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdb %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovsdb %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsdb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsdb %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdb %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovsdb %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdb %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovsdb %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsdb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsdb %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdb %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovsdb %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdb %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovusdb %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusdb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusdb %xmm6, 508(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdb %xmm6, 512(%edx){%k7} # AVX512{F,VL}
vpmovusdb %xmm6, -512(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdb %xmm6, -516(%edx){%k7} # AVX512{F,VL}
vpmovusdb %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusdb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusdb %ymm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdb %ymm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovusdb %ymm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdb %ymm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovdw %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovdw %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovdw %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdw %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovdw %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdw %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovdw %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovdw %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovdw %ymm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdw %ymm6, 2048(%edx){%k7} # AVX512{F,VL}
vpmovdw %ymm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vpmovdw %ymm6, -2064(%edx){%k7} # AVX512{F,VL}
vpmovsdw %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsdw %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsdw %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdw %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovsdw %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdw %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovsdw %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovsdw %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovsdw %ymm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdw %ymm6, 2048(%edx){%k7} # AVX512{F,VL}
vpmovsdw %ymm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vpmovsdw %ymm6, -2064(%edx){%k7} # AVX512{F,VL}
vpmovusdw %xmm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusdw %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusdw %xmm6, 1016(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdw %xmm6, 1024(%edx){%k7} # AVX512{F,VL}
vpmovusdw %xmm6, -1024(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdw %xmm6, -1032(%edx){%k7} # AVX512{F,VL}
vpmovusdw %ymm6, (%ecx){%k7} # AVX512{F,VL}
vpmovusdw %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{F,VL}
vpmovusdw %ymm6, 2032(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdw %ymm6, 2048(%edx){%k7} # AVX512{F,VL}
vpmovusdw %ymm6, -2048(%edx){%k7} # AVX512{F,VL} Disp8
vpmovusdw %ymm6, -2064(%edx){%k7} # AVX512{F,VL}
vcvttpd2udq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvttpd2udqx (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqx -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udq (%eax){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqx 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqx 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqx -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqx -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqx 1016(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqx 1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqx -1024(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqx -1032(%edx){1to2}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udq %ymm5, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udq %ymm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvttpd2udqy (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqy -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqy 4064(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqy 4096(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqy -4096(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqy -4128(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqy 1016(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqy 1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttpd2udqy -1024(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttpd2udqy -1032(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq %xmm5, %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vcvttps2udq (%ecx), %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq (%eax){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq 2032(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq 2048(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq -2048(%edx), %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq -2064(%edx), %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq 508(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq 512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq -512(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq -516(%edx){1to4}, %xmm6{%k7} # AVX512{F,VL}
vcvttps2udq %ymm5, %ymm6{%k7} # AVX512{F,VL}
vcvttps2udq %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vcvttps2udq (%ecx), %ymm6{%k7} # AVX512{F,VL}
vcvttps2udq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{F,VL}
vcvttps2udq (%eax){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvttps2udq 4064(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq 4096(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvttps2udq -4096(%edx), %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq -4128(%edx), %ymm6{%k7} # AVX512{F,VL}
vcvttps2udq 508(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq 512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vcvttps2udq -512(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL} Disp8
vcvttps2udq -516(%edx){1to8}, %ymm6{%k7} # AVX512{F,VL}
vpermi2d %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermi2d (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2d 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2d -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2d 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2d -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2d %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2d %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermi2d (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2d -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2d (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2d 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2d 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2d -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2d -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2d 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2d 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2d -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2d -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermi2q (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2q 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2q -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2q 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2q -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2q %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermi2q (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2q 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2q -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2q 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2q -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2q -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermi2ps (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2ps %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermi2ps (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2ps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2ps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd %xmm4, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vpermi2pd (%ecx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{F,VL}
vpermi2pd %ymm4, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vpermi2pd (%ecx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vpermi2pd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL} Disp8
vpermi2pd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{F,VL}
vptestnmd %xmm4, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd (%ecx), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd (%eax){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd 2032(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd 2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd -2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd -2064(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd 508(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd 512(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd -512(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd -516(%edx){1to4}, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmd %ymm4, %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmd (%ecx), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmd -123456(%esp,%esi,8), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmd (%eax){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmd 4064(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd 4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmd -4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd -4128(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmd 508(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd 512(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmd -512(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmd -516(%edx){1to8}, %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq %xmm4, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq (%ecx), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq (%eax){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq 2032(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq 2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq -2048(%edx), %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq -2064(%edx), %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq 1016(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq 1024(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq -1024(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq -1032(%edx){1to2}, %xmm5, %k5{%k7} # AVX512{F,VL}
vptestnmq %ymm4, %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq (%ecx), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq -123456(%esp,%esi,8), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq (%eax){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq 4064(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq 4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq -4096(%edx), %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq -4128(%edx), %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq 1016(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq 1024(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL}
vptestnmq -1024(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL} Disp8
vptestnmq -1032(%edx){1to4}, %ymm5, %k5{%k7} # AVX512{F,VL}
.intel_syntax noprefix
vaddpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vaddpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vaddpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vaddpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vaddpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vaddpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vaddpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vaddpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vaddpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vaddpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vaddpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vaddpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vaddpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vaddpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vaddpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vaddpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vaddpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vaddpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vaddpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vaddps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vaddps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vaddps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vaddps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vaddps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vaddps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vaddps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vaddps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vaddps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vaddps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vaddps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vaddps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
valignd xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
valignd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
valignd xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
valignd xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
valignd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
valignd xmm6{k7}, xmm5, [eax]{1to4}, 123 # AVX512{F,VL}
valignd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
valignd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
valignd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
valignd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
valignd xmm6{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
valignd xmm6{k7}, xmm5, [edx+512]{1to4}, 123 # AVX512{F,VL}
valignd xmm6{k7}, xmm5, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
valignd xmm6{k7}, xmm5, [edx-516]{1to4}, 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
valignd ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
valignd ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, [eax]{1to8}, 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
valignd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
valignd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
valignd ymm6{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{F,VL}
valignd ymm6{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
valignd ymm6{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vblendmpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vblendmpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vblendmpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vblendmpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vblendmpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vblendmpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vblendmpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vblendmpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vblendmpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vblendmpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vblendmpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vblendmpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vblendmps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vblendmps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vblendmps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vblendmps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vblendmps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vblendmps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vblendmps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vblendmps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vblendmps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vblendmps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vblendmps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vblendmps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vbroadcastf32x4 ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vbroadcastf32x4 ymm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{F,VL}
vbroadcastf32x4 ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vbroadcastf32x4 ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vbroadcastf32x4 ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vbroadcastf32x4 ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vbroadcastf32x4 ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vbroadcasti32x4 ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vbroadcasti32x4 ymm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{F,VL}
vbroadcasti32x4 ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vbroadcasti32x4 ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vbroadcasti32x4 ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vbroadcasti32x4 ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vbroadcasti32x4 ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vbroadcastsd ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vbroadcastsd ymm6{k7}{z}, QWORD PTR [ecx] # AVX512{F,VL}
vbroadcastsd ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vbroadcastsd ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vbroadcastsd ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vbroadcastsd ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vbroadcastsd ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vbroadcastsd ymm6{k7}, xmm5 # AVX512{F,VL}
vbroadcastsd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vbroadcastss xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vbroadcastss xmm6{k7}{z}, DWORD PTR [ecx] # AVX512{F,VL}
vbroadcastss xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vbroadcastss xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vbroadcastss xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vbroadcastss xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vbroadcastss xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vbroadcastss ymm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vbroadcastss ymm6{k7}{z}, DWORD PTR [ecx] # AVX512{F,VL}
vbroadcastss ymm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vbroadcastss ymm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vbroadcastss ymm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vbroadcastss ymm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vbroadcastss ymm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vbroadcastss xmm6{k7}, xmm5 # AVX512{F,VL}
vbroadcastss xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vbroadcastss ymm6{k7}, xmm5 # AVX512{F,VL}
vbroadcastss ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vcmppd k5{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, [eax]{1to2}, 123 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, xmm5, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vcmppd k5{k7}, xmm5, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, xmm5, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vcmppd k5{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, [eax]{1to4}, 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vcmppd k5{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vcmppd k5{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vcmpps k5{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, [eax]{1to4}, 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, xmm5, [edx+512]{1to4}, 123 # AVX512{F,VL}
vcmpps k5{k7}, xmm5, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, xmm5, [edx-516]{1to4}, 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vcmpps k5{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, [eax]{1to8}, 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{F,VL}
vcmpps k5{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vcmpps k5{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{F,VL}
vcompresspd XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vcompresspd XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vcompresspd XMMWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vcompresspd XMMWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vcompresspd XMMWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vcompresspd XMMWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vcompresspd YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vcompresspd YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vcompresspd YMMWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vcompresspd YMMWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vcompresspd YMMWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vcompresspd YMMWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vcompresspd xmm6{k7}, xmm5 # AVX512{F,VL}
vcompresspd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcompresspd ymm6{k7}, ymm5 # AVX512{F,VL}
vcompresspd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcompressps XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vcompressps XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vcompressps XMMWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vcompressps XMMWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vcompressps XMMWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vcompressps XMMWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vcompressps YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vcompressps YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vcompressps YMMWORD PTR [edx+508]{k7}, ymm6 # AVX512{F,VL} Disp8
vcompressps YMMWORD PTR [edx+512]{k7}, ymm6 # AVX512{F,VL}
vcompressps YMMWORD PTR [edx-512]{k7}, ymm6 # AVX512{F,VL} Disp8
vcompressps YMMWORD PTR [edx-516]{k7}, ymm6 # AVX512{F,VL}
vcompressps xmm6{k7}, xmm5 # AVX512{F,VL}
vcompressps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcompressps ymm6{k7}, ymm5 # AVX512{F,VL}
vcompressps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtdq2pd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vcvtdq2pd xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vcvtdq2pd xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, [edx+508]{1to2} # AVX512{F,VL} Disp8
vcvtdq2pd xmm6{k7}, [edx+512]{1to2} # AVX512{F,VL}
vcvtdq2pd xmm6{k7}, [edx-512]{1to2} # AVX512{F,VL} Disp8
vcvtdq2pd xmm6{k7}, [edx-516]{1to2} # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, xmm5 # AVX512{F,VL}
vcvtdq2pd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtdq2pd ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtdq2pd ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvtdq2pd ymm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvtdq2pd ymm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvtdq2pd ymm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtdq2ps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtdq2ps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtdq2ps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvtdq2ps xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvtdq2ps xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvtdq2ps xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, ymm5 # AVX512{F,VL}
vcvtdq2ps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvtdq2ps ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvtdq2ps ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vcvtdq2ps ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vcvtdq2ps ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vcvtdq2ps ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtpd2dq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, QWORD BCST [edx+1016]{1to2} # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, QWORD BCST [edx+1024]{1to2} # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, QWORD BCST [edx-1024]{1to2} # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, QWORD BCST [edx-1032]{1to2} # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, ymm5 # AVX512{F,VL}
vcvtpd2dq xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, QWORD BCST [edx+1016]{1to4} # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, QWORD BCST [edx+1024]{1to4} # AVX512{F,VL}
vcvtpd2dq xmm6{k7}, QWORD BCST [edx-1024]{1to4} # AVX512{F,VL} Disp8
vcvtpd2dq xmm6{k7}, QWORD BCST [edx-1032]{1to4} # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtpd2ps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, QWORD BCST [edx+1016]{1to2} # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, QWORD BCST [edx+1024]{1to2} # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, QWORD BCST [edx-1024]{1to2} # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, QWORD BCST [edx-1032]{1to2} # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, ymm5 # AVX512{F,VL}
vcvtpd2ps xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, QWORD BCST [edx+1016]{1to4} # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, QWORD BCST [edx+1024]{1to4} # AVX512{F,VL}
vcvtpd2ps xmm6{k7}, QWORD BCST [edx-1024]{1to4} # AVX512{F,VL} Disp8
vcvtpd2ps xmm6{k7}, QWORD BCST [edx-1032]{1to4} # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtpd2udq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, QWORD BCST [edx+1016]{1to2} # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, QWORD BCST [edx+1024]{1to2} # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, QWORD BCST [edx-1024]{1to2} # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, QWORD BCST [edx-1032]{1to2} # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, ymm5 # AVX512{F,VL}
vcvtpd2udq xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, QWORD BCST [edx+1016]{1to4} # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, QWORD BCST [edx+1024]{1to4} # AVX512{F,VL}
vcvtpd2udq xmm6{k7}, QWORD BCST [edx-1024]{1to4} # AVX512{F,VL} Disp8
vcvtpd2udq xmm6{k7}, QWORD BCST [edx-1032]{1to4} # AVX512{F,VL}
vcvtph2ps xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtph2ps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtph2ps xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vcvtph2ps xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtph2ps xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vcvtph2ps xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vcvtph2ps xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vcvtph2ps xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vcvtph2ps ymm6{k7}, xmm5 # AVX512{F,VL}
vcvtph2ps ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtph2ps ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtph2ps ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtph2ps ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtph2ps ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtph2ps ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtph2ps ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtps2dq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtps2dq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtps2dq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtps2dq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtps2dq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtps2dq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtps2dq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtps2dq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtps2dq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtps2dq xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvtps2dq xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvtps2dq xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvtps2dq xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvtps2dq ymm6{k7}, ymm5 # AVX512{F,VL}
vcvtps2dq ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtps2dq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvtps2dq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtps2dq ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vcvtps2dq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvtps2dq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvtps2dq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvtps2dq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvtps2dq ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vcvtps2dq ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vcvtps2dq ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vcvtps2dq ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vcvtps2pd xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtps2pd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtps2pd xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vcvtps2pd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtps2pd xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvtps2pd xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vcvtps2pd xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vcvtps2pd xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vcvtps2pd xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vcvtps2pd xmm6{k7}, [edx+508]{1to2} # AVX512{F,VL} Disp8
vcvtps2pd xmm6{k7}, [edx+512]{1to2} # AVX512{F,VL}
vcvtps2pd xmm6{k7}, [edx-512]{1to2} # AVX512{F,VL} Disp8
vcvtps2pd xmm6{k7}, [edx-516]{1to2} # AVX512{F,VL}
vcvtps2pd ymm6{k7}, xmm5 # AVX512{F,VL}
vcvtps2pd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtps2pd ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtps2pd ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtps2pd ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtps2pd ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtps2pd ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtps2pd ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtps2pd ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtps2pd ymm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvtps2pd ymm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvtps2pd ymm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvtps2pd ymm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvtps2ph xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vcvtps2ph xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vcvtps2ph xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vcvtps2ph xmm6{k7}, ymm5, 0xab # AVX512{F,VL}
vcvtps2ph xmm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vcvtps2ph xmm6{k7}, ymm5, 123 # AVX512{F,VL}
vcvtps2udq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtps2udq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtps2udq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtps2udq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtps2udq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtps2udq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtps2udq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtps2udq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtps2udq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtps2udq xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvtps2udq xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvtps2udq xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvtps2udq xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvtps2udq ymm6{k7}, ymm5 # AVX512{F,VL}
vcvtps2udq ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtps2udq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvtps2udq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtps2udq ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vcvtps2udq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvtps2udq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvtps2udq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvtps2udq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvtps2udq ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vcvtps2udq ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vcvtps2udq ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vcvtps2udq ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvttpd2dq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, QWORD BCST [edx+1016]{1to2} # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, QWORD BCST [edx+1024]{1to2} # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, QWORD BCST [edx-1024]{1to2} # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, QWORD BCST [edx-1032]{1to2} # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, ymm5 # AVX512{F,VL}
vcvttpd2dq xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, QWORD BCST [edx+1016]{1to4} # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, QWORD BCST [edx+1024]{1to4} # AVX512{F,VL}
vcvttpd2dq xmm6{k7}, QWORD BCST [edx-1024]{1to4} # AVX512{F,VL} Disp8
vcvttpd2dq xmm6{k7}, QWORD BCST [edx-1032]{1to4} # AVX512{F,VL}
vcvttps2dq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvttps2dq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvttps2dq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvttps2dq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttps2dq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvttps2dq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvttps2dq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvttps2dq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvttps2dq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvttps2dq xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvttps2dq xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvttps2dq xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvttps2dq xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvttps2dq ymm6{k7}, ymm5 # AVX512{F,VL}
vcvttps2dq ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvttps2dq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvttps2dq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttps2dq ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vcvttps2dq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvttps2dq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvttps2dq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvttps2dq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvttps2dq ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vcvttps2dq ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vcvttps2dq ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vcvttps2dq ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtudq2pd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vcvtudq2pd xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vcvtudq2pd xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, [edx+508]{1to2} # AVX512{F,VL} Disp8
vcvtudq2pd xmm6{k7}, [edx+512]{1to2} # AVX512{F,VL}
vcvtudq2pd xmm6{k7}, [edx-512]{1to2} # AVX512{F,VL} Disp8
vcvtudq2pd xmm6{k7}, [edx-516]{1to2} # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, xmm5 # AVX512{F,VL}
vcvtudq2pd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtudq2pd ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtudq2pd ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvtudq2pd ymm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvtudq2pd ymm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvtudq2pd ymm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, xmm5 # AVX512{F,VL}
vcvtudq2ps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvtudq2ps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvtudq2ps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvtudq2ps xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvtudq2ps xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvtudq2ps xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, ymm5 # AVX512{F,VL}
vcvtudq2ps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvtudq2ps ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvtudq2ps ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vcvtudq2ps ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vcvtudq2ps ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vcvtudq2ps ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vdivpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vdivpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vdivpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vdivpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vdivpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vdivpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vdivpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vdivpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vdivpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vdivpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vdivpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vdivpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vdivps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vdivps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vdivps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vdivps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vdivps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vdivps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vdivps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vdivps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vdivps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vdivps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vdivps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vdivps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vexpandpd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vexpandpd xmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{F,VL}
vexpandpd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vexpandpd xmm6{k7}, XMMWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vexpandpd xmm6{k7}, XMMWORD PTR [edx+1024] # AVX512{F,VL}
vexpandpd xmm6{k7}, XMMWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vexpandpd xmm6{k7}, XMMWORD PTR [edx-1032] # AVX512{F,VL}
vexpandpd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vexpandpd ymm6{k7}{z}, YMMWORD PTR [ecx] # AVX512{F,VL}
vexpandpd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vexpandpd ymm6{k7}, YMMWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vexpandpd ymm6{k7}, YMMWORD PTR [edx+1024] # AVX512{F,VL}
vexpandpd ymm6{k7}, YMMWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vexpandpd ymm6{k7}, YMMWORD PTR [edx-1032] # AVX512{F,VL}
vexpandpd xmm6{k7}, xmm5 # AVX512{F,VL}
vexpandpd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vexpandpd ymm6{k7}, ymm5 # AVX512{F,VL}
vexpandpd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vexpandps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vexpandps xmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{F,VL}
vexpandps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vexpandps xmm6{k7}, XMMWORD PTR [edx+508] # AVX512{F,VL} Disp8
vexpandps xmm6{k7}, XMMWORD PTR [edx+512] # AVX512{F,VL}
vexpandps xmm6{k7}, XMMWORD PTR [edx-512] # AVX512{F,VL} Disp8
vexpandps xmm6{k7}, XMMWORD PTR [edx-516] # AVX512{F,VL}
vexpandps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vexpandps ymm6{k7}{z}, YMMWORD PTR [ecx] # AVX512{F,VL}
vexpandps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vexpandps ymm6{k7}, YMMWORD PTR [edx+508] # AVX512{F,VL} Disp8
vexpandps ymm6{k7}, YMMWORD PTR [edx+512] # AVX512{F,VL}
vexpandps ymm6{k7}, YMMWORD PTR [edx-512] # AVX512{F,VL} Disp8
vexpandps ymm6{k7}, YMMWORD PTR [edx-516] # AVX512{F,VL}
vexpandps xmm6{k7}, xmm5 # AVX512{F,VL}
vexpandps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vexpandps ymm6{k7}, ymm5 # AVX512{F,VL}
vexpandps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vextractf32x4 xmm6{k7}, ymm5, 0xab # AVX512{F,VL}
vextractf32x4 xmm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vextractf32x4 xmm6{k7}, ymm5, 123 # AVX512{F,VL}
vextracti32x4 xmm6{k7}, ymm5, 0xab # AVX512{F,VL}
vextracti32x4 xmm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vextracti32x4 xmm6{k7}, ymm5, 123 # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmadd132pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmadd132pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmadd132pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmadd132pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmadd132pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmadd132pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmadd132pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmadd132pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmadd132ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmadd132ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmadd132ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmadd132ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmadd132ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmadd132ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmadd132ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmadd132ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmadd213pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmadd213pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmadd213pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmadd213pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmadd213pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmadd213pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmadd213pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmadd213pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmadd213ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmadd213ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmadd213ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmadd213ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmadd213ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmadd213ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmadd213ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmadd213ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmadd231pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmadd231pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmadd231pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmadd231pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmadd231pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmadd231pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmadd231pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmadd231pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmadd231ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmadd231ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmadd231ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmadd231ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmadd231ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmadd231ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmadd231ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmadd231ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmaddsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmaddsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmaddsub132pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmaddsub132pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmaddsub132pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmaddsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmaddsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmaddsub132pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmaddsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmaddsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmaddsub132ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmaddsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmaddsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmaddsub132ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmaddsub132ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmaddsub132ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmaddsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmaddsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmaddsub213pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmaddsub213pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmaddsub213pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmaddsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmaddsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmaddsub213pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmaddsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmaddsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmaddsub213ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmaddsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmaddsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmaddsub213ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmaddsub213ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmaddsub213ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmaddsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmaddsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmaddsub231pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmaddsub231pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmaddsub231pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmaddsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmaddsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmaddsub231pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmaddsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmaddsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmaddsub231ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmaddsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmaddsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmaddsub231ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmaddsub231ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmaddsub231ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsub132pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsub132pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmsub132pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsub132pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsub132pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsub132pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmsub132pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsub132pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsub132ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmsub132ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmsub132ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmsub132ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsub132ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmsub132ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmsub132ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmsub132ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsub213pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsub213pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmsub213pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsub213pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsub213pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsub213pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmsub213pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsub213pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsub213ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmsub213ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmsub213ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmsub213ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsub213ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmsub213ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmsub213ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmsub213ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsub231pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsub231pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmsub231pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsub231pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsub231pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsub231pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmsub231pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsub231pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsub231ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmsub231ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmsub231ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmsub231ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsub231ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmsub231ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmsub231ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmsub231ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsubadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsubadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsubadd132pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmsubadd132pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsubadd132pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsubadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsubadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmsubadd132pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsubadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsubadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmsubadd132ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsubadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsubadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmsubadd132ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmsubadd132ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmsubadd132ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsubadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsubadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsubadd213pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmsubadd213pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsubadd213pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsubadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsubadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmsubadd213pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsubadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsubadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmsubadd213ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsubadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsubadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmsubadd213ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmsubadd213ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmsubadd213ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsubadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsubadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsubadd231pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfmsubadd231pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsubadd231pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsubadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsubadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfmsubadd231pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfmsubadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfmsubadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfmsubadd231ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfmsubadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfmsubadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfmsubadd231ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfmsubadd231ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfmsubadd231ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd132pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmadd132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmadd132pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfnmadd132pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmadd132pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd132pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmadd132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmadd132pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfnmadd132pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmadd132pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd132ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmadd132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfnmadd132ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfnmadd132ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfnmadd132ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd132ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmadd132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfnmadd132ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfnmadd132ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfnmadd132ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd213pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmadd213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmadd213pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfnmadd213pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmadd213pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd213pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmadd213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmadd213pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfnmadd213pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmadd213pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd213ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmadd213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfnmadd213ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfnmadd213ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfnmadd213ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd213ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmadd213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfnmadd213ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfnmadd213ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfnmadd213ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd231pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmadd231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmadd231pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfnmadd231pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmadd231pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd231pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmadd231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmadd231pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfnmadd231pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmadd231pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd231ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmadd231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfnmadd231ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfnmadd231ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfnmadd231ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd231ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmadd231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfnmadd231ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfnmadd231ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfnmadd231ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub132pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmsub132pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmsub132pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfnmsub132pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmsub132pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub132pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmsub132pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmsub132pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfnmsub132pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmsub132pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub132ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmsub132ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfnmsub132ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfnmsub132ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfnmsub132ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub132ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmsub132ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfnmsub132ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfnmsub132ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfnmsub132ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub213pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmsub213pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmsub213pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfnmsub213pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmsub213pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub213pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmsub213pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmsub213pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfnmsub213pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmsub213pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub213ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmsub213ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfnmsub213ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfnmsub213ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfnmsub213ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub213ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmsub213ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfnmsub213ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfnmsub213ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfnmsub213ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub231pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmsub231pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmsub231pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vfnmsub231pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmsub231pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub231pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmsub231pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmsub231pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vfnmsub231pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmsub231pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub231ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vfnmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vfnmsub231ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vfnmsub231ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vfnmsub231ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vfnmsub231ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub231ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vfnmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vfnmsub231ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vfnmsub231ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vfnmsub231ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vfnmsub231ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vgatherdpd xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vgatherdpd xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vgatherdpd xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vgatherdpd ymm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vgatherdpd ymm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vgatherdpd ymm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vgatherdps xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vgatherdps xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vgatherdps xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vgatherdps ymm6{k1}, [ebp+ymm7*8-123] # AVX512{F,VL}
vgatherdps ymm6{k1}, [eax+ymm7+256] # AVX512{F,VL}
vgatherdps ymm6{k1}, [ecx+ymm7*4+1024] # AVX512{F,VL}
vgatherqpd xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vgatherqpd xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vgatherqpd xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vgatherqpd ymm6{k1}, [ebp+ymm7*8-123] # AVX512{F,VL}
vgatherqpd ymm6{k1}, [eax+ymm7+256] # AVX512{F,VL}
vgatherqpd ymm6{k1}, [ecx+ymm7*4+1024] # AVX512{F,VL}
vgatherqps xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vgatherqps xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vgatherqps xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vgatherqps xmm6{k1}, [ebp+ymm7*8-123] # AVX512{F,VL}
vgatherqps xmm6{k1}, [eax+ymm7+256] # AVX512{F,VL}
vgatherqps xmm6{k1}, [ecx+ymm7*4+1024] # AVX512{F,VL}
vgetexppd xmm6{k7}, xmm5 # AVX512{F,VL}
vgetexppd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vgetexppd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vgetexppd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vgetexppd xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vgetexppd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vgetexppd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vgetexppd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vgetexppd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vgetexppd xmm6{k7}, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vgetexppd xmm6{k7}, [edx+1024]{1to2} # AVX512{F,VL}
vgetexppd xmm6{k7}, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vgetexppd xmm6{k7}, [edx-1032]{1to2} # AVX512{F,VL}
vgetexppd ymm6{k7}, ymm5 # AVX512{F,VL}
vgetexppd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vgetexppd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vgetexppd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vgetexppd ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vgetexppd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vgetexppd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vgetexppd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vgetexppd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vgetexppd ymm6{k7}, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vgetexppd ymm6{k7}, [edx+1024]{1to4} # AVX512{F,VL}
vgetexppd ymm6{k7}, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vgetexppd ymm6{k7}, [edx-1032]{1to4} # AVX512{F,VL}
vgetexpps xmm6{k7}, xmm5 # AVX512{F,VL}
vgetexpps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vgetexpps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vgetexpps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vgetexpps xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vgetexpps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vgetexpps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vgetexpps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vgetexpps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vgetexpps xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vgetexpps xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vgetexpps xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vgetexpps xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vgetexpps ymm6{k7}, ymm5 # AVX512{F,VL}
vgetexpps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vgetexpps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vgetexpps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vgetexpps ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vgetexpps ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vgetexpps ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vgetexpps ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vgetexpps ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vgetexpps ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vgetexpps ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vgetexpps ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vgetexpps ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vgetmantpd xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vgetmantpd xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vgetmantpd xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vgetmantpd xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vgetmantpd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vgetmantpd xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vgetmantpd xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vgetmantpd xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vgetmantpd xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vgetmantpd xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vgetmantpd xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vgetmantpd xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vgetmantpd xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vgetmantpd xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vgetmantpd ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vgetmantpd ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vgetmantpd ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vgetmantpd ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantpd ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vgetmantpd ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantpd ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vgetmantps xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vgetmantps xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vgetmantps xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vgetmantps xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantps xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vgetmantps xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantps xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vgetmantps ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vgetmantps ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vgetmantps ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vgetmantps ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vgetmantps ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vgetmantps ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vgetmantps ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vinsertf32x4 ymm6{k7}, ymm5, xmm4, 0xab # AVX512{F,VL}
vinsertf32x4 ymm6{k7}{z}, ymm5, xmm4, 0xab # AVX512{F,VL}
vinsertf32x4 ymm6{k7}, ymm5, xmm4, 123 # AVX512{F,VL}
vinsertf32x4 ymm6{k7}, ymm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vinsertf32x4 ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vinsertf32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vinsertf32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vinsertf32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vinsertf32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vinserti32x4 ymm6{k7}, ymm5, xmm4, 0xab # AVX512{F,VL}
vinserti32x4 ymm6{k7}{z}, ymm5, xmm4, 0xab # AVX512{F,VL}
vinserti32x4 ymm6{k7}, ymm5, xmm4, 123 # AVX512{F,VL}
vinserti32x4 ymm6{k7}, ymm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vinserti32x4 ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vinserti32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vinserti32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vinserti32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vinserti32x4 ymm6{k7}, ymm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vmaxpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmaxpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmaxpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vmaxpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vmaxpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vmaxpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vmaxpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmaxpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmaxpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vmaxpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vmaxpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vmaxpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vmaxps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmaxps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmaxps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vmaxps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vmaxps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vmaxps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vmaxps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmaxps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmaxps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vmaxps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vmaxps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vmaxps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vminpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vminpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vminpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vminpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vminpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vminpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vminpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vminpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vminpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vminpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vminpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vminpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vminps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vminps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vminps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vminps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vminps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vminps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vminps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vminps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vminps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vminps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vminps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vminps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vminps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vminps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vminps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vminps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vminps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vminps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vminps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vminps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vminps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vminps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vminps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vminps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vminps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vminps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vmovapd xmm6{k7}, xmm5 # AVX512{F,VL}
vmovapd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovapd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovapd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovapd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovapd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovapd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovapd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovapd ymm6{k7}, ymm5 # AVX512{F,VL}
vmovapd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovapd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovapd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovapd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovapd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovapd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovapd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovaps xmm6{k7}, xmm5 # AVX512{F,VL}
vmovaps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovaps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovaps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovaps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovaps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovaps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovaps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovaps ymm6{k7}, ymm5 # AVX512{F,VL}
vmovaps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovaps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovaps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovaps ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovaps ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovaps ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovaps ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovddup xmm6{k7}, xmm5 # AVX512{F,VL}
vmovddup xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovddup xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vmovddup xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovddup xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vmovddup xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vmovddup xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vmovddup xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vmovddup ymm6{k7}, ymm5 # AVX512{F,VL}
vmovddup ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovddup ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovddup ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovddup ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovddup ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovddup ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovddup ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovdqa32 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqa32 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqa32 xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqa32 xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqa32 xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovdqa32 xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovdqa32 xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovdqa32 xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovdqa32 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqa32 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqa32 ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqa32 ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqa32 ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovdqa32 ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovdqa32 ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovdqa32 ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovdqa64 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqa64 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqa64 xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqa64 xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqa64 xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovdqa64 xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovdqa64 xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovdqa64 xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovdqa64 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqa64 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqa64 ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqa64 ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqa64 ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovdqa64 ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovdqa64 ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovdqa64 ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovdqu32 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqu32 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqu32 xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqu32 xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqu32 xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovdqu32 xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovdqu32 xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovdqu32 xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovdqu32 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqu32 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqu32 ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqu32 ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqu32 ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovdqu32 ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovdqu32 ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovdqu32 ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovdqu64 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqu64 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqu64 xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqu64 xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqu64 xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovdqu64 xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovdqu64 xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovdqu64 xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovdqu64 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqu64 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqu64 ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovdqu64 ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovdqu64 ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovdqu64 ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovdqu64 ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovdqu64 ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovshdup xmm6{k7}, xmm5 # AVX512{F,VL}
vmovshdup xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovshdup xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovshdup xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovshdup xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovshdup xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovshdup xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovshdup xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovshdup ymm6{k7}, ymm5 # AVX512{F,VL}
vmovshdup ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovshdup ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovshdup ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovshdup ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovshdup ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovshdup ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovshdup ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovsldup xmm6{k7}, xmm5 # AVX512{F,VL}
vmovsldup xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovsldup xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovsldup xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovsldup xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovsldup xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovsldup xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovsldup xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovsldup ymm6{k7}, ymm5 # AVX512{F,VL}
vmovsldup ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovsldup ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovsldup ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovsldup ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovsldup ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovsldup ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovsldup ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovupd xmm6{k7}, xmm5 # AVX512{F,VL}
vmovupd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovupd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovupd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovupd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovupd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovupd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovupd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovupd ymm6{k7}, ymm5 # AVX512{F,VL}
vmovupd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovupd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovupd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovupd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovupd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovupd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovupd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmovups xmm6{k7}, xmm5 # AVX512{F,VL}
vmovups xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovups xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vmovups xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovups xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmovups xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmovups xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmovups xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmovups ymm6{k7}, ymm5 # AVX512{F,VL}
vmovups ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovups ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vmovups ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmovups ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmovups ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmovups ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmovups ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vmulpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmulpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmulpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vmulpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vmulpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vmulpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vmulpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmulpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmulpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vmulpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vmulpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vmulpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vmulps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vmulps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vmulps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vmulps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vmulps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vmulps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vmulps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vmulps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vmulps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vmulps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vmulps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vmulps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpabsd xmm6{k7}, xmm5 # AVX512{F,VL}
vpabsd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpabsd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpabsd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpabsd xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vpabsd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpabsd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpabsd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpabsd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpabsd xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpabsd xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vpabsd xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpabsd xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vpabsd ymm6{k7}, ymm5 # AVX512{F,VL}
vpabsd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vpabsd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vpabsd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpabsd ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vpabsd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpabsd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpabsd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpabsd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpabsd ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpabsd ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vpabsd ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpabsd ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vpabsq xmm6{k7}, xmm5 # AVX512{F,VL}
vpabsq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpabsq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpabsq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpabsq xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vpabsq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpabsq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpabsq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpabsq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpabsq xmm6{k7}, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpabsq xmm6{k7}, [edx+1024]{1to2} # AVX512{F,VL}
vpabsq xmm6{k7}, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpabsq xmm6{k7}, [edx-1032]{1to2} # AVX512{F,VL}
vpabsq ymm6{k7}, ymm5 # AVX512{F,VL}
vpabsq ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vpabsq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vpabsq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpabsq ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vpabsq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpabsq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpabsq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpabsq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpabsq ymm6{k7}, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpabsq ymm6{k7}, [edx+1024]{1to4} # AVX512{F,VL}
vpabsq ymm6{k7}, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpabsq ymm6{k7}, [edx-1032]{1to4} # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpaddd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpaddd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpaddd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpaddd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpaddd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpaddd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpaddd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpaddd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpaddd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpaddd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpaddd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpaddd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpaddq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpaddq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpaddq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpaddq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpaddq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpaddq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpaddq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpaddq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpaddq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpaddq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpaddq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpaddq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpandd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpandd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpandd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpandd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpandd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpandd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpandd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpandd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpandd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpandd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpandd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpandd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpandnd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpandnd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpandnd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpandnd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpandnd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpandnd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpandnd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpandnd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpandnd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpandnd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpandnd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpandnd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpandnq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpandnq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpandnq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpandnq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpandnq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpandnq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpandnq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpandnq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpandnq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpandnq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpandnq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpandnq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpandq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpandq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpandq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpandq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpandq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpandq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpandq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpandq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpandq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpandq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpandq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpandq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpblendmd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpblendmd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpblendmd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpblendmd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpblendmd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpblendmd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpblendmd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpblendmd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpblendmd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpblendmd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpblendmd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpblendmd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpbroadcastd xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastd xmm6{k7}{z}, DWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastd xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpbroadcastd xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpbroadcastd xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpbroadcastd xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpbroadcastd xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpbroadcastd ymm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastd ymm6{k7}{z}, DWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastd ymm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpbroadcastd ymm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpbroadcastd ymm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpbroadcastd ymm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpbroadcastd ymm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpbroadcastd xmm6{k7}, xmm5 # AVX512{F,VL}
vpbroadcastd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpbroadcastd ymm6{k7}, xmm5 # AVX512{F,VL}
vpbroadcastd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpbroadcastd xmm6{k7}, eax # AVX512{F,VL}
vpbroadcastd xmm6{k7}{z}, eax # AVX512{F,VL}
vpbroadcastd xmm6{k7}, ebp # AVX512{F,VL}
vpbroadcastd ymm6{k7}, eax # AVX512{F,VL}
vpbroadcastd ymm6{k7}{z}, eax # AVX512{F,VL}
vpbroadcastd ymm6{k7}, ebp # AVX512{F,VL}
vpbroadcastq xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastq xmm6{k7}{z}, QWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastq xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpbroadcastq xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpbroadcastq xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpbroadcastq xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpbroadcastq xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpbroadcastq ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastq ymm6{k7}{z}, QWORD PTR [ecx] # AVX512{F,VL}
vpbroadcastq ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpbroadcastq ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpbroadcastq ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpbroadcastq ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpbroadcastq ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpbroadcastq xmm6{k7}, xmm5 # AVX512{F,VL}
vpbroadcastq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpbroadcastq ymm6{k7}, xmm5 # AVX512{F,VL}
vpbroadcastq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, xmm5, 0xab # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, xmm5, 123 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, [eax]{1to4}, 123 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, xmm6, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpcmpd k5{k7}, xmm6, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, xmm6, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, ymm5, 0xab # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, ymm5, 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, [eax]{1to8}, 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, ymm6, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpcmpd k5{k7}, ymm6, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpd k5{k7}, ymm6, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, xmm5 # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, [eax]{1to4} # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, xmm6, [edx+512]{1to4} # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm6, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, xmm6, [edx-516]{1to4} # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, ymm5 # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, [eax]{1to8} # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, ymm6, [edx+512]{1to8} # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm6, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpcmpeqd k5{k7}, ymm6, [edx-516]{1to8} # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, xmm5 # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, [eax]{1to2} # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, xmm6, [edx+1024]{1to2} # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm6, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, xmm6, [edx-1032]{1to2} # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, ymm5 # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, [eax]{1to4} # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, ymm6, [edx+1024]{1to4} # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm6, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpcmpeqq k5{k7}, ymm6, [edx-1032]{1to4} # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, xmm5 # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, [eax]{1to4} # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, xmm6, [edx+512]{1to4} # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm6, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, xmm6, [edx-516]{1to4} # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, ymm5 # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, [eax]{1to8} # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, ymm6, [edx+512]{1to8} # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm6, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpcmpgtd k5{k7}, ymm6, [edx-516]{1to8} # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, xmm5 # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, [eax]{1to2} # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, xmm6, [edx+1024]{1to2} # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm6, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, xmm6, [edx-1032]{1to2} # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, ymm5 # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, [eax]{1to4} # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, ymm6, [edx+1024]{1to4} # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm6, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpcmpgtq k5{k7}, ymm6, [edx-1032]{1to4} # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, xmm5, 0xab # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, xmm5, 123 # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, [eax]{1to2}, 123 # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, xmm6, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vpcmpq k5{k7}, xmm6, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, xmm6, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, ymm5, 0xab # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, ymm5, 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, [eax]{1to4}, 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, ymm6, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpcmpq k5{k7}, ymm6, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpq k5{k7}, ymm6, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, xmm5, 0xab # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, xmm5, 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, [eax]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, xmm6, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5{k7}, xmm6, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, xmm6, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, ymm5, 0xab # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, ymm5, 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, [eax]{1to8}, 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, ymm6, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpcmpud k5{k7}, ymm6, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpud k5{k7}, ymm6, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, xmm5, 0xab # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, xmm5, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, [eax]{1to2}, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, xmm6, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, xmm6, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, xmm6, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, ymm5, 0xab # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, ymm5, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, [eax]{1to4}, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, ymm6, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpcmpuq k5{k7}, ymm6, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5{k7}, ymm6, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpblendmq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpblendmq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpblendmq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpblendmq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpblendmq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpblendmq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpblendmq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpblendmq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpblendmq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpblendmq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpblendmq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpblendmq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpcompressd XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpcompressd XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpcompressd XMMWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vpcompressd XMMWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vpcompressd XMMWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vpcompressd XMMWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vpcompressd YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpcompressd YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpcompressd YMMWORD PTR [edx+508]{k7}, ymm6 # AVX512{F,VL} Disp8
vpcompressd YMMWORD PTR [edx+512]{k7}, ymm6 # AVX512{F,VL}
vpcompressd YMMWORD PTR [edx-512]{k7}, ymm6 # AVX512{F,VL} Disp8
vpcompressd YMMWORD PTR [edx-516]{k7}, ymm6 # AVX512{F,VL}
vpcompressd xmm6{k7}, xmm5 # AVX512{F,VL}
vpcompressd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpcompressd ymm6{k7}, ymm5 # AVX512{F,VL}
vpcompressd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpermd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpermd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpermd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpermilpd xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpermilpd ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpermilpd ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermilpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpermilpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpermilpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermilpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpermilpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpermilpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpermilps xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpermilps xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpermilps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpermilps xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpermilps xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpermilps xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpermilps xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpermilps xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpermilps ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpermilps ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpermilps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpermilps ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vpermilps ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpermilps ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpermilps ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpermilps ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermilps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpermilps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpermilps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermilps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpermilps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpermilps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpermpd ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpermpd ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpermpd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpermpd ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpermpd ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpermpd ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpermpd ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpermpd ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpermps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpermps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpermps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpermq ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpermq ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpermq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpermq ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpermq ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpermq ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpermq ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpermq ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpermq ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpermq ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpermq ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpermq ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpexpandd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandd xmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpexpandd xmm6{k7}, XMMWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpexpandd xmm6{k7}, XMMWORD PTR [edx+512] # AVX512{F,VL}
vpexpandd xmm6{k7}, XMMWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpexpandd xmm6{k7}, XMMWORD PTR [edx-516] # AVX512{F,VL}
vpexpandd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandd ymm6{k7}{z}, YMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpexpandd ymm6{k7}, YMMWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpexpandd ymm6{k7}, YMMWORD PTR [edx+512] # AVX512{F,VL}
vpexpandd ymm6{k7}, YMMWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpexpandd ymm6{k7}, YMMWORD PTR [edx-516] # AVX512{F,VL}
vpexpandd xmm6{k7}, xmm5 # AVX512{F,VL}
vpexpandd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpexpandd ymm6{k7}, ymm5 # AVX512{F,VL}
vpexpandd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vpexpandq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandq xmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpexpandq xmm6{k7}, XMMWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpexpandq xmm6{k7}, XMMWORD PTR [edx+1024] # AVX512{F,VL}
vpexpandq xmm6{k7}, XMMWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpexpandq xmm6{k7}, XMMWORD PTR [edx-1032] # AVX512{F,VL}
vpexpandq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandq ymm6{k7}{z}, YMMWORD PTR [ecx] # AVX512{F,VL}
vpexpandq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpexpandq ymm6{k7}, YMMWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpexpandq ymm6{k7}, YMMWORD PTR [edx+1024] # AVX512{F,VL}
vpexpandq ymm6{k7}, YMMWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpexpandq ymm6{k7}, YMMWORD PTR [edx-1032] # AVX512{F,VL}
vpexpandq xmm6{k7}, xmm5 # AVX512{F,VL}
vpexpandq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpexpandq ymm6{k7}, ymm5 # AVX512{F,VL}
vpexpandq ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vpgatherdd xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vpgatherdd xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vpgatherdd xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vpgatherdd ymm6{k1}, [ebp+ymm7*8-123] # AVX512{F,VL}
vpgatherdd ymm6{k1}, [eax+ymm7+256] # AVX512{F,VL}
vpgatherdd ymm6{k1}, [ecx+ymm7*4+1024] # AVX512{F,VL}
vpgatherdq xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vpgatherdq xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vpgatherdq xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vpgatherdq ymm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vpgatherdq ymm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vpgatherdq ymm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vpgatherqd xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vpgatherqd xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vpgatherqd xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vpgatherqd xmm6{k1}, [ebp+ymm7*8-123] # AVX512{F,VL}
vpgatherqd xmm6{k1}, [eax+ymm7+256] # AVX512{F,VL}
vpgatherqd xmm6{k1}, [ecx+ymm7*4+1024] # AVX512{F,VL}
vpgatherqq xmm6{k1}, [ebp+xmm7*8-123] # AVX512{F,VL}
vpgatherqq xmm6{k1}, [eax+xmm7+256] # AVX512{F,VL}
vpgatherqq xmm6{k1}, [ecx+xmm7*4+1024] # AVX512{F,VL}
vpgatherqq ymm6{k1}, [ebp+ymm7*8-123] # AVX512{F,VL}
vpgatherqq ymm6{k1}, [eax+ymm7+256] # AVX512{F,VL}
vpgatherqq ymm6{k1}, [ecx+ymm7*4+1024] # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpmaxsd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmaxsd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmaxsd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpmaxsd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpmaxsd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpmaxsd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpmaxsd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpmaxsd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpmaxsd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpmaxsd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpmaxsd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpmaxsd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpmaxsq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmaxsq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmaxsq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpmaxsq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpmaxsq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpmaxsq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpmaxsq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpmaxsq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpmaxsq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpmaxsq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpmaxsq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpmaxsq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpmaxud xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmaxud xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmaxud xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpmaxud xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpmaxud xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpmaxud xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpmaxud ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpmaxud ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpmaxud ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpmaxud ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpmaxud ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpmaxud ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpmaxuq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmaxuq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmaxuq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpmaxuq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpmaxuq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpmaxuq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpmaxuq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpmaxuq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpmaxuq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpmaxuq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpmaxuq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpmaxuq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpminsd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpminsd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpminsd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpminsd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpminsd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpminsd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpminsd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpminsd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpminsd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpminsd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpminsd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpminsd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpminsq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpminsq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpminsq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpminsq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpminsq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpminsq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpminsq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpminsq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpminsq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpminsq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpminsq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpminsq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpminud xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpminud xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpminud xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpminud xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpminud xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpminud xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpminud ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpminud ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpminud ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpminud ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpminud ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpminud ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpminuq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpminuq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpminuq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpminuq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpminuq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpminuq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpminuq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpminuq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpminuq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpminuq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpminuq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpminuq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpmovsxbd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovsxbd xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovsxbd xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovsxbd xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovsxbd ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovsxbd ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovsxbd ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [ecx] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [edx+254] # AVX512{F,VL} Disp8
vpmovsxbq xmm6{k7}, WORD PTR [edx+256] # AVX512{F,VL}
vpmovsxbq xmm6{k7}, WORD PTR [edx-256] # AVX512{F,VL} Disp8
vpmovsxbq xmm6{k7}, WORD PTR [edx-258] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxbq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovsxbq ymm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovsxbq ymm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovsxbq ymm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovsxdq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxdq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxdq xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovsxdq xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxdq xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovsxdq xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovsxdq xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovsxdq xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovsxdq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxdq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxdq ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmovsxdq ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxdq ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmovsxdq ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmovsxdq ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmovsxdq ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovsxwd xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovsxwd xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovsxwd xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmovsxwd ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovsxwq xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovsxwq xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovsxwq xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovsxwq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovsxwq ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovsxwq ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovsxwq ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovzxbd xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovzxbd xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovzxbd xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovzxbd ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovzxbd ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovzxbd ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [ecx] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [edx+254] # AVX512{F,VL} Disp8
vpmovzxbq xmm6{k7}, WORD PTR [edx+256] # AVX512{F,VL}
vpmovzxbq xmm6{k7}, WORD PTR [edx-256] # AVX512{F,VL} Disp8
vpmovzxbq xmm6{k7}, WORD PTR [edx-258] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxbq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovzxbq ymm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovzxbq ymm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovzxbq ymm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovzxdq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxdq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxdq xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovzxdq xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxdq xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovzxdq xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovzxdq xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovzxdq xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovzxdq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxdq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxdq ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmovzxdq ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxdq ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmovzxdq ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmovzxdq ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmovzxdq ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovzxwd xmm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovzxwd xmm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovzxwd xmm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwd ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmovzxwd ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [edx+508] # AVX512{F,VL} Disp8
vpmovzxwq xmm6{k7}, DWORD PTR [edx+512] # AVX512{F,VL}
vpmovzxwq xmm6{k7}, DWORD PTR [edx-512] # AVX512{F,VL} Disp8
vpmovzxwq xmm6{k7}, DWORD PTR [edx-516] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, xmm5 # AVX512{F,VL}
vpmovzxwq ymm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [ecx] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [edx+1016] # AVX512{F,VL} Disp8
vpmovzxwq ymm6{k7}, QWORD PTR [edx+1024] # AVX512{F,VL}
vpmovzxwq ymm6{k7}, QWORD PTR [edx-1024] # AVX512{F,VL} Disp8
vpmovzxwq ymm6{k7}, QWORD PTR [edx-1032] # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpmuldq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmuldq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmuldq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpmuldq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpmuldq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpmuldq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpmuldq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpmuldq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpmuldq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpmuldq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpmuldq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpmuldq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpmulld xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmulld xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmulld xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpmulld xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpmulld xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpmulld xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpmulld ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpmulld ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpmulld ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpmulld ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpmulld ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpmulld ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpmuludq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpmuludq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpmuludq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpmuludq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpmuludq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpmuludq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpmuludq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpmuludq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpmuludq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpmuludq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpmuludq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpmuludq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpord xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpord xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpord xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpord xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpord xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpord xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpord xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpord xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpord xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpord xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpord xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpord xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpord xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpord ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpord ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpord ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpord ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpord ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpord ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpord ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpord ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpord ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpord ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpord ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpord ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpord ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vporq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vporq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vporq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vporq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vporq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vporq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vporq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vporq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vporq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vporq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vporq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vporq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vporq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vporq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vporq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vporq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vporq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vporq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vporq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vporq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vporq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vporq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vporq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vporq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vporq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vporq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpscatterdd [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vpscatterdd [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vpscatterdd [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vpscatterdd [ebp+ymm7*8-123]{k1}, ymm6 # AVX512{F,VL}
vpscatterdd [eax+ymm7+256]{k1}, ymm6 # AVX512{F,VL}
vpscatterdd [ecx+ymm7*4+1024]{k1}, ymm6 # AVX512{F,VL}
vpscatterdq [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vpscatterdq [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vpscatterdq [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vpscatterdq [ebp+xmm7*8-123]{k1}, ymm6 # AVX512{F,VL}
vpscatterdq [eax+xmm7+256]{k1}, ymm6 # AVX512{F,VL}
vpscatterdq [ecx+xmm7*4+1024]{k1}, ymm6 # AVX512{F,VL}
vpscatterqd [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vpscatterqd [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vpscatterqd [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vpscatterqd [ebp+ymm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vpscatterqd [eax+ymm7+256]{k1}, xmm6 # AVX512{F,VL}
vpscatterqd [ecx+ymm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vpscatterqq [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vpscatterqq [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vpscatterqq [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vpscatterqq [ebp+ymm7*8-123]{k1}, ymm6 # AVX512{F,VL}
vpscatterqq [eax+ymm7+256]{k1}, ymm6 # AVX512{F,VL}
vpscatterqq [ecx+ymm7*4+1024]{k1}, ymm6 # AVX512{F,VL}
vpshufd xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpshufd xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpshufd xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpshufd xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpshufd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpshufd xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpshufd xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpshufd xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpshufd xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpshufd xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpshufd xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpshufd xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpshufd xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpshufd xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpshufd ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpshufd ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpshufd ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpshufd ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpshufd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpshufd ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vpshufd ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpshufd ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpshufd ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpshufd ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpshufd ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpshufd ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpshufd ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpshufd ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpslld xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpslld xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpslld xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpslld xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpslld xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpslld xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpslld xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpslld xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpslld ymm6{k7}, ymm5, xmm4 # AVX512{F,VL}
vpslld ymm6{k7}{z}, ymm5, xmm4 # AVX512{F,VL}
vpslld ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpslld ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpslld ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpslld ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpslld ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpslld ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsllq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsllq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsllq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsllq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsllq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsllq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsllq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsllq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsllq ymm6{k7}, ymm5, xmm4 # AVX512{F,VL}
vpsllq ymm6{k7}{z}, ymm5, xmm4 # AVX512{F,VL}
vpsllq ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsllq ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsllq ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsllq ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsllq ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsllq ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsllvd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsllvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsllvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpsllvd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpsllvd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpsllvd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsllvd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsllvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsllvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpsllvd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpsllvd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpsllvd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsllvq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsllvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsllvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpsllvq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpsllvq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpsllvq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsllvq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsllvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsllvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpsllvq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpsllvq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpsllvq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpsrad xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsrad xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsrad xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrad xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrad xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrad xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrad xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrad xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsrad ymm6{k7}, ymm5, xmm4 # AVX512{F,VL}
vpsrad ymm6{k7}{z}, ymm5, xmm4 # AVX512{F,VL}
vpsrad ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrad ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrad ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrad ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrad ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrad ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsraq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsraq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsraq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsraq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsraq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsraq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsraq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsraq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsraq ymm6{k7}, ymm5, xmm4 # AVX512{F,VL}
vpsraq ymm6{k7}{z}, ymm5, xmm4 # AVX512{F,VL}
vpsraq ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsraq ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsraq ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsraq ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsraq ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsraq ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsravd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsravd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsravd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpsravd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpsravd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpsravd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsravd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsravd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsravd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpsravd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpsravd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpsravd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsravq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsravq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsravq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpsravq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpsravq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpsravq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsravq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsravq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsravq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpsravq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpsravq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpsravq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpsrld xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsrld xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsrld xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrld xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrld xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrld xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrld xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrld xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsrld ymm6{k7}, ymm5, xmm4 # AVX512{F,VL}
vpsrld ymm6{k7}{z}, ymm5, xmm4 # AVX512{F,VL}
vpsrld ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrld ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrld ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrld ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrld ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrld ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsrlq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsrlq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsrlq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrlq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrlq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrlq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrlq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrlq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsrlq ymm6{k7}, ymm5, xmm4 # AVX512{F,VL}
vpsrlq ymm6{k7}{z}, ymm5, xmm4 # AVX512{F,VL}
vpsrlq ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrlq ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrlq ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrlq ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrlq ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrlq ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsrlvd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrlvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrlvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpsrlvd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpsrlvd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpsrlvd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsrlvd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsrlvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsrlvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpsrlvd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpsrlvd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpsrlvd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsrlvq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsrlvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsrlvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpsrlvq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpsrlvq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpsrlvq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsrlvq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsrlvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsrlvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpsrlvq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpsrlvq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpsrlvq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpsrld xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpsrld xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpsrld xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpsrld xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsrld xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsrld xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpsrld xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpsrld xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpsrld xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpsrld xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpsrld xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrld xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpsrld xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrld xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpsrld ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpsrld ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpsrld ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpsrld ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsrld ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsrld ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vpsrld ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpsrld ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpsrld ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpsrld ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpsrld ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrld ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpsrld ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrld ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpsrlq xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpsrlq xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpsrlq xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpsrlq xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpsrlq xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vpsrlq xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpsrlq xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpsrlq ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpsrlq ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpsrlq ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpsrlq ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrlq ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpsrlq ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrlq ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsubd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsubd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsubd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpsubd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpsubd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpsubd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsubd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsubd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsubd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpsubd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpsubd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpsubd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpsubq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpsubq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpsubq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpsubq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpsubq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpsubq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpsubq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpsubq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpsubq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpsubq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpsubq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpsubq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vptestmd k5{k7}, xmm6, xmm5 # AVX512{F,VL}
vptestmd k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{F,VL}
vptestmd k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestmd k5{k7}, xmm6, [eax]{1to4} # AVX512{F,VL}
vptestmd k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vptestmd k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vptestmd k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vptestmd k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vptestmd k5{k7}, xmm6, [edx+508]{1to4} # AVX512{F,VL} Disp8
vptestmd k5{k7}, xmm6, [edx+512]{1to4} # AVX512{F,VL}
vptestmd k5{k7}, xmm6, [edx-512]{1to4} # AVX512{F,VL} Disp8
vptestmd k5{k7}, xmm6, [edx-516]{1to4} # AVX512{F,VL}
vptestmd k5{k7}, ymm6, ymm5 # AVX512{F,VL}
vptestmd k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{F,VL}
vptestmd k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestmd k5{k7}, ymm6, [eax]{1to8} # AVX512{F,VL}
vptestmd k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vptestmd k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vptestmd k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vptestmd k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vptestmd k5{k7}, ymm6, [edx+508]{1to8} # AVX512{F,VL} Disp8
vptestmd k5{k7}, ymm6, [edx+512]{1to8} # AVX512{F,VL}
vptestmd k5{k7}, ymm6, [edx-512]{1to8} # AVX512{F,VL} Disp8
vptestmd k5{k7}, ymm6, [edx-516]{1to8} # AVX512{F,VL}
vptestmq k5{k7}, xmm6, xmm5 # AVX512{F,VL}
vptestmq k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{F,VL}
vptestmq k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestmq k5{k7}, xmm6, [eax]{1to2} # AVX512{F,VL}
vptestmq k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vptestmq k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vptestmq k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vptestmq k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vptestmq k5{k7}, xmm6, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vptestmq k5{k7}, xmm6, [edx+1024]{1to2} # AVX512{F,VL}
vptestmq k5{k7}, xmm6, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vptestmq k5{k7}, xmm6, [edx-1032]{1to2} # AVX512{F,VL}
vptestmq k5{k7}, ymm6, ymm5 # AVX512{F,VL}
vptestmq k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{F,VL}
vptestmq k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestmq k5{k7}, ymm6, [eax]{1to4} # AVX512{F,VL}
vptestmq k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vptestmq k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vptestmq k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vptestmq k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vptestmq k5{k7}, ymm6, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vptestmq k5{k7}, ymm6, [edx+1024]{1to4} # AVX512{F,VL}
vptestmq k5{k7}, ymm6, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vptestmq k5{k7}, ymm6, [edx-1032]{1to4} # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpunpckhdq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpunpckhdq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpunpckhdq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpunpckhdq xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpunpckhdq xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpunpckhdq xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpunpckhdq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpunpckhdq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpunpckhdq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpunpckhdq ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpunpckhdq ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpunpckhdq ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpunpckhqdq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpunpckhqdq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpunpckhqdq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpunpckhqdq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpunpckhqdq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpunpckhqdq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpunpckhqdq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpunpckhqdq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpunpckhqdq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpunpckhqdq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpunpckhqdq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpunpckhqdq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpunpckldq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpunpckldq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpunpckldq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpunpckldq xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpunpckldq xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpunpckldq xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpunpckldq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpunpckldq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpunpckldq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpunpckldq ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpunpckldq ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpunpckldq ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpunpcklqdq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpunpcklqdq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpunpcklqdq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpunpcklqdq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpunpcklqdq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpunpcklqdq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpunpcklqdq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpunpcklqdq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpunpcklqdq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpunpcklqdq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpunpcklqdq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpunpcklqdq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpxord xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpxord xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpxord xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpxord xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpxord xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpxord xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpxord ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpxord ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpxord ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpxord ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpxord ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpxord ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpxorq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpxorq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpxorq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpxorq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpxorq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpxorq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpxorq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpxorq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpxorq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpxorq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpxorq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpxorq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vrcp14pd xmm6{k7}, xmm5 # AVX512{F,VL}
vrcp14pd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vrcp14pd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vrcp14pd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrcp14pd xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vrcp14pd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vrcp14pd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vrcp14pd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vrcp14pd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vrcp14pd xmm6{k7}, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vrcp14pd xmm6{k7}, [edx+1024]{1to2} # AVX512{F,VL}
vrcp14pd xmm6{k7}, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vrcp14pd xmm6{k7}, [edx-1032]{1to2} # AVX512{F,VL}
vrcp14pd ymm6{k7}, ymm5 # AVX512{F,VL}
vrcp14pd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vrcp14pd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vrcp14pd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrcp14pd ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vrcp14pd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vrcp14pd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vrcp14pd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vrcp14pd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vrcp14pd ymm6{k7}, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vrcp14pd ymm6{k7}, [edx+1024]{1to4} # AVX512{F,VL}
vrcp14pd ymm6{k7}, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vrcp14pd ymm6{k7}, [edx-1032]{1to4} # AVX512{F,VL}
vrcp14ps xmm6{k7}, xmm5 # AVX512{F,VL}
vrcp14ps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vrcp14ps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vrcp14ps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrcp14ps xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vrcp14ps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vrcp14ps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vrcp14ps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vrcp14ps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vrcp14ps xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vrcp14ps xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vrcp14ps xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vrcp14ps xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vrcp14ps ymm6{k7}, ymm5 # AVX512{F,VL}
vrcp14ps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vrcp14ps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vrcp14ps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrcp14ps ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vrcp14ps ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vrcp14ps ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vrcp14ps ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vrcp14ps ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vrcp14ps ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vrcp14ps ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vrcp14ps ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vrcp14ps ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, xmm5 # AVX512{F,VL}
vrsqrt14pd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vrsqrt14pd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vrsqrt14pd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vrsqrt14pd xmm6{k7}, [edx+1024]{1to2} # AVX512{F,VL}
vrsqrt14pd xmm6{k7}, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vrsqrt14pd xmm6{k7}, [edx-1032]{1to2} # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, ymm5 # AVX512{F,VL}
vrsqrt14pd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vrsqrt14pd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vrsqrt14pd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vrsqrt14pd ymm6{k7}, [edx+1024]{1to4} # AVX512{F,VL}
vrsqrt14pd ymm6{k7}, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vrsqrt14pd ymm6{k7}, [edx-1032]{1to4} # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, xmm5 # AVX512{F,VL}
vrsqrt14ps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vrsqrt14ps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vrsqrt14ps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vrsqrt14ps xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vrsqrt14ps xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vrsqrt14ps xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, ymm5 # AVX512{F,VL}
vrsqrt14ps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vrsqrt14ps ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vrsqrt14ps ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vrsqrt14ps ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vrsqrt14ps ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vrsqrt14ps ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vscatterdpd [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vscatterdpd [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vscatterdpd [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vscatterdpd [ebp+xmm7*8-123]{k1}, ymm6 # AVX512{F,VL}
vscatterdpd [eax+xmm7+256]{k1}, ymm6 # AVX512{F,VL}
vscatterdpd [ecx+xmm7*4+1024]{k1}, ymm6 # AVX512{F,VL}
vscatterdps [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vscatterdps [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vscatterdps [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vscatterdps [ebp+ymm7*8-123]{k1}, ymm6 # AVX512{F,VL}
vscatterdps [eax+ymm7+256]{k1}, ymm6 # AVX512{F,VL}
vscatterdps [ecx+ymm7*4+1024]{k1}, ymm6 # AVX512{F,VL}
vscatterqpd [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vscatterqpd [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vscatterqpd [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vscatterqpd [ebp+ymm7*8-123]{k1}, ymm6 # AVX512{F,VL}
vscatterqpd [eax+ymm7+256]{k1}, ymm6 # AVX512{F,VL}
vscatterqpd [ecx+ymm7*4+1024]{k1}, ymm6 # AVX512{F,VL}
vscatterqps [ebp+xmm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vscatterqps [eax+xmm7+256]{k1}, xmm6 # AVX512{F,VL}
vscatterqps [ecx+xmm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vscatterqps [ebp+ymm7*8-123]{k1}, xmm6 # AVX512{F,VL}
vscatterqps [eax+ymm7+256]{k1}, xmm6 # AVX512{F,VL}
vscatterqps [ecx+ymm7*4+1024]{k1}, xmm6 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vshufpd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, [eax]{1to2}, 123 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vshufpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vshufpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vshufpd xmm6{k7}, xmm5, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vshufpd xmm6{k7}, xmm5, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vshufpd xmm6{k7}, xmm5, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufpd ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vshufpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vshufpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vshufpd ymm6{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vshufpd ymm6{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vshufpd ymm6{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vshufps xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, [eax]{1to4}, 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vshufps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vshufps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vshufps xmm6{k7}, xmm5, [edx+512]{1to4}, 123 # AVX512{F,VL}
vshufps xmm6{k7}, xmm5, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vshufps xmm6{k7}, xmm5, [edx-516]{1to4}, 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufps ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, [eax]{1to8}, 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vshufps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vshufps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vshufps ymm6{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{F,VL}
vshufps ymm6{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vshufps ymm6{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{F,VL}
vsqrtpd xmm6{k7}, xmm5 # AVX512{F,VL}
vsqrtpd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vsqrtpd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vsqrtpd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsqrtpd xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vsqrtpd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vsqrtpd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vsqrtpd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vsqrtpd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vsqrtpd xmm6{k7}, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vsqrtpd xmm6{k7}, [edx+1024]{1to2} # AVX512{F,VL}
vsqrtpd xmm6{k7}, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vsqrtpd xmm6{k7}, [edx-1032]{1to2} # AVX512{F,VL}
vsqrtpd ymm6{k7}, ymm5 # AVX512{F,VL}
vsqrtpd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vsqrtpd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vsqrtpd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsqrtpd ymm6{k7}, [eax]{1to4} # AVX512{F,VL}
vsqrtpd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vsqrtpd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vsqrtpd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vsqrtpd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vsqrtpd ymm6{k7}, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vsqrtpd ymm6{k7}, [edx+1024]{1to4} # AVX512{F,VL}
vsqrtpd ymm6{k7}, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vsqrtpd ymm6{k7}, [edx-1032]{1to4} # AVX512{F,VL}
vsqrtps xmm6{k7}, xmm5 # AVX512{F,VL}
vsqrtps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vsqrtps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vsqrtps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsqrtps xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vsqrtps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vsqrtps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vsqrtps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vsqrtps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vsqrtps xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vsqrtps xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vsqrtps xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vsqrtps xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vsqrtps ymm6{k7}, ymm5 # AVX512{F,VL}
vsqrtps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vsqrtps ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vsqrtps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsqrtps ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vsqrtps ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vsqrtps ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vsqrtps ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vsqrtps ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vsqrtps ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vsqrtps ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vsqrtps ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vsqrtps ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vsubpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vsubpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vsubpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vsubpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vsubpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vsubpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vsubpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vsubpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vsubpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vsubpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vsubpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vsubpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vsubps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vsubps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vsubps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vsubps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vsubps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vsubps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vsubps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vsubps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vsubps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vsubps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vsubps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vsubps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vunpckhpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vunpckhpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vunpckhpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vunpckhpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vunpckhpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vunpckhpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vunpckhpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vunpckhpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vunpckhpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vunpckhpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vunpckhpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vunpckhpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vunpckhps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vunpckhps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vunpckhps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vunpckhps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vunpckhps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vunpckhps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vunpckhps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vunpckhps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vunpckhps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vunpckhps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vunpckhps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vunpckhps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vunpcklpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vunpcklpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vunpcklpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vunpcklpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vunpcklpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vunpcklpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vunpcklpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vunpcklpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vunpcklpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vunpcklpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vunpcklpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vunpcklpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vunpcklps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vunpcklps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vunpcklps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vunpcklps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vunpcklps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vunpcklps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vunpcklps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vunpcklps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vunpcklps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vunpcklps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vunpcklps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vunpcklps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vpternlogd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, [eax]{1to4}, 123 # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpternlogd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpternlogd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogd xmm6{k7}, xmm5, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpternlogd xmm6{k7}, xmm5, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogd xmm6{k7}, xmm5, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vpternlogd ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, [eax]{1to8}, 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpternlogd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpternlogd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpternlogd ymm6{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpternlogd ymm6{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpternlogd ymm6{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vpternlogq xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, [eax]{1to2}, 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpternlogq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpternlogq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpternlogq xmm6{k7}, xmm5, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vpternlogq xmm6{k7}, xmm5, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpternlogq xmm6{k7}, xmm5, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vpternlogq ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpternlogq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpternlogq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogq ymm6{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpternlogq ymm6{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogq ymm6{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpmovqb xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovqb xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovqb xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovqb xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovsqb xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsqb xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsqb xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovsqb xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovusqb xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovusqb xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovusqb xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovusqb xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovqw xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovqw xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovqw xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovqw xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovsqw xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsqw xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsqw xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovsqw xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovusqw xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovusqw xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovusqw xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovusqw xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovqd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovqd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovqd xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovqd xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovsqd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsqd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsqd xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovsqd xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovusqd xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovusqd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovusqd xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovusqd xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovdb xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovdb xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovdb xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovdb xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovsdb xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsdb xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsdb xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovsdb xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovusdb xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovusdb xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovusdb xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovusdb xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovdw xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovdw xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovdw xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovdw xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovsdw xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovsdw xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovsdw xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovsdw xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vpmovusdw xmm6{k7}, xmm5 # AVX512{F,VL}
vpmovusdw xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpmovusdw xmm6{k7}, ymm5 # AVX512{F,VL}
vpmovusdw xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshuff32x4 ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, [eax]{1to8}, 123 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm6{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{F,VL}
vshuff32x4 ymm6{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm6{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshuff64x2 ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm6{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vshuff64x2 ymm6{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm6{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufi32x4 ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, [eax]{1to8}, 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm6{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{F,VL}
vshufi32x4 ymm6{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm6{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufi64x2 ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm6{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vshufi64x2 ymm6{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm6{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpermq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpermq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpermq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpermpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpermpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermt2d xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermt2d xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermt2d xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpermt2d xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpermt2d xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpermt2d xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermt2d ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermt2d ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermt2d ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpermt2d ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpermt2d ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpermt2d ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermt2q xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermt2q xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermt2q xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpermt2q xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpermt2q xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpermt2q xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermt2q ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermt2q ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermt2q ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpermt2q ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpermt2q ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpermt2q ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermt2ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermt2ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermt2ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpermt2ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpermt2ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpermt2ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermt2ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermt2ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermt2ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpermt2ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpermt2ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpermt2ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermt2pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermt2pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermt2pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpermt2pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpermt2pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpermt2pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermt2pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermt2pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermt2pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpermt2pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpermt2pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpermt2pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
valignq xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
valignq xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
valignq xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
valignq xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
valignq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
valignq xmm6{k7}, xmm5, [eax]{1to2}, 123 # AVX512{F,VL}
valignq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
valignq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
valignq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
valignq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
valignq xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
valignq xmm6{k7}, xmm5, [edx+1024]{1to2}, 123 # AVX512{F,VL}
valignq xmm6{k7}, xmm5, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
valignq xmm6{k7}, xmm5, [edx-1032]{1to2}, 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
valignq ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
valignq ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
valignq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
valignq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
valignq ymm6{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{F,VL}
valignq ymm6{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
valignq ymm6{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vscalefpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vscalefpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vscalefpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vscalefpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vscalefpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vscalefpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vscalefpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vscalefpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vscalefpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vscalefpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vscalefpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vscalefpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vscalefps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vscalefps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vscalefps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vscalefps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vscalefps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vscalefps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vscalefps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vscalefps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vscalefps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vscalefps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vscalefps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vscalefps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vfixupimmpd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, [eax]{1to2}, 123 # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm6{k7}, xmm5, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vfixupimmpd xmm6{k7}, xmm5, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm6{k7}, xmm5, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vfixupimmpd ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm6{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vfixupimmpd ymm6{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm6{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, xmm4, 0xab # AVX512{F,VL}
vfixupimmps xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, xmm4, 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, [eax]{1to4}, 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vfixupimmps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vfixupimmps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmps xmm6{k7}, xmm5, [edx+512]{1to4}, 123 # AVX512{F,VL}
vfixupimmps xmm6{k7}, xmm5, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmps xmm6{k7}, xmm5, [edx-516]{1to4}, 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, ymm4, 0xab # AVX512{F,VL}
vfixupimmps ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, ymm4, 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, [eax]{1to8}, 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vfixupimmps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vfixupimmps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vfixupimmps ymm6{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{F,VL}
vfixupimmps ymm6{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vfixupimmps ymm6{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpslld xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpslld xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpslld xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpslld xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpslld xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpslld xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpslld xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpslld xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpslld xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpslld xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpslld xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpslld xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpslld xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpslld xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpslld ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpslld ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpslld ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpslld ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpslld ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpslld ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vpslld ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpslld ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpslld ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpslld ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpslld ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpslld ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpslld ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpslld ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpsllq xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpsllq xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpsllq xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpsllq xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsllq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsllq xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vpsllq xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpsllq xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpsllq xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpsllq xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpsllq xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpsllq xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vpsllq xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpsllq xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vpsllq ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpsllq ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpsllq ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpsllq ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsllq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsllq ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpsllq ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpsllq ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpsllq ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpsllq ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpsllq ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpsllq ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpsllq ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpsllq ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vpsrad xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpsrad xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpsrad xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpsrad xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsrad xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsrad xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpsrad xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpsrad xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpsrad xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpsrad xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpsrad xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrad xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vpsrad xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrad xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vpsrad ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpsrad ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpsrad ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpsrad ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsrad ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsrad ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vpsrad ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpsrad ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpsrad ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpsrad ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpsrad ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrad ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vpsrad ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrad ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpsraq xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vpsraq xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vpsraq xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vpsraq xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsraq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsraq xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vpsraq xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vpsraq xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vpsraq xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vpsraq xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vpsraq xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpsraq xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vpsraq xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpsraq xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vpsraq ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vpsraq ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vpsraq ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vpsraq ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vpsraq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vpsraq ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vpsraq ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vpsraq ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vpsraq ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vpsraq ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vpsraq ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpsraq ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vpsraq ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpsraq ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vprolvd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vprolvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vprolvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vprolvd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vprolvd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vprolvd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vprolvd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vprolvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vprolvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vprolvd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vprolvd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vprolvd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vprold xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vprold xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vprold xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vprold xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprold xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprold xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vprold xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vprold xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vprold xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vprold xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vprold xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vprold xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vprold xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vprold xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vprold ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vprold ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vprold ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vprold ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprold ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprold ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vprold ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vprold ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vprold ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vprold ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vprold ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vprold ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vprold ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vprold ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vprolvq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vprolvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vprolvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vprolvq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vprolvq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vprolvq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vprolvq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vprolvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vprolvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vprolvq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vprolvq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vprolvq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vprolq xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vprolq xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vprolq xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vprolq xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprolq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprolq xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vprolq xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vprolq xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vprolq xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vprolq xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vprolq xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vprolq xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vprolq xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vprolq xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vprolq ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vprolq ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vprolq ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vprolq ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprolq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprolq ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vprolq ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vprolq ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vprolq ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vprolq ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vprolq ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vprolq ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vprolq ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vprolq ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vprorvd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vprorvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vprorvd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vprorvd xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vprorvd xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vprorvd xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vprorvd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vprorvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vprorvd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vprorvd ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vprorvd ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vprorvd ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vprord xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vprord xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vprord xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vprord xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprord xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprord xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vprord xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vprord xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vprord xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vprord xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vprord xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vprord xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vprord xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vprord xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vprord ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vprord ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vprord ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vprord ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprord ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprord ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vprord ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vprord ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vprord ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vprord ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vprord ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vprord ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vprord ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vprord ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vprorvq xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vprorvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vprorvq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vprorvq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vprorvq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vprorvq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vprorvq ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vprorvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vprorvq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vprorvq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vprorvq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vprorvq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vprorq xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vprorq xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vprorq xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vprorq xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprorq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprorq xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vprorq xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vprorq xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vprorq xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vprorq xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vprorq xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vprorq xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vprorq xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vprorq xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vprorq ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vprorq ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vprorq ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vprorq ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vprorq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vprorq ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vprorq ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vprorq ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vprorq ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vprorq ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vprorq ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vprorq ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vprorq ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vprorq ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vrndscalepd xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vrndscalepd xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, [eax]{1to2}, 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vrndscalepd xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vrndscalepd xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vrndscalepd xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{F,VL}
vrndscalepd xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vrndscalepd xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vrndscalepd ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vrndscalepd ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vrndscalepd ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vrndscalepd ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscalepd ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{F,VL}
vrndscalepd ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscalepd ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, xmm5, 0xab # AVX512{F,VL}
vrndscaleps xmm6{k7}{z}, xmm5, 0xab # AVX512{F,VL}
vrndscaleps xmm6{k7}, xmm5, 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, [eax]{1to4}, 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{F,VL} Disp8
vrndscaleps xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{F,VL} Disp8
vrndscaleps xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscaleps xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{F,VL}
vrndscaleps xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscaleps xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, ymm5, 0xab # AVX512{F,VL}
vrndscaleps ymm6{k7}{z}, ymm5, 0xab # AVX512{F,VL}
vrndscaleps ymm6{k7}, ymm5, 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, [eax]{1to8}, 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{F,VL} Disp8
vrndscaleps ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{F,VL} Disp8
vrndscaleps ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vrndscaleps ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{F,VL}
vrndscaleps ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vrndscaleps ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{F,VL}
vpcompressq XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpcompressq XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpcompressq XMMWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vpcompressq XMMWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vpcompressq XMMWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vpcompressq XMMWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vpcompressq YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpcompressq YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpcompressq YMMWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vpcompressq YMMWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vpcompressq YMMWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vpcompressq YMMWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vpcompressq xmm6{k7}, xmm5 # AVX512{F,VL}
vpcompressq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vpcompressq ymm6{k7}, ymm5 # AVX512{F,VL}
vpcompressq ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvtps2ph QWORD PTR [ecx]{k7}, xmm6, 0xab # AVX512{F,VL}
vcvtps2ph QWORD PTR [ecx]{k7}, xmm6, 123 # AVX512{F,VL}
vcvtps2ph QWORD PTR [esp+esi*8-123456]{k7}, xmm6, 123 # AVX512{F,VL}
vcvtps2ph QWORD PTR [edx+1016]{k7}, xmm6, 123 # AVX512{F,VL} Disp8
vcvtps2ph QWORD PTR [edx+1024]{k7}, xmm6, 123 # AVX512{F,VL}
vcvtps2ph QWORD PTR [edx-1024]{k7}, xmm6, 123 # AVX512{F,VL} Disp8
vcvtps2ph QWORD PTR [edx-1032]{k7}, xmm6, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [ecx]{k7}, ymm6, 0xab # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [ecx]{k7}, ymm6, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [edx+2032]{k7}, ymm6, 123 # AVX512{F,VL} Disp8
vcvtps2ph XMMWORD PTR [edx+2048]{k7}, ymm6, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [edx-2048]{k7}, ymm6, 123 # AVX512{F,VL} Disp8
vcvtps2ph XMMWORD PTR [edx-2064]{k7}, ymm6, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [ecx]{k7}, ymm5, 0xab # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [ecx]{k7}, ymm5, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [esp+esi*8-123456]{k7}, ymm5, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [edx+2032]{k7}, ymm5, 123 # AVX512{F,VL} Disp8
vextractf32x4 XMMWORD PTR [edx+2048]{k7}, ymm5, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [edx-2048]{k7}, ymm5, 123 # AVX512{F,VL} Disp8
vextractf32x4 XMMWORD PTR [edx-2064]{k7}, ymm5, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [ecx]{k7}, ymm5, 0xab # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [ecx]{k7}, ymm5, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [esp+esi*8-123456]{k7}, ymm5, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [edx+2032]{k7}, ymm5, 123 # AVX512{F,VL} Disp8
vextracti32x4 XMMWORD PTR [edx+2048]{k7}, ymm5, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [edx-2048]{k7}, ymm5, 123 # AVX512{F,VL} Disp8
vextracti32x4 XMMWORD PTR [edx-2064]{k7}, ymm5, 123 # AVX512{F,VL}
vmovapd XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovapd XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovapd XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovapd XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovapd XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovapd XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovapd YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovapd YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovapd YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovapd YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovapd YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovapd YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vmovaps XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovaps XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovaps XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovaps XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovaps XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovaps XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovaps YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovaps YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovaps YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovaps YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovaps YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovaps YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqa32 XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqa32 XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqa32 YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqa32 YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqa64 XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqa64 XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqa64 YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqa64 YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqu32 XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqu32 XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqu32 YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqu32 YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqu64 XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovdqu64 XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqu64 YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovdqu64 YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vmovupd XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovupd XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovupd XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovupd XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovupd XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovupd XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovupd YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovupd YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovupd YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovupd YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovupd YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovupd YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vmovups XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vmovups XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vmovups XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovups XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{F,VL}
vmovups XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{F,VL} Disp8
vmovups XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{F,VL}
vmovups YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vmovups YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vmovups YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovups YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{F,VL}
vmovups YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{F,VL} Disp8
vmovups YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{F,VL}
vpmovqb WORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovqb WORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovqb WORD PTR [edx+254]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovqb WORD PTR [edx+256]{k7}, xmm6 # AVX512{F,VL}
vpmovqb WORD PTR [edx-256]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovqb WORD PTR [edx-258]{k7}, xmm6 # AVX512{F,VL}
vpmovqb DWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovqb DWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovqb DWORD PTR [edx+508]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovqb DWORD PTR [edx+512]{k7}, ymm6 # AVX512{F,VL}
vpmovqb DWORD PTR [edx-512]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovqb DWORD PTR [edx-516]{k7}, ymm6 # AVX512{F,VL}
vpmovsqb WORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovsqb WORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovsqb WORD PTR [edx+254]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsqb WORD PTR [edx+256]{k7}, xmm6 # AVX512{F,VL}
vpmovsqb WORD PTR [edx-256]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsqb WORD PTR [edx-258]{k7}, xmm6 # AVX512{F,VL}
vpmovsqb DWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovsqb DWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovsqb DWORD PTR [edx+508]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsqb DWORD PTR [edx+512]{k7}, ymm6 # AVX512{F,VL}
vpmovsqb DWORD PTR [edx-512]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsqb DWORD PTR [edx-516]{k7}, ymm6 # AVX512{F,VL}
vpmovusqb WORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovusqb WORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovusqb WORD PTR [edx+254]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusqb WORD PTR [edx+256]{k7}, xmm6 # AVX512{F,VL}
vpmovusqb WORD PTR [edx-256]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusqb WORD PTR [edx-258]{k7}, xmm6 # AVX512{F,VL}
vpmovusqb DWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovusqb DWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovusqb DWORD PTR [edx+508]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusqb DWORD PTR [edx+512]{k7}, ymm6 # AVX512{F,VL}
vpmovusqb DWORD PTR [edx-512]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusqb DWORD PTR [edx-516]{k7}, ymm6 # AVX512{F,VL}
vpmovqw DWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovqw DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovqw DWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovqw DWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vpmovqw DWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovqw DWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vpmovqw QWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovqw QWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovqw QWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovqw QWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vpmovqw QWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovqw QWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vpmovsqw DWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovsqw DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovsqw DWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsqw DWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vpmovsqw DWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsqw DWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vpmovsqw QWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovsqw QWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovsqw QWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsqw QWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vpmovsqw QWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsqw QWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vpmovusqw DWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovusqw DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovusqw DWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusqw DWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vpmovusqw DWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusqw DWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vpmovusqw QWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovusqw QWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovusqw QWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusqw QWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vpmovusqw QWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusqw QWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vpmovqd QWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovqd QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovqd QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovqd QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vpmovqd QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovqd QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vpmovqd XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovqd XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovqd XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovqd XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{F,VL}
vpmovqd XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovqd XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{F,VL}
vpmovsqd QWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovsqd QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovsqd QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsqd QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vpmovsqd QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsqd QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsqd XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsqd XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{F,VL}
vpmovusqd QWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovusqd QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovusqd QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusqd QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vpmovusqd QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusqd QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusqd XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusqd XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{F,VL}
vpmovdb DWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovdb DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovdb DWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovdb DWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vpmovdb DWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovdb DWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vpmovdb QWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovdb QWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovdb QWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovdb QWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vpmovdb QWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovdb QWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vpmovsdb DWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovsdb DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovsdb DWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsdb DWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vpmovsdb DWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsdb DWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vpmovsdb QWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovsdb QWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovsdb QWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsdb QWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vpmovsdb QWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsdb QWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vpmovusdb DWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovusdb DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovusdb DWORD PTR [edx+508]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusdb DWORD PTR [edx+512]{k7}, xmm6 # AVX512{F,VL}
vpmovusdb DWORD PTR [edx-512]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusdb DWORD PTR [edx-516]{k7}, xmm6 # AVX512{F,VL}
vpmovusdb QWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovusdb QWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovusdb QWORD PTR [edx+1016]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusdb QWORD PTR [edx+1024]{k7}, ymm6 # AVX512{F,VL}
vpmovusdb QWORD PTR [edx-1024]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusdb QWORD PTR [edx-1032]{k7}, ymm6 # AVX512{F,VL}
vpmovdw QWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovdw QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovdw QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovdw QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vpmovdw QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovdw QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vpmovdw XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovdw XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovdw XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovdw XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{F,VL}
vpmovdw XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovdw XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{F,VL}
vpmovsdw QWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovsdw QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovsdw QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsdw QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vpmovsdw QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovsdw QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsdw XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovsdw XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{F,VL}
vpmovusdw QWORD PTR [ecx]{k7}, xmm6 # AVX512{F,VL}
vpmovusdw QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{F,VL}
vpmovusdw QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusdw QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{F,VL}
vpmovusdw QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{F,VL} Disp8
vpmovusdw QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusdw XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{F,VL} Disp8
vpmovusdw XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvttpd2udq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, [eax]{1to2} # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, QWORD BCST [edx+1016]{1to2} # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, QWORD BCST [edx+1024]{1to2} # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, QWORD BCST [edx-1024]{1to2} # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, QWORD BCST [edx-1032]{1to2} # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, ymm5 # AVX512{F,VL}
vcvttpd2udq xmm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, QWORD BCST [edx+1016]{1to4} # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, QWORD BCST [edx+1024]{1to4} # AVX512{F,VL}
vcvttpd2udq xmm6{k7}, QWORD BCST [edx-1024]{1to4} # AVX512{F,VL} Disp8
vcvttpd2udq xmm6{k7}, QWORD BCST [edx-1032]{1to4} # AVX512{F,VL}
vcvttps2udq xmm6{k7}, xmm5 # AVX512{F,VL}
vcvttps2udq xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vcvttps2udq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{F,VL}
vcvttps2udq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttps2udq xmm6{k7}, [eax]{1to4} # AVX512{F,VL}
vcvttps2udq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vcvttps2udq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vcvttps2udq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vcvttps2udq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vcvttps2udq xmm6{k7}, [edx+508]{1to4} # AVX512{F,VL} Disp8
vcvttps2udq xmm6{k7}, [edx+512]{1to4} # AVX512{F,VL}
vcvttps2udq xmm6{k7}, [edx-512]{1to4} # AVX512{F,VL} Disp8
vcvttps2udq xmm6{k7}, [edx-516]{1to4} # AVX512{F,VL}
vcvttps2udq ymm6{k7}, ymm5 # AVX512{F,VL}
vcvttps2udq ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vcvttps2udq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{F,VL}
vcvttps2udq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vcvttps2udq ymm6{k7}, [eax]{1to8} # AVX512{F,VL}
vcvttps2udq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vcvttps2udq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vcvttps2udq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vcvttps2udq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vcvttps2udq ymm6{k7}, [edx+508]{1to8} # AVX512{F,VL} Disp8
vcvttps2udq ymm6{k7}, [edx+512]{1to8} # AVX512{F,VL}
vcvttps2udq ymm6{k7}, [edx-512]{1to8} # AVX512{F,VL} Disp8
vcvttps2udq ymm6{k7}, [edx-516]{1to8} # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermi2d xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermi2d xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermi2d xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpermi2d xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpermi2d xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpermi2d xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermi2d ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermi2d ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermi2d ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpermi2d ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpermi2d ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpermi2d ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermi2q xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermi2q xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermi2q xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpermi2q xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpermi2q xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpermi2q xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermi2q ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermi2q ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermi2q ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpermi2q ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpermi2q ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpermi2q ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermi2ps xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermi2ps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermi2ps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vpermi2ps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vpermi2ps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vpermi2ps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermi2ps ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermi2ps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermi2ps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vpermi2ps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vpermi2ps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vpermi2ps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, xmm4 # AVX512{F,VL}
vpermi2pd xmm6{k7}{z}, xmm5, xmm4 # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vpermi2pd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vpermi2pd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vpermi2pd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vpermi2pd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vpermi2pd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, ymm4 # AVX512{F,VL}
vpermi2pd ymm6{k7}{z}, ymm5, ymm4 # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vpermi2pd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vpermi2pd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vpermi2pd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vpermi2pd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vpermi2pd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, xmm4 # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, [eax]{1to4} # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vptestnmd k5{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vptestnmd k5{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, [edx+508]{1to4} # AVX512{F,VL} Disp8
vptestnmd k5{k7}, xmm5, [edx+512]{1to4} # AVX512{F,VL}
vptestnmd k5{k7}, xmm5, [edx-512]{1to4} # AVX512{F,VL} Disp8
vptestnmd k5{k7}, xmm5, [edx-516]{1to4} # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, ymm4 # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, [eax]{1to8} # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vptestnmd k5{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vptestnmd k5{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, [edx+508]{1to8} # AVX512{F,VL} Disp8
vptestnmd k5{k7}, ymm5, [edx+512]{1to8} # AVX512{F,VL}
vptestnmd k5{k7}, ymm5, [edx-512]{1to8} # AVX512{F,VL} Disp8
vptestnmd k5{k7}, ymm5, [edx-516]{1to8} # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, xmm4 # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, [eax]{1to2} # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{F,VL} Disp8
vptestnmq k5{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{F,VL} Disp8
vptestnmq k5{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, [edx+1016]{1to2} # AVX512{F,VL} Disp8
vptestnmq k5{k7}, xmm5, [edx+1024]{1to2} # AVX512{F,VL}
vptestnmq k5{k7}, xmm5, [edx-1024]{1to2} # AVX512{F,VL} Disp8
vptestnmq k5{k7}, xmm5, [edx-1032]{1to2} # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, ymm4 # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, [eax]{1to4} # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{F,VL} Disp8
vptestnmq k5{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{F,VL} Disp8
vptestnmq k5{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, [edx+1016]{1to4} # AVX512{F,VL} Disp8
vptestnmq k5{k7}, ymm5, [edx+1024]{1to4} # AVX512{F,VL}
vptestnmq k5{k7}, ymm5, [edx-1024]{1to4} # AVX512{F,VL} Disp8
vptestnmq k5{k7}, ymm5, [edx-1032]{1to4} # AVX512{F,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 5,772
|
gas/testsuite/gas/i386/disassem.s
|
.text
.byte 0xFF, 0xEF
.byte 0xFF, 0xD8
.fill 0x5, 0x1, 0x90
.byte 0xC5, 0xEC, 0x4A, 0x9B
.byte 0xC5, 0xEC, 0x4A, 0x6F
.byte 0xC5, 0xEC, 0x4A, 0x3F
.byte 0xC5, 0xED, 0x4A, 0x9B
.byte 0xC5, 0xED, 0x4A, 0x6F
.byte 0xC5, 0xED, 0x4A, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x4A, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x4A, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x4A, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x4A, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x4A, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x4A, 0x3F
.byte 0xC5, 0xEC, 0x41, 0x9B
.byte 0xC5, 0xEC, 0x41, 0x6F
.byte 0xC5, 0xEC, 0x41, 0x3F
.byte 0xC5, 0xED, 0x41, 0x9B
.byte 0xC5, 0xED, 0x41, 0x6F
.byte 0xC5, 0xED, 0x41, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x41, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x41, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x41, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x41, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x41, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x41, 0x3F
.byte 0xC5, 0xEC, 0x42, 0x9B
.byte 0xC5, 0xEC, 0x42, 0x6F
.byte 0xC5, 0xEC, 0x42, 0x3F
.byte 0xC5, 0xED, 0x42, 0x9B
.byte 0xC5, 0xED, 0x42, 0x6F
.byte 0xC5, 0xED, 0x42, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x42, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x42, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x42, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x42, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x42, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x42, 0x3F
.byte 0xC5, 0xEC, 0x4B, 0x9B
.byte 0xC5, 0xEC, 0x4B, 0x6F
.byte 0xC5, 0xEC, 0x4B, 0x3F
.byte 0xC5, 0xED, 0x4B, 0x9B
.byte 0xC5, 0xED, 0x4B, 0x6F
.byte 0xC5, 0xED, 0x4B, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x4B, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x4B, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x4B, 0x3F
.byte 0xC5, 0xF8, 0x44, 0x9B
.byte 0xC5, 0xF8, 0x44, 0x6F
.byte 0xC5, 0xF8, 0x44, 0x3F
.byte 0xC5, 0xF9, 0x44, 0x9B
.byte 0xC5, 0xF9, 0x44, 0x6F
.byte 0xC5, 0xF9, 0x44, 0x3F
.byte 0xC4, 0xE1, 0xF8, 0x44, 0x9B
.byte 0xC4, 0xE1, 0xF8, 0x44, 0x6F
.byte 0xC4, 0xE1, 0xF8, 0x44, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x44, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x44, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x44, 0x3F
.byte 0xC5, 0xEC, 0x45, 0x9B
.byte 0xC5, 0xEC, 0x45, 0x6F
.byte 0xC5, 0xEC, 0x45, 0x3F
.byte 0xC5, 0xED, 0x45, 0x9B
.byte 0xC5, 0xED, 0x45, 0x6F
.byte 0xC5, 0xED, 0x45, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x45, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x45, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x45, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x45, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x45, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x45, 0x3F
.byte 0xC5, 0xF8, 0x98, 0x9B
.byte 0xC5, 0xF8, 0x98, 0x6F
.byte 0xC5, 0xF8, 0x98, 0x3F
.byte 0xC5, 0xF9, 0x98, 0x9B
.byte 0xC5, 0xF9, 0x98, 0x6F
.byte 0xC5, 0xF9, 0x98, 0x3F
.byte 0xC4, 0xE1, 0xF8, 0x98, 0x9B
.byte 0xC4, 0xE1, 0xF8, 0x98, 0x6F
.byte 0xC4, 0xE1, 0xF8, 0x98, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x98, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x98, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x98, 0x3F
.byte 0xC5, 0xEC, 0x46, 0x9B
.byte 0xC5, 0xEC, 0x46, 0x6F
.byte 0xC5, 0xEC, 0x46, 0x3F
.byte 0xC5, 0xED, 0x46, 0x9B
.byte 0xC5, 0xED, 0x46, 0x6F
.byte 0xC5, 0xED, 0x46, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x46, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x46, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x46, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x46, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x46, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x46, 0x3F
.byte 0xC5, 0xEC, 0x47, 0x9B
.byte 0xC5, 0xEC, 0x47, 0x6F
.byte 0xC5, 0xEC, 0x47, 0x3F
.byte 0xC5, 0xED, 0x47, 0x9B
.byte 0xC5, 0xED, 0x47, 0x6F
.byte 0xC5, 0xED, 0x47, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x47, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x47, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x47, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x47, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x47, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x47, 0x3F
.byte 0xC5, 0xF8, 0x99, 0x9B
.byte 0xC5, 0xF8, 0x99, 0x6F
.byte 0xC5, 0xF8, 0x99, 0x3F
.byte 0xC5, 0xF9, 0x99, 0x9B
.byte 0xC5, 0xF9, 0x99, 0x6F
.byte 0xC5, 0xF9, 0x99, 0x3F
.byte 0xC4, 0xE1, 0xF8, 0x99, 0x9B
.byte 0xC4, 0xE1, 0xF8, 0x99, 0x6F
.byte 0xC4, 0xE1, 0xF8, 0x99, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x99, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x99, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x99, 0x3F
.byte 0xC4, 0xE3, 0xF9, 0x30, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x30, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x30, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x30, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x30, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x30, 0x04, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x31, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x31, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x31, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x31, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x31, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x31, 0x04, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x32, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x32, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x32, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x32, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x32, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x32, 0x04, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x33, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x33, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x33, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x33, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x33, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x33, 0x04, 0x01
.byte 0xC5, 0xF8, 0x92, 0x9B
.byte 0xC5, 0xF8, 0x92, 0x6F
.byte 0xC5, 0xF8, 0x92, 0x3F
.byte 0xC5, 0xF9, 0x92, 0x9B
.byte 0xC5, 0xF9, 0x92, 0x6F
.byte 0xC5, 0xF9, 0x92, 0x3F
.byte 0xC5, 0xFB, 0x92, 0x9B
.byte 0xC5, 0xFB, 0x92, 0x6F
.byte 0xC5, 0xFB, 0x92, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x92, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x92, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x92, 0x3F
.byte 0xC5, 0xF8, 0x93, 0x9B
.byte 0xC5, 0xF8, 0x93, 0x6F
.byte 0xC5, 0xF8, 0x93, 0x3F
.byte 0xC5, 0xF9, 0x93, 0x9B
.byte 0xC5, 0xF9, 0x93, 0x6F
.byte 0xC5, 0xF9, 0x93, 0x3F
.byte 0xC5, 0xFB, 0x93, 0x9B
.byte 0xC5, 0xFB, 0x93, 0x6F
.byte 0xC5, 0xFB, 0x93, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x93, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x93, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x93, 0x3F
.byte 0xc4, 0xe2, 0x1, 0x1c, 0x41, 0x37
.byte 0x62, 0xf2, 0xad, 0x08, 0x1c, 0x01
.byte 0x1
.byte 0x62, 0xf3, 0x7d, 0x28, 0x1b, 0xc8, 0x25
.byte 0x62, 0xf3
.byte 0x62, 0xf3, 0x75, 0x08, 0x23, 0xc2, 0x25
.byte 0x62
.byte 0x62, 0xf2, 0x7d, 0x28, 0x5b, 0x41, 0x37
|
tactcomplabs/xbgas-binutils-gdb
| 8,414
|
gas/testsuite/gas/i386/x86-64-avx-gather.s
|
# Check 64bit AVX gather instructions
.text
_start:
vgatherdpd %xmm2, (%rbp, %xmm7, 2),%xmm1
vgatherqpd %xmm2, (%rbp, %xmm7, 2),%xmm1
vgatherdpd %ymm2, (%rbp, %xmm7, 2),%ymm1
vgatherqpd %ymm2, (%rbp, %ymm7, 2),%ymm1
vgatherdpd %xmm12, (%r13, %xmm14, 2),%xmm11
vgatherqpd %xmm12, (%r13, %xmm14, 2),%xmm11
vgatherdpd %ymm12, (%r13, %xmm14, 2),%ymm11
vgatherqpd %ymm12, (%r13, %ymm14, 2),%ymm11
vgatherdpd %ymm5,0x8(,%xmm4,1),%ymm6
vgatherdpd %ymm5,-0x8(,%xmm4,1),%ymm6
vgatherdpd %ymm5,(,%xmm4,1),%ymm6
vgatherdpd %ymm5,0x298(,%xmm4,1),%ymm6
vgatherdpd %ymm5,0x8(,%xmm4,8),%ymm6
vgatherdpd %ymm5,-0x8(,%xmm4,8),%ymm6
vgatherdpd %ymm5,(,%xmm4,8),%ymm6
vgatherdpd %ymm5,0x298(,%xmm4,8),%ymm6
vgatherdpd %ymm5,0x8(,%xmm14,1),%ymm6
vgatherdpd %ymm5,-0x8(,%xmm14,1),%ymm6
vgatherdpd %ymm5,(,%xmm14,1),%ymm6
vgatherdpd %ymm5,0x298(,%xmm14,1),%ymm6
vgatherdpd %ymm5,0x8(,%xmm14,8),%ymm6
vgatherdpd %ymm5,-0x8(,%xmm14,8),%ymm6
vgatherdpd %ymm5,(,%xmm14,8),%ymm6
vgatherdpd %ymm5,0x298(,%xmm14,8),%ymm6
vgatherdps %xmm2, (%rbp, %xmm7, 2),%xmm1
vgatherqps %xmm2, (%rbp, %xmm7, 2),%xmm1
vgatherdps %ymm2, (%rbp, %ymm7, 2),%ymm1
vgatherqps %xmm2, (%rbp, %ymm7, 2),%xmm1
vgatherdps %xmm12, (%r13, %xmm14, 2),%xmm11
vgatherqps %xmm12, (%r13, %xmm14, 2),%xmm11
vgatherdps %ymm12, (%r13, %ymm14, 2),%ymm11
vgatherqps %xmm12, (%r13, %ymm14, 2),%xmm11
vgatherdps %xmm5,0x8(,%xmm4,1),%xmm6
vgatherdps %xmm5,-0x8(,%xmm4,1),%xmm6
vgatherdps %xmm5,(,%xmm4,1),%xmm6
vgatherdps %xmm5,0x298(,%xmm4,1),%xmm6
vgatherdps %xmm5,0x8(,%xmm4,8),%xmm6
vgatherdps %xmm5,-0x8(,%xmm4,8),%xmm6
vgatherdps %xmm5,(,%xmm4,8),%xmm6
vgatherdps %xmm5,0x298(,%xmm4,8),%xmm6
vgatherdps %xmm5,0x8(,%xmm14,1),%xmm6
vgatherdps %xmm5,-0x8(,%xmm14,1),%xmm6
vgatherdps %xmm5,(,%xmm14,1),%xmm6
vgatherdps %xmm5,0x298(,%xmm14,1),%xmm6
vgatherdps %xmm5,0x8(,%xmm14,8),%xmm6
vgatherdps %xmm5,-0x8(,%xmm14,8),%xmm6
vgatherdps %xmm5,(,%xmm14,8),%xmm6
vgatherdps %xmm5,0x298(,%xmm14,8),%xmm6
vpgatherdd %xmm2, (%rbp, %xmm7, 2),%xmm1
vpgatherqd %xmm2, (%rbp, %xmm7, 2),%xmm1
vpgatherdd %ymm2, (%rbp, %ymm7, 2),%ymm1
vpgatherqd %xmm2, (%rbp, %ymm7, 2),%xmm1
vpgatherdd %xmm12, (%r13, %xmm14, 2),%xmm11
vpgatherqd %xmm12, (%r13, %xmm14, 2),%xmm11
vpgatherdd %ymm12, (%r13, %ymm14, 2),%ymm11
vpgatherqd %xmm12, (%r13, %ymm14, 2),%xmm11
vpgatherdd %xmm5,0x8(,%xmm4,1),%xmm6
vpgatherdd %xmm5,-0x8(,%xmm4,1),%xmm6
vpgatherdd %xmm5,(,%xmm4,1),%xmm6
vpgatherdd %xmm5,0x298(,%xmm4,1),%xmm6
vpgatherdd %xmm5,0x8(,%xmm4,8),%xmm6
vpgatherdd %xmm5,-0x8(,%xmm4,8),%xmm6
vpgatherdd %xmm5,(,%xmm4,8),%xmm6
vpgatherdd %xmm5,0x298(,%xmm4,8),%xmm6
vpgatherdd %xmm5,0x8(,%xmm14,1),%xmm6
vpgatherdd %xmm5,-0x8(,%xmm14,1),%xmm6
vpgatherdd %xmm5,(,%xmm14,1),%xmm6
vpgatherdd %xmm5,0x298(,%xmm14,1),%xmm6
vpgatherdd %xmm5,0x8(,%xmm14,8),%xmm6
vpgatherdd %xmm5,-0x8(,%xmm14,8),%xmm6
vpgatherdd %xmm5,(,%xmm14,8),%xmm6
vpgatherdd %xmm5,0x298(,%xmm14,8),%xmm6
vpgatherdq %xmm2, (%rbp, %xmm7, 2),%xmm1
vpgatherqq %xmm2, (%rbp, %xmm7, 2),%xmm1
vpgatherdq %ymm2, (%rbp, %xmm7, 2),%ymm1
vpgatherqq %ymm2, (%rbp, %ymm7, 2),%ymm1
vpgatherdq %xmm12, (%r13, %xmm14, 2),%xmm11
vpgatherqq %xmm12, (%r13, %xmm14, 2),%xmm11
vpgatherdq %ymm12, (%r13, %xmm14, 2),%ymm11
vpgatherqq %ymm12, (%r13, %ymm14, 2),%ymm11
vpgatherdq %ymm5,0x8(,%xmm4,1),%ymm6
vpgatherdq %ymm5,-0x8(,%xmm4,1),%ymm6
vpgatherdq %ymm5,(,%xmm4,1),%ymm6
vpgatherdq %ymm5,0x298(,%xmm4,1),%ymm6
vpgatherdq %ymm5,0x8(,%xmm4,8),%ymm6
vpgatherdq %ymm5,-0x8(,%xmm4,8),%ymm6
vpgatherdq %ymm5,(,%xmm4,8),%ymm6
vpgatherdq %ymm5,0x298(,%xmm4,8),%ymm6
vpgatherdq %ymm5,0x8(,%xmm14,1),%ymm6
vpgatherdq %ymm5,-0x8(,%xmm14,1),%ymm6
vpgatherdq %ymm5,(,%xmm14,1),%ymm6
vpgatherdq %ymm5,0x298(,%xmm14,1),%ymm6
vpgatherdq %ymm5,0x8(,%xmm14,8),%ymm6
vpgatherdq %ymm5,-0x8(,%xmm14,8),%ymm6
vpgatherdq %ymm5,(,%xmm14,8),%ymm6
vpgatherdq %ymm5,0x298(,%xmm14,8),%ymm6
.intel_syntax noprefix
vgatherdpd xmm1,QWORD PTR [rbp+xmm7*2+0x0],xmm2
vgatherqpd xmm1,QWORD PTR [rbp+xmm7*2+0x0],xmm2
vgatherdpd ymm1,QWORD PTR [rbp+xmm7*2+0x0],ymm2
vgatherqpd ymm1,QWORD PTR [rbp+ymm7*2+0x0],ymm2
vgatherdpd xmm11,QWORD PTR [r13+xmm14*2+0x0],xmm12
vgatherqpd xmm11,QWORD PTR [r13+xmm14*2+0x0],xmm12
vgatherdpd ymm11,QWORD PTR [r13+xmm14*2+0x0],ymm12
vgatherqpd ymm11,QWORD PTR [r13+ymm14*2+0x0],ymm12
vgatherdpd ymm6,QWORD PTR [xmm4*1+0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*1-0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*1+0x0],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*1+0x298],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*8+0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*8-0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*8+0x0],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*8+0x298],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*1+0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*1-0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*1+0x0],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*1+0x298],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*8+0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*8-0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*8+0x0],ymm5
vgatherdpd ymm6,QWORD PTR [xmm14*8+0x298],ymm5
vgatherdps xmm1,DWORD PTR [rbp+xmm7*2+0x0],xmm2
vgatherqps xmm1,DWORD PTR [rbp+xmm7*2+0x0],xmm2
vgatherdps ymm1,DWORD PTR [rbp+ymm7*2+0x0],ymm2
vgatherqps xmm1,DWORD PTR [rbp+ymm7*2+0x0],xmm2
vgatherdps xmm11,DWORD PTR [r13+xmm14*2+0x0],xmm12
vgatherqps xmm11,DWORD PTR [r13+xmm14*2+0x0],xmm12
vgatherdps ymm11,DWORD PTR [r13+ymm14*2+0x0],ymm12
vgatherqps xmm11,DWORD PTR [r13+ymm14*2+0x0],xmm12
vgatherdps xmm6,DWORD PTR [xmm4*1+0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*1-0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*1+0x0],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*1+0x298],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*8+0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*8-0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*8+0x0],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*8+0x298],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*1+0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*1-0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*1+0x0],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*1+0x298],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*8+0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*8-0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*8+0x0],xmm5
vgatherdps xmm6,DWORD PTR [xmm14*8+0x298],xmm5
vpgatherdd xmm1,DWORD PTR [rbp+xmm7*2+0x0],xmm2
vpgatherqd xmm1,DWORD PTR [rbp+xmm7*2+0x0],xmm2
vpgatherdd ymm1,DWORD PTR [rbp+ymm7*2+0x0],ymm2
vpgatherqd xmm1,DWORD PTR [rbp+ymm7*2+0x0],xmm2
vpgatherdd xmm11,DWORD PTR [r13+xmm14*2+0x0],xmm12
vpgatherqd xmm11,DWORD PTR [r13+xmm14*2+0x0],xmm12
vpgatherdd ymm11,DWORD PTR [r13+ymm14*2+0x0],ymm12
vpgatherqd xmm11,DWORD PTR [r13+ymm14*2+0x0],xmm12
vpgatherdd xmm6,DWORD PTR [xmm4*1+0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*1-0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*1+0x0],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*1+0x298],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*8+0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*8-0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*8+0x0],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*8+0x298],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*1+0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*1-0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*1+0x0],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*1+0x298],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*8+0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*8-0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*8+0x0],xmm5
vpgatherdd xmm6,DWORD PTR [xmm14*8+0x298],xmm5
vpgatherdq xmm1,QWORD PTR [rbp+xmm7*2+0x0],xmm2
vpgatherqq xmm1,QWORD PTR [rbp+xmm7*2+0x0],xmm2
vpgatherdq ymm1,QWORD PTR [rbp+xmm7*2+0x0],ymm2
vpgatherqq ymm1,QWORD PTR [rbp+ymm7*2+0x0],ymm2
vpgatherdq xmm11,QWORD PTR [r13+xmm14*2+0x0],xmm12
vpgatherqq xmm11,QWORD PTR [r13+xmm14*2+0x0],xmm12
vpgatherdq ymm11,QWORD PTR [r13+xmm14*2+0x0],ymm12
vpgatherqq ymm11,QWORD PTR [r13+ymm14*2+0x0],ymm12
vpgatherdq ymm6,QWORD PTR [xmm4*1+0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*1-0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*1+0x0],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*1+0x298],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*8+0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*8-0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*8+0x0],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*8+0x298],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*1+0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*1-0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*1+0x0],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*1+0x298],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*8+0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*8-0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*8+0x0],ymm5
vpgatherdq ymm6,QWORD PTR [xmm14*8+0x298],ymm5
|
tactcomplabs/xbgas-binutils-gdb
| 104,175
|
gas/testsuite/gas/i386/avx512_fp16.s
|
# Check 32bit AVX512-FP16 instructions
.allow_index_reg
.text
_start:
vaddph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vaddph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vaddph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vaddph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vaddph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vaddph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vaddph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vaddsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vaddsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vaddsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vaddsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vaddsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vaddsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vaddsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcmpph $123, %zmm4, %zmm5, %k5 #AVX512-FP16
vcmpph $123, {sae}, %zmm4, %zmm5, %k5 #AVX512-FP16 HAS_SAE
vcmpph $123, {sae}, %zmm4, %zmm5, %k5{%k7} #AVX512-FP16 MASK_ENABLING HAS_SAE
vcmpph $123, 0x10000000(%esp, %esi, 8), %zmm5, %k5{%k7} #AVX512-FP16 MASK_ENABLING
vcmpph $123, (%ecx){1to32}, %zmm5, %k5 #AVX512-FP16 BROADCAST_EN
vcmpph $123, 8128(%ecx), %zmm5, %k5 #AVX512-FP16 Disp8(7f)
vcmpph $123, -256(%edx){1to32}, %zmm5, %k5{%k7} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING
vcmpsh $123, %xmm4, %xmm5, %k5 #AVX512-FP16
vcmpsh $123, {sae}, %xmm4, %xmm5, %k5 #AVX512-FP16 HAS_SAE
vcmpsh $123, {sae}, %xmm4, %xmm5, %k5{%k7} #AVX512-FP16 MASK_ENABLING HAS_SAE
vcmpsh $123, 0x10000000(%esp, %esi, 8), %xmm5, %k5{%k7} #AVX512-FP16 MASK_ENABLING
vcmpsh $123, (%ecx), %xmm5, %k5 #AVX512-FP16
vcmpsh $123, 254(%ecx), %xmm5, %k5 #AVX512-FP16 Disp8(7f)
vcmpsh $123, -256(%edx), %xmm5, %k5{%k7} #AVX512-FP16 Disp8(80) MASK_ENABLING
vcomish %xmm5, %xmm6 #AVX512-FP16
vcomish {sae}, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vcomish 0x10000000(%esp, %esi, 8), %xmm6 #AVX512-FP16
vcomish (%ecx), %xmm6 #AVX512-FP16
vcomish 254(%ecx), %xmm6 #AVX512-FP16 Disp8(7f)
vcomish -256(%edx), %xmm6 #AVX512-FP16 Disp8(80)
vcvtdq2ph %zmm5, %ymm6 #AVX512-FP16
vcvtdq2ph {rn-sae}, %zmm5, %ymm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtdq2ph {rn-sae}, %zmm5, %ymm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtdq2ph 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtdq2ph (%ecx){1to16}, %ymm6 #AVX512-FP16 BROADCAST_EN
vcvtdq2ph 8128(%ecx), %ymm6 #AVX512-FP16 Disp8(7f)
vcvtdq2ph -512(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph %zmm5, %xmm6 #AVX512-FP16
vcvtpd2ph {rn-sae}, %zmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtpd2ph {rn-sae}, %zmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtpd2phz 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtpd2ph (%ecx){1to8}, %xmm6 #AVX512-FP16 BROADCAST_EN
vcvtpd2phz 8128(%ecx), %xmm6 #AVX512-FP16 Disp8(7f)
vcvtpd2ph -1024(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq %ymm5, %zmm6 #AVX512-FP16
vcvtph2dq {rn-sae}, %ymm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2dq {rn-sae}, %ymm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2dq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2dq (%ecx){1to16}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2dq 4064(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2dq -256(%edx){1to16}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd %xmm5, %zmm6 #AVX512-FP16
vcvtph2pd {sae}, %xmm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvtph2pd {sae}, %xmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtph2pd 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2pd (%ecx){1to8}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2pd 2032(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2pd -256(%edx){1to8}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx %ymm5, %zmm6 #AVX512-FP16
vcvtph2psx {sae}, %ymm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvtph2psx {sae}, %ymm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtph2psx 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2psx (%ecx){1to16}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2psx 4064(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2psx -256(%edx){1to16}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq %xmm5, %zmm6 #AVX512-FP16
vcvtph2qq {rn-sae}, %xmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2qq {rn-sae}, %xmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2qq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2qq (%ecx){1to8}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2qq 2032(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2qq -256(%edx){1to8}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq %ymm5, %zmm6 #AVX512-FP16
vcvtph2udq {rn-sae}, %ymm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2udq {rn-sae}, %ymm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2udq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2udq (%ecx){1to16}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2udq 4064(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2udq -256(%edx){1to16}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq %xmm5, %zmm6 #AVX512-FP16
vcvtph2uqq {rn-sae}, %xmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2uqq {rn-sae}, %xmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2uqq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2uqq (%ecx){1to8}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2uqq 2032(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2uqq -256(%edx){1to8}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw %zmm5, %zmm6 #AVX512-FP16
vcvtph2uw {rn-sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2uw {rn-sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2uw 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2uw (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2uw 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2uw -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w %zmm5, %zmm6 #AVX512-FP16
vcvtph2w {rn-sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2w {rn-sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2w 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2w (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtph2w 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtph2w -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx %zmm5, %ymm6 #AVX512-FP16
vcvtps2phx {rn-sae}, %zmm5, %ymm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtps2phx {rn-sae}, %zmm5, %ymm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtps2phx 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtps2phx (%ecx){1to16}, %ymm6 #AVX512-FP16 BROADCAST_EN
vcvtps2phx 8128(%ecx), %ymm6 #AVX512-FP16 Disp8(7f)
vcvtps2phx -512(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph %zmm5, %xmm6 #AVX512-FP16
vcvtqq2ph {rn-sae}, %zmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtqq2ph {rn-sae}, %zmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtqq2phz 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtqq2ph (%ecx){1to8}, %xmm6 #AVX512-FP16 BROADCAST_EN
vcvtqq2phz 8128(%ecx), %xmm6 #AVX512-FP16 Disp8(7f)
vcvtqq2ph -1024(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtsd2sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vcvtsd2sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsd2sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtsd2sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtsd2sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vcvtsd2sh 1016(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vcvtsd2sh -1024(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2sd %xmm4, %xmm5, %xmm6 #AVX512-FP16
vcvtsh2sd {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vcvtsh2sd {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtsh2sd 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtsh2sd (%ecx), %xmm5, %xmm6 #AVX512-FP16
vcvtsh2sd 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vcvtsh2sd -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2si %xmm6, %edx #AVX512-FP16
vcvtsh2si {rn-sae}, %xmm6, %edx #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2si 0x10000000(%esp, %esi, 8), %edx #AVX512-FP16
vcvtsh2si (%ecx), %edx #AVX512-FP16
vcvtsh2si 254(%ecx), %edx #AVX512-FP16 Disp8(7f)
vcvtsh2si -256(%edx), %edx #AVX512-FP16 Disp8(80)
vcvtsh2ss %xmm4, %xmm5, %xmm6 #AVX512-FP16
vcvtsh2ss {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vcvtsh2ss {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtsh2ss 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtsh2ss (%ecx), %xmm5, %xmm6 #AVX512-FP16
vcvtsh2ss 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vcvtsh2ss -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2usi %xmm6, %edx #AVX512-FP16
vcvtsh2usi {rn-sae}, %xmm6, %edx #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2usi 0x10000000(%esp, %esi, 8), %edx #AVX512-FP16
vcvtsh2usi (%ecx), %edx #AVX512-FP16
vcvtsh2usi 254(%ecx), %edx #AVX512-FP16 Disp8(7f)
vcvtsh2usi -256(%edx), %edx #AVX512-FP16 Disp8(80)
vcvtsi2sh %edx, %xmm5, %xmm6 #AVX512-FP16
vcvtsi2sh %edx, {rn-sae}, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsi2shl 0x10000000(%esp, %esi, 8), %xmm5, %xmm6 #AVX512-FP16
vcvtsi2shl (%ecx), %xmm5, %xmm6 #AVX512-FP16
vcvtsi2shl 508(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vcvtsi2shl -512(%edx), %xmm5, %xmm6 #AVX512-FP16 Disp8(80)
vcvtss2sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vcvtss2sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtss2sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtss2sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtss2sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vcvtss2sh 508(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vcvtss2sh -512(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq %ymm5, %zmm6 #AVX512-FP16
vcvttph2dq {sae}, %ymm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvttph2dq {sae}, %ymm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2dq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2dq (%ecx){1to16}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvttph2dq 4064(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvttph2dq -256(%edx){1to16}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq %xmm5, %zmm6 #AVX512-FP16
vcvttph2qq {sae}, %xmm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvttph2qq {sae}, %xmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2qq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2qq (%ecx){1to8}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvttph2qq 2032(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvttph2qq -256(%edx){1to8}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq %ymm5, %zmm6 #AVX512-FP16
vcvttph2udq {sae}, %ymm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvttph2udq {sae}, %ymm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2udq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2udq (%ecx){1to16}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvttph2udq 4064(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvttph2udq -256(%edx){1to16}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq %xmm5, %zmm6 #AVX512-FP16
vcvttph2uqq {sae}, %xmm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvttph2uqq {sae}, %xmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2uqq 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2uqq (%ecx){1to8}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvttph2uqq 2032(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvttph2uqq -256(%edx){1to8}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw %zmm5, %zmm6 #AVX512-FP16
vcvttph2uw {sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvttph2uw {sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2uw 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2uw (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvttph2uw 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvttph2uw -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w %zmm5, %zmm6 #AVX512-FP16
vcvttph2w {sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vcvttph2w {sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2w 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2w (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvttph2w 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvttph2w -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttsh2si %xmm6, %edx #AVX512-FP16
vcvttsh2si {sae}, %xmm6, %edx #AVX512-FP16 HAS_SAE
vcvttsh2si 0x10000000(%esp, %esi, 8), %edx #AVX512-FP16
vcvttsh2si (%ecx), %edx #AVX512-FP16
vcvttsh2si 254(%ecx), %edx #AVX512-FP16 Disp8(7f)
vcvttsh2si -256(%edx), %edx #AVX512-FP16 Disp8(80)
vcvttsh2usi %xmm6, %edx #AVX512-FP16
vcvttsh2usi {sae}, %xmm6, %edx #AVX512-FP16 HAS_SAE
vcvttsh2usi 0x10000000(%esp, %esi, 8), %edx #AVX512-FP16
vcvttsh2usi (%ecx), %edx #AVX512-FP16
vcvttsh2usi 254(%ecx), %edx #AVX512-FP16 Disp8(7f)
vcvttsh2usi -256(%edx), %edx #AVX512-FP16 Disp8(80)
vcvtudq2ph %zmm5, %ymm6 #AVX512-FP16
vcvtudq2ph {rn-sae}, %zmm5, %ymm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtudq2ph {rn-sae}, %zmm5, %ymm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtudq2ph 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtudq2ph (%ecx){1to16}, %ymm6 #AVX512-FP16 BROADCAST_EN
vcvtudq2ph 8128(%ecx), %ymm6 #AVX512-FP16 Disp8(7f)
vcvtudq2ph -512(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph %zmm5, %xmm6 #AVX512-FP16
vcvtuqq2ph {rn-sae}, %zmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtuqq2ph {rn-sae}, %zmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtuqq2phz 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtuqq2ph (%ecx){1to8}, %xmm6 #AVX512-FP16 BROADCAST_EN
vcvtuqq2phz 8128(%ecx), %xmm6 #AVX512-FP16 Disp8(7f)
vcvtuqq2ph -1024(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtusi2sh %edx, %xmm5, %xmm6 #AVX512-FP16
vcvtusi2sh %edx, {rn-sae}, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtusi2shl 0x10000000(%esp, %esi, 8), %xmm5, %xmm6 #AVX512-FP16
vcvtusi2shl (%ecx), %xmm5, %xmm6 #AVX512-FP16
vcvtusi2shl 508(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vcvtusi2shl -512(%edx), %xmm5, %xmm6 #AVX512-FP16 Disp8(80)
vcvtuw2ph %zmm5, %zmm6 #AVX512-FP16
vcvtuw2ph {rn-sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtuw2ph {rn-sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtuw2ph 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtuw2ph (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtuw2ph 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtuw2ph -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph %zmm5, %zmm6 #AVX512-FP16
vcvtw2ph {rn-sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtw2ph {rn-sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtw2ph 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vcvtw2ph (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vcvtw2ph 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vcvtw2ph -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vdivph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vdivph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vdivph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vdivph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vdivph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vdivph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vdivsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vdivsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vdivsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vdivsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vdivsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vdivsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfcmaddcph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfcmaddcph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmaddcph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfcmaddcph (%ecx){1to16}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfcmaddcph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfcmaddcph -512(%edx){1to16}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfcmaddcsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfcmaddcsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmaddcsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfcmaddcsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfcmaddcsh 508(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfcmaddcsh -512(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfcmulcph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfcmulcph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmulcph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfcmulcph (%ecx){1to16}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfcmulcph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfcmulcph -512(%edx){1to16}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfcmulcsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfcmulcsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmulcsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfcmulcsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfcmulcsh 508(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfcmulcsh -512(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmadd132ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd132ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd132ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd132ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmadd132ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmadd132ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmadd132sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd132sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd132sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd132sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmadd132sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmadd132sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmadd213ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd213ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd213ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd213ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmadd213ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmadd213ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmadd213sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd213sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd213sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd213sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmadd213sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmadd213sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmadd231ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd231ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd231ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd231ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmadd231ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmadd231ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmadd231sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd231sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd231sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd231sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmadd231sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmadd231sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmaddcph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddcph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddcph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddcph (%ecx){1to16}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmaddcph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmaddcph -512(%edx){1to16}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmaddcsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddcsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddcsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddcsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmaddcsh 508(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmaddcsh -512(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmaddsub132ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub132ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub132ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddsub132ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmaddsub132ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmaddsub132ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmaddsub213ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub213ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub213ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddsub213ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmaddsub213ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmaddsub213ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmaddsub231ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub231ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub231ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddsub231ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmaddsub231ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmaddsub231ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmsub132ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub132ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub132ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub132ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmsub132ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmsub132ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmsub132sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub132sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub132sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub132sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmsub132sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmsub132sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmsub213ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub213ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub213ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub213ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmsub213ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmsub213ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmsub213sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub213sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub213sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub213sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmsub213sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmsub213sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmsub231ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub231ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub231ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub231ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmsub231ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmsub231ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmsub231sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub231sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub231sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub231sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmsub231sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmsub231sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmsubadd132ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd132ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd132ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsubadd132ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmsubadd132ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmsubadd132ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmsubadd213ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd213ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd213ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsubadd213ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmsubadd213ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmsubadd213ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmsubadd231ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd231ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd231ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmsubadd231ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmsubadd231ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmsubadd231ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfmulcph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmulcph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmulcph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmulcph (%ecx){1to16}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfmulcph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfmulcph -512(%edx){1to16}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfmulcsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfmulcsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmulcsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfmulcsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfmulcsh 508(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfmulcsh -512(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfnmadd132ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd132ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd132ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd132ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfnmadd132ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfnmadd132ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfnmadd132sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd132sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd132sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd132sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfnmadd132sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfnmadd132sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfnmadd213ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd213ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd213ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd213ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfnmadd213ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfnmadd213ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfnmadd213sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd213sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd213sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd213sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfnmadd213sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfnmadd213sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfnmadd231ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd231ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd231ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd231ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfnmadd231ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfnmadd231ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfnmadd231sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd231sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd231sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd231sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfnmadd231sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfnmadd231sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfnmsub132ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub132ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub132ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub132ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfnmsub132ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfnmsub132ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfnmsub132sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub132sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub132sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub132sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfnmsub132sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfnmsub132sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfnmsub213ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub213ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub213ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub213ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfnmsub213ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfnmsub213ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfnmsub213sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub213sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub213sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub213sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfnmsub213sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfnmsub213sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vfnmsub231ph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub231ph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub231ph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub231ph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vfnmsub231ph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vfnmsub231ph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231sh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vfnmsub231sh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub231sh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub231sh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub231sh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vfnmsub231sh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vfnmsub231sh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfpclassph $123, %zmm6, %k5 #AVX512-FP16
vfpclassph $123, %zmm6, %k5{%k7} #AVX512-FP16 MASK_ENABLING
vfpclassphz $123, 0x10000000(%esp, %esi, 8), %k5{%k7} #AVX512-FP16 MASK_ENABLING
vfpclassph $123, (%ecx){1to32}, %k5 #AVX512-FP16 BROADCAST_EN
vfpclassphz $123, 8128(%ecx), %k5 #AVX512-FP16 Disp8(7f)
vfpclassph $123, -256(%edx){1to32}, %k5{%k7} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING
vfpclasssh $123, %xmm6, %k5 #AVX512-FP16
vfpclasssh $123, %xmm6, %k5{%k7} #AVX512-FP16 MASK_ENABLING
vfpclasssh $123, 0x10000000(%esp, %esi, 8), %k5{%k7} #AVX512-FP16 MASK_ENABLING
vfpclasssh $123, (%ecx), %k5 #AVX512-FP16
vfpclasssh $123, 254(%ecx), %k5 #AVX512-FP16 Disp8(7f)
vfpclasssh $123, -256(%edx), %k5{%k7} #AVX512-FP16 Disp8(80) MASK_ENABLING
vgetexpph %zmm5, %zmm6 #AVX512-FP16
vgetexpph {sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vgetexpph {sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetexpph 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vgetexpph (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vgetexpph 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vgetexpph -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetexpsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vgetexpsh {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vgetexpsh {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetexpsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vgetexpsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vgetexpsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vgetexpsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph $123, %zmm5, %zmm6 #AVX512-FP16
vgetmantph $123, {sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vgetmantph $123, {sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetmantph $123, 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vgetmantph $123, (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vgetmantph $123, 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vgetmantph $123, -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantsh $123, %xmm4, %xmm5, %xmm6 #AVX512-FP16
vgetmantsh $123, {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vgetmantsh $123, {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetmantsh $123, 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vgetmantsh $123, (%ecx), %xmm5, %xmm6 #AVX512-FP16
vgetmantsh $123, 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vgetmantsh $123, -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmaxph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vmaxph {sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vmaxph {sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vmaxph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vmaxph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vmaxph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vmaxph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vmaxsh {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vmaxsh {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vmaxsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vmaxsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vmaxsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vmaxsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vminph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vminph {sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vminph {sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vminph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vminph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vminph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vminph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vminsh {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vminsh {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vminsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vminsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vminsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vminsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmovsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vmovsh %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vmovsh 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vmovsh (%ecx), %xmm6 #AVX512-FP16
vmovsh 254(%ecx), %xmm6 #AVX512-FP16 Disp8(7f)
vmovsh -256(%edx), %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmovsh %xmm6, 0x10000000(%esp, %esi, 8){%k7} #AVX512-FP16 MASK_ENABLING
vmovsh %xmm6, (%ecx) #AVX512-FP16
vmovsh %xmm6, 254(%ecx) #AVX512-FP16 Disp8(7f)
vmovsh %xmm6, -256(%edx){%k7} #AVX512-FP16 Disp8(80) MASK_ENABLING
vmovw %edx, %xmm6 #AVX512-FP16
vmovw %xmm6, %edx #AVX512-FP16
vmovw 0x10000000(%esp, %esi, 8), %xmm6 #AVX512-FP16
vmovw (%ecx), %xmm6 #AVX512-FP16
vmovw 254(%ecx), %xmm6 #AVX512-FP16 Disp8(7f)
vmovw -256(%edx), %xmm6 #AVX512-FP16 Disp8(80)
vmovw %xmm6, 0x10000000(%esp, %esi, 8) #AVX512-FP16
vmovw %xmm6, (%ecx) #AVX512-FP16
vmovw %xmm6, 254(%ecx) #AVX512-FP16 Disp8(7f)
vmovw %xmm6, -256(%edx) #AVX512-FP16 Disp8(80)
vmulph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vmulph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vmulph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vmulph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vmulph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vmulph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vmulph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vmulsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vmulsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vmulsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vmulsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vmulsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vmulsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrcpph %zmm5, %zmm6 #AVX512-FP16
vrcpph %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vrcpph 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vrcpph (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vrcpph 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vrcpph -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vrcpsh %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vrcpsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vrcpsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vrcpsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vrcpsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vreduceph $123, %zmm5, %zmm6 #AVX512-FP16
vreduceph $123, {sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vreduceph $123, {sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vreduceph $123, 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vreduceph $123, (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vreduceph $123, 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vreduceph $123, -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreducesh $123, %xmm4, %xmm5, %xmm6 #AVX512-FP16
vreducesh $123, {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vreducesh $123, {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vreducesh $123, 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vreducesh $123, (%ecx), %xmm5, %xmm6 #AVX512-FP16
vreducesh $123, 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vreducesh $123, -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph $123, %zmm5, %zmm6 #AVX512-FP16
vrndscaleph $123, {sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE
vrndscaleph $123, {sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vrndscaleph $123, 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vrndscaleph $123, (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vrndscaleph $123, 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vrndscaleph $123, -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscalesh $123, %xmm4, %xmm5, %xmm6 #AVX512-FP16
vrndscalesh $123, {sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vrndscalesh $123, {sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vrndscalesh $123, 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vrndscalesh $123, (%ecx), %xmm5, %xmm6 #AVX512-FP16
vrndscalesh $123, 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vrndscalesh $123, -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph %zmm5, %zmm6 #AVX512-FP16
vrsqrtph %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vrsqrtph 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vrsqrtph (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vrsqrtph 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vrsqrtph -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vrsqrtsh %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vrsqrtsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vrsqrtsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vrsqrtsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vrsqrtsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vscalefph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vscalefph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vscalefph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vscalefph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vscalefph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vscalefph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vscalefph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vscalefsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vscalefsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vscalefsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vscalefsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vscalefsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vscalefsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph %zmm5, %zmm6 #AVX512-FP16
vsqrtph {rn-sae}, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vsqrtph {rn-sae}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsqrtph 0x10000000(%esp, %esi, 8), %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vsqrtph (%ecx){1to32}, %zmm6 #AVX512-FP16 BROADCAST_EN
vsqrtph 8128(%ecx), %zmm6 #AVX512-FP16 Disp8(7f)
vsqrtph -256(%edx){1to32}, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vsqrtsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vsqrtsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsqrtsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vsqrtsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vsqrtsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vsqrtsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vsubph %zmm4, %zmm5, %zmm6 #AVX512-FP16
vsubph {rn-sae}, %zmm4, %zmm5, %zmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vsubph {rn-sae}, %zmm4, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsubph 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512-FP16 MASK_ENABLING
vsubph (%ecx){1to32}, %zmm5, %zmm6 #AVX512-FP16 BROADCAST_EN
vsubph 8128(%ecx), %zmm5, %zmm6 #AVX512-FP16 Disp8(7f)
vsubph -256(%edx){1to32}, %zmm5, %zmm6{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubsh %xmm4, %xmm5, %xmm6 #AVX512-FP16
vsubsh {rn-sae}, %xmm4, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE RC_CTRL
vsubsh {rn-sae}, %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsubsh 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16 MASK_ENABLING
vsubsh (%ecx), %xmm5, %xmm6 #AVX512-FP16
vsubsh 254(%ecx), %xmm5, %xmm6 #AVX512-FP16 Disp8(7f)
vsubsh -256(%edx), %xmm5, %xmm6{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vucomish %xmm5, %xmm6 #AVX512-FP16
vucomish {sae}, %xmm5, %xmm6 #AVX512-FP16 HAS_SAE
vucomish 0x10000000(%esp, %esi, 8), %xmm6 #AVX512-FP16
vucomish (%ecx), %xmm6 #AVX512-FP16
vucomish 254(%ecx), %xmm6 #AVX512-FP16 Disp8(7f)
vucomish -256(%edx), %xmm6 #AVX512-FP16 Disp8(80)
.intel_syntax noprefix
vaddph zmm6, zmm5, zmm4 #AVX512-FP16
vaddph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vaddph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vaddph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vaddph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vaddph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vaddph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vaddsh xmm6, xmm5, xmm4 #AVX512-FP16
vaddsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vaddsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vaddsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vaddsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vaddsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vaddsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcmpph k5, zmm5, zmm4, 123 #AVX512-FP16
vcmpph k5, zmm5, zmm4{sae}, 123 #AVX512-FP16 HAS_SAE
vcmpph k5{k7}, zmm5, zmm4{sae}, 123 #AVX512-FP16 MASK_ENABLING HAS_SAE
vcmpph k5{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vcmpph k5, zmm5, WORD BCST [ecx], 123 #AVX512-FP16 BROADCAST_EN
vcmpph k5, zmm5, ZMMWORD PTR [ecx+8128], 123 #AVX512-FP16 Disp8(7f)
vcmpph k5{k7}, zmm5, WORD BCST [edx-256], 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING
vcmpsh k5, xmm5, xmm4, 123 #AVX512-FP16
vcmpsh k5, xmm5, xmm4{sae}, 123 #AVX512-FP16 HAS_SAE
vcmpsh k5{k7}, xmm5, xmm4{sae}, 123 #AVX512-FP16 MASK_ENABLING HAS_SAE
vcmpsh k5{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vcmpsh k5, xmm5, WORD PTR [ecx], 123 #AVX512-FP16
vcmpsh k5, xmm5, WORD PTR [ecx+254], 123 #AVX512-FP16 Disp8(7f)
vcmpsh k5{k7}, xmm5, WORD PTR [edx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING
vcomish xmm6, xmm5 #AVX512-FP16
vcomish xmm6, xmm5{sae} #AVX512-FP16 HAS_SAE
vcomish xmm6, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vcomish xmm6, WORD PTR [ecx] #AVX512-FP16
vcomish xmm6, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vcomish xmm6, WORD PTR [edx-256] #AVX512-FP16 Disp8(80)
vcvtdq2ph ymm6, zmm5 #AVX512-FP16
vcvtdq2ph ymm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtdq2ph ymm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtdq2ph ymm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtdq2ph ymm6, DWORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtdq2ph ymm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtdq2ph ymm6{k7}{z}, DWORD BCST [edx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph xmm6, zmm5 #AVX512-FP16
vcvtpd2ph xmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtpd2ph xmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtpd2ph xmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtpd2ph xmm6, QWORD BCST [ecx]{1to8} #AVX512-FP16 BROADCAST_EN
vcvtpd2ph xmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtpd2ph xmm6{k7}{z}, QWORD BCST [edx-1024]{1to8} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq zmm6, ymm5 #AVX512-FP16
vcvtph2dq zmm6, ymm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2dq zmm6{k7}{z}, ymm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2dq zmm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2dq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2dq zmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16 Disp8(7f)
vcvtph2dq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd zmm6, xmm5 #AVX512-FP16
vcvtph2pd zmm6, xmm5{sae} #AVX512-FP16 HAS_SAE
vcvtph2pd zmm6{k7}{z}, xmm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtph2pd zmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2pd zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2pd zmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16 Disp8(7f)
vcvtph2pd zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx zmm6, ymm5 #AVX512-FP16
vcvtph2psx zmm6, ymm5{sae} #AVX512-FP16 HAS_SAE
vcvtph2psx zmm6{k7}{z}, ymm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtph2psx zmm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2psx zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2psx zmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16 Disp8(7f)
vcvtph2psx zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq zmm6, xmm5 #AVX512-FP16
vcvtph2qq zmm6, xmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2qq zmm6{k7}{z}, xmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2qq zmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2qq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2qq zmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16 Disp8(7f)
vcvtph2qq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq zmm6, ymm5 #AVX512-FP16
vcvtph2udq zmm6, ymm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2udq zmm6{k7}{z}, ymm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2udq zmm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2udq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2udq zmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16 Disp8(7f)
vcvtph2udq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq zmm6, xmm5 #AVX512-FP16
vcvtph2uqq zmm6, xmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2uqq zmm6{k7}{z}, xmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2uqq zmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2uqq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2uqq zmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16 Disp8(7f)
vcvtph2uqq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw zmm6, zmm5 #AVX512-FP16
vcvtph2uw zmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2uw zmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2uw zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2uw zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2uw zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtph2uw zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w zmm6, zmm5 #AVX512-FP16
vcvtph2w zmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2w zmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2w zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2w zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtph2w zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtph2w zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx ymm6, zmm5 #AVX512-FP16
vcvtps2phx ymm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtps2phx ymm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtps2phx ymm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtps2phx ymm6, DWORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtps2phx ymm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtps2phx ymm6{k7}{z}, DWORD BCST [edx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph xmm6, zmm5 #AVX512-FP16
vcvtqq2ph xmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtqq2ph xmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtqq2ph xmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtqq2ph xmm6, QWORD BCST [ecx]{1to8} #AVX512-FP16 BROADCAST_EN
vcvtqq2ph xmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtqq2ph xmm6{k7}{z}, QWORD BCST [edx-1024]{1to8} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtsd2sh xmm6, xmm5, xmm4 #AVX512-FP16
vcvtsd2sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsd2sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtsd2sh xmm6{k7}, xmm5, QWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtsd2sh xmm6, xmm5, QWORD PTR [ecx] #AVX512-FP16
vcvtsd2sh xmm6, xmm5, QWORD PTR [ecx+1016] #AVX512-FP16 Disp8(7f)
vcvtsd2sh xmm6{k7}{z}, xmm5, QWORD PTR [edx-1024] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2sd xmm6, xmm5, xmm4 #AVX512-FP16
vcvtsh2sd xmm6, xmm5, xmm4{sae} #AVX512-FP16 HAS_SAE
vcvtsh2sd xmm6{k7}{z}, xmm5, xmm4{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtsh2sd xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtsh2sd xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vcvtsh2sd xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vcvtsh2sd xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2si edx, xmm6 #AVX512-FP16
vcvtsh2si edx, xmm6{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2si edx, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vcvtsh2si edx, WORD PTR [ecx] #AVX512-FP16
vcvtsh2si edx, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vcvtsh2si edx, WORD PTR [edx-256] #AVX512-FP16 Disp8(80)
vcvtsh2ss xmm6, xmm5, xmm4 #AVX512-FP16
vcvtsh2ss xmm6, xmm5, xmm4{sae} #AVX512-FP16 HAS_SAE
vcvtsh2ss xmm6{k7}{z}, xmm5, xmm4{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtsh2ss xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtsh2ss xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vcvtsh2ss xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vcvtsh2ss xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2usi edx, xmm6 #AVX512-FP16
vcvtsh2usi edx, xmm6{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2usi edx, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vcvtsh2usi edx, WORD PTR [ecx] #AVX512-FP16
vcvtsh2usi edx, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vcvtsh2usi edx, WORD PTR [edx-256] #AVX512-FP16 Disp8(80)
vcvtsi2sh xmm6, xmm5, edx #AVX512-FP16
vcvtsi2sh xmm6, xmm5, edx{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsi2sh xmm6, xmm5, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vcvtsi2sh xmm6, xmm5, DWORD PTR [ecx] #AVX512-FP16
vcvtsi2sh xmm6, xmm5, DWORD PTR [ecx+508] #AVX512-FP16 Disp8(7f)
vcvtsi2sh xmm6, xmm5, DWORD PTR [edx-512] #AVX512-FP16 Disp8(80)
vcvtss2sh xmm6, xmm5, xmm4 #AVX512-FP16
vcvtss2sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtss2sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtss2sh xmm6{k7}, xmm5, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtss2sh xmm6, xmm5, DWORD PTR [ecx] #AVX512-FP16
vcvtss2sh xmm6, xmm5, DWORD PTR [ecx+508] #AVX512-FP16 Disp8(7f)
vcvtss2sh xmm6{k7}{z}, xmm5, DWORD PTR [edx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq zmm6, ymm5 #AVX512-FP16
vcvttph2dq zmm6, ymm5{sae} #AVX512-FP16 HAS_SAE
vcvttph2dq zmm6{k7}{z}, ymm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2dq zmm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2dq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvttph2dq zmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16 Disp8(7f)
vcvttph2dq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq zmm6, xmm5 #AVX512-FP16
vcvttph2qq zmm6, xmm5{sae} #AVX512-FP16 HAS_SAE
vcvttph2qq zmm6{k7}{z}, xmm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2qq zmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2qq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvttph2qq zmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16 Disp8(7f)
vcvttph2qq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq zmm6, ymm5 #AVX512-FP16
vcvttph2udq zmm6, ymm5{sae} #AVX512-FP16 HAS_SAE
vcvttph2udq zmm6{k7}{z}, ymm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2udq zmm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2udq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvttph2udq zmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16 Disp8(7f)
vcvttph2udq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq zmm6, xmm5 #AVX512-FP16
vcvttph2uqq zmm6, xmm5{sae} #AVX512-FP16 HAS_SAE
vcvttph2uqq zmm6{k7}{z}, xmm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2uqq zmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2uqq zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvttph2uqq zmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16 Disp8(7f)
vcvttph2uqq zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw zmm6, zmm5 #AVX512-FP16
vcvttph2uw zmm6, zmm5{sae} #AVX512-FP16 HAS_SAE
vcvttph2uw zmm6{k7}{z}, zmm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2uw zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2uw zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvttph2uw zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvttph2uw zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w zmm6, zmm5 #AVX512-FP16
vcvttph2w zmm6, zmm5{sae} #AVX512-FP16 HAS_SAE
vcvttph2w zmm6{k7}{z}, zmm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2w zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2w zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvttph2w zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvttph2w zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttsh2si edx, xmm6 #AVX512-FP16
vcvttsh2si edx, xmm6{sae} #AVX512-FP16 HAS_SAE
vcvttsh2si edx, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vcvttsh2si edx, WORD PTR [ecx] #AVX512-FP16
vcvttsh2si edx, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vcvttsh2si edx, WORD PTR [edx-256] #AVX512-FP16 Disp8(80)
vcvttsh2usi edx, xmm6 #AVX512-FP16
vcvttsh2usi edx, xmm6{sae} #AVX512-FP16 HAS_SAE
vcvttsh2usi edx, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vcvttsh2usi edx, WORD PTR [ecx] #AVX512-FP16
vcvttsh2usi edx, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vcvttsh2usi edx, WORD PTR [edx-256] #AVX512-FP16 Disp8(80)
vcvtudq2ph ymm6, zmm5 #AVX512-FP16
vcvtudq2ph ymm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtudq2ph ymm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtudq2ph ymm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtudq2ph ymm6, DWORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtudq2ph ymm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtudq2ph ymm6{k7}{z}, DWORD BCST [edx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm6, zmm5 #AVX512-FP16
vcvtuqq2ph xmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtuqq2ph xmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtuqq2ph xmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtuqq2ph xmm6, QWORD BCST [ecx]{1to8} #AVX512-FP16 BROADCAST_EN
vcvtuqq2ph xmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtuqq2ph xmm6{k7}{z}, QWORD BCST [edx-1024]{1to8} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtusi2sh xmm6, xmm5, edx #AVX512-FP16
vcvtusi2sh xmm6, xmm5, edx{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtusi2sh xmm6, xmm5, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vcvtusi2sh xmm6, xmm5, DWORD PTR [ecx] #AVX512-FP16
vcvtusi2sh xmm6, xmm5, DWORD PTR [ecx+508] #AVX512-FP16 Disp8(7f)
vcvtusi2sh xmm6, xmm5, DWORD PTR [edx-512] #AVX512-FP16 Disp8(80)
vcvtuw2ph zmm6, zmm5 #AVX512-FP16
vcvtuw2ph zmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtuw2ph zmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtuw2ph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtuw2ph zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtuw2ph zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtuw2ph zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph zmm6, zmm5 #AVX512-FP16
vcvtw2ph zmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtw2ph zmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtw2ph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtw2ph zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vcvtw2ph zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vcvtw2ph zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph zmm6, zmm5, zmm4 #AVX512-FP16
vdivph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vdivph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vdivph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vdivph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vdivph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vdivph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivsh xmm6, xmm5, xmm4 #AVX512-FP16
vdivsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vdivsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vdivsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vdivsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vdivsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vdivsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph zmm6, zmm5, zmm4 #AVX512-FP16
vfcmaddcph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfcmaddcph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmaddcph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfcmaddcph zmm6, zmm5, DWORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfcmaddcph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfcmaddcph zmm6{k7}{z}, zmm5, DWORD BCST [edx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcsh xmm6, xmm5, xmm4 #AVX512-FP16
vfcmaddcsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfcmaddcsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmaddcsh xmm6{k7}, xmm5, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfcmaddcsh xmm6, xmm5, DWORD PTR [ecx] #AVX512-FP16
vfcmaddcsh xmm6, xmm5, DWORD PTR [ecx+508] #AVX512-FP16 Disp8(7f)
vfcmaddcsh xmm6{k7}{z}, xmm5, DWORD PTR [edx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph zmm6, zmm5, zmm4 #AVX512-FP16
vfcmulcph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfcmulcph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmulcph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfcmulcph zmm6, zmm5, DWORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfcmulcph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfcmulcph zmm6{k7}{z}, zmm5, DWORD BCST [edx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcsh xmm6, xmm5, xmm4 #AVX512-FP16
vfcmulcsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfcmulcsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmulcsh xmm6{k7}, xmm5, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfcmulcsh xmm6, xmm5, DWORD PTR [ecx] #AVX512-FP16
vfcmulcsh xmm6, xmm5, DWORD PTR [ecx+508] #AVX512-FP16 Disp8(7f)
vfcmulcsh xmm6{k7}{z}, xmm5, DWORD PTR [edx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmadd132ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd132ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd132ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd132ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmadd132ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmadd132ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132sh xmm6, xmm5, xmm4 #AVX512-FP16
vfmadd132sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd132sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd132sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd132sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfmadd132sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfmadd132sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmadd213ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd213ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd213ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd213ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmadd213ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmadd213ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213sh xmm6, xmm5, xmm4 #AVX512-FP16
vfmadd213sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd213sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd213sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd213sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfmadd213sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfmadd213sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmadd231ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd231ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd231ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd231ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmadd231ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmadd231ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231sh xmm6, xmm5, xmm4 #AVX512-FP16
vfmadd231sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd231sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd231sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd231sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfmadd231sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfmadd231sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph zmm6, zmm5, zmm4 #AVX512-FP16
vfmaddcph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddcph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddcph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddcph zmm6, zmm5, DWORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmaddcph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmaddcph zmm6{k7}{z}, zmm5, DWORD BCST [edx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcsh xmm6, xmm5, xmm4 #AVX512-FP16
vfmaddcsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddcsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddcsh xmm6{k7}, xmm5, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddcsh xmm6, xmm5, DWORD PTR [ecx] #AVX512-FP16
vfmaddcsh xmm6, xmm5, DWORD PTR [ecx+508] #AVX512-FP16 Disp8(7f)
vfmaddcsh xmm6{k7}{z}, xmm5, DWORD PTR [edx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmaddsub132ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub132ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub132ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddsub132ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmaddsub132ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmaddsub132ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmaddsub213ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub213ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub213ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddsub213ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmaddsub213ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmaddsub213ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmaddsub231ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub231ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub231ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddsub231ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmaddsub231ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmaddsub231ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmsub132ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub132ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub132ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub132ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmsub132ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmsub132ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132sh xmm6, xmm5, xmm4 #AVX512-FP16
vfmsub132sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub132sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub132sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub132sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfmsub132sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfmsub132sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmsub213ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub213ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub213ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub213ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmsub213ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmsub213ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213sh xmm6, xmm5, xmm4 #AVX512-FP16
vfmsub213sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub213sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub213sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub213sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfmsub213sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfmsub213sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmsub231ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub231ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub231ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub231ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmsub231ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmsub231ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231sh xmm6, xmm5, xmm4 #AVX512-FP16
vfmsub231sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub231sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub231sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub231sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfmsub231sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfmsub231sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmsubadd132ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd132ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd132ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsubadd132ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmsubadd132ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmsubadd132ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmsubadd213ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd213ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd213ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsubadd213ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmsubadd213ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmsubadd213ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph zmm6, zmm5, zmm4 #AVX512-FP16
vfmsubadd231ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd231ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd231ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsubadd231ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmsubadd231ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmsubadd231ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph zmm6, zmm5, zmm4 #AVX512-FP16
vfmulcph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmulcph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmulcph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmulcph zmm6, zmm5, DWORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfmulcph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfmulcph zmm6{k7}{z}, zmm5, DWORD BCST [edx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcsh xmm6, xmm5, xmm4 #AVX512-FP16
vfmulcsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmulcsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmulcsh xmm6{k7}, xmm5, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmulcsh xmm6, xmm5, DWORD PTR [ecx] #AVX512-FP16
vfmulcsh xmm6, xmm5, DWORD PTR [ecx+508] #AVX512-FP16 Disp8(7f)
vfmulcsh xmm6{k7}{z}, xmm5, DWORD PTR [edx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph zmm6, zmm5, zmm4 #AVX512-FP16
vfnmadd132ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd132ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd132ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd132ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfnmadd132ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfnmadd132ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132sh xmm6, xmm5, xmm4 #AVX512-FP16
vfnmadd132sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd132sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd132sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd132sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfnmadd132sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfnmadd132sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph zmm6, zmm5, zmm4 #AVX512-FP16
vfnmadd213ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd213ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd213ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd213ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfnmadd213ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfnmadd213ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213sh xmm6, xmm5, xmm4 #AVX512-FP16
vfnmadd213sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd213sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd213sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd213sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfnmadd213sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfnmadd213sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph zmm6, zmm5, zmm4 #AVX512-FP16
vfnmadd231ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd231ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd231ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd231ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfnmadd231ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfnmadd231ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231sh xmm6, xmm5, xmm4 #AVX512-FP16
vfnmadd231sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd231sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd231sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd231sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfnmadd231sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfnmadd231sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph zmm6, zmm5, zmm4 #AVX512-FP16
vfnmsub132ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub132ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub132ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub132ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfnmsub132ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfnmsub132ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132sh xmm6, xmm5, xmm4 #AVX512-FP16
vfnmsub132sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub132sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub132sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub132sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfnmsub132sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfnmsub132sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph zmm6, zmm5, zmm4 #AVX512-FP16
vfnmsub213ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub213ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub213ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub213ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfnmsub213ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfnmsub213ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213sh xmm6, xmm5, xmm4 #AVX512-FP16
vfnmsub213sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub213sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub213sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub213sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfnmsub213sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfnmsub213sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph zmm6, zmm5, zmm4 #AVX512-FP16
vfnmsub231ph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub231ph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub231ph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub231ph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vfnmsub231ph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vfnmsub231ph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231sh xmm6, xmm5, xmm4 #AVX512-FP16
vfnmsub231sh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub231sh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub231sh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub231sh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vfnmsub231sh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vfnmsub231sh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfpclassph k5, zmm6, 123 #AVX512-FP16
vfpclassph k5{k7}, zmm6, 123 #AVX512-FP16 MASK_ENABLING
vfpclassph k5{k7}, ZMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vfpclassph k5, WORD BCST [ecx]{1to32}, 123 #AVX512-FP16 BROADCAST_EN
vfpclassph k5, ZMMWORD PTR [ecx+8128], 123 #AVX512-FP16 Disp8(7f)
vfpclassph k5{k7}, WORD BCST [edx-256]{1to32}, 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING
vfpclasssh k5, xmm6, 123 #AVX512-FP16
vfpclasssh k5{k7}, xmm6, 123 #AVX512-FP16 MASK_ENABLING
vfpclasssh k5{k7}, WORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vfpclasssh k5, WORD PTR [ecx], 123 #AVX512-FP16
vfpclasssh k5, WORD PTR [ecx+254], 123 #AVX512-FP16 Disp8(7f)
vfpclasssh k5{k7}, WORD PTR [edx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING
vgetexpph zmm6, zmm5 #AVX512-FP16
vgetexpph zmm6, zmm5{sae} #AVX512-FP16 HAS_SAE
vgetexpph zmm6{k7}{z}, zmm5{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetexpph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vgetexpph zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vgetexpph zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vgetexpph zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetexpsh xmm6, xmm5, xmm4 #AVX512-FP16
vgetexpsh xmm6, xmm5, xmm4{sae} #AVX512-FP16 HAS_SAE
vgetexpsh xmm6{k7}{z}, xmm5, xmm4{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetexpsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vgetexpsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vgetexpsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vgetexpsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph zmm6, zmm5, 123 #AVX512-FP16
vgetmantph zmm6, zmm5{sae}, 123 #AVX512-FP16 HAS_SAE
vgetmantph zmm6{k7}{z}, zmm5{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetmantph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vgetmantph zmm6, WORD BCST [ecx], 123 #AVX512-FP16 BROADCAST_EN
vgetmantph zmm6, ZMMWORD PTR [ecx+8128], 123 #AVX512-FP16 Disp8(7f)
vgetmantph zmm6{k7}{z}, WORD BCST [edx-256], 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantsh xmm6, xmm5, xmm4, 123 #AVX512-FP16
vgetmantsh xmm6, xmm5, xmm4{sae}, 123 #AVX512-FP16 HAS_SAE
vgetmantsh xmm6{k7}{z}, xmm5, xmm4{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetmantsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vgetmantsh xmm6, xmm5, WORD PTR [ecx], 123 #AVX512-FP16
vgetmantsh xmm6, xmm5, WORD PTR [ecx+254], 123 #AVX512-FP16 Disp8(7f)
vgetmantsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmaxph zmm6, zmm5, zmm4 #AVX512-FP16
vmaxph zmm6, zmm5, zmm4{sae} #AVX512-FP16 HAS_SAE
vmaxph zmm6{k7}{z}, zmm5, zmm4{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vmaxph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmaxph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vmaxph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vmaxph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxsh xmm6, xmm5, xmm4 #AVX512-FP16
vmaxsh xmm6, xmm5, xmm4{sae} #AVX512-FP16 HAS_SAE
vmaxsh xmm6{k7}{z}, xmm5, xmm4{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vmaxsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmaxsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vmaxsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vmaxsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vminph zmm6, zmm5, zmm4 #AVX512-FP16
vminph zmm6, zmm5, zmm4{sae} #AVX512-FP16 HAS_SAE
vminph zmm6{k7}{z}, zmm5, zmm4{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vminph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vminph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vminph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vminph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminsh xmm6, xmm5, xmm4 #AVX512-FP16
vminsh xmm6, xmm5, xmm4{sae} #AVX512-FP16 HAS_SAE
vminsh xmm6{k7}{z}, xmm5, xmm4{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vminsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vminsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vminsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vminsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmovsh xmm6, xmm5, xmm4 #AVX512-FP16
vmovsh xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16 MASK_ENABLING ZEROCTL
vmovsh xmm6{k7}, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmovsh xmm6, WORD PTR [ecx] #AVX512-FP16
vmovsh xmm6, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vmovsh xmm6{k7}{z}, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmovsh WORD PTR [esp+esi*8+0x10000000]{k7}, xmm6 #AVX512-FP16 MASK_ENABLING
vmovsh WORD PTR [ecx], xmm6 #AVX512-FP16
vmovsh WORD PTR [ecx+254], xmm6 #AVX512-FP16 Disp8(7f)
vmovsh WORD PTR [edx-256]{k7}, xmm6 #AVX512-FP16 Disp8(80) MASK_ENABLING
vmovw xmm6, edx #AVX512-FP16
vmovw edx, xmm6 #AVX512-FP16
vmovw xmm6, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vmovw xmm6, WORD PTR [ecx] #AVX512-FP16
vmovw xmm6, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vmovw xmm6, WORD PTR [edx-256] #AVX512-FP16 Disp8(80)
vmovw WORD PTR [esp+esi*8+0x10000000], xmm6 #AVX512-FP16
vmovw WORD PTR [ecx], xmm6 #AVX512-FP16
vmovw WORD PTR [ecx+254], xmm6 #AVX512-FP16 Disp8(7f)
vmovw WORD PTR [edx-256], xmm6 #AVX512-FP16 Disp8(80)
vmulph zmm6, zmm5, zmm4 #AVX512-FP16
vmulph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vmulph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vmulph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmulph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vmulph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vmulph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulsh xmm6, xmm5, xmm4 #AVX512-FP16
vmulsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vmulsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vmulsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmulsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vmulsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vmulsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrcpph zmm6, zmm5 #AVX512-FP16
vrcpph zmm6{k7}{z}, zmm5 #AVX512-FP16 MASK_ENABLING ZEROCTL
vrcpph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vrcpph zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vrcpph zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vrcpph zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpsh xmm6, xmm5, xmm4 #AVX512-FP16
vrcpsh xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16 MASK_ENABLING ZEROCTL
vrcpsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vrcpsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vrcpsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vrcpsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vreduceph zmm6, zmm5, 123 #AVX512-FP16
vreduceph zmm6, zmm5{sae}, 123 #AVX512-FP16 HAS_SAE
vreduceph zmm6{k7}{z}, zmm5{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vreduceph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vreduceph zmm6, WORD BCST [ecx], 123 #AVX512-FP16 BROADCAST_EN
vreduceph zmm6, ZMMWORD PTR [ecx+8128], 123 #AVX512-FP16 Disp8(7f)
vreduceph zmm6{k7}{z}, WORD BCST [edx-256], 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreducesh xmm6, xmm5, xmm4, 123 #AVX512-FP16
vreducesh xmm6, xmm5, xmm4{sae}, 123 #AVX512-FP16 HAS_SAE
vreducesh xmm6{k7}{z}, xmm5, xmm4{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vreducesh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vreducesh xmm6, xmm5, WORD PTR [ecx], 123 #AVX512-FP16
vreducesh xmm6, xmm5, WORD PTR [ecx+254], 123 #AVX512-FP16 Disp8(7f)
vreducesh xmm6{k7}{z}, xmm5, WORD PTR [edx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph zmm6, zmm5, 123 #AVX512-FP16
vrndscaleph zmm6, zmm5{sae}, 123 #AVX512-FP16 HAS_SAE
vrndscaleph zmm6{k7}{z}, zmm5{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vrndscaleph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vrndscaleph zmm6, WORD BCST [ecx], 123 #AVX512-FP16 BROADCAST_EN
vrndscaleph zmm6, ZMMWORD PTR [ecx+8128], 123 #AVX512-FP16 Disp8(7f)
vrndscaleph zmm6{k7}{z}, WORD BCST [edx-256], 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscalesh xmm6, xmm5, xmm4, 123 #AVX512-FP16
vrndscalesh xmm6, xmm5, xmm4{sae}, 123 #AVX512-FP16 HAS_SAE
vrndscalesh xmm6{k7}{z}, xmm5, xmm4{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vrndscalesh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vrndscalesh xmm6, xmm5, WORD PTR [ecx], 123 #AVX512-FP16
vrndscalesh xmm6, xmm5, WORD PTR [ecx+254], 123 #AVX512-FP16 Disp8(7f)
vrndscalesh xmm6{k7}{z}, xmm5, WORD PTR [edx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph zmm6, zmm5 #AVX512-FP16
vrsqrtph zmm6{k7}{z}, zmm5 #AVX512-FP16 MASK_ENABLING ZEROCTL
vrsqrtph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vrsqrtph zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vrsqrtph zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vrsqrtph zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtsh xmm6, xmm5, xmm4 #AVX512-FP16
vrsqrtsh xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16 MASK_ENABLING ZEROCTL
vrsqrtsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vrsqrtsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vrsqrtsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vrsqrtsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vscalefph zmm6, zmm5, zmm4 #AVX512-FP16
vscalefph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vscalefph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vscalefph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vscalefph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vscalefph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vscalefph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefsh xmm6, xmm5, xmm4 #AVX512-FP16
vscalefsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vscalefsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vscalefsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vscalefsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vscalefsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vscalefsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph zmm6, zmm5 #AVX512-FP16
vsqrtph zmm6, zmm5{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vsqrtph zmm6{k7}{z}, zmm5{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsqrtph zmm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vsqrtph zmm6, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vsqrtph zmm6, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vsqrtph zmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtsh xmm6, xmm5, xmm4 #AVX512-FP16
vsqrtsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vsqrtsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsqrtsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vsqrtsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vsqrtsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vsqrtsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vsubph zmm6, zmm5, zmm4 #AVX512-FP16
vsubph zmm6, zmm5, zmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vsubph zmm6{k7}{z}, zmm5, zmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsubph zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vsubph zmm6, zmm5, WORD BCST [ecx] #AVX512-FP16 BROADCAST_EN
vsubph zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512-FP16 Disp8(7f)
vsubph zmm6{k7}{z}, zmm5, WORD BCST [edx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubsh xmm6, xmm5, xmm4 #AVX512-FP16
vsubsh xmm6, xmm5, xmm4{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vsubsh xmm6{k7}{z}, xmm5, xmm4{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsubsh xmm6{k7}, xmm5, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vsubsh xmm6, xmm5, WORD PTR [ecx] #AVX512-FP16
vsubsh xmm6, xmm5, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vsubsh xmm6{k7}{z}, xmm5, WORD PTR [edx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vucomish xmm6, xmm5 #AVX512-FP16
vucomish xmm6, xmm5{sae} #AVX512-FP16 HAS_SAE
vucomish xmm6, WORD PTR [esp+esi*8+0x10000000] #AVX512-FP16
vucomish xmm6, WORD PTR [ecx] #AVX512-FP16
vucomish xmm6, WORD PTR [ecx+254] #AVX512-FP16 Disp8(7f)
vucomish xmm6, WORD PTR [edx-256] #AVX512-FP16 Disp8(80)
|
tactcomplabs/xbgas-binutils-gdb
| 4,117
|
gas/testsuite/gas/i386/x86-64-fma-scalar.s
|
# Check 64bit AVX scalar instructions
.allow_index_reg
.text
_start:
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd %xmm4,%xmm6,%xmm2
vfmadd132sd (%rcx),%xmm6,%xmm2
vfmadd213sd %xmm4,%xmm6,%xmm2
vfmadd213sd (%rcx),%xmm6,%xmm2
vfmadd231sd %xmm4,%xmm6,%xmm2
vfmadd231sd (%rcx),%xmm6,%xmm2
vfmsub132sd %xmm4,%xmm6,%xmm2
vfmsub132sd (%rcx),%xmm6,%xmm2
vfmsub213sd %xmm4,%xmm6,%xmm2
vfmsub213sd (%rcx),%xmm6,%xmm2
vfmsub231sd %xmm4,%xmm6,%xmm2
vfmsub231sd (%rcx),%xmm6,%xmm2
vfnmadd132sd %xmm4,%xmm6,%xmm2
vfnmadd132sd (%rcx),%xmm6,%xmm2
vfnmadd213sd %xmm4,%xmm6,%xmm2
vfnmadd213sd (%rcx),%xmm6,%xmm2
vfnmadd231sd %xmm4,%xmm6,%xmm2
vfnmadd231sd (%rcx),%xmm6,%xmm2
vfnmsub132sd %xmm4,%xmm6,%xmm2
vfnmsub132sd (%rcx),%xmm6,%xmm2
vfnmsub213sd %xmm4,%xmm6,%xmm2
vfnmsub213sd (%rcx),%xmm6,%xmm2
vfnmsub231sd %xmm4,%xmm6,%xmm2
vfnmsub231sd (%rcx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss %xmm4,%xmm6,%xmm2
vfmadd132ss (%rcx),%xmm6,%xmm2
vfmadd213ss %xmm4,%xmm6,%xmm2
vfmadd213ss (%rcx),%xmm6,%xmm2
vfmadd231ss %xmm4,%xmm6,%xmm2
vfmadd231ss (%rcx),%xmm6,%xmm2
vfmsub132ss %xmm4,%xmm6,%xmm2
vfmsub132ss (%rcx),%xmm6,%xmm2
vfmsub213ss %xmm4,%xmm6,%xmm2
vfmsub213ss (%rcx),%xmm6,%xmm2
vfmsub231ss %xmm4,%xmm6,%xmm2
vfmsub231ss (%rcx),%xmm6,%xmm2
vfnmadd132ss %xmm4,%xmm6,%xmm2
vfnmadd132ss (%rcx),%xmm6,%xmm2
vfnmadd213ss %xmm4,%xmm6,%xmm2
vfnmadd213ss (%rcx),%xmm6,%xmm2
vfnmadd231ss %xmm4,%xmm6,%xmm2
vfnmadd231ss (%rcx),%xmm6,%xmm2
vfnmsub132ss %xmm4,%xmm6,%xmm2
vfnmsub132ss (%rcx),%xmm6,%xmm2
vfnmsub213ss %xmm4,%xmm6,%xmm2
vfnmsub213ss (%rcx),%xmm6,%xmm2
vfnmsub231ss %xmm4,%xmm6,%xmm2
vfnmsub231ss (%rcx),%xmm6,%xmm2
.intel_syntax noprefix
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd xmm2,xmm6,xmm4
vfmadd132sd xmm2,xmm6,QWORD PTR [rcx]
vfmadd132sd xmm2,xmm6,[rcx]
vfmadd213sd xmm2,xmm6,xmm4
vfmadd213sd xmm2,xmm6,QWORD PTR [rcx]
vfmadd213sd xmm2,xmm6,[rcx]
vfmadd231sd xmm2,xmm6,xmm4
vfmadd231sd xmm2,xmm6,QWORD PTR [rcx]
vfmadd231sd xmm2,xmm6,[rcx]
vfmsub132sd xmm2,xmm6,xmm4
vfmsub132sd xmm2,xmm6,QWORD PTR [rcx]
vfmsub132sd xmm2,xmm6,[rcx]
vfmsub213sd xmm2,xmm6,xmm4
vfmsub213sd xmm2,xmm6,QWORD PTR [rcx]
vfmsub213sd xmm2,xmm6,[rcx]
vfmsub231sd xmm2,xmm6,xmm4
vfmsub231sd xmm2,xmm6,QWORD PTR [rcx]
vfmsub231sd xmm2,xmm6,[rcx]
vfnmadd132sd xmm2,xmm6,xmm4
vfnmadd132sd xmm2,xmm6,QWORD PTR [rcx]
vfnmadd132sd xmm2,xmm6,[rcx]
vfnmadd213sd xmm2,xmm6,xmm4
vfnmadd213sd xmm2,xmm6,QWORD PTR [rcx]
vfnmadd213sd xmm2,xmm6,[rcx]
vfnmadd231sd xmm2,xmm6,xmm4
vfnmadd231sd xmm2,xmm6,QWORD PTR [rcx]
vfnmadd231sd xmm2,xmm6,[rcx]
vfnmsub132sd xmm2,xmm6,xmm4
vfnmsub132sd xmm2,xmm6,QWORD PTR [rcx]
vfnmsub132sd xmm2,xmm6,[rcx]
vfnmsub213sd xmm2,xmm6,xmm4
vfnmsub213sd xmm2,xmm6,QWORD PTR [rcx]
vfnmsub213sd xmm2,xmm6,[rcx]
vfnmsub231sd xmm2,xmm6,xmm4
vfnmsub231sd xmm2,xmm6,QWORD PTR [rcx]
vfnmsub231sd xmm2,xmm6,[rcx]
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss xmm2,xmm6,xmm4
vfmadd132ss xmm2,xmm6,DWORD PTR [rcx]
vfmadd132ss xmm2,xmm6,[rcx]
vfmadd213ss xmm2,xmm6,xmm4
vfmadd213ss xmm2,xmm6,DWORD PTR [rcx]
vfmadd213ss xmm2,xmm6,[rcx]
vfmadd231ss xmm2,xmm6,xmm4
vfmadd231ss xmm2,xmm6,DWORD PTR [rcx]
vfmadd231ss xmm2,xmm6,[rcx]
vfmsub132ss xmm2,xmm6,xmm4
vfmsub132ss xmm2,xmm6,DWORD PTR [rcx]
vfmsub132ss xmm2,xmm6,[rcx]
vfmsub213ss xmm2,xmm6,xmm4
vfmsub213ss xmm2,xmm6,DWORD PTR [rcx]
vfmsub213ss xmm2,xmm6,[rcx]
vfmsub231ss xmm2,xmm6,xmm4
vfmsub231ss xmm2,xmm6,DWORD PTR [rcx]
vfmsub231ss xmm2,xmm6,[rcx]
vfnmadd132ss xmm2,xmm6,xmm4
vfnmadd132ss xmm2,xmm6,DWORD PTR [rcx]
vfnmadd132ss xmm2,xmm6,[rcx]
vfnmadd213ss xmm2,xmm6,xmm4
vfnmadd213ss xmm2,xmm6,DWORD PTR [rcx]
vfnmadd213ss xmm2,xmm6,[rcx]
vfnmadd231ss xmm2,xmm6,xmm4
vfnmadd231ss xmm2,xmm6,DWORD PTR [rcx]
vfnmadd231ss xmm2,xmm6,[rcx]
vfnmsub132ss xmm2,xmm6,xmm4
vfnmsub132ss xmm2,xmm6,DWORD PTR [rcx]
vfnmsub132ss xmm2,xmm6,[rcx]
vfnmsub213ss xmm2,xmm6,xmm4
vfnmsub213ss xmm2,xmm6,DWORD PTR [rcx]
vfnmsub213ss xmm2,xmm6,[rcx]
vfnmsub231ss xmm2,xmm6,xmm4
vfnmsub231ss xmm2,xmm6,DWORD PTR [rcx]
vfnmsub231ss xmm2,xmm6,[rcx]
|
tactcomplabs/xbgas-binutils-gdb
| 4,634
|
gas/testsuite/gas/i386/avx512vbmi.s
|
# Check 32bit AVX512VBMI instructions
.allow_index_reg
.text
_start:
vpermb %zmm4, %zmm5, %zmm6 # AVX512VBMI
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI
vpermb (%ecx), %zmm5, %zmm6 # AVX512VBMI
vpermb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI
vpermb 8128(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpermb 8192(%edx), %zmm5, %zmm6 # AVX512VBMI
vpermb -8192(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpermb -8256(%edx), %zmm5, %zmm6 # AVX512VBMI
vpermi2b %zmm4, %zmm5, %zmm6 # AVX512VBMI
vpermi2b %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermi2b %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI
vpermi2b (%ecx), %zmm5, %zmm6 # AVX512VBMI
vpermi2b -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI
vpermi2b 8128(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpermi2b 8192(%edx), %zmm5, %zmm6 # AVX512VBMI
vpermi2b -8192(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpermi2b -8256(%edx), %zmm5, %zmm6 # AVX512VBMI
vpermt2b %zmm4, %zmm5, %zmm6 # AVX512VBMI
vpermt2b %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermt2b %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI
vpermt2b (%ecx), %zmm5, %zmm6 # AVX512VBMI
vpermt2b -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI
vpermt2b 8128(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpermt2b 8192(%edx), %zmm5, %zmm6 # AVX512VBMI
vpermt2b -8192(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpermt2b -8256(%edx), %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb %zmm4, %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpmultishiftqb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI
vpmultishiftqb (%ecx), %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb (%eax){1to8}, %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb 8128(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpmultishiftqb 8192(%edx), %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb -8192(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpmultishiftqb -8256(%edx), %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI Disp8
vpmultishiftqb 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI Disp8
vpmultishiftqb -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI
.intel_syntax noprefix
vpermb zmm6, zmm5, zmm4 # AVX512VBMI
vpermb zmm6{k7}, zmm5, zmm4 # AVX512VBMI
vpermb zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI
vpermb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512VBMI
vpermb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI
vpermb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512VBMI Disp8
vpermb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512VBMI
vpermb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512VBMI Disp8
vpermb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512VBMI
vpermi2b zmm6, zmm5, zmm4 # AVX512VBMI
vpermi2b zmm6{k7}, zmm5, zmm4 # AVX512VBMI
vpermi2b zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI
vpermi2b zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512VBMI
vpermi2b zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI
vpermi2b zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512VBMI Disp8
vpermi2b zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512VBMI
vpermi2b zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512VBMI Disp8
vpermi2b zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512VBMI
vpermt2b zmm6, zmm5, zmm4 # AVX512VBMI
vpermt2b zmm6{k7}, zmm5, zmm4 # AVX512VBMI
vpermt2b zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI
vpermt2b zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512VBMI
vpermt2b zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI
vpermt2b zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512VBMI Disp8
vpermt2b zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512VBMI
vpermt2b zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512VBMI Disp8
vpermt2b zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512VBMI
vpmultishiftqb zmm6, zmm5, zmm4 # AVX512VBMI
vpmultishiftqb zmm6{k7}, zmm5, zmm4 # AVX512VBMI
vpmultishiftqb zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI
vpmultishiftqb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512VBMI
vpmultishiftqb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI
vpmultishiftqb zmm6, zmm5, [eax]{1to8} # AVX512VBMI
vpmultishiftqb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512VBMI Disp8
vpmultishiftqb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512VBMI
vpmultishiftqb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512VBMI Disp8
vpmultishiftqb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512VBMI
vpmultishiftqb zmm6, zmm5, [edx+1016]{1to8} # AVX512VBMI Disp8
vpmultishiftqb zmm6, zmm5, [edx+1024]{1to8} # AVX512VBMI
vpmultishiftqb zmm6, zmm5, [edx-1024]{1to8} # AVX512VBMI Disp8
vpmultishiftqb zmm6, zmm5, [edx-1032]{1to8} # AVX512VBMI
|
tactcomplabs/xbgas-binutils-gdb
| 117,571
|
gas/testsuite/gas/i386/x86-64-avx512_fp16_vl.s
|
# Check 64bit AVX512-FP16,AVX512VL instructions
.allow_index_reg
.text
_start:
vaddph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vaddph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vaddph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vaddph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vaddph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vaddph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vaddph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vaddph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vaddph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vaddph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vaddph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vaddph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcmpph $123, %ymm28, %ymm29, %k5 #AVX512-FP16,AVX512VL
vcmpph $123, %ymm28, %ymm29, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph $123, %xmm28, %xmm29, %k5 #AVX512-FP16,AVX512VL
vcmpph $123, %xmm28, %xmm29, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph $123, 0x10000000(%rbp, %r14, 8), %xmm29, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph $123, (%r9){1to8}, %xmm29, %k5 #AVX512-FP16,AVX512VL BROADCAST_EN
vcmpph $123, 2032(%rcx), %xmm29, %k5 #AVX512-FP16,AVX512VL Disp8(7f)
vcmpph $123, -256(%rdx){1to8}, %xmm29, %k5{%k7} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vcmpph $123, 0x10000000(%rbp, %r14, 8), %ymm29, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph $123, (%r9){1to16}, %ymm29, %k5 #AVX512-FP16,AVX512VL BROADCAST_EN
vcmpph $123, 4064(%rcx), %ymm29, %k5 #AVX512-FP16,AVX512VL Disp8(7f)
vcmpph $123, -256(%rdx){1to16}, %ymm29, %k5{%k7} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vcvtdq2ph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtdq2ph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtdq2ph %ymm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtdq2ph %ymm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtdq2phx 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtdq2ph (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtdq2phx 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtdq2ph -512(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtdq2ph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtdq2phy 4064(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtdq2ph -512(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtpd2ph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtpd2ph %ymm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtpd2ph %ymm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtpd2phx 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtpd2ph (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtpd2phx 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtpd2ph -1024(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtpd2phy 4064(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtpd2ph -1024(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2dq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2dq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2dq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2dq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2dq (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2dq 1016(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2dq -256(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2dq (%r9){1to8}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2dq 2032(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2dq -256(%rdx){1to8}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2pd %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2pd %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2pd %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2pd 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2pd (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2pd 508(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2pd -256(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2pd (%r9){1to4}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2pd 1016(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2pd -256(%rdx){1to4}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2psx %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2psx %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2psx %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2psx 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2psx (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2psx 1016(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2psx -256(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2psx (%r9){1to8}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2psx 2032(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2psx -256(%rdx){1to8}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2qq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2qq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2qq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2qq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2qq (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2qq 508(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2qq -256(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2qq (%r9){1to4}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2qq 1016(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2qq -256(%rdx){1to4}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2udq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2udq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2udq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2udq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2udq (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2udq 1016(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2udq -256(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2udq (%r9){1to8}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2udq 2032(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2udq -256(%rdx){1to8}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2uqq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uqq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2uqq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uqq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uqq (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uqq 508(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uqq -256(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uqq (%r9){1to4}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uqq 1016(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uqq -256(%rdx){1to4}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2uw %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uw %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2uw %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uw 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uw (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uw 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uw -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uw (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uw 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uw -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2w %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2w %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2w %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2w 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2w (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2w 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2w -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2w (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2w 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2w -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtps2phx %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtps2phx %ymm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtps2phx %ymm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtps2phxx 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtps2phx (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtps2phxx 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtps2phx -512(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtps2phxy 4064(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtps2phx -512(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtqq2ph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtqq2ph %ymm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtqq2ph %ymm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtqq2phx 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtqq2ph (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtqq2phx 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtqq2ph -1024(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtqq2phy 4064(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtqq2ph -1024(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvttph2dq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2dq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvttph2dq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2dq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2dq (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2dq 1016(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2dq -256(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2dq (%r9){1to8}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2dq 2032(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2dq -256(%rdx){1to8}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvttph2qq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2qq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvttph2qq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2qq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2qq (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2qq 508(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2qq -256(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2qq (%r9){1to4}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2qq 1016(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2qq -256(%rdx){1to4}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvttph2udq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2udq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvttph2udq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2udq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2udq (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2udq 1016(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2udq -256(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2udq (%r9){1to8}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2udq 2032(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2udq -256(%rdx){1to8}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvttph2uqq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uqq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvttph2uqq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uqq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uqq (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uqq 508(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uqq -256(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uqq (%r9){1to4}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uqq 1016(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uqq -256(%rdx){1to4}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvttph2uw %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uw %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vcvttph2uw %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uw 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uw (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uw 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uw -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uw (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uw 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uw -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvttph2w %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2w %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vcvttph2w %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2w 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2w (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2w 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2w -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2w (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2w 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2w -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtudq2ph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtudq2ph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtudq2ph %ymm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtudq2ph %ymm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtudq2phx 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtudq2ph (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtudq2phx 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtudq2ph -512(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtudq2ph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtudq2phy 4064(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtudq2ph -512(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtuqq2ph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuqq2ph %ymm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtuqq2ph %ymm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuqq2phx 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuqq2ph (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuqq2phx 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuqq2ph -1024(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuqq2phy 4064(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuqq2ph -1024(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuw2ph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtuw2ph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuw2ph %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtuw2ph %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuw2ph 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuw2ph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuw2ph 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuw2ph -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuw2ph 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuw2ph (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuw2ph 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuw2ph -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtw2ph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtw2ph %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtw2ph %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtw2ph 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtw2ph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtw2ph 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtw2ph -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtw2ph (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtw2ph 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtw2ph -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vdivph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vdivph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vdivph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vdivph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vdivph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vdivph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vdivph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vdivph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vdivph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vdivph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfcmaddcph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmaddcph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfcmaddcph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmaddcph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmaddcph (%r9){1to8}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmaddcph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfcmaddcph -512(%rdx){1to8}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmaddcph (%r9){1to4}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmaddcph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfcmaddcph -512(%rdx){1to4}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfcmulcph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmulcph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfcmulcph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmulcph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmulcph (%r9){1to8}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmulcph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfcmulcph -512(%rdx){1to8}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmulcph (%r9){1to4}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmulcph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfcmulcph -512(%rdx){1to4}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmadd132ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd132ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmadd132ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd132ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd132ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd132ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd132ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd132ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd132ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd132ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmadd213ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd213ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmadd213ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd213ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd213ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd213ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd213ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd213ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd213ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd213ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmadd231ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd231ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmadd231ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd231ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd231ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd231ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd231ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd231ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd231ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd231ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmaddcph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddcph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmaddcph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddcph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddcph (%r9){1to8}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddcph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddcph -512(%rdx){1to8}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddcph (%r9){1to4}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddcph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddcph -512(%rdx){1to4}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmaddsub132ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub132ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmaddsub132ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub132ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub132ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub132ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub132ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub132ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub132ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub132ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmaddsub213ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub213ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmaddsub213ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub213ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub213ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub213ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub213ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub213ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub213ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub213ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmaddsub231ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub231ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmaddsub231ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub231ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub231ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub231ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub231ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub231ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub231ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub231ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmsub132ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub132ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmsub132ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub132ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub132ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub132ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub132ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub132ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub132ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub132ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmsub213ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub213ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmsub213ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub213ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub213ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub213ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub213ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub213ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub213ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub213ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmsub231ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub231ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmsub231ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub231ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub231ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub231ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub231ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub231ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub231ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub231ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmsubadd132ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd132ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmsubadd132ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd132ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd132ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd132ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd132ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd132ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd132ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd132ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmsubadd213ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd213ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmsubadd213ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd213ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd213ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd213ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd213ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd213ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd213ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd213ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmsubadd231ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd231ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmsubadd231ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd231ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd231ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd231ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd231ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd231ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd231ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd231ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmulcph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmulcph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmulcph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmulcph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmulcph (%r9){1to8}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmulcph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmulcph -512(%rdx){1to8}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmulcph (%r9){1to4}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmulcph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmulcph -512(%rdx){1to4}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfnmadd132ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd132ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfnmadd132ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd132ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd132ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd132ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd132ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd132ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd132ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd132ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfnmadd213ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd213ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfnmadd213ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd213ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd213ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd213ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd213ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd213ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd213ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd213ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfnmadd231ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd231ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfnmadd231ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd231ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd231ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd231ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd231ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd231ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd231ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd231ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfnmsub132ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub132ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfnmsub132ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub132ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub132ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub132ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub132ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub132ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub132ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub132ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfnmsub213ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub213ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfnmsub213ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub213ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub213ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub213ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub213ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub213ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub213ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub213ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfnmsub231ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub231ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfnmsub231ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub231ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub231ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub231ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub231ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub231ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub231ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub231ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfpclassph $123, %xmm30, %k5 #AVX512-FP16,AVX512VL
vfpclassph $123, %xmm30, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph $123, %ymm30, %k5 #AVX512-FP16,AVX512VL
vfpclassph $123, %ymm30, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassphx $123, 0x10000000(%rbp, %r14, 8), %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph $123, (%r9){1to8}, %k5 #AVX512-FP16,AVX512VL BROADCAST_EN
vfpclassphx $123, 2032(%rcx), %k5 #AVX512-FP16,AVX512VL Disp8(7f)
vfpclassph $123, -256(%rdx){1to8}, %k5{%k7} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vfpclassph $123, (%r9){1to16}, %k5 #AVX512-FP16,AVX512VL BROADCAST_EN
vfpclassphy $123, 4064(%rcx), %k5 #AVX512-FP16,AVX512VL Disp8(7f)
vfpclassph $123, -256(%rdx){1to16}, %k5{%k7} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vgetexpph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vgetexpph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetexpph %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vgetexpph %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetexpph 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vgetexpph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetexpph 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vgetexpph -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetexpph 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vgetexpph (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetexpph 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vgetexpph -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph $123, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vgetmantph $123, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetmantph $123, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vgetmantph $123, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetmantph $123, 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vgetmantph $123, (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetmantph $123, 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vgetmantph $123, -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph $123, 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vgetmantph $123, (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetmantph $123, 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vgetmantph $123, -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vmaxph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmaxph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vmaxph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmaxph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vmaxph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vmaxph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vmaxph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vmaxph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vmaxph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vmaxph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vminph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vminph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vminph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vminph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vminph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vminph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vminph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vminph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vminph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vminph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vmulph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmulph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vmulph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmulph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vmulph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vmulph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vmulph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vmulph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vmulph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vmulph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vrcpph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrcpph %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vrcpph %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrcpph 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrcpph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vrcpph 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vrcpph -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpph 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrcpph (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vrcpph 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vrcpph -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreduceph $123, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vreduceph $123, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vreduceph $123, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vreduceph $123, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vreduceph $123, 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vreduceph $123, (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vreduceph $123, 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vreduceph $123, -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreduceph $123, 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vreduceph $123, (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vreduceph $123, 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vreduceph $123, -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph $123, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vrndscaleph $123, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrndscaleph $123, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vrndscaleph $123, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrndscaleph $123, 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrndscaleph $123, (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vrndscaleph $123, 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vrndscaleph $123, -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph $123, 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrndscaleph $123, (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vrndscaleph $123, 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vrndscaleph $123, -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vrsqrtph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrsqrtph %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vrsqrtph %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrsqrtph 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrsqrtph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vrsqrtph 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vrsqrtph -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrsqrtph (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vrsqrtph 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vrsqrtph -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vscalefph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vscalefph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vscalefph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vscalefph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vscalefph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vscalefph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vscalefph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vscalefph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vscalefph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vscalefph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vsqrtph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsqrtph %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vsqrtph %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsqrtph 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vsqrtph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vsqrtph 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vsqrtph -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vsqrtph (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vsqrtph 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vsqrtph -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vsubph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsubph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vsubph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsubph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vsubph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vsubph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vsubph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vsubph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vsubph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vsubph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
.intel_syntax noprefix
vaddph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vaddph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vaddph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vaddph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vaddph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vaddph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vaddph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vaddph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vaddph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vaddph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vaddph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vaddph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcmpph k5, ymm29, ymm28, 123 #AVX512-FP16,AVX512VL
vcmpph k5{k7}, ymm29, ymm28, 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph k5, xmm29, xmm28, 123 #AVX512-FP16,AVX512VL
vcmpph k5{k7}, xmm29, xmm28, 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph k5{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph k5, xmm29, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vcmpph k5, xmm29, XMMWORD PTR [rcx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vcmpph k5{k7}, xmm29, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vcmpph k5{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph k5, ymm29, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vcmpph k5, ymm29, YMMWORD PTR [rcx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vcmpph k5{k7}, ymm29, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vcvtdq2ph xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtdq2ph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtdq2ph xmm30, ymm29 #AVX512-FP16,AVX512VL
vcvtdq2ph xmm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtdq2ph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtdq2ph xmm30, DWORD BCST [r9]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtdq2ph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtdq2ph xmm30{k7}{z}, DWORD BCST [rdx-512]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtdq2ph xmm30, DWORD BCST [r9]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtdq2ph xmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtdq2ph xmm30{k7}{z}, DWORD BCST [rdx-512]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtpd2ph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtpd2ph xmm30, ymm29 #AVX512-FP16,AVX512VL
vcvtpd2ph xmm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtpd2ph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtpd2ph xmm30, QWORD BCST [r9]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtpd2ph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtpd2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph xmm30, QWORD BCST [r9]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtpd2ph xmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtpd2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2dq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2dq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2dq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2dq xmm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2dq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2dq xmm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2dq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq ymm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2dq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2dq ymm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2dq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2pd xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2pd ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2pd ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2pd xmm30{k7}, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2pd xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2pd xmm30, DWORD PTR [rcx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2pd xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd ymm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2pd ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2pd ymm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2pd ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2psx xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2psx ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2psx ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2psx xmm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2psx xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2psx xmm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2psx xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx ymm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2psx ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2psx ymm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2psx ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2qq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2qq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2qq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2qq xmm30{k7}, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2qq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2qq xmm30, DWORD PTR [rcx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2qq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq ymm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2qq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2qq ymm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2qq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2udq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2udq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2udq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2udq xmm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2udq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2udq xmm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2udq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq ymm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2udq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2udq ymm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2udq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2uqq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uqq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2uqq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uqq xmm30{k7}, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uqq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uqq xmm30, DWORD PTR [rcx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uqq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq ymm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uqq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uqq ymm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uqq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2uw xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uw ymm30, ymm29 #AVX512-FP16,AVX512VL
vcvtph2uw ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uw xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uw xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uw xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uw xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uw ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uw ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uw ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2w xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2w ymm30, ymm29 #AVX512-FP16,AVX512VL
vcvtph2w ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2w xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2w xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2w xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2w xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2w ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2w ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2w ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtps2phx xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtps2phx xmm30, ymm29 #AVX512-FP16,AVX512VL
vcvtps2phx xmm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtps2phx xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtps2phx xmm30, DWORD BCST [r9]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtps2phx xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtps2phx xmm30{k7}{z}, DWORD BCST [rdx-512]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx xmm30, DWORD BCST [r9]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtps2phx xmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtps2phx xmm30{k7}{z}, DWORD BCST [rdx-512]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtqq2ph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtqq2ph xmm30, ymm29 #AVX512-FP16,AVX512VL
vcvtqq2ph xmm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtqq2ph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtqq2ph xmm30, QWORD BCST [r9]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtqq2ph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtqq2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph xmm30, QWORD BCST [r9]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtqq2ph xmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtqq2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2dq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2dq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2dq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2dq xmm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2dq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2dq xmm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2dq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq ymm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2dq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2dq ymm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2dq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2qq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2qq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2qq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2qq xmm30{k7}, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2qq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2qq xmm30, DWORD PTR [rcx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2qq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq ymm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2qq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2qq ymm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2qq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2udq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2udq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2udq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2udq xmm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2udq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2udq xmm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2udq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq ymm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2udq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2udq ymm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2udq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2uqq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uqq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2uqq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uqq xmm30{k7}, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uqq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uqq xmm30, DWORD PTR [rcx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uqq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq ymm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uqq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uqq ymm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uqq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2uw xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uw ymm30, ymm29 #AVX512-FP16,AVX512VL
vcvttph2uw ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uw xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uw xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uw xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uw xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uw ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uw ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uw ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2w xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2w ymm30, ymm29 #AVX512-FP16,AVX512VL
vcvttph2w ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2w xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2w xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2w xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2w xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2w ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2w ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2w ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtudq2ph xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtudq2ph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtudq2ph xmm30, ymm29 #AVX512-FP16,AVX512VL
vcvtudq2ph xmm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtudq2ph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtudq2ph xmm30, DWORD BCST [r9]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtudq2ph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtudq2ph xmm30{k7}{z}, DWORD BCST [rdx-512]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtudq2ph xmm30, DWORD BCST [r9]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtudq2ph xmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtudq2ph xmm30{k7}{z}, DWORD BCST [rdx-512]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtuqq2ph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm30, ymm29 #AVX512-FP16,AVX512VL
vcvtuqq2ph xmm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuqq2ph xmm30, QWORD BCST [r9]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuqq2ph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuqq2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm30, QWORD BCST [r9]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuqq2ph xmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuqq2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuw2ph xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtuw2ph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuw2ph ymm30, ymm29 #AVX512-FP16,AVX512VL
vcvtuw2ph ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuw2ph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuw2ph xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuw2ph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuw2ph xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuw2ph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuw2ph ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuw2ph ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuw2ph ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtw2ph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtw2ph ymm30, ymm29 #AVX512-FP16,AVX512VL
vcvtw2ph ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtw2ph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtw2ph xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtw2ph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtw2ph xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtw2ph ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtw2ph ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtw2ph ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vdivph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vdivph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vdivph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vdivph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vdivph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vdivph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vdivph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vdivph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vdivph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vdivph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfcmaddcph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmaddcph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfcmaddcph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmaddcph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmaddcph ymm30, ymm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmaddcph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfcmaddcph ymm30{k7}{z}, ymm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmaddcph xmm30, xmm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmaddcph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfcmaddcph xmm30{k7}{z}, xmm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfcmulcph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmulcph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfcmulcph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmulcph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmulcph ymm30, ymm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmulcph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfcmulcph ymm30{k7}{z}, ymm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmulcph xmm30, xmm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmulcph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfcmulcph xmm30{k7}{z}, xmm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmadd132ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd132ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmadd132ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd132ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd132ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd132ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd132ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd132ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd132ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd132ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmadd213ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd213ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmadd213ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd213ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd213ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd213ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd213ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd213ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd213ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd213ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmadd231ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd231ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmadd231ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd231ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd231ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd231ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd231ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd231ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd231ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd231ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmaddcph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddcph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmaddcph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddcph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddcph ymm30, ymm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddcph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddcph ymm30{k7}{z}, ymm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddcph xmm30, xmm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddcph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddcph xmm30{k7}{z}, xmm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmaddsub132ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub132ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmaddsub132ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub132ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub132ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub132ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub132ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub132ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub132ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub132ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmaddsub213ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub213ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmaddsub213ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub213ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub213ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub213ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub213ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub213ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub213ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub213ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmaddsub231ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub231ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmaddsub231ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub231ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub231ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub231ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub231ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub231ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub231ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub231ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmsub132ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub132ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmsub132ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub132ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub132ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub132ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub132ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub132ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub132ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub132ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmsub213ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub213ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmsub213ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub213ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub213ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub213ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub213ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub213ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub213ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub213ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmsub231ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub231ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmsub231ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub231ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub231ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub231ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub231ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub231ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub231ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub231ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmsubadd132ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd132ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmsubadd132ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd132ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd132ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd132ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd132ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd132ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd132ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd132ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmsubadd213ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd213ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmsubadd213ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd213ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd213ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd213ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd213ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd213ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd213ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd213ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmsubadd231ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd231ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmsubadd231ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd231ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd231ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd231ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd231ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd231ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd231ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd231ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmulcph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmulcph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmulcph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmulcph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmulcph ymm30, ymm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmulcph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmulcph ymm30{k7}{z}, ymm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmulcph xmm30, xmm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmulcph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmulcph xmm30{k7}{z}, xmm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfnmadd132ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd132ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfnmadd132ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd132ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd132ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd132ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd132ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd132ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd132ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd132ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfnmadd213ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd213ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfnmadd213ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd213ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd213ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd213ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd213ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd213ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd213ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd213ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfnmadd231ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd231ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfnmadd231ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd231ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd231ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd231ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd231ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd231ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd231ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd231ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfnmsub132ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub132ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfnmsub132ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub132ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub132ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub132ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub132ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub132ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub132ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub132ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfnmsub213ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub213ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfnmsub213ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub213ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub213ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub213ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub213ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub213ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub213ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub213ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfnmsub231ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub231ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfnmsub231ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub231ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub231ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub231ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub231ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub231ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub231ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub231ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfpclassph k5, xmm30, 123 #AVX512-FP16,AVX512VL
vfpclassph k5{k7}, xmm30, 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph k5, ymm30, 123 #AVX512-FP16,AVX512VL
vfpclassph k5{k7}, ymm30, 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph k5{k7}, XMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph k5, WORD BCST [r9]{1to8}, 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vfpclassph k5, XMMWORD PTR [rcx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vfpclassph k5{k7}, WORD BCST [rdx-256]{1to8}, 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vfpclassph k5, WORD BCST [r9]{1to16}, 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vfpclassph k5, YMMWORD PTR [rcx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vfpclassph k5{k7}, WORD BCST [rdx-256]{1to16}, 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vgetexpph xmm30, xmm29 #AVX512-FP16,AVX512VL
vgetexpph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetexpph ymm30, ymm29 #AVX512-FP16,AVX512VL
vgetexpph ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetexpph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vgetexpph xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vgetexpph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vgetexpph xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetexpph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vgetexpph ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vgetexpph ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vgetexpph ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph ymm30, ymm29, 123 #AVX512-FP16,AVX512VL
vgetmantph ymm30{k7}{z}, ymm29, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetmantph xmm30, xmm29, 123 #AVX512-FP16,AVX512VL
vgetmantph xmm30{k7}{z}, xmm29, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetmantph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vgetmantph xmm30, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetmantph xmm30, XMMWORD PTR [rcx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vgetmantph xmm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vgetmantph ymm30, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetmantph ymm30, YMMWORD PTR [rcx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vgetmantph ymm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vmaxph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmaxph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vmaxph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmaxph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vmaxph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vmaxph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vmaxph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vmaxph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vmaxph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vmaxph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vminph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vminph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vminph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vminph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vminph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vminph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vminph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vminph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vminph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vminph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vmulph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmulph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vmulph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmulph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vmulph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vmulph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vmulph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vmulph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vmulph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vmulph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpph xmm30, xmm29 #AVX512-FP16,AVX512VL
vrcpph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrcpph ymm30, ymm29 #AVX512-FP16,AVX512VL
vrcpph ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrcpph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vrcpph xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vrcpph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vrcpph xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vrcpph ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vrcpph ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vrcpph ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreduceph ymm30, ymm29, 123 #AVX512-FP16,AVX512VL
vreduceph ymm30{k7}{z}, ymm29, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vreduceph xmm30, xmm29, 123 #AVX512-FP16,AVX512VL
vreduceph xmm30{k7}{z}, xmm29, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vreduceph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vreduceph xmm30, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vreduceph xmm30, XMMWORD PTR [rcx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vreduceph xmm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreduceph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vreduceph ymm30, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vreduceph ymm30, YMMWORD PTR [rcx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vreduceph ymm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph ymm30, ymm29, 123 #AVX512-FP16,AVX512VL
vrndscaleph ymm30{k7}{z}, ymm29, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrndscaleph xmm30, xmm29, 123 #AVX512-FP16,AVX512VL
vrndscaleph xmm30{k7}{z}, xmm29, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrndscaleph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vrndscaleph xmm30, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vrndscaleph xmm30, XMMWORD PTR [rcx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vrndscaleph xmm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vrndscaleph ymm30, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vrndscaleph ymm30, YMMWORD PTR [rcx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vrndscaleph ymm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph xmm30, xmm29 #AVX512-FP16,AVX512VL
vrsqrtph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrsqrtph ymm30, ymm29 #AVX512-FP16,AVX512VL
vrsqrtph ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrsqrtph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vrsqrtph xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vrsqrtph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vrsqrtph xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vrsqrtph ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vrsqrtph ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vrsqrtph ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vscalefph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vscalefph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vscalefph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vscalefph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vscalefph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vscalefph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vscalefph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vscalefph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vscalefph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vscalefph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph xmm30, xmm29 #AVX512-FP16,AVX512VL
vsqrtph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsqrtph ymm30, ymm29 #AVX512-FP16,AVX512VL
vsqrtph ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsqrtph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vsqrtph xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vsqrtph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vsqrtph xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vsqrtph ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vsqrtph ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vsqrtph ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vsubph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsubph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vsubph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsubph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vsubph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vsubph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vsubph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vsubph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vsubph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vsubph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
|
tactcomplabs/xbgas-binutils-gdb
| 1,466
|
gas/testsuite/gas/i386/x86-64-amx.s
|
.allow_index_reg
.text
_start:
ldtilecfg (%rcx,%rdx,2)
sttilecfg (%rcx,%rdx,2)
tdpbf16ps %tmm5, %tmm4, %tmm3
tdpbssd %tmm3, %tmm2, %tmm1
tdpbsud %tmm3, %tmm2, %tmm1
tdpbusd %tmm3, %tmm2, %tmm1
tdpbuud %tmm3, %tmm2, %tmm1
tileloadd foo, %tmm5
tileloadd (%rcx), %tmm5
tileloadd (%ecx), %tmm5
tileloadd (%rcx,%rdx,1), %tmm5
tileloadd (%ecx,%edx,2), %tmm1
tileloaddt1 foo, %tmm5
tileloaddt1 (%rcx), %tmm5
tileloaddt1 (%ecx), %tmm5
tileloaddt1 (%rcx,%rdx,1), %tmm5
tileloaddt1 (%ecx,%edx,2), %tmm1
tileloaddt1 (%rcx,%riz,2), %tmm1
tilerelease
tilestored %tmm5, (%rcx)
tilestored %tmm5, (%ecx)
tilestored %tmm5, (%rcx,%rdx,1)
tilestored %tmm1, (%ecx,%edx,2)
tilezero %tmm0
tilezero %tmm5
tilezero %tmm7
.intel_syntax noprefix
ldtilecfg [rcx]
ldtilecfg [rbx]
sttilecfg [rcx]
sttilecfg [rbx]
tdpbf16ps tmm3, tmm4, tmm5
tdpbssd tmm1, tmm2, tmm3
tdpbsud tmm1, tmm2, tmm3
tdpbusd tmm1, tmm2, tmm3
tdpbuud tmm1, tmm2, tmm3
tileloadd tmm5, foo
tileloadd tmm5, [rcx]
tileloadd tmm5, [ecx]
tileloadd tmm5, [rcx+rdx]
tileloadd tmm1, [ecx+edx*2]
tileloaddt1 tmm5, foo
tileloaddt1 tmm5, [rcx]
tileloaddt1 tmm5, [ecx]
tileloaddt1 tmm5, [rcx+rdx]
tileloaddt1 tmm1, [ecx+edx*2]
tileloaddt1 tmm1, [rcx+riz*2]
tilerelease
tilestored [rcx], tmm5
tilestored [ecx], tmm5
tilestored [rcx+rdx], tmm5
tilestored [ecx+edx*2], tmm1
tilezero tmm0
tilezero tmm5
tilezero tmm7
|
tactcomplabs/xbgas-binutils-gdb
| 6,952
|
gas/testsuite/gas/i386/x86-64-avx-wig.s
|
# Check AVX WIG instructions
.allow_index_reg
.text
_start:
vaddpd %ymm4,%ymm6,%ymm2
vaddps %ymm4,%ymm6,%ymm2
vaddsd %xmm4,%xmm6,%xmm2
vaddss %xmm4,%xmm6,%xmm2
vaddsubpd %ymm4,%ymm6,%ymm2
vaddsubps %ymm4,%ymm6,%ymm2
vaesdec %xmm4,%xmm6,%xmm2
vaesdeclast %xmm4,%xmm6,%xmm2
vaesenc %xmm4,%xmm6,%xmm2
vaesenclast %xmm4,%xmm6,%xmm2
vaesimc %xmm4,%xmm6
vaeskeygenassist $7,%xmm4,%xmm6
vandnpd %ymm4,%ymm6,%ymm2
vandnps %ymm4,%ymm6,%ymm2
vandpd %ymm4,%ymm6,%ymm2
vandps %ymm4,%ymm6,%ymm2
vblendpd $7,%ymm4,%ymm6,%ymm2
vblendps $7,%ymm4,%ymm6,%ymm2
vcmpeqpd %ymm4,%ymm6,%ymm2
vcmpeqps %ymm4,%ymm6,%ymm2
vcmpeqsd %xmm4,%xmm6,%xmm2
vcmpeqss %xmm4,%xmm6,%xmm2
vcmppd $7,%ymm4,%ymm6,%ymm2
vcmpps $7,%ymm4,%ymm6,%ymm2
vcmpsd $7,%xmm4,%xmm6,%xmm2
vcmpss $7,%xmm4,%xmm6,%xmm2
vcomisd %xmm4,%xmm6
vcomiss %xmm4,%xmm6
vcvtdq2pd %xmm4,%ymm4
vcvtdq2ps %ymm4,%ymm6
vcvtpd2dqy %ymm4,%xmm4
vcvtpd2dqx %xmm4,%xmm6
vcvtpd2dqy %ymm4,%xmm4
vcvtpd2psy %ymm4,%xmm4
vcvtpd2psx %xmm4,%xmm6
vcvtpd2psy %ymm4,%xmm4
vcvtps2dq %ymm4,%ymm6
vcvtps2pd %xmm4,%ymm4
vcvtsd2ss %xmm4,%xmm6,%xmm2
vcvtss2sd %xmm4,%xmm6,%xmm2
vcvttpd2dqy %ymm4,%xmm4
vcvttpd2dqx %xmm4,%xmm6
vcvttpd2dqy %ymm4,%xmm4
vcvttps2dq %ymm4,%ymm6
vdivpd %ymm4,%ymm6,%ymm2
vdivps %ymm4,%ymm6,%ymm2
vdivsd %xmm4,%xmm6,%xmm2
vdivss %xmm4,%xmm6,%xmm2
vdppd $7,%xmm4,%xmm6,%xmm2
vdpps $7,%ymm4,%ymm6,%ymm2
vextractps $7,%xmm4,%rcx
vhaddpd %ymm4,%ymm6,%ymm2
vhaddps %ymm4,%ymm6,%ymm2
vhsubpd %ymm4,%ymm6,%ymm2
vhsubps %ymm4,%ymm6,%ymm2
vinsertps $7,%xmm4,%xmm6,%xmm2
vlddqu (%rcx),%ymm4
vldmxcsr (%rcx)
vmaskmovdqu %xmm4,%xmm6
vmaxpd %ymm4,%ymm6,%ymm2
vmaxps %ymm4,%ymm6,%ymm2
vmaxsd %xmm4,%xmm6,%xmm2
vmaxss %xmm4,%xmm6,%xmm2
vminpd %ymm4,%ymm6,%ymm2
vminps %ymm4,%ymm6,%ymm2
vminsd %xmm4,%xmm6,%xmm2
vminss %xmm4,%xmm6,%xmm2
vmovapd %ymm4,%ymm6
vmovaps %ymm4,%ymm6
{store} vmovapd %ymm4,%ymm6
{store} vmovaps %ymm4,%ymm6
vmovddup %ymm4,%ymm6
vmovdqa %ymm4,%ymm6
vmovdqu %ymm4,%ymm6
{store} vmovdqa %ymm4,%ymm6
{store} vmovdqu %ymm4,%ymm6
vmovhlps %xmm4,%xmm6,%xmm2
vmovhpd (%rcx),%xmm4,%xmm6
vmovhpd %xmm4,(%rcx)
vmovhps (%rcx),%xmm4,%xmm6
vmovhps %xmm4,(%rcx)
vmovlhps %xmm4,%xmm6,%xmm2
vmovlpd (%rcx),%xmm4,%xmm6
vmovlpd %xmm4,(%rcx)
vmovlps (%rcx),%xmm4,%xmm6
vmovlps %xmm4,(%rcx)
vmovmskpd %xmm4,%rcx
vmovmskps %xmm4,%rcx
vmovntdq %ymm4,(%rcx)
vmovntdqa (%rcx),%xmm4
vmovntpd %ymm4,(%rcx)
vmovntps %ymm4,(%rcx)
vmovq %xmm4,%xmm6
vmovq %xmm4,(%rcx)
vmovq %xmm4,%rcx
vmovq %rcx,%xmm4
vmovsd (%rcx),%xmm4
vmovsd %xmm4,(%rcx)
vmovshdup %ymm4,%ymm6
vmovsldup %ymm4,%ymm6
vmovss (%rcx),%xmm4
vmovss %xmm4,(%rcx)
vmovupd %ymm4,%ymm6
vmovupd %ymm4,(%rcx)
vmovups %ymm4,%ymm6
vmovups %ymm4,(%rcx)
vmpsadbw $7,%xmm4,%xmm6,%xmm2
vmulpd %ymm4,%ymm6,%ymm2
vmulps %ymm4,%ymm6,%ymm2
vmulsd %xmm4,%xmm6,%xmm2
vmulss %xmm4,%xmm6,%xmm2
vorpd %ymm4,%ymm6,%ymm2
vorps %ymm4,%ymm6,%ymm2
vpabsb %xmm4,%xmm6
vpabsd %xmm4,%xmm6
vpabsw %xmm4,%xmm6
vpackssdw %xmm4,%xmm6,%xmm2
vpacksswb %xmm4,%xmm6,%xmm2
vpackusdw %xmm4,%xmm6,%xmm2
vpackuswb %xmm4,%xmm6,%xmm2
vpaddb %xmm4,%xmm6,%xmm2
vpaddd %xmm4,%xmm6,%xmm2
vpaddq %xmm4,%xmm6,%xmm2
vpaddsb %xmm4,%xmm6,%xmm2
vpaddsw %xmm4,%xmm6,%xmm2
vpaddusb %xmm4,%xmm6,%xmm2
vpaddusw %xmm4,%xmm6,%xmm2
vpaddw %xmm4,%xmm6,%xmm2
vpalignr $7,%xmm4,%xmm6,%xmm2
vpand %xmm4,%xmm6,%xmm2
vpandn %xmm4,%xmm6,%xmm2
vpavgb %xmm4,%xmm6,%xmm2
vpavgw %xmm4,%xmm6,%xmm2
vpblendw $7,%xmm4,%xmm6,%xmm2
vpclmulhqhqdq %xmm4,%xmm6,%xmm2
vpclmulhqlqdq %xmm4,%xmm6,%xmm2
vpclmullqhqdq %xmm4,%xmm6,%xmm2
vpclmullqlqdq %xmm4,%xmm6,%xmm2
vpclmulqdq $7,%xmm4,%xmm6,%xmm2
vpcmpeqb %xmm4,%xmm6,%xmm2
vpcmpeqd %xmm4,%xmm6,%xmm2
vpcmpeqq %xmm4,%xmm6,%xmm2
vpcmpeqw %xmm4,%xmm6,%xmm2
vpcmpgtb %xmm4,%xmm6,%xmm2
vpcmpgtd %xmm4,%xmm6,%xmm2
vpcmpgtq %xmm4,%xmm6,%xmm2
vpcmpgtw %xmm4,%xmm6,%xmm2
vpcmpistri $7,%xmm4,%xmm6
vpcmpistrm $7,%xmm4,%xmm6
vpextrb $0, %xmm0, %eax
vpextrb $0, %xmm0, (%rax)
vpextrw $0, %xmm0, %eax
{store} vpextrw $0, %xmm0, %eax
vpextrw $0, %xmm0, (%rax)
vphaddd %xmm4,%xmm6,%xmm2
vphaddsw %xmm4,%xmm6,%xmm2
vphaddw %xmm4,%xmm6,%xmm2
vphminposuw %xmm4,%xmm6
vphsubd %xmm4,%xmm6,%xmm2
vphsubsw %xmm4,%xmm6,%xmm2
vphsubw %xmm4,%xmm6,%xmm2
vpinsrb $0, %eax, %xmm0, %xmm0
vpinsrb $0, (%rax), %xmm0, %xmm0
vpinsrw $0, %eax, %xmm0, %xmm0
vpinsrw $0, (%rax), %xmm0, %xmm0
vpmaddubsw %xmm4,%xmm6,%xmm2
vpmaddwd %xmm4,%xmm6,%xmm2
vpmaxsb %xmm4,%xmm6,%xmm2
vpmaxsd %xmm4,%xmm6,%xmm2
vpmaxsw %xmm4,%xmm6,%xmm2
vpmaxub %xmm4,%xmm6,%xmm2
vpmaxud %xmm4,%xmm6,%xmm2
vpmaxuw %xmm4,%xmm6,%xmm2
vpminsb %xmm4,%xmm6,%xmm2
vpminsd %xmm4,%xmm6,%xmm2
vpminsw %xmm4,%xmm6,%xmm2
vpminub %xmm4,%xmm6,%xmm2
vpminud %xmm4,%xmm6,%xmm2
vpminuw %xmm4,%xmm6,%xmm2
vpmovmskb %xmm4,%rcx
vpmovsxbd %xmm4,%xmm6
vpmovsxbq %xmm4,%xmm6
vpmovsxbw %xmm4,%xmm6
vpmovsxdq %xmm4,%xmm6
vpmovsxwd %xmm4,%xmm6
vpmovsxwq %xmm4,%xmm6
vpmovzxbd %xmm4,%xmm6
vpmovzxbq %xmm4,%xmm6
vpmovzxbw %xmm4,%xmm6
vpmovzxdq %xmm4,%xmm6
vpmovzxwd %xmm4,%xmm6
vpmovzxwq %xmm4,%xmm6
vpmuldq %xmm4,%xmm6,%xmm2
vpmulhrsw %xmm4,%xmm6,%xmm2
vpmulhuw %xmm4,%xmm6,%xmm2
vpmulhw %xmm4,%xmm6,%xmm2
vpmulld %xmm4,%xmm6,%xmm2
vpmullw %xmm4,%xmm6,%xmm2
vpmuludq %xmm4,%xmm6,%xmm2
vpor %xmm4,%xmm6,%xmm2
vpsadbw %xmm4,%xmm6,%xmm2
vpshufb %xmm4,%xmm6,%xmm2
vpshufd $7,%xmm4,%xmm6
vpshufhw $7,%xmm4,%xmm6
vpshuflw $7,%xmm4,%xmm6
vpsignb %xmm4,%xmm6,%xmm2
vpsignd %xmm4,%xmm6,%xmm2
vpsignw %xmm4,%xmm6,%xmm2
vpslld %xmm4,%xmm6,%xmm2
vpslldq $7,%xmm4,%xmm6
vpsllq %xmm4,%xmm6,%xmm2
vpsllw %xmm4,%xmm6,%xmm2
vpsrad %xmm4,%xmm6,%xmm2
vpsraw %xmm4,%xmm6,%xmm2
vpsrld %xmm4,%xmm6,%xmm2
vpsrldq $7,%xmm4,%xmm6
vpsrlq %xmm4,%xmm6,%xmm2
vpsrlw %xmm4,%xmm6,%xmm2
vpsubb %xmm4,%xmm6,%xmm2
vpsubd %xmm4,%xmm6,%xmm2
vpsubq %xmm4,%xmm6,%xmm2
vpsubsb %xmm4,%xmm6,%xmm2
vpsubsw %xmm4,%xmm6,%xmm2
vpsubusb %xmm4,%xmm6,%xmm2
vpsubusw %xmm4,%xmm6,%xmm2
vpsubw %xmm4,%xmm6,%xmm2
vptest %ymm4,%ymm6
vpunpckhbw %xmm4,%xmm6,%xmm2
vpunpckhdq %xmm4,%xmm6,%xmm2
vpunpckhqdq %xmm4,%xmm6,%xmm2
vpunpckhwd %xmm4,%xmm6,%xmm2
vpunpcklbw %xmm4,%xmm6,%xmm2
vpunpckldq %xmm4,%xmm6,%xmm2
vpunpcklqdq %xmm4,%xmm6,%xmm2
vpunpcklwd %xmm4,%xmm6,%xmm2
vpxor %xmm4,%xmm6,%xmm2
vrcpps %ymm4,%ymm6
vrcpss %xmm4,%xmm6,%xmm2
vroundpd $7,%ymm6,%ymm2
vroundps $7,%ymm6,%ymm2
vroundsd $7,%xmm4,%xmm6,%xmm2
vroundss $7,%xmm4,%xmm6,%xmm2
vrsqrtps %ymm4,%ymm6
vrsqrtss %xmm4,%xmm6,%xmm2
vshufpd $7,%ymm4,%ymm6,%ymm2
vshufps $7,%ymm4,%ymm6,%ymm2
vsqrtpd %ymm4,%ymm6
vsqrtps %ymm4,%ymm6
vsqrtsd %xmm4,%xmm6,%xmm2
vsqrtss %xmm4,%xmm6,%xmm2
vstmxcsr (%rcx)
vsubpd %ymm4,%ymm6,%ymm2
vsubps %ymm4,%ymm6,%ymm2
vsubsd %xmm4,%xmm6,%xmm2
vsubss %xmm4,%xmm6,%xmm2
vucomisd %xmm4,%xmm6
vucomiss %xmm4,%xmm6
vunpckhpd %ymm4,%ymm6,%ymm2
vunpckhps %ymm4,%ymm6,%ymm2
vunpcklpd %ymm4,%ymm6,%ymm2
vunpcklps %ymm4,%ymm6,%ymm2
vxorpd %ymm4,%ymm6,%ymm2
vxorps %ymm4,%ymm6,%ymm2
vzeroall
vzeroupper
|
tactcomplabs/xbgas-binutils-gdb
| 1,405
|
gas/testsuite/gas/i386/dw2-compress-3.s
|
.file "dw2-compress-3.c"
.text
.Ltext0:
.comm foo,4,4
.Letext0:
.file 1 "dw2-compress-3.c"
.section .debug_info,"",@progbits
.Ldebug_info0:
.long 0x32
.value 0x4
.long .Ldebug_abbrev0
.byte 0x4
.uleb128 0x1
.long .LASF0
.byte 0x1
.long .LASF1
.long .LASF2
.long .Ldebug_line0
.uleb128 0x2
.string "foo"
.byte 0x1
.byte 0x1
.long 0x2e
.uleb128 0x5
.byte 0x3
.long foo
.uleb128 0x3
.byte 0x4
.byte 0x5
.string "int"
.byte 0
.section .debug_abbrev,"",@progbits
.Ldebug_abbrev0:
.uleb128 0x1
.uleb128 0x11
.byte 0x1
.uleb128 0x25
.uleb128 0xe
.uleb128 0x13
.uleb128 0xb
.uleb128 0x3
.uleb128 0xe
.uleb128 0x1b
.uleb128 0xe
.uleb128 0x10
.uleb128 0x17
.byte 0
.byte 0
.uleb128 0x2
.uleb128 0x34
.byte 0
.uleb128 0x3
.uleb128 0x8
.uleb128 0x3a
.uleb128 0xb
.uleb128 0x3b
.uleb128 0xb
.uleb128 0x49
.uleb128 0x13
.uleb128 0x3f
.uleb128 0x19
.uleb128 0x2
.uleb128 0x18
.byte 0
.byte 0
.uleb128 0x3
.uleb128 0x24
.byte 0
.uleb128 0xb
.uleb128 0xb
.uleb128 0x3e
.uleb128 0xb
.uleb128 0x3
.uleb128 0x8
.byte 0
.byte 0
.byte 0
.section .debug_aranges,"",@progbits
.long 0x14
.value 0x2
.long .Ldebug_info0
.byte 0x4
.byte 0
.value 0
.value 0
.long 0
.long 0
.section .debug_line,"",@progbits
.Ldebug_line0:
.section .debug_str,"MS",@progbits,1
.LASF2:
.string "."
.LASF0:
.string "GNU C 4.8.3"
.LASF1:
.string "dw2-compress-3.c"
|
tactcomplabs/xbgas-binutils-gdb
| 1,247
|
gas/testsuite/gas/i386/x86-64-vgather-check.s
|
# Check vgather instructions
.text
vgather:
vgatherdps %xmm2,(%rax,%xmm1,1),%xmm0
vgatherdps %xmm2,(%rax,%xmm1,2),%xmm2
vgatherdps %xmm2,(%rax,%xmm1,2),%xmm10
vgatherdps %xmm10,(%rax,%xmm1,2),%xmm10
vgatherdps %xmm1,(%rax,%xmm1,4),%xmm0
vgatherdps %xmm9,(%rax,%xmm1,4),%xmm0
vgatherdps %xmm9,(%rax,%xmm9,4),%xmm0
vgatherdps %xmm2,(%rax,%xmm1,8),%xmm1
vgatherdps %xmm2,(%rax,%xmm1,8),%xmm9
vgatherdps %xmm2,(%rax,%xmm9,8),%xmm9
avx512vgather:
vgatherdpd 123(%rbp,%ymm17,8), %zmm16{%k1}
vgatherdpd 123(%rbp,%ymm16,8), %zmm16{%k1}
vgatherdps 123(%rbp,%zmm17,8), %zmm16{%k1}
vgatherdps 123(%rbp,%zmm16,8), %zmm16{%k1}
vgatherqpd 123(%rbp,%zmm17,8), %zmm16{%k1}
vgatherqpd 123(%rbp,%zmm16,8), %zmm16{%k1}
vgatherqps 123(%rbp,%zmm17,8), %ymm16{%k1}
vgatherqps 123(%rbp,%zmm16,8), %ymm16{%k1}
vpgatherdd 123(%rbp,%zmm17,8), %zmm16{%k1}
vpgatherdd 123(%rbp,%zmm16,8), %zmm16{%k1}
vpgatherdq 123(%rbp,%ymm17,8), %zmm16{%k1}
vpgatherdq 123(%rbp,%ymm16,8), %zmm16{%k1}
vpgatherqd 123(%rbp,%zmm17,8), %ymm16{%k1}
vpgatherqd 123(%rbp,%zmm16,8), %ymm16{%k1}
vpgatherqq 123(%rbp,%zmm17,8), %zmm16{%k1}
vpgatherqq 123(%rbp,%zmm16,8), %zmm16{%k1}
vpgatherqd 123(%rbp,%ymm17,8), %xmm16{%k1}
vpgatherqd 123(%rbp,%ymm16,8), %xmm16{%k1}
|
tactcomplabs/xbgas-binutils-gdb
| 11,432
|
gas/testsuite/gas/i386/x86-64-avx-swap-2.s
|
# Check 64bit AVX/AVX2 instructions w/ source swapping
.text
_start:
# Tests for op ymm/mem256, ymm, ymm
vaddpd %ymm14,%ymm6,%ymm2
vaddps %ymm14,%ymm6,%ymm2
vaddsubpd %ymm14,%ymm6,%ymm2
vaddsubps %ymm14,%ymm6,%ymm2
vandnpd %ymm14,%ymm6,%ymm2
vandnps %ymm14,%ymm6,%ymm2
vandpd %ymm14,%ymm6,%ymm2
vandps %ymm14,%ymm6,%ymm2
vdivpd %ymm14,%ymm6,%ymm2
vdivps %ymm14,%ymm6,%ymm2
vhaddpd %ymm14,%ymm6,%ymm2
vhaddps %ymm14,%ymm6,%ymm2
vhsubpd %ymm14,%ymm6,%ymm2
vhsubps %ymm14,%ymm6,%ymm2
vmaxpd %ymm14,%ymm6,%ymm2
vmaxps %ymm14,%ymm6,%ymm2
vminpd %ymm14,%ymm6,%ymm2
vminps %ymm14,%ymm6,%ymm2
vmulpd %ymm14,%ymm6,%ymm2
vmulps %ymm14,%ymm6,%ymm2
vorpd %ymm14,%ymm6,%ymm2
vorps %ymm14,%ymm6,%ymm2
vpaddb %ymm14,%ymm6,%ymm2
vpaddw %ymm14,%ymm6,%ymm2
vpaddd %ymm14,%ymm6,%ymm2
vpaddq %ymm14,%ymm6,%ymm2
vpaddsb %ymm14,%ymm6,%ymm2
vpaddsw %ymm14,%ymm6,%ymm2
vpaddusb %ymm14,%ymm6,%ymm2
vpaddusw %ymm14,%ymm6,%ymm2
vpand %ymm14,%ymm6,%ymm2
vpandn %ymm14,%ymm6,%ymm2
vpavgb %ymm14,%ymm6,%ymm2
vpavgw %ymm14,%ymm6,%ymm2
vpcmpeqb %ymm14,%ymm6,%ymm2
vpcmpeqw %ymm14,%ymm6,%ymm2
vpcmpeqd %ymm14,%ymm6,%ymm2
vpcmpeqq %ymm14,%ymm6,%ymm2
vpcmpgtb %ymm14,%ymm6,%ymm2
vpcmpgtw %ymm14,%ymm6,%ymm2
vpcmpgtd %ymm14,%ymm6,%ymm2
vpcmpgtq %ymm14,%ymm6,%ymm2
vpmaddwd %ymm14,%ymm6,%ymm2
vpmaxsb %ymm14,%ymm6,%ymm2
vpmaxsw %ymm14,%ymm6,%ymm2
vpmaxsd %ymm14,%ymm6,%ymm2
vpmaxub %ymm14,%ymm6,%ymm2
vpmaxuw %ymm14,%ymm6,%ymm2
vpmaxud %ymm14,%ymm6,%ymm2
vpminsb %ymm14,%ymm6,%ymm2
vpminsw %ymm14,%ymm6,%ymm2
vpminsd %ymm14,%ymm6,%ymm2
vpminub %ymm14,%ymm6,%ymm2
vpminuw %ymm14,%ymm6,%ymm2
vpminud %ymm14,%ymm6,%ymm2
vpmulhuw %ymm14,%ymm6,%ymm2
vpmulhw %ymm14,%ymm6,%ymm2
vpmullw %ymm14,%ymm6,%ymm2
vpmulld %ymm14,%ymm6,%ymm2
vpmuludq %ymm14,%ymm6,%ymm2
vpmuldq %ymm14,%ymm6,%ymm2
vpor %ymm14,%ymm6,%ymm2
vpsadbw %ymm14,%ymm6,%ymm2
vpsubb %ymm14,%ymm6,%ymm2
vpsubw %ymm14,%ymm6,%ymm2
vpsubd %ymm14,%ymm6,%ymm2
vpsubq %ymm14,%ymm6,%ymm2
vpsubsb %ymm14,%ymm6,%ymm2
vpsubsw %ymm14,%ymm6,%ymm2
vpsubusb %ymm14,%ymm6,%ymm2
vpsubusw %ymm14,%ymm6,%ymm2
vpxor %ymm14,%ymm6,%ymm2
vsubpd %ymm14,%ymm6,%ymm2
vsubps %ymm14,%ymm6,%ymm2
vxorpd %ymm14,%ymm6,%ymm2
vxorps %ymm14,%ymm6,%ymm2
vcmpeqpd %ymm14,%ymm6,%ymm2
vcmpltpd %ymm14,%ymm6,%ymm2
vcmplepd %ymm14,%ymm6,%ymm2
vcmpunordpd %ymm14,%ymm6,%ymm2
vcmpneqpd %ymm14,%ymm6,%ymm2
vcmpnltpd %ymm14,%ymm6,%ymm2
vcmpnlepd %ymm14,%ymm6,%ymm2
vcmpordpd %ymm14,%ymm6,%ymm2
vcmpeq_uqpd %ymm14,%ymm6,%ymm2
vcmpngepd %ymm14,%ymm6,%ymm2
vcmpngtpd %ymm14,%ymm6,%ymm2
vcmpfalsepd %ymm14,%ymm6,%ymm2
vcmpneq_oqpd %ymm14,%ymm6,%ymm2
vcmpgepd %ymm14,%ymm6,%ymm2
vcmpgtpd %ymm14,%ymm6,%ymm2
vcmptruepd %ymm14,%ymm6,%ymm2
vcmpeq_ospd %ymm14,%ymm6,%ymm2
vcmplt_oqpd %ymm14,%ymm6,%ymm2
vcmple_oqpd %ymm14,%ymm6,%ymm2
vcmpunord_spd %ymm14,%ymm6,%ymm2
vcmpneq_uspd %ymm14,%ymm6,%ymm2
vcmpnlt_uqpd %ymm14,%ymm6,%ymm2
vcmpnle_uqpd %ymm14,%ymm6,%ymm2
vcmpord_spd %ymm14,%ymm6,%ymm2
vcmpeq_uspd %ymm14,%ymm6,%ymm2
vcmpnge_uqpd %ymm14,%ymm6,%ymm2
vcmpngt_uqpd %ymm14,%ymm6,%ymm2
vcmpfalse_ospd %ymm14,%ymm6,%ymm2
vcmpneq_ospd %ymm14,%ymm6,%ymm2
vcmpge_oqpd %ymm14,%ymm6,%ymm2
vcmpgt_oqpd %ymm14,%ymm6,%ymm2
vcmptrue_uspd %ymm14,%ymm6,%ymm2
vcmpeqps %ymm14,%ymm6,%ymm2
vcmpltps %ymm14,%ymm6,%ymm2
vcmpleps %ymm14,%ymm6,%ymm2
vcmpunordps %ymm14,%ymm6,%ymm2
vcmpneqps %ymm14,%ymm6,%ymm2
vcmpnltps %ymm14,%ymm6,%ymm2
vcmpnleps %ymm14,%ymm6,%ymm2
vcmpordps %ymm14,%ymm6,%ymm2
vcmpeq_uqps %ymm14,%ymm6,%ymm2
vcmpngeps %ymm14,%ymm6,%ymm2
vcmpngtps %ymm14,%ymm6,%ymm2
vcmpfalseps %ymm14,%ymm6,%ymm2
vcmpneq_oqps %ymm14,%ymm6,%ymm2
vcmpgeps %ymm14,%ymm6,%ymm2
vcmpgtps %ymm14,%ymm6,%ymm2
vcmptrueps %ymm14,%ymm6,%ymm2
vcmpeq_osps %ymm14,%ymm6,%ymm2
vcmplt_oqps %ymm14,%ymm6,%ymm2
vcmple_oqps %ymm14,%ymm6,%ymm2
vcmpunord_sps %ymm14,%ymm6,%ymm2
vcmpneq_usps %ymm14,%ymm6,%ymm2
vcmpnlt_uqps %ymm14,%ymm6,%ymm2
vcmpnle_uqps %ymm14,%ymm6,%ymm2
vcmpord_sps %ymm14,%ymm6,%ymm2
vcmpeq_usps %ymm14,%ymm6,%ymm2
vcmpnge_uqps %ymm14,%ymm6,%ymm2
vcmpngt_uqps %ymm14,%ymm6,%ymm2
vcmpfalse_osps %ymm14,%ymm6,%ymm2
vcmpneq_osps %ymm14,%ymm6,%ymm2
vcmpge_oqps %ymm14,%ymm6,%ymm2
vcmpgt_oqps %ymm14,%ymm6,%ymm2
vcmptrue_usps %ymm14,%ymm6,%ymm2
# Tests for op imm8, ymm/mem256, ymm, ymm
vcmppd $7,%ymm14,%ymm6,%ymm2
vcmpps $7,%ymm14,%ymm6,%ymm2
# Tests for op xmm/mem128, xmm, xmm
vaddpd %xmm14,%xmm6,%xmm2
vaddps %xmm14,%xmm6,%xmm2
vaddsubpd %xmm14,%xmm6,%xmm2
vaddsubps %xmm14,%xmm6,%xmm2
vandnpd %xmm14,%xmm6,%xmm2
vandnps %xmm14,%xmm6,%xmm2
vandpd %xmm14,%xmm6,%xmm2
vandps %xmm14,%xmm6,%xmm2
vdivpd %xmm14,%xmm6,%xmm2
vdivps %xmm14,%xmm6,%xmm2
vhaddpd %xmm14,%xmm6,%xmm2
vhaddps %xmm14,%xmm6,%xmm2
vhsubpd %xmm14,%xmm6,%xmm2
vhsubps %xmm14,%xmm6,%xmm2
vmaxpd %xmm14,%xmm6,%xmm2
vmaxps %xmm14,%xmm6,%xmm2
vminpd %xmm14,%xmm6,%xmm2
vminps %xmm14,%xmm6,%xmm2
vmulpd %xmm14,%xmm6,%xmm2
vmulps %xmm14,%xmm6,%xmm2
vorpd %xmm14,%xmm6,%xmm2
vorps %xmm14,%xmm6,%xmm2
vpaddb %xmm14,%xmm6,%xmm2
vpaddw %xmm14,%xmm6,%xmm2
vpaddd %xmm14,%xmm6,%xmm2
vpaddq %xmm14,%xmm6,%xmm2
vpaddsb %xmm14,%xmm6,%xmm2
vpaddsw %xmm14,%xmm6,%xmm2
vpaddusb %xmm14,%xmm6,%xmm2
vpaddusw %xmm14,%xmm6,%xmm2
vpand %xmm14,%xmm6,%xmm2
vpandn %xmm14,%xmm6,%xmm2
vpavgb %xmm14,%xmm6,%xmm2
vpavgw %xmm14,%xmm6,%xmm2
vpcmpeqb %xmm14,%xmm6,%xmm2
vpcmpeqw %xmm14,%xmm6,%xmm2
vpcmpeqd %xmm14,%xmm6,%xmm2
vpcmpeqq %xmm14,%xmm6,%xmm2
vpcmpgtb %xmm14,%xmm6,%xmm2
vpcmpgtw %xmm14,%xmm6,%xmm2
vpcmpgtd %xmm14,%xmm6,%xmm2
vpcmpgtq %xmm14,%xmm6,%xmm2
vpmaddwd %xmm14,%xmm6,%xmm2
vpmaxsb %xmm14,%xmm6,%xmm2
vpmaxsw %xmm14,%xmm6,%xmm2
vpmaxsd %xmm14,%xmm6,%xmm2
vpmaxub %xmm14,%xmm6,%xmm2
vpmaxuw %xmm14,%xmm6,%xmm2
vpmaxud %xmm14,%xmm6,%xmm2
vpminsb %xmm14,%xmm6,%xmm2
vpminsw %xmm14,%xmm6,%xmm2
vpminsd %xmm14,%xmm6,%xmm2
vpminub %xmm14,%xmm6,%xmm2
vpminuw %xmm14,%xmm6,%xmm2
vpminud %xmm14,%xmm6,%xmm2
vpmulhuw %xmm14,%xmm6,%xmm2
vpmulhw %xmm14,%xmm6,%xmm2
vpmullw %xmm14,%xmm6,%xmm2
vpmulld %xmm14,%xmm6,%xmm2
vpmuludq %xmm14,%xmm6,%xmm2
vpmuldq %xmm14,%xmm6,%xmm2
vpor %xmm14,%xmm6,%xmm2
vpsadbw %xmm14,%xmm6,%xmm2
vpsubb %xmm14,%xmm6,%xmm2
vpsubw %xmm14,%xmm6,%xmm2
vpsubd %xmm14,%xmm6,%xmm2
vpsubq %xmm14,%xmm6,%xmm2
vpsubsb %xmm14,%xmm6,%xmm2
vpsubsw %xmm14,%xmm6,%xmm2
vpsubusb %xmm14,%xmm6,%xmm2
vpsubusw %xmm14,%xmm6,%xmm2
vpxor %xmm14,%xmm6,%xmm2
vsubpd %xmm14,%xmm6,%xmm2
vsubps %xmm14,%xmm6,%xmm2
vxorpd %xmm14,%xmm6,%xmm2
vxorps %xmm14,%xmm6,%xmm2
vcmpeqpd %xmm14,%xmm6,%xmm2
vcmpltpd %xmm14,%xmm6,%xmm2
vcmplepd %xmm14,%xmm6,%xmm2
vcmpunordpd %xmm14,%xmm6,%xmm2
vcmpneqpd %xmm14,%xmm6,%xmm2
vcmpnltpd %xmm14,%xmm6,%xmm2
vcmpnlepd %xmm14,%xmm6,%xmm2
vcmpordpd %xmm14,%xmm6,%xmm2
vcmpeq_uqpd %xmm14,%xmm6,%xmm2
vcmpngepd %xmm14,%xmm6,%xmm2
vcmpngtpd %xmm14,%xmm6,%xmm2
vcmpfalsepd %xmm14,%xmm6,%xmm2
vcmpneq_oqpd %xmm14,%xmm6,%xmm2
vcmpgepd %xmm14,%xmm6,%xmm2
vcmpgtpd %xmm14,%xmm6,%xmm2
vcmptruepd %xmm14,%xmm6,%xmm2
vcmpeq_ospd %xmm14,%xmm6,%xmm2
vcmplt_oqpd %xmm14,%xmm6,%xmm2
vcmple_oqpd %xmm14,%xmm6,%xmm2
vcmpunord_spd %xmm14,%xmm6,%xmm2
vcmpneq_uspd %xmm14,%xmm6,%xmm2
vcmpnlt_uqpd %xmm14,%xmm6,%xmm2
vcmpnle_uqpd %xmm14,%xmm6,%xmm2
vcmpord_spd %xmm14,%xmm6,%xmm2
vcmpeq_uspd %xmm14,%xmm6,%xmm2
vcmpnge_uqpd %xmm14,%xmm6,%xmm2
vcmpngt_uqpd %xmm14,%xmm6,%xmm2
vcmpfalse_ospd %xmm14,%xmm6,%xmm2
vcmpneq_ospd %xmm14,%xmm6,%xmm2
vcmpge_oqpd %xmm14,%xmm6,%xmm2
vcmpgt_oqpd %xmm14,%xmm6,%xmm2
vcmptrue_uspd %xmm14,%xmm6,%xmm2
vcmpeqps %xmm14,%xmm6,%xmm2
vcmpltps %xmm14,%xmm6,%xmm2
vcmpleps %xmm14,%xmm6,%xmm2
vcmpunordps %xmm14,%xmm6,%xmm2
vcmpneqps %xmm14,%xmm6,%xmm2
vcmpnltps %xmm14,%xmm6,%xmm2
vcmpnleps %xmm14,%xmm6,%xmm2
vcmpordps %xmm14,%xmm6,%xmm2
vcmpeq_uqps %xmm14,%xmm6,%xmm2
vcmpngeps %xmm14,%xmm6,%xmm2
vcmpngtps %xmm14,%xmm6,%xmm2
vcmpfalseps %xmm14,%xmm6,%xmm2
vcmpneq_oqps %xmm14,%xmm6,%xmm2
vcmpgeps %xmm14,%xmm6,%xmm2
vcmpgtps %xmm14,%xmm6,%xmm2
vcmptrueps %xmm14,%xmm6,%xmm2
vcmpeq_osps %xmm14,%xmm6,%xmm2
vcmplt_oqps %xmm14,%xmm6,%xmm2
vcmple_oqps %xmm14,%xmm6,%xmm2
vcmpunord_sps %xmm14,%xmm6,%xmm2
vcmpneq_usps %xmm14,%xmm6,%xmm2
vcmpnlt_uqps %xmm14,%xmm6,%xmm2
vcmpnle_uqps %xmm14,%xmm6,%xmm2
vcmpord_sps %xmm14,%xmm6,%xmm2
vcmpeq_usps %xmm14,%xmm6,%xmm2
vcmpnge_uqps %xmm14,%xmm6,%xmm2
vcmpngt_uqps %xmm14,%xmm6,%xmm2
vcmpfalse_osps %xmm14,%xmm6,%xmm2
vcmpneq_osps %xmm14,%xmm6,%xmm2
vcmpge_oqps %xmm14,%xmm6,%xmm2
vcmpgt_oqps %xmm14,%xmm6,%xmm2
vcmptrue_usps %xmm14,%xmm6,%xmm2
# Tests for op imm8, xmm/mem128, xmm, xmm
vcmppd $7,%xmm14,%xmm6,%xmm2
vcmpps $7,%xmm14,%xmm6,%xmm2
# Tests for op xmm/mem64, xmm
vcomisd %xmm14,%xmm6
vucomisd %xmm14,%xmm6
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd $7,%xmm14,%xmm6,%xmm2
# Tests for op xmm/mem64, xmm, xmm
vaddsd %xmm14,%xmm6,%xmm2
vdivsd %xmm14,%xmm6,%xmm2
vmaxsd %xmm14,%xmm6,%xmm2
vminsd %xmm14,%xmm6,%xmm2
vmulsd %xmm14,%xmm6,%xmm2
vsqrtsd %xmm14,%xmm6,%xmm2
vsubsd %xmm14,%xmm6,%xmm2
vcmpeqsd %xmm14,%xmm6,%xmm2
vcmpltsd %xmm14,%xmm6,%xmm2
vcmplesd %xmm14,%xmm6,%xmm2
vcmpunordsd %xmm14,%xmm6,%xmm2
vcmpneqsd %xmm14,%xmm6,%xmm2
vcmpnltsd %xmm14,%xmm6,%xmm2
vcmpnlesd %xmm14,%xmm6,%xmm2
vcmpordsd %xmm14,%xmm6,%xmm2
vcmpeq_uqsd %xmm14,%xmm6,%xmm2
vcmpngesd %xmm14,%xmm6,%xmm2
vcmpngtsd %xmm14,%xmm6,%xmm2
vcmpfalsesd %xmm14,%xmm6,%xmm2
vcmpneq_oqsd %xmm14,%xmm6,%xmm2
vcmpgesd %xmm14,%xmm6,%xmm2
vcmpgtsd %xmm14,%xmm6,%xmm2
vcmptruesd %xmm14,%xmm6,%xmm2
vcmpeq_ossd %xmm14,%xmm6,%xmm2
vcmplt_oqsd %xmm14,%xmm6,%xmm2
vcmple_oqsd %xmm14,%xmm6,%xmm2
vcmpunord_ssd %xmm14,%xmm6,%xmm2
vcmpneq_ussd %xmm14,%xmm6,%xmm2
vcmpnlt_uqsd %xmm14,%xmm6,%xmm2
vcmpnle_uqsd %xmm14,%xmm6,%xmm2
vcmpord_ssd %xmm14,%xmm6,%xmm2
vcmpeq_ussd %xmm14,%xmm6,%xmm2
vcmpnge_uqsd %xmm14,%xmm6,%xmm2
vcmpngt_uqsd %xmm14,%xmm6,%xmm2
vcmpfalse_ossd %xmm14,%xmm6,%xmm2
vcmpneq_ossd %xmm14,%xmm6,%xmm2
vcmpge_oqsd %xmm14,%xmm6,%xmm2
vcmpgt_oqsd %xmm14,%xmm6,%xmm2
vcmptrue_ussd %xmm14,%xmm6,%xmm2
# Tests for op xmm/mem32, xmm, xmm
vaddss %xmm14,%xmm6,%xmm2
vdivss %xmm14,%xmm6,%xmm2
vmaxss %xmm14,%xmm6,%xmm2
vminss %xmm14,%xmm6,%xmm2
vmulss %xmm14,%xmm6,%xmm2
vrcpss %xmm14,%xmm6,%xmm2
vrsqrtss %xmm14,%xmm6,%xmm2
vsqrtss %xmm14,%xmm6,%xmm2
vsubss %xmm14,%xmm6,%xmm2
vcmpeqss %xmm14,%xmm6,%xmm2
vcmpltss %xmm14,%xmm6,%xmm2
vcmpless %xmm14,%xmm6,%xmm2
vcmpunordss %xmm14,%xmm6,%xmm2
vcmpneqss %xmm14,%xmm6,%xmm2
vcmpnltss %xmm14,%xmm6,%xmm2
vcmpnless %xmm14,%xmm6,%xmm2
vcmpordss %xmm14,%xmm6,%xmm2
vcmpeq_uqss %xmm14,%xmm6,%xmm2
vcmpngess %xmm14,%xmm6,%xmm2
vcmpngtss %xmm14,%xmm6,%xmm2
vcmpfalsess %xmm14,%xmm6,%xmm2
vcmpneq_oqss %xmm14,%xmm6,%xmm2
vcmpgess %xmm14,%xmm6,%xmm2
vcmpgtss %xmm14,%xmm6,%xmm2
vcmptruess %xmm14,%xmm6,%xmm2
vcmpeq_osss %xmm14,%xmm6,%xmm2
vcmplt_oqss %xmm14,%xmm6,%xmm2
vcmple_oqss %xmm14,%xmm6,%xmm2
vcmpunord_sss %xmm14,%xmm6,%xmm2
vcmpneq_usss %xmm14,%xmm6,%xmm2
vcmpnlt_uqss %xmm14,%xmm6,%xmm2
vcmpnle_uqss %xmm14,%xmm6,%xmm2
vcmpord_sss %xmm14,%xmm6,%xmm2
vcmpeq_usss %xmm14,%xmm6,%xmm2
vcmpnge_uqss %xmm14,%xmm6,%xmm2
vcmpngt_uqss %xmm14,%xmm6,%xmm2
vcmpfalse_osss %xmm14,%xmm6,%xmm2
vcmpneq_osss %xmm14,%xmm6,%xmm2
vcmpge_oqss %xmm14,%xmm6,%xmm2
vcmpgt_oqss %xmm14,%xmm6,%xmm2
vcmptrue_usss %xmm14,%xmm6,%xmm2
# Tests for op xmm/mem32, xmm
vcomiss %xmm14,%xmm6
vucomiss %xmm14,%xmm6
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss $7,%xmm14,%xmm6,%xmm2
|
tactcomplabs/xbgas-binutils-gdb
| 12,268
|
gas/testsuite/gas/i386/avx512cd_vl.s
|
# Check 32bit AVX512{CD,VL} instructions
.allow_index_reg
.text
_start:
vpconflictd %xmm5, %xmm6{%k7} # AVX512{CD,VL}
vpconflictd %xmm5, %xmm6{%k7}{z} # AVX512{CD,VL}
vpconflictd (%ecx), %xmm6{%k7} # AVX512{CD,VL}
vpconflictd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{CD,VL}
vpconflictd (%eax){1to4}, %xmm6{%k7} # AVX512{CD,VL}
vpconflictd 2032(%edx), %xmm6{%k7} # AVX512{CD,VL} Disp8
vpconflictd 2048(%edx), %xmm6{%k7} # AVX512{CD,VL}
vpconflictd -2048(%edx), %xmm6{%k7} # AVX512{CD,VL} Disp8
vpconflictd -2064(%edx), %xmm6{%k7} # AVX512{CD,VL}
vpconflictd 508(%edx){1to4}, %xmm6{%k7} # AVX512{CD,VL} Disp8
vpconflictd 512(%edx){1to4}, %xmm6{%k7} # AVX512{CD,VL}
vpconflictd -512(%edx){1to4}, %xmm6{%k7} # AVX512{CD,VL} Disp8
vpconflictd -516(%edx){1to4}, %xmm6{%k7} # AVX512{CD,VL}
vpconflictd %ymm5, %ymm6{%k7} # AVX512{CD,VL}
vpconflictd %ymm5, %ymm6{%k7}{z} # AVX512{CD,VL}
vpconflictd (%ecx), %ymm6{%k7} # AVX512{CD,VL}
vpconflictd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{CD,VL}
vpconflictd (%eax){1to8}, %ymm6{%k7} # AVX512{CD,VL}
vpconflictd 4064(%edx), %ymm6{%k7} # AVX512{CD,VL} Disp8
vpconflictd 4096(%edx), %ymm6{%k7} # AVX512{CD,VL}
vpconflictd -4096(%edx), %ymm6{%k7} # AVX512{CD,VL} Disp8
vpconflictd -4128(%edx), %ymm6{%k7} # AVX512{CD,VL}
vpconflictd 508(%edx){1to8}, %ymm6{%k7} # AVX512{CD,VL} Disp8
vpconflictd 512(%edx){1to8}, %ymm6{%k7} # AVX512{CD,VL}
vpconflictd -512(%edx){1to8}, %ymm6{%k7} # AVX512{CD,VL} Disp8
vpconflictd -516(%edx){1to8}, %ymm6{%k7} # AVX512{CD,VL}
vpconflictq %xmm5, %xmm6{%k7} # AVX512{CD,VL}
vpconflictq %xmm5, %xmm6{%k7}{z} # AVX512{CD,VL}
vpconflictq (%ecx), %xmm6{%k7} # AVX512{CD,VL}
vpconflictq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{CD,VL}
vpconflictq (%eax){1to2}, %xmm6{%k7} # AVX512{CD,VL}
vpconflictq 2032(%edx), %xmm6{%k7} # AVX512{CD,VL} Disp8
vpconflictq 2048(%edx), %xmm6{%k7} # AVX512{CD,VL}
vpconflictq -2048(%edx), %xmm6{%k7} # AVX512{CD,VL} Disp8
vpconflictq -2064(%edx), %xmm6{%k7} # AVX512{CD,VL}
vpconflictq 1016(%edx){1to2}, %xmm6{%k7} # AVX512{CD,VL} Disp8
vpconflictq 1024(%edx){1to2}, %xmm6{%k7} # AVX512{CD,VL}
vpconflictq -1024(%edx){1to2}, %xmm6{%k7} # AVX512{CD,VL} Disp8
vpconflictq -1032(%edx){1to2}, %xmm6{%k7} # AVX512{CD,VL}
vpconflictq %ymm5, %ymm6{%k7} # AVX512{CD,VL}
vpconflictq %ymm5, %ymm6{%k7}{z} # AVX512{CD,VL}
vpconflictq (%ecx), %ymm6{%k7} # AVX512{CD,VL}
vpconflictq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{CD,VL}
vpconflictq (%eax){1to4}, %ymm6{%k7} # AVX512{CD,VL}
vpconflictq 4064(%edx), %ymm6{%k7} # AVX512{CD,VL} Disp8
vpconflictq 4096(%edx), %ymm6{%k7} # AVX512{CD,VL}
vpconflictq -4096(%edx), %ymm6{%k7} # AVX512{CD,VL} Disp8
vpconflictq -4128(%edx), %ymm6{%k7} # AVX512{CD,VL}
vpconflictq 1016(%edx){1to4}, %ymm6{%k7} # AVX512{CD,VL} Disp8
vpconflictq 1024(%edx){1to4}, %ymm6{%k7} # AVX512{CD,VL}
vpconflictq -1024(%edx){1to4}, %ymm6{%k7} # AVX512{CD,VL} Disp8
vpconflictq -1032(%edx){1to4}, %ymm6{%k7} # AVX512{CD,VL}
vplzcntd %xmm5, %xmm6{%k7} # AVX512{CD,VL}
vplzcntd %xmm5, %xmm6{%k7}{z} # AVX512{CD,VL}
vplzcntd (%ecx), %xmm6{%k7} # AVX512{CD,VL}
vplzcntd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{CD,VL}
vplzcntd (%eax){1to4}, %xmm6{%k7} # AVX512{CD,VL}
vplzcntd 2032(%edx), %xmm6{%k7} # AVX512{CD,VL} Disp8
vplzcntd 2048(%edx), %xmm6{%k7} # AVX512{CD,VL}
vplzcntd -2048(%edx), %xmm6{%k7} # AVX512{CD,VL} Disp8
vplzcntd -2064(%edx), %xmm6{%k7} # AVX512{CD,VL}
vplzcntd 508(%edx){1to4}, %xmm6{%k7} # AVX512{CD,VL} Disp8
vplzcntd 512(%edx){1to4}, %xmm6{%k7} # AVX512{CD,VL}
vplzcntd -512(%edx){1to4}, %xmm6{%k7} # AVX512{CD,VL} Disp8
vplzcntd -516(%edx){1to4}, %xmm6{%k7} # AVX512{CD,VL}
vplzcntd %ymm5, %ymm6{%k7} # AVX512{CD,VL}
vplzcntd %ymm5, %ymm6{%k7}{z} # AVX512{CD,VL}
vplzcntd (%ecx), %ymm6{%k7} # AVX512{CD,VL}
vplzcntd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{CD,VL}
vplzcntd (%eax){1to8}, %ymm6{%k7} # AVX512{CD,VL}
vplzcntd 4064(%edx), %ymm6{%k7} # AVX512{CD,VL} Disp8
vplzcntd 4096(%edx), %ymm6{%k7} # AVX512{CD,VL}
vplzcntd -4096(%edx), %ymm6{%k7} # AVX512{CD,VL} Disp8
vplzcntd -4128(%edx), %ymm6{%k7} # AVX512{CD,VL}
vplzcntd 508(%edx){1to8}, %ymm6{%k7} # AVX512{CD,VL} Disp8
vplzcntd 512(%edx){1to8}, %ymm6{%k7} # AVX512{CD,VL}
vplzcntd -512(%edx){1to8}, %ymm6{%k7} # AVX512{CD,VL} Disp8
vplzcntd -516(%edx){1to8}, %ymm6{%k7} # AVX512{CD,VL}
vplzcntq %xmm5, %xmm6{%k7} # AVX512{CD,VL}
vplzcntq %xmm5, %xmm6{%k7}{z} # AVX512{CD,VL}
vplzcntq (%ecx), %xmm6{%k7} # AVX512{CD,VL}
vplzcntq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{CD,VL}
vplzcntq (%eax){1to2}, %xmm6{%k7} # AVX512{CD,VL}
vplzcntq 2032(%edx), %xmm6{%k7} # AVX512{CD,VL} Disp8
vplzcntq 2048(%edx), %xmm6{%k7} # AVX512{CD,VL}
vplzcntq -2048(%edx), %xmm6{%k7} # AVX512{CD,VL} Disp8
vplzcntq -2064(%edx), %xmm6{%k7} # AVX512{CD,VL}
vplzcntq 1016(%edx){1to2}, %xmm6{%k7} # AVX512{CD,VL} Disp8
vplzcntq 1024(%edx){1to2}, %xmm6{%k7} # AVX512{CD,VL}
vplzcntq -1024(%edx){1to2}, %xmm6{%k7} # AVX512{CD,VL} Disp8
vplzcntq -1032(%edx){1to2}, %xmm6{%k7} # AVX512{CD,VL}
vplzcntq %ymm5, %ymm6{%k7} # AVX512{CD,VL}
vplzcntq %ymm5, %ymm6{%k7}{z} # AVX512{CD,VL}
vplzcntq (%ecx), %ymm6{%k7} # AVX512{CD,VL}
vplzcntq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{CD,VL}
vplzcntq (%eax){1to4}, %ymm6{%k7} # AVX512{CD,VL}
vplzcntq 4064(%edx), %ymm6{%k7} # AVX512{CD,VL} Disp8
vplzcntq 4096(%edx), %ymm6{%k7} # AVX512{CD,VL}
vplzcntq -4096(%edx), %ymm6{%k7} # AVX512{CD,VL} Disp8
vplzcntq -4128(%edx), %ymm6{%k7} # AVX512{CD,VL}
vplzcntq 1016(%edx){1to4}, %ymm6{%k7} # AVX512{CD,VL} Disp8
vplzcntq 1024(%edx){1to4}, %ymm6{%k7} # AVX512{CD,VL}
vplzcntq -1024(%edx){1to4}, %ymm6{%k7} # AVX512{CD,VL} Disp8
vplzcntq -1032(%edx){1to4}, %ymm6{%k7} # AVX512{CD,VL}
vpbroadcastmw2d %k6, %xmm6 # AVX512{CD,VL}
vpbroadcastmw2d %k6, %ymm6 # AVX512{CD,VL}
vpbroadcastmb2q %k6, %xmm6 # AVX512{CD,VL}
vpbroadcastmb2q %k6, %ymm6 # AVX512{CD,VL}
.intel_syntax noprefix
vpconflictd xmm6{k7}, xmm5 # AVX512{CD,VL}
vpconflictd xmm6{k7}{z}, xmm5 # AVX512{CD,VL}
vpconflictd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{CD,VL}
vpconflictd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{CD,VL}
vpconflictd xmm6{k7}, [eax]{1to4} # AVX512{CD,VL}
vpconflictd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{CD,VL} Disp8
vpconflictd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{CD,VL}
vpconflictd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{CD,VL} Disp8
vpconflictd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{CD,VL}
vpconflictd xmm6{k7}, [edx+508]{1to4} # AVX512{CD,VL} Disp8
vpconflictd xmm6{k7}, [edx+512]{1to4} # AVX512{CD,VL}
vpconflictd xmm6{k7}, [edx-512]{1to4} # AVX512{CD,VL} Disp8
vpconflictd xmm6{k7}, [edx-516]{1to4} # AVX512{CD,VL}
vpconflictd ymm6{k7}, ymm5 # AVX512{CD,VL}
vpconflictd ymm6{k7}{z}, ymm5 # AVX512{CD,VL}
vpconflictd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{CD,VL}
vpconflictd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{CD,VL}
vpconflictd ymm6{k7}, [eax]{1to8} # AVX512{CD,VL}
vpconflictd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{CD,VL} Disp8
vpconflictd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{CD,VL}
vpconflictd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{CD,VL} Disp8
vpconflictd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{CD,VL}
vpconflictd ymm6{k7}, [edx+508]{1to8} # AVX512{CD,VL} Disp8
vpconflictd ymm6{k7}, [edx+512]{1to8} # AVX512{CD,VL}
vpconflictd ymm6{k7}, [edx-512]{1to8} # AVX512{CD,VL} Disp8
vpconflictd ymm6{k7}, [edx-516]{1to8} # AVX512{CD,VL}
vpconflictq xmm6{k7}, xmm5 # AVX512{CD,VL}
vpconflictq xmm6{k7}{z}, xmm5 # AVX512{CD,VL}
vpconflictq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{CD,VL}
vpconflictq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{CD,VL}
vpconflictq xmm6{k7}, [eax]{1to2} # AVX512{CD,VL}
vpconflictq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{CD,VL} Disp8
vpconflictq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{CD,VL}
vpconflictq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{CD,VL} Disp8
vpconflictq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{CD,VL}
vpconflictq xmm6{k7}, [edx+1016]{1to2} # AVX512{CD,VL} Disp8
vpconflictq xmm6{k7}, [edx+1024]{1to2} # AVX512{CD,VL}
vpconflictq xmm6{k7}, [edx-1024]{1to2} # AVX512{CD,VL} Disp8
vpconflictq xmm6{k7}, [edx-1032]{1to2} # AVX512{CD,VL}
vpconflictq ymm6{k7}, ymm5 # AVX512{CD,VL}
vpconflictq ymm6{k7}{z}, ymm5 # AVX512{CD,VL}
vpconflictq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{CD,VL}
vpconflictq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{CD,VL}
vpconflictq ymm6{k7}, [eax]{1to4} # AVX512{CD,VL}
vpconflictq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{CD,VL} Disp8
vpconflictq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{CD,VL}
vpconflictq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{CD,VL} Disp8
vpconflictq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{CD,VL}
vpconflictq ymm6{k7}, [edx+1016]{1to4} # AVX512{CD,VL} Disp8
vpconflictq ymm6{k7}, [edx+1024]{1to4} # AVX512{CD,VL}
vpconflictq ymm6{k7}, [edx-1024]{1to4} # AVX512{CD,VL} Disp8
vpconflictq ymm6{k7}, [edx-1032]{1to4} # AVX512{CD,VL}
vplzcntd xmm6{k7}, xmm5 # AVX512{CD,VL}
vplzcntd xmm6{k7}{z}, xmm5 # AVX512{CD,VL}
vplzcntd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{CD,VL}
vplzcntd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{CD,VL}
vplzcntd xmm6{k7}, [eax]{1to4} # AVX512{CD,VL}
vplzcntd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{CD,VL} Disp8
vplzcntd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{CD,VL}
vplzcntd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{CD,VL} Disp8
vplzcntd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{CD,VL}
vplzcntd xmm6{k7}, [edx+508]{1to4} # AVX512{CD,VL} Disp8
vplzcntd xmm6{k7}, [edx+512]{1to4} # AVX512{CD,VL}
vplzcntd xmm6{k7}, [edx-512]{1to4} # AVX512{CD,VL} Disp8
vplzcntd xmm6{k7}, [edx-516]{1to4} # AVX512{CD,VL}
vplzcntd ymm6{k7}, ymm5 # AVX512{CD,VL}
vplzcntd ymm6{k7}{z}, ymm5 # AVX512{CD,VL}
vplzcntd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{CD,VL}
vplzcntd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{CD,VL}
vplzcntd ymm6{k7}, [eax]{1to8} # AVX512{CD,VL}
vplzcntd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{CD,VL} Disp8
vplzcntd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{CD,VL}
vplzcntd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{CD,VL} Disp8
vplzcntd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{CD,VL}
vplzcntd ymm6{k7}, [edx+508]{1to8} # AVX512{CD,VL} Disp8
vplzcntd ymm6{k7}, [edx+512]{1to8} # AVX512{CD,VL}
vplzcntd ymm6{k7}, [edx-512]{1to8} # AVX512{CD,VL} Disp8
vplzcntd ymm6{k7}, [edx-516]{1to8} # AVX512{CD,VL}
vplzcntq xmm6{k7}, xmm5 # AVX512{CD,VL}
vplzcntq xmm6{k7}{z}, xmm5 # AVX512{CD,VL}
vplzcntq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{CD,VL}
vplzcntq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{CD,VL}
vplzcntq xmm6{k7}, [eax]{1to2} # AVX512{CD,VL}
vplzcntq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{CD,VL} Disp8
vplzcntq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{CD,VL}
vplzcntq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{CD,VL} Disp8
vplzcntq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{CD,VL}
vplzcntq xmm6{k7}, [edx+1016]{1to2} # AVX512{CD,VL} Disp8
vplzcntq xmm6{k7}, [edx+1024]{1to2} # AVX512{CD,VL}
vplzcntq xmm6{k7}, [edx-1024]{1to2} # AVX512{CD,VL} Disp8
vplzcntq xmm6{k7}, [edx-1032]{1to2} # AVX512{CD,VL}
vplzcntq ymm6{k7}, ymm5 # AVX512{CD,VL}
vplzcntq ymm6{k7}{z}, ymm5 # AVX512{CD,VL}
vplzcntq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{CD,VL}
vplzcntq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{CD,VL}
vplzcntq ymm6{k7}, [eax]{1to4} # AVX512{CD,VL}
vplzcntq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{CD,VL} Disp8
vplzcntq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{CD,VL}
vplzcntq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{CD,VL} Disp8
vplzcntq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{CD,VL}
vplzcntq ymm6{k7}, [edx+1016]{1to4} # AVX512{CD,VL} Disp8
vplzcntq ymm6{k7}, [edx+1024]{1to4} # AVX512{CD,VL}
vplzcntq ymm6{k7}, [edx-1024]{1to4} # AVX512{CD,VL} Disp8
vplzcntq ymm6{k7}, [edx-1032]{1to4} # AVX512{CD,VL}
vpbroadcastmw2d xmm6, k6 # AVX512{CD,VL}
vpbroadcastmw2d ymm6, k6 # AVX512{CD,VL}
vpbroadcastmb2q xmm6, k6 # AVX512{CD,VL}
vpbroadcastmb2q ymm6, k6 # AVX512{CD,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 1,378
|
gas/testsuite/gas/i386/nops.s
|
.text
.byte 0x0f, 0x1f, 0x0
.byte 0x0f, 0x1f, 0x40, 0x0
.byte 0x0f, 0x1f, 0x44, 0x0, 0x0
.byte 0x66, 0x0f, 0x1f, 0x44, 0x0, 0x0
.byte 0x0f, 0x1f, 0x80, 0x0, 0x0, 0x0, 0x0
.byte 0x0f, 0x1f, 0x84, 0x0, 0x0, 0x0, 0x0, 0x0
.byte 0x66, 0x0f, 0x1f, 0x84, 0x0, 0x0, 0x0, 0x0, 0x0
.byte 0x66, 0x2e, 0x0f, 0x1f, 0x84, 0x0, 0x0, 0x0, 0x0, 0x0
# reg,reg
.byte 0x0f, 0x19, 0xff
.byte 0x0f, 0x1a, 0xff
.byte 0x0f, 0x1b, 0xff
.byte 0x0f, 0x1c, 0xff
.byte 0x0f, 0x1d, 0xff
.byte 0x0f, 0x1e, 0xff
.byte 0x0f, 0x1f, 0xff
# with base and imm8
.byte 0x0f, 0x19, 0x5A, 0x22
.byte 0x0f, 0x1c, 0x5A, 0x22
.byte 0x0f, 0x1d, 0x5A, 0x22
.byte 0x0f, 0x1e, 0x5A, 0x22
.byte 0x0f, 0x1f, 0x5A, 0x22
# with sib and imm32
.byte 0x0f, 0x19, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1c, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1d, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1e, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1f, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x19, 0x04, 0x60
.byte 0x0f, 0x1c, 0x0c, 0x60
.byte 0x0f, 0x1d, 0x04, 0x60
.byte 0x0f, 0x1e, 0x04, 0x60
.byte 0x0f, 0x1f, 0x04, 0x60
.byte 0x0f, 0x19, 0x04, 0x59
.byte 0x0f, 0x1c, 0x0c, 0x59
.byte 0x0f, 0x1d, 0x04, 0x59
.byte 0x0f, 0x1e, 0x04, 0x59
.byte 0x0f, 0x1f, 0x04, 0x59
nop %eax
nop %ax
nopl (%eax)
nopw (%eax)
nopl %eax
nopw %ax
|
tactcomplabs/xbgas-binutils-gdb
| 67,144
|
gas/testsuite/gas/i386/avx512dq.s
|
# Check 32bit AVX512DQ instructions
.allow_index_reg
.text
_start:
vbroadcastf32x8 (%ecx), %zmm6 # AVX512DQ
vbroadcastf32x8 (%ecx), %zmm6{%k7} # AVX512DQ
vbroadcastf32x8 (%ecx), %zmm6{%k7}{z} # AVX512DQ
vbroadcastf32x8 -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vbroadcastf32x8 4064(%edx), %zmm6 # AVX512DQ Disp8
vbroadcastf32x8 4096(%edx), %zmm6 # AVX512DQ
vbroadcastf32x8 -4096(%edx), %zmm6 # AVX512DQ Disp8
vbroadcastf32x8 -4128(%edx), %zmm6 # AVX512DQ
vbroadcastf64x2 (%ecx), %zmm6 # AVX512DQ
vbroadcastf64x2 (%ecx), %zmm6{%k7} # AVX512DQ
vbroadcastf64x2 (%ecx), %zmm6{%k7}{z} # AVX512DQ
vbroadcastf64x2 -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vbroadcastf64x2 2032(%edx), %zmm6 # AVX512DQ Disp8
vbroadcastf64x2 2048(%edx), %zmm6 # AVX512DQ
vbroadcastf64x2 -2048(%edx), %zmm6 # AVX512DQ Disp8
vbroadcastf64x2 -2064(%edx), %zmm6 # AVX512DQ
vbroadcasti32x8 (%ecx), %zmm6 # AVX512DQ
vbroadcasti32x8 (%ecx), %zmm6{%k7} # AVX512DQ
vbroadcasti32x8 (%ecx), %zmm6{%k7}{z} # AVX512DQ
vbroadcasti32x8 -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vbroadcasti32x8 4064(%edx), %zmm6 # AVX512DQ Disp8
vbroadcasti32x8 4096(%edx), %zmm6 # AVX512DQ
vbroadcasti32x8 -4096(%edx), %zmm6 # AVX512DQ Disp8
vbroadcasti32x8 -4128(%edx), %zmm6 # AVX512DQ
vbroadcasti64x2 (%ecx), %zmm6 # AVX512DQ
vbroadcasti64x2 (%ecx), %zmm6{%k7} # AVX512DQ
vbroadcasti64x2 (%ecx), %zmm6{%k7}{z} # AVX512DQ
vbroadcasti64x2 -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vbroadcasti64x2 2032(%edx), %zmm6 # AVX512DQ Disp8
vbroadcasti64x2 2048(%edx), %zmm6 # AVX512DQ
vbroadcasti64x2 -2048(%edx), %zmm6 # AVX512DQ Disp8
vbroadcasti64x2 -2064(%edx), %zmm6 # AVX512DQ
vbroadcastf32x2 %xmm7, %zmm6 # AVX512DQ
vbroadcastf32x2 %xmm7, %zmm6{%k7} # AVX512DQ
vbroadcastf32x2 %xmm7, %zmm6{%k7}{z} # AVX512DQ
vbroadcastf32x2 (%ecx), %zmm6 # AVX512DQ
vbroadcastf32x2 -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vbroadcastf32x2 1016(%edx), %zmm6 # AVX512DQ Disp8
vbroadcastf32x2 1024(%edx), %zmm6 # AVX512DQ
vbroadcastf32x2 -1024(%edx), %zmm6 # AVX512DQ Disp8
vbroadcastf32x2 -1032(%edx), %zmm6 # AVX512DQ
vcvtpd2qq %zmm5, %zmm6 # AVX512DQ
vcvtpd2qq %zmm5, %zmm6{%k7} # AVX512DQ
vcvtpd2qq %zmm5, %zmm6{%k7}{z} # AVX512DQ
vcvtpd2qq {rn-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2qq {ru-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2qq {rd-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2qq {rz-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2qq (%ecx), %zmm6 # AVX512DQ
vcvtpd2qq -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vcvtpd2qq (%eax){1to8}, %zmm6 # AVX512DQ
vcvtpd2qq 8128(%edx), %zmm6 # AVX512DQ Disp8
vcvtpd2qq 8192(%edx), %zmm6 # AVX512DQ
vcvtpd2qq -8192(%edx), %zmm6 # AVX512DQ Disp8
vcvtpd2qq -8256(%edx), %zmm6 # AVX512DQ
vcvtpd2qq 1016(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtpd2qq 1024(%edx){1to8}, %zmm6 # AVX512DQ
vcvtpd2qq -1024(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtpd2qq -1032(%edx){1to8}, %zmm6 # AVX512DQ
vcvtpd2uqq %zmm5, %zmm6 # AVX512DQ
vcvtpd2uqq %zmm5, %zmm6{%k7} # AVX512DQ
vcvtpd2uqq %zmm5, %zmm6{%k7}{z} # AVX512DQ
vcvtpd2uqq {rn-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2uqq {ru-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2uqq {rd-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2uqq {rz-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2uqq (%ecx), %zmm6 # AVX512DQ
vcvtpd2uqq -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vcvtpd2uqq (%eax){1to8}, %zmm6 # AVX512DQ
vcvtpd2uqq 8128(%edx), %zmm6 # AVX512DQ Disp8
vcvtpd2uqq 8192(%edx), %zmm6 # AVX512DQ
vcvtpd2uqq -8192(%edx), %zmm6 # AVX512DQ Disp8
vcvtpd2uqq -8256(%edx), %zmm6 # AVX512DQ
vcvtpd2uqq 1016(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtpd2uqq 1024(%edx){1to8}, %zmm6 # AVX512DQ
vcvtpd2uqq -1024(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtpd2uqq -1032(%edx){1to8}, %zmm6 # AVX512DQ
vcvtps2qq %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2qq %ymm5, %zmm6{%k7}{z} # AVX512DQ
vcvtps2qq {rn-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2qq {ru-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2qq {rd-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2qq {rz-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtps2qq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512DQ
vcvtps2qq (%eax){1to8}, %zmm6{%k7} # AVX512DQ
vcvtps2qq 4064(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvtps2qq 4096(%edx), %zmm6{%k7} # AVX512DQ
vcvtps2qq -4096(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvtps2qq -4128(%edx), %zmm6{%k7} # AVX512DQ
vcvtps2qq 508(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvtps2qq 512(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vcvtps2qq -512(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvtps2qq -516(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vcvtps2uqq %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2uqq %ymm5, %zmm6{%k7}{z} # AVX512DQ
vcvtps2uqq {rn-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2uqq {ru-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2uqq {rd-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2uqq {rz-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2uqq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtps2uqq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512DQ
vcvtps2uqq (%eax){1to8}, %zmm6{%k7} # AVX512DQ
vcvtps2uqq 4064(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvtps2uqq 4096(%edx), %zmm6{%k7} # AVX512DQ
vcvtps2uqq -4096(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvtps2uqq -4128(%edx), %zmm6{%k7} # AVX512DQ
vcvtps2uqq 508(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvtps2uqq 512(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vcvtps2uqq -512(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvtps2uqq -516(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vcvtqq2pd %zmm5, %zmm6 # AVX512DQ
vcvtqq2pd %zmm5, %zmm6{%k7} # AVX512DQ
vcvtqq2pd %zmm5, %zmm6{%k7}{z} # AVX512DQ
vcvtqq2pd {rn-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtqq2pd {ru-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtqq2pd {rd-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtqq2pd {rz-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtqq2pd (%ecx), %zmm6 # AVX512DQ
vcvtqq2pd -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vcvtqq2pd (%eax){1to8}, %zmm6 # AVX512DQ
vcvtqq2pd 8128(%edx), %zmm6 # AVX512DQ Disp8
vcvtqq2pd 8192(%edx), %zmm6 # AVX512DQ
vcvtqq2pd -8192(%edx), %zmm6 # AVX512DQ Disp8
vcvtqq2pd -8256(%edx), %zmm6 # AVX512DQ
vcvtqq2pd 1016(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtqq2pd 1024(%edx){1to8}, %zmm6 # AVX512DQ
vcvtqq2pd -1024(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtqq2pd -1032(%edx){1to8}, %zmm6 # AVX512DQ
vcvtqq2ps %zmm5, %ymm6{%k7} # AVX512DQ
vcvtqq2ps %zmm5, %ymm6{%k7}{z} # AVX512DQ
vcvtqq2ps {rn-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtqq2ps {ru-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtqq2ps {rd-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtqq2ps {rz-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtqq2ps (%ecx), %ymm6{%k7} # AVX512DQ
vcvtqq2ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512DQ
vcvtqq2ps (%eax){1to8}, %ymm6{%k7} # AVX512DQ
vcvtqq2ps 8128(%edx), %ymm6{%k7} # AVX512DQ Disp8
vcvtqq2ps 8192(%edx), %ymm6{%k7} # AVX512DQ
vcvtqq2ps -8192(%edx), %ymm6{%k7} # AVX512DQ Disp8
vcvtqq2ps -8256(%edx), %ymm6{%k7} # AVX512DQ
vcvtqq2ps 1016(%edx){1to8}, %ymm6{%k7} # AVX512DQ Disp8
vcvtqq2ps 1024(%edx){1to8}, %ymm6{%k7} # AVX512DQ
vcvtqq2ps -1024(%edx){1to8}, %ymm6{%k7} # AVX512DQ Disp8
vcvtqq2ps -1032(%edx){1to8}, %ymm6{%k7} # AVX512DQ
vcvtuqq2pd %zmm5, %zmm6 # AVX512DQ
vcvtuqq2pd %zmm5, %zmm6{%k7} # AVX512DQ
vcvtuqq2pd %zmm5, %zmm6{%k7}{z} # AVX512DQ
vcvtuqq2pd {rn-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtuqq2pd {ru-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtuqq2pd {rd-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtuqq2pd {rz-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtuqq2pd (%ecx), %zmm6 # AVX512DQ
vcvtuqq2pd -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vcvtuqq2pd (%eax){1to8}, %zmm6 # AVX512DQ
vcvtuqq2pd 8128(%edx), %zmm6 # AVX512DQ Disp8
vcvtuqq2pd 8192(%edx), %zmm6 # AVX512DQ
vcvtuqq2pd -8192(%edx), %zmm6 # AVX512DQ Disp8
vcvtuqq2pd -8256(%edx), %zmm6 # AVX512DQ
vcvtuqq2pd 1016(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtuqq2pd 1024(%edx){1to8}, %zmm6 # AVX512DQ
vcvtuqq2pd -1024(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtuqq2pd -1032(%edx){1to8}, %zmm6 # AVX512DQ
vcvtuqq2ps %zmm5, %ymm6{%k7} # AVX512DQ
vcvtuqq2ps %zmm5, %ymm6{%k7}{z} # AVX512DQ
vcvtuqq2ps {rn-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtuqq2ps {ru-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtuqq2ps {rd-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtuqq2ps {rz-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtuqq2ps (%ecx), %ymm6{%k7} # AVX512DQ
vcvtuqq2ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512DQ
vcvtuqq2ps (%eax){1to8}, %ymm6{%k7} # AVX512DQ
vcvtuqq2ps 8128(%edx), %ymm6{%k7} # AVX512DQ Disp8
vcvtuqq2ps 8192(%edx), %ymm6{%k7} # AVX512DQ
vcvtuqq2ps -8192(%edx), %ymm6{%k7} # AVX512DQ Disp8
vcvtuqq2ps -8256(%edx), %ymm6{%k7} # AVX512DQ
vcvtuqq2ps 1016(%edx){1to8}, %ymm6{%k7} # AVX512DQ Disp8
vcvtuqq2ps 1024(%edx){1to8}, %ymm6{%k7} # AVX512DQ
vcvtuqq2ps -1024(%edx){1to8}, %ymm6{%k7} # AVX512DQ Disp8
vcvtuqq2ps -1032(%edx){1to8}, %ymm6{%k7} # AVX512DQ
vextractf64x2 $0xab, %zmm5, %xmm6{%k7} # AVX512DQ
vextractf64x2 $0xab, %zmm5, %xmm6{%k7}{z} # AVX512DQ
vextractf64x2 $123, %zmm5, %xmm6{%k7} # AVX512DQ
vextractf32x8 $0xab, %zmm5, %ymm6{%k7} # AVX512DQ
vextractf32x8 $0xab, %zmm5, %ymm6{%k7}{z} # AVX512DQ
vextractf32x8 $123, %zmm5, %ymm6{%k7} # AVX512DQ
vextracti64x2 $0xab, %zmm5, %xmm6{%k7} # AVX512DQ
vextracti64x2 $0xab, %zmm5, %xmm6{%k7}{z} # AVX512DQ
vextracti64x2 $123, %zmm5, %xmm6{%k7} # AVX512DQ
vextracti32x8 $0xab, %zmm5, %ymm6{%k7} # AVX512DQ
vextracti32x8 $0xab, %zmm5, %ymm6{%k7}{z} # AVX512DQ
vextracti32x8 $123, %zmm5, %ymm6{%k7} # AVX512DQ
vfpclasspd $0xab, %zmm6, %k5 # AVX512DQ
vfpclasspd $0xab, %zmm6, %k5{%k7} # AVX512DQ
vfpclasspd $123, %zmm6, %k5 # AVX512DQ
vfpclasspdz $123, (%ecx), %k5 # AVX512DQ
vfpclasspdz $123, -123456(%esp,%esi,8), %k5 # AVX512DQ
vfpclasspd $123, (%eax){1to8}, %k5 # AVX512DQ
vfpclasspdz $123, 8128(%edx), %k5 # AVX512DQ Disp8
vfpclasspdz $123, 8192(%edx), %k5 # AVX512DQ
vfpclasspdz $123, -8192(%edx), %k5 # AVX512DQ Disp8
vfpclasspdz $123, -8256(%edx), %k5 # AVX512DQ
vfpclasspdz $123, 1016(%edx){1to8}, %k5 # AVX512DQ Disp8
vfpclasspdz $123, 1024(%edx){1to8}, %k5 # AVX512DQ
vfpclasspdz $123, -1024(%edx){1to8}, %k5 # AVX512DQ Disp8
vfpclasspdz $123, -1032(%edx){1to8}, %k5 # AVX512DQ
vfpclassps $0xab, %zmm6, %k5 # AVX512DQ
vfpclassps $0xab, %zmm6, %k5{%k7} # AVX512DQ
vfpclassps $123, %zmm6, %k5 # AVX512DQ
vfpclasspsz $123, (%ecx), %k5 # AVX512DQ
vfpclasspsz $123, -123456(%esp,%esi,8), %k5 # AVX512DQ
vfpclassps $123, (%eax){1to16}, %k5 # AVX512DQ
vfpclasspsz $123, 8128(%edx), %k5 # AVX512DQ Disp8
vfpclasspsz $123, 8192(%edx), %k5 # AVX512DQ
vfpclasspsz $123, -8192(%edx), %k5 # AVX512DQ Disp8
vfpclasspsz $123, -8256(%edx), %k5 # AVX512DQ
vfpclasspsz $123, 508(%edx){1to16}, %k5 # AVX512DQ Disp8
vfpclasspsz $123, 512(%edx){1to16}, %k5 # AVX512DQ
vfpclasspsz $123, -512(%edx){1to16}, %k5 # AVX512DQ Disp8
vfpclasspsz $123, -516(%edx){1to16}, %k5 # AVX512DQ
vfpclasssd $0xab, %xmm6, %k5{%k7} # AVX512DQ
vfpclasssd $123, %xmm6, %k5{%k7} # AVX512DQ
vfpclasssd $123, (%ecx), %k5{%k7} # AVX512DQ
vfpclasssd $123, -123456(%esp,%esi,8), %k5{%k7} # AVX512DQ
vfpclasssd $123, 1016(%edx), %k5{%k7} # AVX512DQ Disp8
vfpclasssd $123, 1024(%edx), %k5{%k7} # AVX512DQ
vfpclasssd $123, -1024(%edx), %k5{%k7} # AVX512DQ Disp8
vfpclasssd $123, -1032(%edx), %k5{%k7} # AVX512DQ
vfpclassss $0xab, %xmm6, %k5{%k7} # AVX512DQ
vfpclassss $123, %xmm6, %k5{%k7} # AVX512DQ
vfpclassss $123, (%ecx), %k5{%k7} # AVX512DQ
vfpclassss $123, -123456(%esp,%esi,8), %k5{%k7} # AVX512DQ
vfpclassss $123, 508(%edx), %k5{%k7} # AVX512DQ Disp8
vfpclassss $123, 512(%edx), %k5{%k7} # AVX512DQ
vfpclassss $123, -512(%edx), %k5{%k7} # AVX512DQ Disp8
vfpclassss $123, -516(%edx), %k5{%k7} # AVX512DQ
vinsertf64x2 $0xab, %xmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf64x2 $0xab, %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vinsertf64x2 $123, %xmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf64x2 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf64x2 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf64x2 $123, 2032(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinsertf64x2 $123, 2048(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf64x2 $123, -2048(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinsertf64x2 $123, -2064(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf32x8 $0xab, %ymm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf32x8 $0xab, %ymm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vinsertf32x8 $123, %ymm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf32x8 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf32x8 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf32x8 $123, 4064(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinsertf32x8 $123, 4096(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf32x8 $123, -4096(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinsertf32x8 $123, -4128(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti64x2 $0xab, %xmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinserti64x2 $0xab, %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vinserti64x2 $123, %xmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinserti64x2 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti64x2 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti64x2 $123, 2032(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinserti64x2 $123, 2048(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti64x2 $123, -2048(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinserti64x2 $123, -2064(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti32x8 $0xab, %ymm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinserti32x8 $0xab, %ymm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vinserti32x8 $123, %ymm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinserti32x8 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti32x8 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti32x8 $123, 4064(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinserti32x8 $123, 4096(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti32x8 $123, -4096(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinserti32x8 $123, -4128(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vbroadcasti32x2 %xmm7, %zmm6 # AVX512DQ
vbroadcasti32x2 %xmm7, %zmm6{%k7} # AVX512DQ
vbroadcasti32x2 %xmm7, %zmm6{%k7}{z} # AVX512DQ
vbroadcasti32x2 (%ecx), %zmm6 # AVX512DQ
vbroadcasti32x2 -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vbroadcasti32x2 1016(%edx), %zmm6 # AVX512DQ Disp8
vbroadcasti32x2 1024(%edx), %zmm6 # AVX512DQ
vbroadcasti32x2 -1024(%edx), %zmm6 # AVX512DQ Disp8
vbroadcasti32x2 -1032(%edx), %zmm6 # AVX512DQ
vpmullq %zmm4, %zmm5, %zmm6 # AVX512DQ
vpmullq %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vpmullq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vpmullq (%ecx), %zmm5, %zmm6 # AVX512DQ
vpmullq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vpmullq (%eax){1to8}, %zmm5, %zmm6 # AVX512DQ
vpmullq 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vpmullq 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vpmullq -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vpmullq -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vpmullq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vpmullq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vpmullq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vpmullq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vrangepd $0xab, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangepd $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vrangepd $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vrangepd $0xab, {sae}, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangepd $123, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangepd $123, {sae}, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangepd $123, (%ecx), %zmm5, %zmm6 # AVX512DQ
vrangepd $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vrangepd $123, (%eax){1to8}, %zmm5, %zmm6 # AVX512DQ
vrangepd $123, 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vrangepd $123, 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vrangepd $123, -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vrangepd $123, -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vrangepd $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vrangepd $123, 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vrangepd $123, -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vrangepd $123, -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vrangeps $0xab, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangeps $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vrangeps $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vrangeps $0xab, {sae}, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangeps $123, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangeps $123, {sae}, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangeps $123, (%ecx), %zmm5, %zmm6 # AVX512DQ
vrangeps $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vrangeps $123, (%eax){1to16}, %zmm5, %zmm6 # AVX512DQ
vrangeps $123, 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vrangeps $123, 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vrangeps $123, -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vrangeps $123, -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vrangeps $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vrangeps $123, 512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vrangeps $123, -512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vrangeps $123, -516(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vrangesd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512DQ
vrangesd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vrangesd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vrangesd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512DQ
vrangess $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vrangess $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vrangess $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
vandpd %zmm4, %zmm5, %zmm6 # AVX512DQ
vandpd %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vandpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vandpd (%ecx), %zmm5, %zmm6 # AVX512DQ
vandpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vandpd (%eax){1to8}, %zmm5, %zmm6 # AVX512DQ
vandpd 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandpd 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vandpd -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandpd -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vandpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vandpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vandpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vandpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vandps %zmm4, %zmm5, %zmm6 # AVX512DQ
vandps %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vandps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vandps (%ecx), %zmm5, %zmm6 # AVX512DQ
vandps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vandps (%eax){1to16}, %zmm5, %zmm6 # AVX512DQ
vandps 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandps 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vandps -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandps -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vandps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vandps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vandps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vandps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vandnpd %zmm4, %zmm5, %zmm6 # AVX512DQ
vandnpd %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vandnpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vandnpd (%ecx), %zmm5, %zmm6 # AVX512DQ
vandnpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vandnpd (%eax){1to8}, %zmm5, %zmm6 # AVX512DQ
vandnpd 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandnpd 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vandnpd -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandnpd -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vandnpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vandnpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vandnpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vandnpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vandnps %zmm4, %zmm5, %zmm6 # AVX512DQ
vandnps %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vandnps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vandnps (%ecx), %zmm5, %zmm6 # AVX512DQ
vandnps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vandnps (%eax){1to16}, %zmm5, %zmm6 # AVX512DQ
vandnps 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandnps 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vandnps -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandnps -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vandnps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vandnps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vandnps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vandnps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vorpd %zmm4, %zmm5, %zmm6 # AVX512DQ
vorpd %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vorpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vorpd (%ecx), %zmm5, %zmm6 # AVX512DQ
vorpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vorpd (%eax){1to8}, %zmm5, %zmm6 # AVX512DQ
vorpd 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vorpd 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vorpd -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vorpd -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vorpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vorpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vorpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vorpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vorps %zmm4, %zmm5, %zmm6 # AVX512DQ
vorps %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vorps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vorps (%ecx), %zmm5, %zmm6 # AVX512DQ
vorps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vorps (%eax){1to16}, %zmm5, %zmm6 # AVX512DQ
vorps 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vorps 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vorps -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vorps -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vorps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vorps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vorps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vorps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vxorpd %zmm4, %zmm5, %zmm6 # AVX512DQ
vxorpd %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vxorpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vxorpd (%ecx), %zmm5, %zmm6 # AVX512DQ
vxorpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vxorpd (%eax){1to8}, %zmm5, %zmm6 # AVX512DQ
vxorpd 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vxorpd 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vxorpd -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vxorpd -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vxorpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vxorpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vxorpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vxorpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vxorps %zmm4, %zmm5, %zmm6 # AVX512DQ
vxorps %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vxorps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vxorps (%ecx), %zmm5, %zmm6 # AVX512DQ
vxorps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vxorps (%eax){1to16}, %zmm5, %zmm6 # AVX512DQ
vxorps 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vxorps 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vxorps -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vxorps -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vxorps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vxorps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vxorps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vxorps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vreducepd $0xab, %zmm5, %zmm6 # AVX512DQ
vreducepd $0xab, %zmm5, %zmm6{%k7} # AVX512DQ
vreducepd $0xab, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vreducepd $0xab, {sae}, %zmm5, %zmm6 # AVX512DQ
vreducepd $123, %zmm5, %zmm6 # AVX512DQ
vreducepd $123, {sae}, %zmm5, %zmm6 # AVX512DQ
vreducepd $123, (%ecx), %zmm6 # AVX512DQ
vreducepd $123, -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vreducepd $123, (%eax){1to8}, %zmm6 # AVX512DQ
vreducepd $123, 8128(%edx), %zmm6 # AVX512DQ Disp8
vreducepd $123, 8192(%edx), %zmm6 # AVX512DQ
vreducepd $123, -8192(%edx), %zmm6 # AVX512DQ Disp8
vreducepd $123, -8256(%edx), %zmm6 # AVX512DQ
vreducepd $123, 1016(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vreducepd $123, 1024(%edx){1to8}, %zmm6 # AVX512DQ
vreducepd $123, -1024(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vreducepd $123, -1032(%edx){1to8}, %zmm6 # AVX512DQ
vreduceps $0xab, %zmm5, %zmm6 # AVX512DQ
vreduceps $0xab, %zmm5, %zmm6{%k7} # AVX512DQ
vreduceps $0xab, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vreduceps $0xab, {sae}, %zmm5, %zmm6 # AVX512DQ
vreduceps $123, %zmm5, %zmm6 # AVX512DQ
vreduceps $123, {sae}, %zmm5, %zmm6 # AVX512DQ
vreduceps $123, (%ecx), %zmm6 # AVX512DQ
vreduceps $123, -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vreduceps $123, (%eax){1to16}, %zmm6 # AVX512DQ
vreduceps $123, 8128(%edx), %zmm6 # AVX512DQ Disp8
vreduceps $123, 8192(%edx), %zmm6 # AVX512DQ
vreduceps $123, -8192(%edx), %zmm6 # AVX512DQ Disp8
vreduceps $123, -8256(%edx), %zmm6 # AVX512DQ
vreduceps $123, 508(%edx){1to16}, %zmm6 # AVX512DQ Disp8
vreduceps $123, 512(%edx){1to16}, %zmm6 # AVX512DQ
vreduceps $123, -512(%edx){1to16}, %zmm6 # AVX512DQ Disp8
vreduceps $123, -516(%edx){1to16}, %zmm6 # AVX512DQ
vreducesd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512DQ
vreducesd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vreducesd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vreducesd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512DQ
vreducess $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vreducess $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vreducess $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
kandb %k7, %k6, %k5 # AVX512DQ
kandnb %k7, %k6, %k5 # AVX512DQ
korb %k7, %k6, %k5 # AVX512DQ
kxnorb %k7, %k6, %k5 # AVX512DQ
kxorb %k7, %k6, %k5 # AVX512DQ
knotb %k6, %k5 # AVX512DQ
kortestb %k6, %k5 # AVX512DQ
ktestw %k6, %k5 # AVX512DQ
ktestb %k6, %k5 # AVX512DQ
kshiftrb $0xab, %k6, %k5 # AVX512DQ
kshiftrb $123, %k6, %k5 # AVX512DQ
kshiftlb $0xab, %k6, %k5 # AVX512DQ
kshiftlb $123, %k6, %k5 # AVX512DQ
kmovb %k6, %k5 # AVX512DQ
kmovb (%ecx), %k5 # AVX512DQ
kmovb -123456(%esp,%esi,8), %k5 # AVX512DQ
kmovb %k5, (%ecx) # AVX512DQ
kmovb %k5, -123456(%esp,%esi,8) # AVX512DQ
kmovb %eax, %k5 # AVX512DQ
kmovb %ebp, %k5 # AVX512DQ
kmovb %k5, %eax # AVX512DQ
kmovb %k5, %ebp # AVX512DQ
kaddw %k7, %k6, %k5 # AVX512DQ
kaddb %k7, %k6, %k5 # AVX512DQ
vextractf64x2 $0xab, %zmm6, (%ecx) # AVX512DQ
vextractf64x2 $0xab, %zmm6, (%ecx){%k7} # AVX512DQ
vextractf64x2 $123, %zmm6, (%ecx) # AVX512DQ
vextractf64x2 $123, %zmm6, -123456(%esp,%esi,8) # AVX512DQ
vextractf64x2 $123, %zmm6, 2032(%edx) # AVX512DQ Disp8
vextractf64x2 $123, %zmm6, 2048(%edx) # AVX512DQ
vextractf64x2 $123, %zmm6, -2048(%edx) # AVX512DQ Disp8
vextractf64x2 $123, %zmm6, -2064(%edx) # AVX512DQ
vextractf32x8 $0xab, %zmm6, (%ecx) # AVX512DQ
vextractf32x8 $0xab, %zmm6, (%ecx){%k7} # AVX512DQ
vextractf32x8 $123, %zmm6, (%ecx) # AVX512DQ
vextractf32x8 $123, %zmm6, -123456(%esp,%esi,8) # AVX512DQ
vextractf32x8 $123, %zmm6, 4064(%edx) # AVX512DQ Disp8
vextractf32x8 $123, %zmm6, 4096(%edx) # AVX512DQ
vextractf32x8 $123, %zmm6, -4096(%edx) # AVX512DQ Disp8
vextractf32x8 $123, %zmm6, -4128(%edx) # AVX512DQ
vextracti64x2 $0xab, %zmm6, (%ecx) # AVX512DQ
vextracti64x2 $0xab, %zmm6, (%ecx){%k7} # AVX512DQ
vextracti64x2 $123, %zmm6, (%ecx) # AVX512DQ
vextracti64x2 $123, %zmm6, -123456(%esp,%esi,8) # AVX512DQ
vextracti64x2 $123, %zmm6, 2032(%edx) # AVX512DQ Disp8
vextracti64x2 $123, %zmm6, 2048(%edx) # AVX512DQ
vextracti64x2 $123, %zmm6, -2048(%edx) # AVX512DQ Disp8
vextracti64x2 $123, %zmm6, -2064(%edx) # AVX512DQ
vextracti32x8 $0xab, %zmm6, (%ecx) # AVX512DQ
vextracti32x8 $0xab, %zmm6, (%ecx){%k7} # AVX512DQ
vextracti32x8 $123, %zmm6, (%ecx) # AVX512DQ
vextracti32x8 $123, %zmm6, -123456(%esp,%esi,8) # AVX512DQ
vextracti32x8 $123, %zmm6, 4064(%edx) # AVX512DQ Disp8
vextracti32x8 $123, %zmm6, 4096(%edx) # AVX512DQ
vextracti32x8 $123, %zmm6, -4096(%edx) # AVX512DQ Disp8
vextracti32x8 $123, %zmm6, -4128(%edx) # AVX512DQ
vcvttpd2qq %zmm5, %zmm6 # AVX512DQ
vcvttpd2qq %zmm5, %zmm6{%k7} # AVX512DQ
vcvttpd2qq %zmm5, %zmm6{%k7}{z} # AVX512DQ
vcvttpd2qq {sae}, %zmm5, %zmm6 # AVX512DQ
vcvttpd2qq (%ecx), %zmm6 # AVX512DQ
vcvttpd2qq -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vcvttpd2qq (%eax){1to8}, %zmm6 # AVX512DQ
vcvttpd2qq 8128(%edx), %zmm6 # AVX512DQ Disp8
vcvttpd2qq 8192(%edx), %zmm6 # AVX512DQ
vcvttpd2qq -8192(%edx), %zmm6 # AVX512DQ Disp8
vcvttpd2qq -8256(%edx), %zmm6 # AVX512DQ
vcvttpd2qq 1016(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvttpd2qq 1024(%edx){1to8}, %zmm6 # AVX512DQ
vcvttpd2qq -1024(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvttpd2qq -1032(%edx){1to8}, %zmm6 # AVX512DQ
vcvttpd2uqq %zmm5, %zmm6 # AVX512DQ
vcvttpd2uqq %zmm5, %zmm6{%k7} # AVX512DQ
vcvttpd2uqq %zmm5, %zmm6{%k7}{z} # AVX512DQ
vcvttpd2uqq {sae}, %zmm5, %zmm6 # AVX512DQ
vcvttpd2uqq (%ecx), %zmm6 # AVX512DQ
vcvttpd2uqq -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vcvttpd2uqq (%eax){1to8}, %zmm6 # AVX512DQ
vcvttpd2uqq 8128(%edx), %zmm6 # AVX512DQ Disp8
vcvttpd2uqq 8192(%edx), %zmm6 # AVX512DQ
vcvttpd2uqq -8192(%edx), %zmm6 # AVX512DQ Disp8
vcvttpd2uqq -8256(%edx), %zmm6 # AVX512DQ
vcvttpd2uqq 1016(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvttpd2uqq 1024(%edx){1to8}, %zmm6 # AVX512DQ
vcvttpd2uqq -1024(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvttpd2uqq -1032(%edx){1to8}, %zmm6 # AVX512DQ
vcvttps2qq %ymm5, %zmm6{%k7} # AVX512DQ
vcvttps2qq %ymm5, %zmm6{%k7}{z} # AVX512DQ
vcvttps2qq {sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvttps2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvttps2qq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512DQ
vcvttps2qq (%eax){1to8}, %zmm6{%k7} # AVX512DQ
vcvttps2qq 4064(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvttps2qq 4096(%edx), %zmm6{%k7} # AVX512DQ
vcvttps2qq -4096(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvttps2qq -4128(%edx), %zmm6{%k7} # AVX512DQ
vcvttps2qq 508(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvttps2qq 512(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vcvttps2qq -512(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvttps2qq -516(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vcvttps2uqq %ymm5, %zmm6{%k7} # AVX512DQ
vcvttps2uqq %ymm5, %zmm6{%k7}{z} # AVX512DQ
vcvttps2uqq {sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvttps2uqq (%ecx), %zmm6{%k7} # AVX512DQ
vcvttps2uqq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512DQ
vcvttps2uqq (%eax){1to8}, %zmm6{%k7} # AVX512DQ
vcvttps2uqq 4064(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvttps2uqq 4096(%edx), %zmm6{%k7} # AVX512DQ
vcvttps2uqq -4096(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvttps2uqq -4128(%edx), %zmm6{%k7} # AVX512DQ
vcvttps2uqq 508(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvttps2uqq 512(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vcvttps2uqq -512(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvttps2uqq -516(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vpmovd2m %zmm6, %k5 # AVX512DQ
vpmovq2m %zmm6, %k5 # AVX512DQ
vpmovm2d %k5, %zmm6 # AVX512DQ
vpmovm2q %k5, %zmm6 # AVX512DQ
.intel_syntax noprefix
vbroadcastf32x8 zmm6, YMMWORD PTR [ecx] # AVX512DQ
vbroadcastf32x8 zmm6{k7}, YMMWORD PTR [ecx] # AVX512DQ
vbroadcastf32x8 zmm6{k7}{z}, YMMWORD PTR [ecx] # AVX512DQ
vbroadcastf32x8 zmm6, YMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vbroadcastf32x8 zmm6, YMMWORD PTR [edx+4064] # AVX512DQ Disp8
vbroadcastf32x8 zmm6, YMMWORD PTR [edx+4096] # AVX512DQ
vbroadcastf32x8 zmm6, YMMWORD PTR [edx-4096] # AVX512DQ Disp8
vbroadcastf32x8 zmm6, YMMWORD PTR [edx-4128] # AVX512DQ
vbroadcastf64x2 zmm6, XMMWORD PTR [ecx] # AVX512DQ
vbroadcastf64x2 zmm6{k7}, XMMWORD PTR [ecx] # AVX512DQ
vbroadcastf64x2 zmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512DQ
vbroadcastf64x2 zmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vbroadcastf64x2 zmm6, XMMWORD PTR [edx+2032] # AVX512DQ Disp8
vbroadcastf64x2 zmm6, XMMWORD PTR [edx+2048] # AVX512DQ
vbroadcastf64x2 zmm6, XMMWORD PTR [edx-2048] # AVX512DQ Disp8
vbroadcastf64x2 zmm6, XMMWORD PTR [edx-2064] # AVX512DQ
vbroadcasti32x8 zmm6, YMMWORD PTR [ecx] # AVX512DQ
vbroadcasti32x8 zmm6{k7}, YMMWORD PTR [ecx] # AVX512DQ
vbroadcasti32x8 zmm6{k7}{z}, YMMWORD PTR [ecx] # AVX512DQ
vbroadcasti32x8 zmm6, YMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vbroadcasti32x8 zmm6, YMMWORD PTR [edx+4064] # AVX512DQ Disp8
vbroadcasti32x8 zmm6, YMMWORD PTR [edx+4096] # AVX512DQ
vbroadcasti32x8 zmm6, YMMWORD PTR [edx-4096] # AVX512DQ Disp8
vbroadcasti32x8 zmm6, YMMWORD PTR [edx-4128] # AVX512DQ
vbroadcasti64x2 zmm6, XMMWORD PTR [ecx] # AVX512DQ
vbroadcasti64x2 zmm6{k7}, XMMWORD PTR [ecx] # AVX512DQ
vbroadcasti64x2 zmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512DQ
vbroadcasti64x2 zmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vbroadcasti64x2 zmm6, XMMWORD PTR [edx+2032] # AVX512DQ Disp8
vbroadcasti64x2 zmm6, XMMWORD PTR [edx+2048] # AVX512DQ
vbroadcasti64x2 zmm6, XMMWORD PTR [edx-2048] # AVX512DQ Disp8
vbroadcasti64x2 zmm6, XMMWORD PTR [edx-2064] # AVX512DQ
vbroadcastf32x2 zmm6, xmm7 # AVX512DQ
vbroadcastf32x2 zmm6{k7}, xmm7 # AVX512DQ
vbroadcastf32x2 zmm6{k7}{z}, xmm7 # AVX512DQ
vbroadcastf32x2 zmm6, QWORD PTR [ecx] # AVX512DQ
vbroadcastf32x2 zmm6, QWORD PTR [esp+esi*8-123456] # AVX512DQ
vbroadcastf32x2 zmm6, QWORD PTR [edx+1016] # AVX512DQ Disp8
vbroadcastf32x2 zmm6, QWORD PTR [edx+1024] # AVX512DQ
vbroadcastf32x2 zmm6, QWORD PTR [edx-1024] # AVX512DQ Disp8
vbroadcastf32x2 zmm6, QWORD PTR [edx-1032] # AVX512DQ
vcvtpd2qq zmm6, zmm5 # AVX512DQ
vcvtpd2qq zmm6{k7}, zmm5 # AVX512DQ
vcvtpd2qq zmm6{k7}{z}, zmm5 # AVX512DQ
vcvtpd2qq zmm6, zmm5{rn-sae} # AVX512DQ
vcvtpd2qq zmm6, zmm5{ru-sae} # AVX512DQ
vcvtpd2qq zmm6, zmm5{rd-sae} # AVX512DQ
vcvtpd2qq zmm6, zmm5{rz-sae} # AVX512DQ
vcvtpd2qq zmm6, ZMMWORD PTR [ecx] # AVX512DQ
vcvtpd2qq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtpd2qq zmm6, qword bcst [eax] # AVX512DQ
vcvtpd2qq zmm6, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvtpd2qq zmm6, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvtpd2qq zmm6, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvtpd2qq zmm6, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvtpd2qq zmm6, qword bcst [edx+1016] # AVX512DQ Disp8
vcvtpd2qq zmm6, qword bcst [edx+1024] # AVX512DQ
vcvtpd2qq zmm6, qword bcst [edx-1024] # AVX512DQ Disp8
vcvtpd2qq zmm6, qword bcst [edx-1032] # AVX512DQ
vcvtpd2uqq zmm6, zmm5 # AVX512DQ
vcvtpd2uqq zmm6{k7}, zmm5 # AVX512DQ
vcvtpd2uqq zmm6{k7}{z}, zmm5 # AVX512DQ
vcvtpd2uqq zmm6, zmm5{rn-sae} # AVX512DQ
vcvtpd2uqq zmm6, zmm5{ru-sae} # AVX512DQ
vcvtpd2uqq zmm6, zmm5{rd-sae} # AVX512DQ
vcvtpd2uqq zmm6, zmm5{rz-sae} # AVX512DQ
vcvtpd2uqq zmm6, ZMMWORD PTR [ecx] # AVX512DQ
vcvtpd2uqq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtpd2uqq zmm6, qword bcst [eax] # AVX512DQ
vcvtpd2uqq zmm6, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvtpd2uqq zmm6, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvtpd2uqq zmm6, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvtpd2uqq zmm6, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvtpd2uqq zmm6, qword bcst [edx+1016] # AVX512DQ Disp8
vcvtpd2uqq zmm6, qword bcst [edx+1024] # AVX512DQ
vcvtpd2uqq zmm6, qword bcst [edx-1024] # AVX512DQ Disp8
vcvtpd2uqq zmm6, qword bcst [edx-1032] # AVX512DQ
vcvtps2qq zmm6{k7}, ymm5 # AVX512DQ
vcvtps2qq zmm6{k7}{z}, ymm5 # AVX512DQ
vcvtps2qq zmm6{k7}, ymm5{rn-sae} # AVX512DQ
vcvtps2qq zmm6{k7}, ymm5{ru-sae} # AVX512DQ
vcvtps2qq zmm6{k7}, ymm5{rd-sae} # AVX512DQ
vcvtps2qq zmm6{k7}, ymm5{rz-sae} # AVX512DQ
vcvtps2qq zmm6{k7}, YMMWORD PTR [ecx] # AVX512DQ
vcvtps2qq zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtps2qq zmm6{k7}, dword bcst [eax] # AVX512DQ
vcvtps2qq zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512DQ Disp8
vcvtps2qq zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512DQ
vcvtps2qq zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512DQ Disp8
vcvtps2qq zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512DQ
vcvtps2qq zmm6{k7}, dword bcst [edx+508] # AVX512DQ Disp8
vcvtps2qq zmm6{k7}, dword bcst [edx+512] # AVX512DQ
vcvtps2qq zmm6{k7}, dword bcst [edx-512] # AVX512DQ Disp8
vcvtps2qq zmm6{k7}, dword bcst [edx-516] # AVX512DQ
vcvtps2qq zmm6{k7}, DWORD BCST [edx+508] # AVX512DQ Disp8
vcvtps2uqq zmm6{k7}, ymm5 # AVX512DQ
vcvtps2uqq zmm6{k7}{z}, ymm5 # AVX512DQ
vcvtps2uqq zmm6{k7}, ymm5{rn-sae} # AVX512DQ
vcvtps2uqq zmm6{k7}, ymm5{ru-sae} # AVX512DQ
vcvtps2uqq zmm6{k7}, ymm5{rd-sae} # AVX512DQ
vcvtps2uqq zmm6{k7}, ymm5{rz-sae} # AVX512DQ
vcvtps2uqq zmm6{k7}, YMMWORD PTR [ecx] # AVX512DQ
vcvtps2uqq zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtps2uqq zmm6{k7}, dword bcst [eax] # AVX512DQ
vcvtps2uqq zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512DQ Disp8
vcvtps2uqq zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512DQ
vcvtps2uqq zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512DQ Disp8
vcvtps2uqq zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512DQ
vcvtps2uqq zmm6{k7}, dword bcst [edx+508] # AVX512DQ Disp8
vcvtps2uqq zmm6{k7}, dword bcst [edx+512] # AVX512DQ
vcvtps2uqq zmm6{k7}, dword bcst [edx-512] # AVX512DQ Disp8
vcvtps2uqq zmm6{k7}, dword bcst [edx-516] # AVX512DQ
vcvtps2uqq zmm6{k7}, DWORD BCST [edx+508] # AVX512DQ Disp8
vcvtqq2pd zmm6, zmm5 # AVX512DQ
vcvtqq2pd zmm6{k7}, zmm5 # AVX512DQ
vcvtqq2pd zmm6{k7}{z}, zmm5 # AVX512DQ
vcvtqq2pd zmm6, zmm5{rn-sae} # AVX512DQ
vcvtqq2pd zmm6, zmm5{ru-sae} # AVX512DQ
vcvtqq2pd zmm6, zmm5{rd-sae} # AVX512DQ
vcvtqq2pd zmm6, zmm5{rz-sae} # AVX512DQ
vcvtqq2pd zmm6, ZMMWORD PTR [ecx] # AVX512DQ
vcvtqq2pd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtqq2pd zmm6, qword bcst [eax] # AVX512DQ
vcvtqq2pd zmm6, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvtqq2pd zmm6, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvtqq2pd zmm6, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvtqq2pd zmm6, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvtqq2pd zmm6, qword bcst [edx+1016] # AVX512DQ Disp8
vcvtqq2pd zmm6, qword bcst [edx+1024] # AVX512DQ
vcvtqq2pd zmm6, qword bcst [edx-1024] # AVX512DQ Disp8
vcvtqq2pd zmm6, qword bcst [edx-1032] # AVX512DQ
vcvtqq2ps ymm6{k7}, zmm5 # AVX512DQ
vcvtqq2ps ymm6{k7}{z}, zmm5 # AVX512DQ
vcvtqq2ps ymm6{k7}, zmm5{rn-sae} # AVX512DQ
vcvtqq2ps ymm6{k7}, zmm5{ru-sae} # AVX512DQ
vcvtqq2ps ymm6{k7}, zmm5{rd-sae} # AVX512DQ
vcvtqq2ps ymm6{k7}, zmm5{rz-sae} # AVX512DQ
vcvtqq2ps ymm6{k7}, ZMMWORD PTR [ecx] # AVX512DQ
vcvtqq2ps ymm6{k7}, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtqq2ps ymm6{k7}, qword bcst [eax] # AVX512DQ
vcvtqq2ps ymm6{k7}, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvtqq2ps ymm6{k7}, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvtqq2ps ymm6{k7}, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvtqq2ps ymm6{k7}, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvtqq2ps ymm6{k7}, qword bcst [edx+1016] # AVX512DQ Disp8
vcvtqq2ps ymm6{k7}, qword bcst [edx+1024] # AVX512DQ
vcvtqq2ps ymm6{k7}, qword bcst [edx-1024] # AVX512DQ Disp8
vcvtqq2ps ymm6{k7}, qword bcst [edx-1032] # AVX512DQ
vcvtuqq2pd zmm6, zmm5 # AVX512DQ
vcvtuqq2pd zmm6{k7}, zmm5 # AVX512DQ
vcvtuqq2pd zmm6{k7}{z}, zmm5 # AVX512DQ
vcvtuqq2pd zmm6, zmm5{rn-sae} # AVX512DQ
vcvtuqq2pd zmm6, zmm5{ru-sae} # AVX512DQ
vcvtuqq2pd zmm6, zmm5{rd-sae} # AVX512DQ
vcvtuqq2pd zmm6, zmm5{rz-sae} # AVX512DQ
vcvtuqq2pd zmm6, ZMMWORD PTR [ecx] # AVX512DQ
vcvtuqq2pd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtuqq2pd zmm6, qword bcst [eax] # AVX512DQ
vcvtuqq2pd zmm6, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvtuqq2pd zmm6, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvtuqq2pd zmm6, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvtuqq2pd zmm6, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvtuqq2pd zmm6, qword bcst [edx+1016] # AVX512DQ Disp8
vcvtuqq2pd zmm6, qword bcst [edx+1024] # AVX512DQ
vcvtuqq2pd zmm6, qword bcst [edx-1024] # AVX512DQ Disp8
vcvtuqq2pd zmm6, qword bcst [edx-1032] # AVX512DQ
vcvtuqq2ps ymm6{k7}, zmm5 # AVX512DQ
vcvtuqq2ps ymm6{k7}{z}, zmm5 # AVX512DQ
vcvtuqq2ps ymm6{k7}, zmm5{rn-sae} # AVX512DQ
vcvtuqq2ps ymm6{k7}, zmm5{ru-sae} # AVX512DQ
vcvtuqq2ps ymm6{k7}, zmm5{rd-sae} # AVX512DQ
vcvtuqq2ps ymm6{k7}, zmm5{rz-sae} # AVX512DQ
vcvtuqq2ps ymm6{k7}, ZMMWORD PTR [ecx] # AVX512DQ
vcvtuqq2ps ymm6{k7}, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtuqq2ps ymm6{k7}, qword bcst [eax] # AVX512DQ
vcvtuqq2ps ymm6{k7}, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvtuqq2ps ymm6{k7}, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvtuqq2ps ymm6{k7}, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvtuqq2ps ymm6{k7}, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvtuqq2ps ymm6{k7}, qword bcst [edx+1016] # AVX512DQ Disp8
vcvtuqq2ps ymm6{k7}, qword bcst [edx+1024] # AVX512DQ
vcvtuqq2ps ymm6{k7}, qword bcst [edx-1024] # AVX512DQ Disp8
vcvtuqq2ps ymm6{k7}, qword bcst [edx-1032] # AVX512DQ
vextractf64x2 xmm6{k7}, zmm5, 0xab # AVX512DQ
vextractf64x2 xmm6{k7}{z}, zmm5, 0xab # AVX512DQ
vextractf64x2 xmm6{k7}, zmm5, 123 # AVX512DQ
vextractf32x8 ymm6{k7}, zmm5, 0xab # AVX512DQ
vextractf32x8 ymm6{k7}{z}, zmm5, 0xab # AVX512DQ
vextractf32x8 ymm6{k7}, zmm5, 123 # AVX512DQ
vextracti64x2 xmm6{k7}, zmm5, 0xab # AVX512DQ
vextracti64x2 xmm6{k7}{z}, zmm5, 0xab # AVX512DQ
vextracti64x2 xmm6{k7}, zmm5, 123 # AVX512DQ
vextracti32x8 ymm6{k7}, zmm5, 0xab # AVX512DQ
vextracti32x8 ymm6{k7}{z}, zmm5, 0xab # AVX512DQ
vextracti32x8 ymm6{k7}, zmm5, 123 # AVX512DQ
vfpclasspd k5, zmm6, 0xab # AVX512DQ
vfpclasspd k5{k7}, zmm6, 0xab # AVX512DQ
vfpclasspd k5, zmm6, 123 # AVX512DQ
vfpclasspd k5, ZMMWORD PTR [ecx], 123 # AVX512DQ
vfpclasspd k5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vfpclasspd k5, [eax]{1to8}, 123 # AVX512DQ
vfpclasspd k5, ZMMWORD PTR [edx+8128], 123 # AVX512DQ Disp8
vfpclasspd k5, ZMMWORD PTR [edx+8192], 123 # AVX512DQ
vfpclasspd k5, ZMMWORD PTR [edx-8192], 123 # AVX512DQ Disp8
vfpclasspd k5, ZMMWORD PTR [edx-8256], 123 # AVX512DQ
vfpclasspd k5, QWORD BCST [edx+1016]{1to8}, 123 # AVX512DQ Disp8
vfpclasspd k5, QWORD BCST [edx+1024]{1to8}, 123 # AVX512DQ
vfpclasspd k5, QWORD BCST [edx-1024]{1to8}, 123 # AVX512DQ Disp8
vfpclasspd k5, QWORD BCST [edx-1032]{1to8}, 123 # AVX512DQ
vfpclassps k5, zmm6, 0xab # AVX512DQ
vfpclassps k5{k7}, zmm6, 0xab # AVX512DQ
vfpclassps k5, zmm6, 123 # AVX512DQ
vfpclassps k5, ZMMWORD PTR [ecx], 123 # AVX512DQ
vfpclassps k5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vfpclassps k5, [eax]{1to16}, 123 # AVX512DQ
vfpclassps k5, ZMMWORD PTR [edx+8128], 123 # AVX512DQ Disp8
vfpclassps k5, ZMMWORD PTR [edx+8192], 123 # AVX512DQ
vfpclassps k5, ZMMWORD PTR [edx-8192], 123 # AVX512DQ Disp8
vfpclassps k5, ZMMWORD PTR [edx-8256], 123 # AVX512DQ
vfpclassps k5, DWORD BCST [edx+508]{1to16}, 123 # AVX512DQ Disp8
vfpclassps k5, DWORD BCST [edx+512]{1to16}, 123 # AVX512DQ
vfpclassps k5, DWORD BCST [edx-512]{1to16}, 123 # AVX512DQ Disp8
vfpclassps k5, DWORD BCST [edx-516]{1to16}, 123 # AVX512DQ
vfpclasssd k5{k7}, xmm6, 0xab # AVX512DQ
vfpclasssd k5{k7}, xmm6, 123 # AVX512DQ
vfpclasssd k5{k7}, QWORD PTR [ecx], 123 # AVX512DQ
vfpclasssd k5{k7}, QWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vfpclasssd k5{k7}, QWORD PTR [edx+1016], 123 # AVX512DQ Disp8
vfpclasssd k5{k7}, QWORD PTR [edx+1024], 123 # AVX512DQ
vfpclasssd k5{k7}, QWORD PTR [edx-1024], 123 # AVX512DQ Disp8
vfpclasssd k5{k7}, QWORD PTR [edx-1032], 123 # AVX512DQ
vfpclassss k5{k7}, xmm6, 0xab # AVX512DQ
vfpclassss k5{k7}, xmm6, 123 # AVX512DQ
vfpclassss k5{k7}, DWORD PTR [ecx], 123 # AVX512DQ
vfpclassss k5{k7}, DWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vfpclassss k5{k7}, DWORD PTR [edx+508], 123 # AVX512DQ Disp8
vfpclassss k5{k7}, DWORD PTR [edx+512], 123 # AVX512DQ
vfpclassss k5{k7}, DWORD PTR [edx-512], 123 # AVX512DQ Disp8
vfpclassss k5{k7}, DWORD PTR [edx-516], 123 # AVX512DQ
vinsertf64x2 zmm6{k7}, zmm5, xmm4, 0xab # AVX512DQ
vinsertf64x2 zmm6{k7}{z}, zmm5, xmm4, 0xab # AVX512DQ
vinsertf64x2 zmm6{k7}, zmm5, xmm4, 123 # AVX512DQ
vinsertf64x2 zmm6{k7}, zmm5, XMMWORD PTR [ecx], 123 # AVX512DQ
vinsertf64x2 zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vinsertf64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx+2032], 123 # AVX512DQ Disp8
vinsertf64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx+2048], 123 # AVX512DQ
vinsertf64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx-2048], 123 # AVX512DQ Disp8
vinsertf64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx-2064], 123 # AVX512DQ
vinsertf32x8 zmm6{k7}, zmm5, ymm4, 0xab # AVX512DQ
vinsertf32x8 zmm6{k7}{z}, zmm5, ymm4, 0xab # AVX512DQ
vinsertf32x8 zmm6{k7}, zmm5, ymm4, 123 # AVX512DQ
vinsertf32x8 zmm6{k7}, zmm5, YMMWORD PTR [ecx], 123 # AVX512DQ
vinsertf32x8 zmm6{k7}, zmm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vinsertf32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx+4064], 123 # AVX512DQ Disp8
vinsertf32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx+4096], 123 # AVX512DQ
vinsertf32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx-4096], 123 # AVX512DQ Disp8
vinsertf32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx-4128], 123 # AVX512DQ
vinserti64x2 zmm6{k7}, zmm5, xmm4, 0xab # AVX512DQ
vinserti64x2 zmm6{k7}{z}, zmm5, xmm4, 0xab # AVX512DQ
vinserti64x2 zmm6{k7}, zmm5, xmm4, 123 # AVX512DQ
vinserti64x2 zmm6{k7}, zmm5, XMMWORD PTR [ecx], 123 # AVX512DQ
vinserti64x2 zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vinserti64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx+2032], 123 # AVX512DQ Disp8
vinserti64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx+2048], 123 # AVX512DQ
vinserti64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx-2048], 123 # AVX512DQ Disp8
vinserti64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx-2064], 123 # AVX512DQ
vinserti32x8 zmm6{k7}, zmm5, ymm4, 0xab # AVX512DQ
vinserti32x8 zmm6{k7}{z}, zmm5, ymm4, 0xab # AVX512DQ
vinserti32x8 zmm6{k7}, zmm5, ymm4, 123 # AVX512DQ
vinserti32x8 zmm6{k7}, zmm5, YMMWORD PTR [ecx], 123 # AVX512DQ
vinserti32x8 zmm6{k7}, zmm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vinserti32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx+4064], 123 # AVX512DQ Disp8
vinserti32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx+4096], 123 # AVX512DQ
vinserti32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx-4096], 123 # AVX512DQ Disp8
vinserti32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx-4128], 123 # AVX512DQ
vbroadcasti32x2 zmm6, xmm7 # AVX512DQ
vbroadcasti32x2 zmm6{k7}, xmm7 # AVX512DQ
vbroadcasti32x2 zmm6{k7}{z}, xmm7 # AVX512DQ
vbroadcasti32x2 zmm6, QWORD PTR [ecx] # AVX512DQ
vbroadcasti32x2 zmm6, QWORD PTR [esp+esi*8-123456] # AVX512DQ
vbroadcasti32x2 zmm6, QWORD PTR [edx+1016] # AVX512DQ Disp8
vbroadcasti32x2 zmm6, QWORD PTR [edx+1024] # AVX512DQ
vbroadcasti32x2 zmm6, QWORD PTR [edx-1024] # AVX512DQ Disp8
vbroadcasti32x2 zmm6, QWORD PTR [edx-1032] # AVX512DQ
vpmullq zmm6, zmm5, zmm4 # AVX512DQ
vpmullq zmm6{k7}, zmm5, zmm4 # AVX512DQ
vpmullq zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vpmullq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vpmullq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vpmullq zmm6, zmm5, qword bcst [eax] # AVX512DQ
vpmullq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vpmullq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vpmullq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vpmullq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vpmullq zmm6, zmm5, qword bcst [edx+1016] # AVX512DQ Disp8
vpmullq zmm6, zmm5, qword bcst [edx+1024] # AVX512DQ
vpmullq zmm6, zmm5, qword bcst [edx-1024] # AVX512DQ Disp8
vpmullq zmm6, zmm5, qword bcst [edx-1032] # AVX512DQ
vrangepd zmm6, zmm5, zmm4, 0xab # AVX512DQ
vrangepd zmm6{k7}, zmm5, zmm4, 0xab # AVX512DQ
vrangepd zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512DQ
vrangepd zmm6, zmm5, zmm4{sae}, 0xab # AVX512DQ
vrangepd zmm6, zmm5, zmm4, 123 # AVX512DQ
vrangepd zmm6, zmm5, zmm4{sae}, 123 # AVX512DQ
vrangepd zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512DQ
vrangepd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vrangepd zmm6, zmm5, qword bcst [eax], 123 # AVX512DQ
vrangepd zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512DQ Disp8
vrangepd zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512DQ
vrangepd zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512DQ Disp8
vrangepd zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512DQ
vrangepd zmm6, zmm5, qword bcst [edx+1016], 123 # AVX512DQ Disp8
vrangepd zmm6, zmm5, qword bcst [edx+1024], 123 # AVX512DQ
vrangepd zmm6, zmm5, qword bcst [edx-1024], 123 # AVX512DQ Disp8
vrangepd zmm6, zmm5, qword bcst [edx-1032], 123 # AVX512DQ
vrangeps zmm6, zmm5, zmm4, 0xab # AVX512DQ
vrangeps zmm6{k7}, zmm5, zmm4, 0xab # AVX512DQ
vrangeps zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512DQ
vrangeps zmm6, zmm5, zmm4{sae}, 0xab # AVX512DQ
vrangeps zmm6, zmm5, zmm4, 123 # AVX512DQ
vrangeps zmm6, zmm5, zmm4{sae}, 123 # AVX512DQ
vrangeps zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512DQ
vrangeps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vrangeps zmm6, zmm5, dword bcst [eax], 123 # AVX512DQ
vrangeps zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512DQ Disp8
vrangeps zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512DQ
vrangeps zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512DQ Disp8
vrangeps zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512DQ
vrangeps zmm6, zmm5, dword bcst [edx+508], 123 # AVX512DQ Disp8
vrangeps zmm6, zmm5, dword bcst [edx+512], 123 # AVX512DQ
vrangeps zmm6, zmm5, dword bcst [edx-512], 123 # AVX512DQ Disp8
vrangeps zmm6, zmm5, dword bcst [edx-516], 123 # AVX512DQ
vrangesd xmm6{k7}, xmm5, xmm4, 0xab # AVX512DQ
vrangesd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512DQ
vrangesd xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512DQ
vrangesd xmm6{k7}, xmm5, xmm4, 123 # AVX512DQ
vrangesd xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512DQ
vrangesd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512DQ
vrangesd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vrangesd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512DQ Disp8
vrangesd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512DQ
vrangesd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512DQ Disp8
vrangesd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512DQ
vrangess xmm6{k7}, xmm5, xmm4, 0xab # AVX512DQ
vrangess xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512DQ
vrangess xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512DQ
vrangess xmm6{k7}, xmm5, xmm4, 123 # AVX512DQ
vrangess xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512DQ
vrangess xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512DQ
vrangess xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vrangess xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512DQ Disp8
vrangess xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512DQ
vrangess xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512DQ Disp8
vrangess xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512DQ
vandpd zmm6, zmm5, zmm4 # AVX512DQ
vandpd zmm6{k7}, zmm5, zmm4 # AVX512DQ
vandpd zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vandpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vandpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vandpd zmm6, zmm5, qword bcst [eax] # AVX512DQ
vandpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vandpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vandpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vandpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vandpd zmm6, zmm5, qword bcst [edx+1016] # AVX512DQ Disp8
vandpd zmm6, zmm5, qword bcst [edx+1024] # AVX512DQ
vandpd zmm6, zmm5, qword bcst [edx-1024] # AVX512DQ Disp8
vandpd zmm6, zmm5, qword bcst [edx-1032] # AVX512DQ
vandps zmm6, zmm5, zmm4 # AVX512DQ
vandps zmm6{k7}, zmm5, zmm4 # AVX512DQ
vandps zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vandps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vandps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vandps zmm6, zmm5, dword bcst [eax] # AVX512DQ
vandps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vandps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vandps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vandps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vandps zmm6, zmm5, dword bcst [edx+508] # AVX512DQ Disp8
vandps zmm6, zmm5, dword bcst [edx+512] # AVX512DQ
vandps zmm6, zmm5, dword bcst [edx-512] # AVX512DQ Disp8
vandps zmm6, zmm5, dword bcst [edx-516] # AVX512DQ
vandnpd zmm6, zmm5, zmm4 # AVX512DQ
vandnpd zmm6{k7}, zmm5, zmm4 # AVX512DQ
vandnpd zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vandnpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vandnpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vandnpd zmm6, zmm5, qword bcst [eax] # AVX512DQ
vandnpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vandnpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vandnpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vandnpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vandnpd zmm6, zmm5, qword bcst [edx+1016] # AVX512DQ Disp8
vandnpd zmm6, zmm5, qword bcst [edx+1024] # AVX512DQ
vandnpd zmm6, zmm5, qword bcst [edx-1024] # AVX512DQ Disp8
vandnpd zmm6, zmm5, qword bcst [edx-1032] # AVX512DQ
vandnps zmm6, zmm5, zmm4 # AVX512DQ
vandnps zmm6{k7}, zmm5, zmm4 # AVX512DQ
vandnps zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vandnps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vandnps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vandnps zmm6, zmm5, dword bcst [eax] # AVX512DQ
vandnps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vandnps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vandnps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vandnps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vandnps zmm6, zmm5, dword bcst [edx+508] # AVX512DQ Disp8
vandnps zmm6, zmm5, dword bcst [edx+512] # AVX512DQ
vandnps zmm6, zmm5, dword bcst [edx-512] # AVX512DQ Disp8
vandnps zmm6, zmm5, dword bcst [edx-516] # AVX512DQ
vorpd zmm6, zmm5, zmm4 # AVX512DQ
vorpd zmm6{k7}, zmm5, zmm4 # AVX512DQ
vorpd zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vorpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vorpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vorpd zmm6, zmm5, qword bcst [eax] # AVX512DQ
vorpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vorpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vorpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vorpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vorpd zmm6, zmm5, qword bcst [edx+1016] # AVX512DQ Disp8
vorpd zmm6, zmm5, qword bcst [edx+1024] # AVX512DQ
vorpd zmm6, zmm5, qword bcst [edx-1024] # AVX512DQ Disp8
vorpd zmm6, zmm5, qword bcst [edx-1032] # AVX512DQ
vorps zmm6, zmm5, zmm4 # AVX512DQ
vorps zmm6{k7}, zmm5, zmm4 # AVX512DQ
vorps zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vorps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vorps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vorps zmm6, zmm5, dword bcst [eax] # AVX512DQ
vorps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vorps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vorps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vorps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vorps zmm6, zmm5, dword bcst [edx+508] # AVX512DQ Disp8
vorps zmm6, zmm5, dword bcst [edx+512] # AVX512DQ
vorps zmm6, zmm5, dword bcst [edx-512] # AVX512DQ Disp8
vorps zmm6, zmm5, dword bcst [edx-516] # AVX512DQ
vxorpd zmm6, zmm5, zmm4 # AVX512DQ
vxorpd zmm6{k7}, zmm5, zmm4 # AVX512DQ
vxorpd zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vxorpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vxorpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vxorpd zmm6, zmm5, qword bcst [eax] # AVX512DQ
vxorpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vxorpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vxorpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vxorpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vxorpd zmm6, zmm5, qword bcst [edx+1016] # AVX512DQ Disp8
vxorpd zmm6, zmm5, qword bcst [edx+1024] # AVX512DQ
vxorpd zmm6, zmm5, qword bcst [edx-1024] # AVX512DQ Disp8
vxorpd zmm6, zmm5, qword bcst [edx-1032] # AVX512DQ
vxorps zmm6, zmm5, zmm4 # AVX512DQ
vxorps zmm6{k7}, zmm5, zmm4 # AVX512DQ
vxorps zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vxorps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vxorps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vxorps zmm6, zmm5, dword bcst [eax] # AVX512DQ
vxorps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vxorps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vxorps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vxorps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vxorps zmm6, zmm5, dword bcst [edx+508] # AVX512DQ Disp8
vxorps zmm6, zmm5, dword bcst [edx+512] # AVX512DQ
vxorps zmm6, zmm5, dword bcst [edx-512] # AVX512DQ Disp8
vxorps zmm6, zmm5, dword bcst [edx-516] # AVX512DQ
vreducepd zmm6, zmm5, 0xab # AVX512DQ
vreducepd zmm6{k7}, zmm5, 0xab # AVX512DQ
vreducepd zmm6{k7}{z}, zmm5, 0xab # AVX512DQ
vreducepd zmm6, zmm5{sae}, 0xab # AVX512DQ
vreducepd zmm6, zmm5, 123 # AVX512DQ
vreducepd zmm6, zmm5{sae}, 123 # AVX512DQ
vreducepd zmm6, ZMMWORD PTR [ecx], 123 # AVX512DQ
vreducepd zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vreducepd zmm6, qword bcst [eax], 123 # AVX512DQ
vreducepd zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512DQ Disp8
vreducepd zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512DQ
vreducepd zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512DQ Disp8
vreducepd zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512DQ
vreducepd zmm6, qword bcst [edx+1016], 123 # AVX512DQ Disp8
vreducepd zmm6, qword bcst [edx+1024], 123 # AVX512DQ
vreducepd zmm6, qword bcst [edx-1024], 123 # AVX512DQ Disp8
vreducepd zmm6, qword bcst [edx-1032], 123 # AVX512DQ
vreduceps zmm6, zmm5, 0xab # AVX512DQ
vreduceps zmm6{k7}, zmm5, 0xab # AVX512DQ
vreduceps zmm6{k7}{z}, zmm5, 0xab # AVX512DQ
vreduceps zmm6, zmm5{sae}, 0xab # AVX512DQ
vreduceps zmm6, zmm5, 123 # AVX512DQ
vreduceps zmm6, zmm5{sae}, 123 # AVX512DQ
vreduceps zmm6, ZMMWORD PTR [ecx], 123 # AVX512DQ
vreduceps zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vreduceps zmm6, dword bcst [eax], 123 # AVX512DQ
vreduceps zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512DQ Disp8
vreduceps zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512DQ
vreduceps zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512DQ Disp8
vreduceps zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512DQ
vreduceps zmm6, dword bcst [edx+508], 123 # AVX512DQ Disp8
vreduceps zmm6, dword bcst [edx+512], 123 # AVX512DQ
vreduceps zmm6, dword bcst [edx-512], 123 # AVX512DQ Disp8
vreduceps zmm6, dword bcst [edx-516], 123 # AVX512DQ
vreducesd xmm6{k7}, xmm5, xmm4, 0xab # AVX512DQ
vreducesd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512DQ
vreducesd xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512DQ
vreducesd xmm6{k7}, xmm5, xmm4, 123 # AVX512DQ
vreducesd xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512DQ
vreducesd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512DQ
vreducesd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vreducesd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512DQ Disp8
vreducesd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512DQ
vreducesd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512DQ Disp8
vreducesd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512DQ
vreducess xmm6{k7}, xmm5, xmm4, 0xab # AVX512DQ
vreducess xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512DQ
vreducess xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512DQ
vreducess xmm6{k7}, xmm5, xmm4, 123 # AVX512DQ
vreducess xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512DQ
vreducess xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512DQ
vreducess xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vreducess xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512DQ Disp8
vreducess xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512DQ
vreducess xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512DQ Disp8
vreducess xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512DQ
kandb k5, k6, k7 # AVX512DQ
kandnb k5, k6, k7 # AVX512DQ
korb k5, k6, k7 # AVX512DQ
kxnorb k5, k6, k7 # AVX512DQ
kxorb k5, k6, k7 # AVX512DQ
knotb k5, k6 # AVX512DQ
kortestb k5, k6 # AVX512DQ
ktestw k5, k6 # AVX512DQ
ktestb k5, k6 # AVX512DQ
kshiftrb k5, k6, 0xab # AVX512DQ
kshiftrb k5, k6, 123 # AVX512DQ
kshiftlb k5, k6, 0xab # AVX512DQ
kshiftlb k5, k6, 123 # AVX512DQ
kmovb k5, k6 # AVX512DQ
kmovb k5, BYTE PTR [ecx] # AVX512DQ
kmovb k5, BYTE PTR [esp+esi*8-123456] # AVX512DQ
kmovb BYTE PTR [ecx], k5 # AVX512DQ
kmovb BYTE PTR [esp+esi*8-123456], k5 # AVX512DQ
kmovb k5, eax # AVX512DQ
kmovb k5, ebp # AVX512DQ
kmovb eax, k5 # AVX512DQ
kmovb ebp, k5 # AVX512DQ
kaddw k5, k6, k7 # AVX512DQ
kaddb k5, k6, k7 # AVX512DQ
vextractf64x2 XMMWORD PTR [ecx], zmm6, 0xab # AVX512DQ
vextractf64x2 XMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512DQ
vextractf64x2 XMMWORD PTR [ecx], zmm6, 123 # AVX512DQ
vextractf64x2 XMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512DQ
vextractf64x2 XMMWORD PTR [edx+2032], zmm6, 123 # AVX512DQ Disp8
vextractf64x2 XMMWORD PTR [edx+2048], zmm6, 123 # AVX512DQ
vextractf64x2 XMMWORD PTR [edx-2048], zmm6, 123 # AVX512DQ Disp8
vextractf64x2 XMMWORD PTR [edx-2064], zmm6, 123 # AVX512DQ
vextractf32x8 YMMWORD PTR [ecx], zmm6, 0xab # AVX512DQ
vextractf32x8 YMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512DQ
vextractf32x8 YMMWORD PTR [ecx], zmm6, 123 # AVX512DQ
vextractf32x8 YMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512DQ
vextractf32x8 YMMWORD PTR [edx+4064], zmm6, 123 # AVX512DQ Disp8
vextractf32x8 YMMWORD PTR [edx+4096], zmm6, 123 # AVX512DQ
vextractf32x8 YMMWORD PTR [edx-4096], zmm6, 123 # AVX512DQ Disp8
vextractf32x8 YMMWORD PTR [edx-4128], zmm6, 123 # AVX512DQ
vextracti64x2 XMMWORD PTR [ecx], zmm6, 0xab # AVX512DQ
vextracti64x2 XMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512DQ
vextracti64x2 XMMWORD PTR [ecx], zmm6, 123 # AVX512DQ
vextracti64x2 XMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512DQ
vextracti64x2 XMMWORD PTR [edx+2032], zmm6, 123 # AVX512DQ Disp8
vextracti64x2 XMMWORD PTR [edx+2048], zmm6, 123 # AVX512DQ
vextracti64x2 XMMWORD PTR [edx-2048], zmm6, 123 # AVX512DQ Disp8
vextracti64x2 XMMWORD PTR [edx-2064], zmm6, 123 # AVX512DQ
vextracti32x8 YMMWORD PTR [ecx], zmm6, 0xab # AVX512DQ
vextracti32x8 YMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512DQ
vextracti32x8 YMMWORD PTR [ecx], zmm6, 123 # AVX512DQ
vextracti32x8 YMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512DQ
vextracti32x8 YMMWORD PTR [edx+4064], zmm6, 123 # AVX512DQ Disp8
vextracti32x8 YMMWORD PTR [edx+4096], zmm6, 123 # AVX512DQ
vextracti32x8 YMMWORD PTR [edx-4096], zmm6, 123 # AVX512DQ Disp8
vextracti32x8 YMMWORD PTR [edx-4128], zmm6, 123 # AVX512DQ
vcvttpd2qq zmm6, zmm5 # AVX512DQ
vcvttpd2qq zmm6{k7}, zmm5 # AVX512DQ
vcvttpd2qq zmm6{k7}{z}, zmm5 # AVX512DQ
vcvttpd2qq zmm6, zmm5{sae} # AVX512DQ
vcvttpd2qq zmm6, ZMMWORD PTR [ecx] # AVX512DQ
vcvttpd2qq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvttpd2qq zmm6, qword bcst [eax] # AVX512DQ
vcvttpd2qq zmm6, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvttpd2qq zmm6, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvttpd2qq zmm6, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvttpd2qq zmm6, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvttpd2qq zmm6, qword bcst [edx+1016] # AVX512DQ Disp8
vcvttpd2qq zmm6, qword bcst [edx+1024] # AVX512DQ
vcvttpd2qq zmm6, qword bcst [edx-1024] # AVX512DQ Disp8
vcvttpd2qq zmm6, qword bcst [edx-1032] # AVX512DQ
vcvttpd2uqq zmm6, zmm5 # AVX512DQ
vcvttpd2uqq zmm6{k7}, zmm5 # AVX512DQ
vcvttpd2uqq zmm6{k7}{z}, zmm5 # AVX512DQ
vcvttpd2uqq zmm6, zmm5{sae} # AVX512DQ
vcvttpd2uqq zmm6, ZMMWORD PTR [ecx] # AVX512DQ
vcvttpd2uqq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvttpd2uqq zmm6, qword bcst [eax] # AVX512DQ
vcvttpd2uqq zmm6, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvttpd2uqq zmm6, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvttpd2uqq zmm6, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvttpd2uqq zmm6, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvttpd2uqq zmm6, qword bcst [edx+1016] # AVX512DQ Disp8
vcvttpd2uqq zmm6, qword bcst [edx+1024] # AVX512DQ
vcvttpd2uqq zmm6, qword bcst [edx-1024] # AVX512DQ Disp8
vcvttpd2uqq zmm6, qword bcst [edx-1032] # AVX512DQ
vcvttps2qq zmm6{k7}, ymm5 # AVX512DQ
vcvttps2qq zmm6{k7}{z}, ymm5 # AVX512DQ
vcvttps2qq zmm6{k7}, ymm5{sae} # AVX512DQ
vcvttps2qq zmm6{k7}, YMMWORD PTR [ecx] # AVX512DQ
vcvttps2qq zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvttps2qq zmm6{k7}, dword bcst [eax] # AVX512DQ
vcvttps2qq zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512DQ Disp8
vcvttps2qq zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512DQ
vcvttps2qq zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512DQ Disp8
vcvttps2qq zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512DQ
vcvttps2qq zmm6{k7}, dword bcst [edx+508] # AVX512DQ Disp8
vcvttps2qq zmm6{k7}, dword bcst [edx+512] # AVX512DQ
vcvttps2qq zmm6{k7}, dword bcst [edx-512] # AVX512DQ Disp8
vcvttps2qq zmm6{k7}, dword bcst [edx-516] # AVX512DQ
vcvttps2qq zmm6{k7}, DWORD BCST [edx+508] # AVX512DQ Disp8
vcvttps2uqq zmm6{k7}, ymm5 # AVX512DQ
vcvttps2uqq zmm6{k7}{z}, ymm5 # AVX512DQ
vcvttps2uqq zmm6{k7}, ymm5{sae} # AVX512DQ
vcvttps2uqq zmm6{k7}, YMMWORD PTR [ecx] # AVX512DQ
vcvttps2uqq zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvttps2uqq zmm6{k7}, dword bcst [eax] # AVX512DQ
vcvttps2uqq zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512DQ Disp8
vcvttps2uqq zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512DQ
vcvttps2uqq zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512DQ Disp8
vcvttps2uqq zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512DQ
vcvttps2uqq zmm6{k7}, dword bcst [edx+508] # AVX512DQ Disp8
vcvttps2uqq zmm6{k7}, dword bcst [edx+512] # AVX512DQ
vcvttps2uqq zmm6{k7}, dword bcst [edx-512] # AVX512DQ Disp8
vcvttps2uqq zmm6{k7}, dword bcst [edx-516] # AVX512DQ
vcvttps2uqq zmm6{k7}, DWORD BCST [edx+508] # AVX512DQ Disp8
vpmovd2m k5, zmm6 # AVX512DQ
vpmovq2m k5, zmm6 # AVX512DQ
vpmovm2d zmm6, k5 # AVX512DQ
vpmovm2q zmm6, k5 # AVX512DQ
|
tactcomplabs/xbgas-binutils-gdb
| 2,362
|
gas/testsuite/gas/i386/x86-64-avx512_bf16.s
|
# Check 64bit AVX512_BF16 instructions
.allow_index_reg
.text
_start:
vcvtne2ps2bf16 %zmm28, %zmm29, %zmm30 #AVX512_BF16
vcvtne2ps2bf16 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512_BF16 MASK_ENABLING
vcvtne2ps2bf16 (%r9){1to16}, %zmm29, %zmm30 #AVX512_BF16 BROADCAST_EN
vcvtne2ps2bf16 8128(%rcx), %zmm29, %zmm30 #AVX512_BF16 Disp8
vcvtne2ps2bf16 -8192(%rdx){1to16}, %zmm29, %zmm30{%k7}{z} #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 %zmm29, %ymm30 #AVX512_BF16
vcvtneps2bf16 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512_BF16 MASK_ENABLING
vcvtneps2bf16 (%r9){1to16}, %ymm30 #AVX512_BF16 BROADCAST_EN
vcvtneps2bf16 8128(%rcx), %ymm30 #AVX512_BF16 Disp8
vcvtneps2bf16 -8192(%rdx){1to16}, %ymm30{%k7}{z} #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps %zmm28, %zmm29, %zmm30 #AVX512_BF16
vdpbf16ps 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512_BF16 MASK_ENABLING
vdpbf16ps (%r9){1to16}, %zmm29, %zmm30 #AVX512_BF16 BROADCAST_EN
vdpbf16ps 8128(%rcx), %zmm29, %zmm30 #AVX512_BF16 Disp8
vdpbf16ps -8192(%rdx){1to16}, %zmm29, %zmm30{%k7}{z} #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
.intel_syntax noprefix
vcvtne2ps2bf16 zmm30, zmm29, zmm28 #AVX512_BF16
vcvtne2ps2bf16 zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512_BF16 MASK_ENABLING
vcvtne2ps2bf16 zmm30, zmm29, DWORD BCST [r9] #AVX512_BF16 BROADCAST_EN
vcvtne2ps2bf16 zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512_BF16 Disp8
vcvtne2ps2bf16 zmm30{k7}{z}, zmm29, DWORD BCST [rdx-8192] #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 ymm30, zmm29 #AVX512_BF16
vcvtneps2bf16 ymm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512_BF16 MASK_ENABLING
vcvtneps2bf16 ymm30, DWORD BCST [r9] #AVX512_BF16 BROADCAST_EN
vcvtneps2bf16 ymm30, ZMMWORD PTR [rcx+8128] #AVX512_BF16 Disp8
vcvtneps2bf16 ymm30{k7}{z}, DWORD BCST [rdx-8192] #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps zmm30, zmm29, zmm28 #AVX512_BF16
vdpbf16ps zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512_BF16 MASK_ENABLING
vdpbf16ps zmm30, zmm29, DWORD BCST [r9] #AVX512_BF16 BROADCAST_EN
vdpbf16ps zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512_BF16 Disp8
vdpbf16ps zmm30{k7}{z}, zmm29, DWORD BCST [rdx-8192] #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
|
tactcomplabs/xbgas-binutils-gdb
| 4,509
|
gas/testsuite/gas/i386/avx512f-opts.s
|
# Check 32bit AVX512F instructions
.allow_index_reg
.text
_start:
vmovapd.s %zmm5, %zmm6 # AVX512F
vmovapd %zmm5, %zmm6 # AVX512F
vmovapd.s %zmm5, %zmm6{%k7} # AVX512F
vmovapd %zmm5, %zmm6{%k7} # AVX512F
vmovapd.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovapd %zmm5, %zmm6{%k7}{z} # AVX512F
vmovaps.s %zmm5, %zmm6 # AVX512F
vmovaps %zmm5, %zmm6 # AVX512F
vmovaps.s %zmm5, %zmm6{%k7} # AVX512F
vmovaps %zmm5, %zmm6{%k7} # AVX512F
vmovaps.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovaps %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqa32.s %zmm5, %zmm6 # AVX512F
vmovdqa32 %zmm5, %zmm6 # AVX512F
vmovdqa32.s %zmm5, %zmm6{%k7} # AVX512F
vmovdqa32 %zmm5, %zmm6{%k7} # AVX512F
vmovdqa32.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqa32 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqa64.s %zmm5, %zmm6 # AVX512F
vmovdqa64 %zmm5, %zmm6 # AVX512F
vmovdqa64.s %zmm5, %zmm6{%k7} # AVX512F
vmovdqa64 %zmm5, %zmm6{%k7} # AVX512F
vmovdqa64.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqa64 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqu32.s %zmm5, %zmm6 # AVX512F
vmovdqu32 %zmm5, %zmm6 # AVX512F
vmovdqu32.s %zmm5, %zmm6{%k7} # AVX512F
vmovdqu32 %zmm5, %zmm6{%k7} # AVX512F
vmovdqu32.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqu32 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqu64.s %zmm5, %zmm6 # AVX512F
vmovdqu64 %zmm5, %zmm6 # AVX512F
vmovdqu64.s %zmm5, %zmm6{%k7} # AVX512F
vmovdqu64 %zmm5, %zmm6{%k7} # AVX512F
vmovdqu64.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqu64 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovsd.s %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmovsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmovsd.s %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmovsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmovss.s %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmovss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmovss.s %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmovss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmovupd.s %zmm5, %zmm6 # AVX512F
vmovupd %zmm5, %zmm6 # AVX512F
vmovupd.s %zmm5, %zmm6{%k7} # AVX512F
vmovupd %zmm5, %zmm6{%k7} # AVX512F
vmovupd.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovupd %zmm5, %zmm6{%k7}{z} # AVX512F
vmovups.s %zmm5, %zmm6 # AVX512F
vmovups %zmm5, %zmm6 # AVX512F
vmovups.s %zmm5, %zmm6{%k7} # AVX512F
vmovups %zmm5, %zmm6{%k7} # AVX512F
vmovups.s %zmm5, %zmm6{%k7}{z} # AVX512F
vmovups %zmm5, %zmm6{%k7}{z} # AVX512F
{evex} vmovq.s %xmm5,%xmm6
{evex} vmovq %xmm5,%xmm6
.intel_syntax noprefix
vmovapd.s zmm6, zmm5 # AVX512F
vmovapd zmm6, zmm5 # AVX512F
vmovapd.s zmm6{k7}, zmm5 # AVX512F
vmovapd zmm6{k7}, zmm5 # AVX512F
vmovapd.s zmm6{k7}{z}, zmm5 # AVX512F
vmovapd zmm6{k7}{z}, zmm5 # AVX512F
vmovaps.s zmm6, zmm5 # AVX512F
vmovaps zmm6, zmm5 # AVX512F
vmovaps.s zmm6{k7}, zmm5 # AVX512F
vmovaps zmm6{k7}, zmm5 # AVX512F
vmovaps.s zmm6{k7}{z}, zmm5 # AVX512F
vmovaps zmm6{k7}{z}, zmm5 # AVX512F
vmovdqa32.s zmm6, zmm5 # AVX512F
vmovdqa32 zmm6, zmm5 # AVX512F
vmovdqa32.s zmm6{k7}, zmm5 # AVX512F
vmovdqa32 zmm6{k7}, zmm5 # AVX512F
vmovdqa32.s zmm6{k7}{z}, zmm5 # AVX512F
vmovdqa32 zmm6{k7}{z}, zmm5 # AVX512F
vmovdqa64.s zmm6, zmm5 # AVX512F
vmovdqa64 zmm6, zmm5 # AVX512F
vmovdqa64.s zmm6{k7}, zmm5 # AVX512F
vmovdqa64 zmm6{k7}, zmm5 # AVX512F
vmovdqa64.s zmm6{k7}{z}, zmm5 # AVX512F
vmovdqa64 zmm6{k7}{z}, zmm5 # AVX512F
vmovdqu32.s zmm6, zmm5 # AVX512F
vmovdqu32 zmm6, zmm5 # AVX512F
vmovdqu32.s zmm6{k7}, zmm5 # AVX512F
vmovdqu32 zmm6{k7}, zmm5 # AVX512F
vmovdqu32.s zmm6{k7}{z}, zmm5 # AVX512F
vmovdqu32 zmm6{k7}{z}, zmm5 # AVX512F
vmovdqu64.s zmm6, zmm5 # AVX512F
vmovdqu64 zmm6, zmm5 # AVX512F
vmovdqu64.s zmm6{k7}, zmm5 # AVX512F
vmovdqu64 zmm6{k7}, zmm5 # AVX512F
vmovdqu64.s zmm6{k7}{z}, zmm5 # AVX512F
vmovdqu64 zmm6{k7}{z}, zmm5 # AVX512F
vmovsd.s xmm6{k7}, xmm5, xmm4 # AVX512F
vmovsd xmm6{k7}, xmm5, xmm4 # AVX512F
vmovsd.s xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmovsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmovss.s xmm6{k7}, xmm5, xmm4 # AVX512F
vmovss xmm6{k7}, xmm5, xmm4 # AVX512F
vmovss.s xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmovss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmovupd.s zmm6, zmm5 # AVX512F
vmovupd zmm6, zmm5 # AVX512F
vmovupd.s zmm6{k7}, zmm5 # AVX512F
vmovupd zmm6{k7}, zmm5 # AVX512F
vmovupd.s zmm6{k7}{z}, zmm5 # AVX512F
vmovupd zmm6{k7}{z}, zmm5 # AVX512F
vmovups.s zmm6, zmm5 # AVX512F
vmovups zmm6, zmm5 # AVX512F
vmovups.s zmm6{k7}, zmm5 # AVX512F
vmovups zmm6{k7}, zmm5 # AVX512F
vmovups.s zmm6{k7}{z}, zmm5 # AVX512F
vmovups zmm6{k7}{z}, zmm5 # AVX512F
|
tactcomplabs/xbgas-binutils-gdb
| 6,130
|
gas/testsuite/gas/i386/avx512vl_vaes.s
|
# Check 32bit AVX512VL,VAES instructions
.allow_index_reg
.text
_start:
vaesdec %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
vaesdec -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
vaesdec 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
vaesdec %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
vaesdec -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
vaesdec 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
vaesdeclast %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
vaesdeclast -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
vaesdeclast 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
vaesdeclast %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
vaesdeclast -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
vaesdeclast 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
vaesenc %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
vaesenc -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
vaesenc 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
vaesenc %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
vaesenc -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
vaesenc 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
vaesenclast %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
vaesenclast -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
vaesenclast 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
vaesenclast %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
vaesenclast -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
vaesenclast 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
{evex} vaesdec %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesdec -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesdec 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
{evex} vaesdec %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesdec -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesdec 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
{evex} vaesdeclast %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesdeclast -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesdeclast 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
{evex} vaesdeclast %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesdeclast -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesdeclast 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
{evex} vaesenc %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesenc -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesenc 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
{evex} vaesenc %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesenc -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesenc 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
{evex} vaesenclast %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesenclast -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesenclast 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
{evex} vaesenclast %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesenclast -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesenclast 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
.intel_syntax noprefix
vaesdec xmm6, xmm5, xmm4 # AVX512VL,VAES
vaesdec xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesdec xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
vaesdec ymm6, ymm5, ymm4 # AVX512VL,VAES
vaesdec ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesdec ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
vaesdeclast xmm6, xmm5, xmm4 # AVX512VL,VAES
vaesdeclast xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesdeclast xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
vaesdeclast ymm6, ymm5, ymm4 # AVX512VL,VAES
vaesdeclast ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesdeclast ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
vaesenc xmm6, xmm5, xmm4 # AVX512VL,VAES
vaesenc xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesenc xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
vaesenc ymm6, ymm5, ymm4 # AVX512VL,VAES
vaesenc ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesenc ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
vaesenclast xmm6, xmm5, xmm4 # AVX512VL,VAES
vaesenclast xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesenclast xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
vaesenclast ymm6, ymm5, ymm4 # AVX512VL,VAES
vaesenclast ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesenclast ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
{evex} vaesdec xmm6, xmm5, xmm4 # AVX512VL,VAES
{evex} vaesdec xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesdec xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
{evex} vaesdec ymm6, ymm5, ymm4 # AVX512VL,VAES
{evex} vaesdec ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesdec ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
{evex} vaesdeclast xmm6, xmm5, xmm4 # AVX512VL,VAES
{evex} vaesdeclast xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesdeclast xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
{evex} vaesdeclast ymm6, ymm5, ymm4 # AVX512VL,VAES
{evex} vaesdeclast ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesdeclast ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
{evex} vaesenc xmm6, xmm5, xmm4 # AVX512VL,VAES
{evex} vaesenc xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesenc xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
{evex} vaesenc ymm6, ymm5, ymm4 # AVX512VL,VAES
{evex} vaesenc ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesenc ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
{evex} vaesenclast xmm6, xmm5, xmm4 # AVX512VL,VAES
{evex} vaesenclast xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesenclast xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
{evex} vaesenclast ymm6, ymm5, ymm4 # AVX512VL,VAES
{evex} vaesenclast ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesenclast ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 10,356
|
gas/testsuite/gas/i386/x86-64-avx512vbmi_vl.s
|
# Check 64bit AVX512{VBMI,VL} instructions
.allow_index_reg
.text
_start:
vpermb %xmm28, %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermb %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI,VL}
vpermb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI,VL}
vpermb (%rcx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermb 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpermb 2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermb -2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpermb -2064(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermb %ymm28, %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermb %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI,VL}
vpermb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI,VL}
vpermb (%rcx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermb 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpermb 4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermb -4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpermb -4128(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermi2b %xmm28, %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermi2b %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI,VL}
vpermi2b %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI,VL}
vpermi2b (%rcx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermi2b 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermi2b 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpermi2b 2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermi2b -2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpermi2b -2064(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermi2b %ymm28, %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermi2b %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI,VL}
vpermi2b %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI,VL}
vpermi2b (%rcx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermi2b 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermi2b 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpermi2b 4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermi2b -4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpermi2b -4128(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermt2b %xmm28, %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermt2b %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI,VL}
vpermt2b %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI,VL}
vpermt2b (%rcx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermt2b 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermt2b 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpermt2b 2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermt2b -2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpermt2b -2064(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermt2b %ymm28, %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermt2b %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI,VL}
vpermt2b %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI,VL}
vpermt2b (%rcx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermt2b 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermt2b 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpermt2b 4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermt2b -4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpermt2b -4128(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb %xmm28, %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI,VL}
vpmultishiftqb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI,VL}
vpmultishiftqb (%rcx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb (%rcx){1to2}, %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb 2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb -2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb -2064(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb %ymm28, %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI,VL}
vpmultishiftqb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI,VL}
vpmultishiftqb (%rcx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb (%rcx){1to4}, %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb 4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb -4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb -4128(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{VBMI,VL}
.intel_syntax noprefix
vpermb xmm30, xmm29, xmm28 # AVX512{VBMI,VL}
vpermb xmm30{k7}, xmm29, xmm28 # AVX512{VBMI,VL}
vpermb xmm30{k7}{z}, xmm29, xmm28 # AVX512{VBMI,VL}
vpermb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpermb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpermb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{VBMI,VL} Disp8
vpermb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{VBMI,VL}
vpermb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{VBMI,VL} Disp8
vpermb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{VBMI,VL}
vpermb ymm30, ymm29, ymm28 # AVX512{VBMI,VL}
vpermb ymm30{k7}, ymm29, ymm28 # AVX512{VBMI,VL}
vpermb ymm30{k7}{z}, ymm29, ymm28 # AVX512{VBMI,VL}
vpermb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpermb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpermb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{VBMI,VL} Disp8
vpermb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{VBMI,VL}
vpermb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{VBMI,VL} Disp8
vpermb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{VBMI,VL}
vpermi2b xmm30, xmm29, xmm28 # AVX512{VBMI,VL}
vpermi2b xmm30{k7}, xmm29, xmm28 # AVX512{VBMI,VL}
vpermi2b xmm30{k7}{z}, xmm29, xmm28 # AVX512{VBMI,VL}
vpermi2b xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpermi2b xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpermi2b xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{VBMI,VL} Disp8
vpermi2b xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{VBMI,VL}
vpermi2b xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{VBMI,VL} Disp8
vpermi2b xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{VBMI,VL}
vpermi2b ymm30, ymm29, ymm28 # AVX512{VBMI,VL}
vpermi2b ymm30{k7}, ymm29, ymm28 # AVX512{VBMI,VL}
vpermi2b ymm30{k7}{z}, ymm29, ymm28 # AVX512{VBMI,VL}
vpermi2b ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpermi2b ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpermi2b ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{VBMI,VL} Disp8
vpermi2b ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{VBMI,VL}
vpermi2b ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{VBMI,VL} Disp8
vpermi2b ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{VBMI,VL}
vpermt2b xmm30, xmm29, xmm28 # AVX512{VBMI,VL}
vpermt2b xmm30{k7}, xmm29, xmm28 # AVX512{VBMI,VL}
vpermt2b xmm30{k7}{z}, xmm29, xmm28 # AVX512{VBMI,VL}
vpermt2b xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpermt2b xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpermt2b xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{VBMI,VL} Disp8
vpermt2b xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{VBMI,VL}
vpermt2b xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{VBMI,VL} Disp8
vpermt2b xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{VBMI,VL}
vpermt2b ymm30, ymm29, ymm28 # AVX512{VBMI,VL}
vpermt2b ymm30{k7}, ymm29, ymm28 # AVX512{VBMI,VL}
vpermt2b ymm30{k7}{z}, ymm29, ymm28 # AVX512{VBMI,VL}
vpermt2b ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpermt2b ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpermt2b ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{VBMI,VL} Disp8
vpermt2b ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{VBMI,VL}
vpermt2b ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{VBMI,VL} Disp8
vpermt2b ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, xmm28 # AVX512{VBMI,VL}
vpmultishiftqb xmm30{k7}, xmm29, xmm28 # AVX512{VBMI,VL}
vpmultishiftqb xmm30{k7}{z}, xmm29, xmm28 # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, [rcx]{1to2} # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{VBMI,VL} Disp8
vpmultishiftqb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{VBMI,VL} Disp8
vpmultishiftqb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, [rdx+1016]{1to2} # AVX512{VBMI,VL} Disp8
vpmultishiftqb xmm30, xmm29, [rdx+1024]{1to2} # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, [rdx-1024]{1to2} # AVX512{VBMI,VL} Disp8
vpmultishiftqb xmm30, xmm29, [rdx-1032]{1to2} # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, ymm28 # AVX512{VBMI,VL}
vpmultishiftqb ymm30{k7}, ymm29, ymm28 # AVX512{VBMI,VL}
vpmultishiftqb ymm30{k7}{z}, ymm29, ymm28 # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, [rcx]{1to4} # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{VBMI,VL} Disp8
vpmultishiftqb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{VBMI,VL} Disp8
vpmultishiftqb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, [rdx+1016]{1to4} # AVX512{VBMI,VL} Disp8
vpmultishiftqb ymm30, ymm29, [rdx+1024]{1to4} # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, [rdx-1024]{1to4} # AVX512{VBMI,VL} Disp8
vpmultishiftqb ymm30, ymm29, [rdx-1032]{1to4} # AVX512{VBMI,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 3,493
|
gas/testsuite/gas/i386/katmai.s
|
#PIII SIMD instructions
.text
foo:
addps (%ecx),%xmm0
addps %xmm2,%xmm1
addss (%ebx),%xmm2
addss %xmm4,%xmm3
andnps 0x0(%ebp),%xmm4
andnps %xmm6,%xmm5
andps (%edi),%xmm6
andps %xmm0,%xmm7
cmpps $0x2,%xmm1,%xmm0
cmpps $0x3,(%edx),%xmm1
cmpss $0x4,%xmm2,%xmm2
cmpss $0x5,(%esp,1),%xmm3
cmpps $0x6,%xmm5,%xmm4
cmpps $0x7,(%esi),%xmm5
cmpss $0x0,%xmm7,%xmm6
cmpss $0x1,(%eax),%xmm7
cmpeqps %xmm1,%xmm0
cmpeqps (%edx),%xmm1
cmpeqss %xmm2,%xmm2
cmpeqss (%esp,1),%xmm3
cmpltps %xmm5,%xmm4
cmpltps (%esi),%xmm5
cmpltss %xmm7,%xmm6
cmpltss (%eax),%xmm7
cmpleps (%ecx),%xmm0
cmpleps %xmm2,%xmm1
cmpless (%ebx),%xmm2
cmpless %xmm4,%xmm3
cmpunordps 0x0(%ebp),%xmm4
cmpunordps %xmm6,%xmm5
cmpunordss (%edi),%xmm6
cmpunordss %xmm0,%xmm7
cmpneqps %xmm1,%xmm0
cmpneqps (%edx),%xmm1
cmpneqss %xmm2,%xmm2
cmpneqss (%esp,1),%xmm3
cmpnltps %xmm5,%xmm4
cmpnltps (%esi),%xmm5
cmpnltss %xmm7,%xmm6
cmpnltss (%eax),%xmm7
cmpnleps (%ecx),%xmm0
cmpnleps %xmm2,%xmm1
cmpnless (%ebx),%xmm2
cmpnless %xmm4,%xmm3
cmpordps 0x0(%ebp),%xmm4
cmpordps %xmm6,%xmm5
cmpordss (%edi),%xmm6
cmpordss %xmm0,%xmm7
comiss %xmm1,%xmm0
comiss (%edx),%xmm1
cvtpi2ps %mm3,%xmm2
cvtpi2ps (%esp,1),%xmm3
cvtsi2ss %ebp,%xmm4
cvtsi2ss (%esi),%xmm5
cvtps2pi %xmm7,%mm6
cvtps2pi (%eax),%mm7
cvtss2si (%ecx),%eax
cvtss2si %xmm2,%ecx
cvttps2pi (%ebx),%mm2
cvttps2pi %xmm4,%mm3
cvttss2si 0x0(%ebp),%esp
cvttss2si %xmm6,%ebp
divps %xmm1,%xmm0
divps (%edx),%xmm1
divss %xmm3,%xmm2
divss (%esp,1),%xmm3
ldmxcsr 0x0(%ebp)
stmxcsr (%esi)
sfence
maxps %xmm1,%xmm0
maxps (%edx),%xmm1
maxss %xmm3,%xmm2
maxss (%esp,1),%xmm3
minps %xmm5,%xmm4
minps (%esi),%xmm5
minss %xmm7,%xmm6
minss (%eax),%xmm7
movaps %xmm1,%xmm0
movaps %xmm2,(%ecx)
movaps (%edx),%xmm2
movlhps %xmm4,%xmm3
movhps %xmm5,(%esp,1)
movhps (%esi),%xmm5
movhlps %xmm7,%xmm6
movlps %xmm0,(%edi)
movlps (%eax),%xmm0
movmskps %xmm2,%ecx
movups %xmm3,%xmm2
movups %xmm4,(%edx)
movups 0x0(%ebp),%xmm4
movss %xmm6,%xmm5
movss %xmm7,(%esi)
movss (%eax),%xmm7
mulps %xmm1,%xmm0
mulps (%edx),%xmm1
mulss %xmm2,%xmm2
mulss (%esp,1),%xmm3
orps %xmm5,%xmm4
orps (%esi),%xmm5
rcpps %xmm7,%xmm6
rcpps (%eax),%xmm7
rcpss (%ecx),%xmm0
rcpss %xmm2,%xmm1
rsqrtps (%ebx),%xmm2
rsqrtps %xmm4,%xmm3
rsqrtss 0x0(%ebp),%xmm4
rsqrtss %xmm6,%xmm5
shufps $0x2,(%edi),%xmm6
shufps $0x3,%xmm0,%xmm7
sqrtps %xmm1,%xmm0
sqrtps (%edx),%xmm1
sqrtss %xmm2,%xmm2
sqrtss (%esp,1),%xmm3
subps %xmm5,%xmm4
subps (%esi),%xmm5
subss %xmm7,%xmm6
subss (%eax),%xmm7
ucomiss (%ecx),%xmm0
ucomiss %xmm2,%xmm1
unpckhps (%ebx),%xmm2
unpckhps %xmm4,%xmm3
unpcklps 0x0(%ebp),%xmm4
unpcklps %xmm6,%xmm5
xorps (%edi),%xmm6
xorps %xmm0,%xmm7
pavgb %mm1,%mm0
pavgb (%edx),%mm1
pavgw %mm3,%mm2
pavgw (%esp,1),%mm3
pextrw $0x0,%mm1,%eax
pinsrw $0x1,(%ecx),%mm1
pinsrw $0x2,%edx,%mm2
pmaxsw %mm1,%mm0
pmaxsw (%edx),%mm1
pmaxub %mm2,%mm2
pmaxub (%esp,1),%mm3
pminsw %mm5,%mm4
pminsw (%esi),%mm5
pminub %mm7,%mm6
pminub (%eax),%mm7
pmovmskb %mm5,%eax
pmulhuw %mm5,%mm4
pmulhuw (%esi),%mm5
psadbw %mm7,%mm6
psadbw (%eax),%mm7
pshufw $0x1,%mm2,%mm3
pshufw $0x4,0x0(%ebp),%mm6
maskmovq %mm7,%mm0
movntps %xmm6,(%ebx)
movntq %mm2,(%eax)
prefetchnta (%esi)
prefetcht0 (%eax,%ebx,4)
prefetcht1 (%edx)
prefetcht2 (%ecx)
# A bad sfence modrm byte
.byte 0x65,0x0F,0xAE,0xff
# Pad out to good alignment
.p2align 4,0
|
tactcomplabs/xbgas-binutils-gdb
| 1,924
|
gas/testsuite/gas/i386/avx512vl_vpclmulqdq-wig.s
|
# Check 32bit AVX512VL,VPCLMULQDQ WIG instructions
.allow_index_reg
.text
_start:
vpclmulqdq $0xab, %xmm4, %xmm1, %xmm1 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, -123456(%esp,%esi,8), %xmm1, %xmm1 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 2032(%edx), %xmm1, %xmm1 # AVX512VL,VPCLMULQDQ Disp8
vpclmulqdq $0xab, %ymm2, %ymm5, %ymm3 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, -123456(%esp,%esi,8), %ymm5, %ymm3 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 4064(%edx), %ymm5, %ymm3 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq $0xab, %xmm4, %xmm1, %xmm1 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, -123456(%esp,%esi,8), %xmm1, %xmm1 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 2032(%edx), %xmm1, %xmm1 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq $0xab, %ymm2, %ymm5, %ymm3 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, -123456(%esp,%esi,8), %ymm5, %ymm3 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 4064(%edx), %ymm5, %ymm3 # AVX512VL,VPCLMULQDQ Disp8
.intel_syntax noprefix
vpclmulqdq xmm6, xmm4, xmm1, 0xab # AVX512VL,VPCLMULQDQ
vpclmulqdq xmm6, xmm4, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,VPCLMULQDQ
vpclmulqdq xmm6, xmm4, XMMWORD PTR [edx+2032], 123 # AVX512VL,VPCLMULQDQ Disp8
vpclmulqdq ymm2, ymm4, ymm4, 0xab # AVX512VL,VPCLMULQDQ
vpclmulqdq ymm2, ymm4, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,VPCLMULQDQ
vpclmulqdq ymm2, ymm4, YMMWORD PTR [edx+4064], 123 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq xmm6, xmm4, xmm1, 0xab # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq xmm6, xmm4, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq xmm6, xmm4, XMMWORD PTR [edx+2032], 123 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq ymm2, ymm4, ymm4, 0xab # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq ymm2, ymm4, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq ymm2, ymm4, YMMWORD PTR [edx+4064], 123 # AVX512VL,VPCLMULQDQ Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 1,386
|
gas/testsuite/gas/i386/vp2intersect.s
|
# Check AVX512_VP2INTERSECT new instructions.
.text
vp2intersectd %zmm1, %zmm2, %k3
vp2intersectd 64(%eax), %zmm2, %k3
vp2intersectd 8(%eax){1to16}, %zmm2, %k3
vp2intersectd %ymm1, %ymm2, %k3
vp2intersectd 32(%eax), %ymm2, %k3
vp2intersectd 8(%eax){1to8}, %ymm2, %k3
vp2intersectd %xmm1, %xmm2, %k3
vp2intersectd 16(%eax), %xmm2, %k3
vp2intersectd 8(%eax){1to4}, %xmm2, %k3
vp2intersectq %zmm1, %zmm2, %k3
vp2intersectq 64(%eax), %zmm2, %k3
vp2intersectq 8(%eax){1to8}, %zmm2, %k3
vp2intersectq %ymm1, %ymm2, %k3
vp2intersectq 32(%eax), %ymm2, %k3
vp2intersectq 8(%eax){1to4}, %ymm2, %k3
vp2intersectq %xmm1, %xmm2, %k3
vp2intersectq 16(%eax), %xmm2, %k3
vp2intersectq 8(%eax){1to2}, %xmm2, %k3
.intel_syntax noprefix
vp2intersectd k3, zmm2, zmm1
vp2intersectd k3, zmm2, 64[eax]
vp2intersectd k3, zmm2, dword bcst 8[eax]
vp2intersectd k3, ymm2, ymm1
vp2intersectd k3, ymm2, 32[eax]
vp2intersectd k3, ymm2, dword bcst 8[eax]
vp2intersectd k3, xmm2, xmm1
vp2intersectd k3, xmm2, 16[eax]
vp2intersectd k3, xmm2, dword bcst 8[eax]
vp2intersectq k3, zmm2, zmm1
vp2intersectq k3, zmm2, 64[eax]
vp2intersectq k3, zmm2, qword bcst 8[eax]
vp2intersectq k3, ymm2, ymm1
vp2intersectq k3, ymm2, 32[eax]
vp2intersectq k3, ymm2, qword bcst 8[eax]
vp2intersectq k3, xmm2, xmm1
vp2intersectq k3, xmm2, 16[eax]
vp2intersectq k3, xmm2, qword bcst 8[eax]
|
tactcomplabs/xbgas-binutils-gdb
| 2,847
|
gas/testsuite/gas/sparc/splet.s
|
.text
.global start
! Starting point
start:
! test all ASRs
rd %asr0, %l0
rd %asr1, %l0
rd %asr15, %l0
rd %asr17, %l0
rd %asr18, %l0
rd %asr19, %l0 ! should stop the processor
rd %asr20, %l0
rd %asr21, %l0
rd %asr22, %l0
wr %l0, 0, %asr0
wr %l0, 0, %asr1
wr %l0, 0, %asr15
wr %l0, 0, %asr17
wr %l0, 0, %asr18
wr %l0, 0, %asr19
wr %l0, 0, %asr20
wr %l0, 0, %asr21
wr %l0, 0, %asr22
! test UMUL with no overflow inside Y
test_umul:
umul %g1, %g2, %g3
! test UMUL with an overflow inside Y
umul %g1, %g2, %g3 ! %g3 must be equal to 0
! test SMUL with negative result
test_smul:
smul %g1, %g2, %g3
! test SMUL with positive result
smul %g1, %g2, %g3
! test STBAR: there are two possible syntaxes
test_stbar:
stbar ! is a valid V8 syntax, at least a synthetic
! instruction
rd %asr15, %g0 ! other solution
! test UNIMP
unimp 1
! test FLUSH
flush %l1 ! is the official V8 syntax
! test SCAN: find first 0
test_scan:
scan %l1, 0xffffffff, %l3
! test scan: find first 1
scan %l1, 0, %l3
! test scan: find first bit != bit-0
scan %l1, %l1, %l3
! test SHUFFLE
test_shuffle:
shuffle %l0, 0x1, %l1
shuffle %l0, 0x2, %l1
shuffle %l0, 0x4, %l1
shuffle %l0, 0x8, %l1
shuffle %l0, 0x10, %l1
shuffle %l0, 0x18, %l1
! test UMAC
test_umac:
umac %l1, %l2, %l0
umac %l1, 2, %l0
umac 2, %l1, %l0
! test UMACD
test_umacd:
umacd %l2, %l4, %l0
umacd %l2, 3, %l0
umacd 3, %l2, %l0
! test SMAC
test_smac:
smac %l1, %l2, %l0
smac %l1, -42, %l0
smac -42, %l1, %l0
! test SMACD
test_smacd:
smacd %l2, %l4, %l0
smacd %l2, 123, %l0
smacd 123, %l2, %l0
! test UMULD
test_umuld:
umuld %o2, %o4, %o0
umuld %o2, 0x234, %o0
umuld 0x567, %o2, %o0
! test SMULD
test_smuld:
smuld %i2, %i4, %i0
smuld %i2, -4096, %i0
smuld 4095, %i4, %i0
! Coprocessor instructions
test_coprocessor:
! %ccsr is register # 0
! %ccfr is register # 1
! %ccpr is register # 3
! %cccrcr is register # 2
! test CPUSH: just syntax
cpush %l0, %l1
cpush %l0, 1
cpusha %l0, %l1
cpusha %l0, 1
! test CPULL: just syntax
cpull %l0
! test CPRDCXT: just syntax
crdcxt %ccsr, %l0
crdcxt %ccfr, %l0
crdcxt %ccpr, %l0
crdcxt %cccrcr, %l0
! test CPWRCXT: just syntax
cwrcxt %l0, %ccsr
cwrcxt %l0, %ccfr
cwrcxt %l0, %ccpr
cwrcxt %l0, %cccrcr
! test CBccc: just syntax
cbn stop
nop
cbn,a stop
nop
cbe stop
nop
cbe,a stop
nop
cbf stop
nop
cbf,a stop
nop
cbef stop
nop
cbef,a stop
nop
cbr stop
nop
cbr,a stop
nop
cber stop
nop
cber,a stop
nop
cbfr stop
nop
cbfr,a stop
nop
cbefr stop
nop
cbefr,a stop
nop
cba stop
nop
cba,a stop
nop
cbne stop
nop
cbne,a stop
nop
cbnf stop
nop
cbnf,a stop
nop
cbnef stop
nop
cbnef,a stop
nop
cbnr stop
nop
cbnr,a stop
nop
cbner stop
nop
cbner,a stop
nop
cbnfr stop
nop
cbnfr,a stop
nop
cbnefr stop
nop
cbnefr,a stop
nop
|
tactcomplabs/xbgas-binutils-gdb
| 1,196
|
gas/testsuite/gas/sparc/xcrypto.s
|
# Test OSA2015 CRYPTO instructions
.text
xmpmul 0
xmpmul 1
xmpmul 2
xmpmul 3
xmpmul 4
xmpmul 5
xmpmul 6
xmpmul 7
xmpmul 8
xmpmul 9
xmpmul 10
xmpmul 11
xmpmul 12
xmpmul 13
xmpmul 14
xmpmul 15
xmpmul 16
xmpmul 17
xmpmul 18
xmpmul 19
xmpmul 20
xmpmul 21
xmpmul 22
xmpmul 23
xmpmul 24
xmpmul 25
xmpmul 26
xmpmul 27
xmpmul 28
xmpmul 29
xmpmul 30
xmpmul 31
xmontmul 0
xmontmul 1
xmontmul 2
xmontmul 3
xmontmul 4
xmontmul 5
xmontmul 6
xmontmul 7
xmontmul 8
xmontmul 9
xmontmul 10
xmontmul 11
xmontmul 12
xmontmul 13
xmontmul 14
xmontmul 15
xmontmul 16
xmontmul 17
xmontmul 18
xmontmul 19
xmontmul 20
xmontmul 21
xmontmul 22
xmontmul 23
xmontmul 24
xmontmul 25
xmontmul 26
xmontmul 27
xmontmul 28
xmontmul 29
xmontmul 30
xmontmul 31
xmontsqr 0
xmontsqr 1
xmontsqr 2
xmontsqr 3
xmontsqr 4
xmontsqr 5
xmontsqr 6
xmontsqr 7
xmontsqr 8
xmontsqr 9
xmontsqr 10
xmontsqr 11
xmontsqr 12
xmontsqr 13
xmontsqr 14
xmontsqr 15
xmontsqr 16
xmontsqr 17
xmontsqr 18
xmontsqr 19
xmontsqr 20
xmontsqr 21
xmontsqr 22
xmontsqr 23
xmontsqr 24
xmontsqr 25
xmontsqr 26
xmontsqr 27
xmontsqr 28
xmontsqr 29
xmontsqr 30
xmontsqr 31
|
tactcomplabs/xbgas-binutils-gdb
| 1,347
|
gas/testsuite/gas/sparc/set64.s
|
# sparc64 set insn handling (includes set, setuw, setsw, setx)
foo:
set foo,%g2
set 0x76543210,%g3
set 0,%g4
set 65535,%g5
setx foo,%g1,%g2
setx -1,%g1,%g3
setx 0,%g1,%g3
setx 1,%g1,%g3
setx 4095,%g1,%g3
setx 4096,%g1,%g3
setx -4096,%g1,%g3
setx -4097,%g1,%g3
setx 65535,%g1,%g3
setx -65536,%g1,%g3
setx 2147483647,%g1,%g4
setx 2147483648,%g1,%g4
setx -2147483648,%g1,%g4
setx -2147483649,%g1,%g4
setx 4294967295,%g1,%g4
setx 4294967296,%g1,%g4
! GAS doesn't handle large base10 numbers yet.
! setx 9223372036854775807,%g1,%g5
! setx 9223372036854775808,%g1,%g5
! setx -9223372036854775808,%g1,%g5
! setx -9223372036854775809,%g1,%g5
setx 0x7fffffffffffffff,%g1,%g5
setx 0x8000000000000000,%g1,%g5 ! test only hh22 needed
setx 0xffffffff00000000,%g1,%g5 ! test only hm10 needed
setx 0xffffffff80000000,%g1,%g5 ! test sign-ext of lower 32
setx 0xffff0000ffff0000,%g1,%g5 ! test hh22,hi22
setx 0xffff000000000001,%g1,%g5 ! test hh22,lo10
setx 0x00000001ffff0001,%g1,%g5 ! test hm10,hi22,lo10
setx 0x00000001ffff0000,%g1,%g5 ! test hm10,hi22
setx 0x0000000100000001,%g1,%g5 ! test hm10,lo10
setuw foo,%g2
setuw 0x76543210,%g3
setuw 0,%g4
setuw 65535,%g5
setsw foo,%g2
setsw 0x76543210,%g3
setsw 0,%g4
setsw 65535,%g5
setsw 0xffffffff,%g1
setsw 0x7fffffff,%g2
setsw 0xffff0000,%g3
setsw -1,%g4
|
tactcomplabs/xbgas-binutils-gdb
| 1,875
|
gas/testsuite/gas/sparc/crypto.s
|
# Test CRYPTO instructions
.text
md5
sha1
sha256
sha512
crc32c %f2, %f4, %f6
aes_kexpand0 %f4, %f6, %f8
aes_kexpand1 %f6, %f8, 0x7, %f10
aes_kexpand1 %f6, %f8, 6, %f10
aes_kexpand2 %f8, %f10, %f12
aes_eround01 %f10, %f12, %f14, %f16
aes_eround23 %f12, %f14, %f16, %f18
aes_dround01 %f14, %f16, %f18, %f20
aes_dround23 %f16, %f18, %f20, %f22
aes_eround01_l %f18, %f20, %f22, %f24
aes_eround23_l %f20, %f22, %f24, %f26
aes_dround01_l %f22, %f24, %f26, %f28
aes_dround23_l %f24, %f26, %f28, %f30
des_ip %f32, %f34
des_iip %f34, %f36
des_kexpand %f36, 7, %f38
des_round %f38, %f40, %f42, %f44
kasumi_fi_fi %f42, %f44, %f46
kasumi_fl_xor %f44, %f46, %f48, %f50
kasumi_fi_xor %f46, %f48, %f50, %f52
camellia_fl %f50, %f52, %f54
camellia_fli %f52, %f54, %f56
camellia_f %f54, %f56, %f58, %f60
mpmul 0
mpmul 1
mpmul 2
mpmul 3
mpmul 4
mpmul 5
mpmul 6
mpmul 7
mpmul 8
mpmul 9
mpmul 10
mpmul 11
mpmul 12
mpmul 13
mpmul 14
mpmul 15
mpmul 16
mpmul 17
mpmul 18
mpmul 19
mpmul 20
mpmul 21
mpmul 22
mpmul 23
mpmul 24
mpmul 25
mpmul 26
mpmul 27
mpmul 28
mpmul 29
mpmul 30
mpmul 31
montmul 0
montmul 1
montmul 2
montmul 3
montmul 4
montmul 5
montmul 6
montmul 7
montmul 8
montmul 9
montmul 10
montmul 11
montmul 12
montmul 13
montmul 14
montmul 15
montmul 16
montmul 17
montmul 18
montmul 19
montmul 20
montmul 21
montmul 22
montmul 23
montmul 24
montmul 25
montmul 26
montmul 27
montmul 28
montmul 29
montmul 30
montmul 31
montsqr 0
montsqr 1
montsqr 2
montsqr 3
montsqr 4
montsqr 5
montsqr 6
montsqr 7
montsqr 8
montsqr 9
montsqr 10
montsqr 11
montsqr 12
montsqr 13
montsqr 14
montsqr 15
montsqr 16
montsqr 17
montsqr 18
montsqr 19
montsqr 20
montsqr 21
montsqr 22
montsqr 23
montsqr 24
montsqr 25
montsqr 26
montsqr 27
montsqr 28
montsqr 29
montsqr 30
montsqr 31
|
tactcomplabs/xbgas-binutils-gdb
| 1,231
|
gas/testsuite/gas/sparc/reloc64.s
|
# sparc64 special relocs
foo:
sethi %uhi(0x1234567800000000),%g1
or %g1,%ulo(0x1234567800000000),%g1
nop
sethi %uhi(foo),%g1
or %g1,%ulo(foo),%g1
nop
sethi %uhi(foo+0x1234567800000000),%g1
or %g1,%ulo(foo+0x1234567800000000),%g1
nop
sethi %hh(0xfedcba9876543210),%g1
or %g1,%hm(0xfedcba9876543210),%g1
sethi %lm(0xfedcba9876543210),%g2
or %g1,%lo(0xfedcba9876543210),%g2
nop
sethi %hh(foo),%g1
or %g1,%hm(foo),%g1
sethi %lm(foo),%g2
or %g1,%lo(foo),%g2
nop
sethi %hh(foo+0xfedcba9876543210),%g1
or %g1,%hm(foo+0xfedcba9876543210),%g1
sethi %lm(foo+0xfedcba9876543210),%g2
or %g1,%lo(foo+0xfedcba9876543210),%g2
nop
sethi %h44(0xa9876543210),%g1
or %g1,%m44(0xa9876543210),%g1
or %g1,%l44(0xa9876543210),%g1
nop
sethi %h44(foo),%g1
or %g1,%m44(foo),%g1
or %g1,%l44(foo),%g1
nop
sethi %h44(foo+0xa9876543210),%g1
or %g1,%m44(foo+0xa9876543210),%g1
or %g1,%l44(foo+0xa9876543210),%g1
nop
sethi %hix(0xffffffff76543210),%g1
xor %g1,%lox(0xffffffff76543210),%g1
nop
sethi %hix(foo),%g1
xor %g1,%lox(foo),%g1
nop
sethi %hix(foo+0xffffffff76543210),%g1
xor %g1,%lox(foo+0xffffffff76543210),%g1
nop
sethi %h34(foo+0xa9876543210),%g1
sllx %g1, 2, %g1
or %g1,%l34(foo+0xa9876543210),%g1
nop
|
tactcomplabs/xbgas-binutils-gdb
| 1,832
|
gas/testsuite/gas/sparc/wrpr.s
|
# Test wrpr
.text
wrpr %g1,%g2,%tpc
wrpr %g1,%tpc
wrpr %g1,666,%tpc
wrpr 666,%g1,%tpc
wrpr 666,%tpc
wrpr %g1,%g2,%tnpc
wrpr %g1,%tnpc
wrpr %g1,666,%tnpc
wrpr 666,%g1,%tnpc
wrpr 666,%tnpc
wrpr %g1,%g2,%tstate
wrpr %g1,%tstate
wrpr %g1,666,%tstate
wrpr 666,%g1,%tstate
wrpr 666,%tstate
wrpr %g1,%g2,%tt
wrpr %g1,%tt
wrpr %g1,666,%tt
wrpr 666,%g1,%tt
wrpr 666,%tt
wrpr %g1,%g2,%tick
wrpr %g1,%tick
wrpr %g1,666,%tick
wrpr 666,%g1,%tick
wrpr 666,%tick
wrpr %g1,%g2,%tba
wrpr %g1,%tba
wrpr %g1,666,%tba
wrpr 666,%g1,%tba
wrpr 666,%tba
wrpr %g1,%g2,%pstate
wrpr %g1,%pstate
wrpr %g1,666,%pstate
wrpr 666,%g1,%pstate
wrpr 666,%pstate
wrpr %g1,%g2,%tl
wrpr %g1,%tl
wrpr %g1,666,%tl
wrpr 666,%g1,%tl
wrpr 666,%tl
wrpr %g1,%g2,%pil
wrpr %g1,%pil
wrpr %g1,666,%pil
wrpr 666,%g1,%pil
wrpr 666,%pil
wrpr %g1,%g2,%cwp
wrpr %g1,%cwp
wrpr %g1,666,%cwp
wrpr 666,%g1,%cwp
wrpr 666,%cwp
wrpr %g1,%g2,%cansave
wrpr %g1,%cansave
wrpr %g1,666,%cansave
wrpr 666,%g1,%cansave
wrpr 666,%cansave
wrpr %g1,%g2,%canrestore
wrpr %g1,%canrestore
wrpr %g1,666,%canrestore
wrpr 666,%g1,%canrestore
wrpr 666,%canrestore
wrpr %g1,%g2,%cleanwin
wrpr %g1,%cleanwin
wrpr %g1,666,%cleanwin
wrpr 666,%g1,%cleanwin
wrpr 666,%cleanwin
wrpr %g1,%g2,%otherwin
wrpr %g1,%otherwin
wrpr %g1,666,%otherwin
wrpr 666,%g1,%otherwin
wrpr 666,%otherwin
wrpr %g1,%g2,%wstate
wrpr %g1,%wstate
wrpr %g1,666,%wstate
wrpr 666,%g1,%wstate
wrpr 666,%wstate
wrpr %g1,%g2,%fq
wrpr %g1,%fq
wrpr %g1,666,%fq
wrpr 666,%g1,%fq
wrpr 666,%fq
wrpr %g1,%g2,%gl
wrpr %g1,%gl
wrpr %g1,666,%gl
wrpr 666,%g1,%gl
wrpr 666,%gl
wrpr %g1,%g2,%pmcdper
wrpr %g1,%pmcdper
wrpr %g1,666,%pmcdper
wrpr 666,%g1,%pmcdper
wrpr 666,%pmcdper
wrpr %g1,%g2,%ver
wrpr %g1,%ver
wrpr %g1,666,%ver
wrpr 666,%g1,%ver
wrpr 666,%ver
|
tactcomplabs/xbgas-binutils-gdb
| 1,767
|
gas/testsuite/gas/sparc/ldm-stm.s
|
# Test ldm/stm/ldma/stma
.text
ldmsh [%g0+%g1], %g2
ldmsh [%g1], %g3
ldmsh [%g1+102], %g3
ldmsh [102+%g1], %g3
ldmsh [102], %g3
ldmuh [%g0+%g1], %g2
ldmuh [%g1], %g3
ldmuh [%g1+102], %g3
ldmuh [102+%g1], %g3
ldmuh [102], %g3
ldmsw [%g0+%g1], %g2
ldmsw [%g1], %g3
ldmsw [%g1+102], %g3
ldmsw [102+%g1], %g3
ldmsw [102], %g3
ldmuw [%g0+%g1], %g2
ldmuw [%g1], %g3
ldmuw [%g1+102], %g3
ldmuw [102+%g1], %g3
ldmuw [102], %g3
ldmx [%g0+%g1], %g2
ldmx [%g1], %g3
ldmx [%g1+102], %g3
ldmx [102+%g1], %g3
ldmx [102], %g3
ldmux [%g0+%g1], %g2
ldmux [%g1], %g3
ldmux [%g1+102], %g3
ldmux [102+%g1], %g3
ldmux [102], %g3
ldmsha [%g1+%g2] %asi, %g3
ldmsha [%g1] %asi, %g2
ldmuha [%g1+%g2] %asi, %g3
ldmuha [%g1] %asi, %g2
ldmswa [%g1+%g2] %asi, %g3
ldmswa [%g1] %asi, %g2
ldmuwa [%g1+%g2] %asi, %g3
ldmuwa [%g1] %asi, %g2
ldmxa [%g1+%g2] %asi, %g3
ldmxa [%g1] %asi, %g2
stmh %g2, [%g0+%g1]
stmh %g3, [%g1]
stmh %g3, [%g1+102]
stmh %g3, [102+%g1]
stmh %g3, [102]
stmw %g2, [%g0+%g1]
stmw %g3, [%g1]
stmw %g3, [%g1+102]
stmw %g3, [102+%g1]
stmw %g3, [102]
stmx %g2, [%g0+%g1]
stmx %g3, [%g1]
stmx %g3, [%g1+102]
stmx %g3, [102+%g1]
stmx %g3, [102]
stmha %g2, [%g0+%g1] %asi
stmha %g3, [%g1] %asi
stmwa %g2, [%g0+%g1] %asi
stmwa %g3, [%g1] %asi
stmxa %g2, [%g0+%g1] %asi
stmxa %g3, [%g1] %asi
|
tactcomplabs/xbgas-binutils-gdb
| 1,965
|
gas/testsuite/gas/sparc/hpcvis3.s
|
# Test HPC/VIS3 instructions
.text
nop
nop
ldx [%g3], %efsr
nop
1: nop
fnadds %f1, %f2, %f3
fnaddd %f2, %f4, %f6
fnmuls %f3, %f5, %f7
fnmuld %f6, %f8, %f10
fhadds %f7, %f9, %f11
fhaddd %f8, %f10, %f12
fhsubs %f9, %f11, %f13
fhsubd %f10, %f12, %f14
fnhadds %f11, %f13, %f15
fnhaddd %f12, %f14, %f16
fnsmuld %f13, %f15, %f16
fmadds %f15, %f17, %f19, %f21
fmaddd %f14, %f16, %f18, %f20
fmsubs %f17, %f19, %f21, %f23
fmsubd %f16, %f18, %f20, %f22
fnmsubs %f19, %f21, %f23, %f25
fnmsubd %f18, %f20, %f22, %f24
fnmadds %f21, %f23, %f25, %f27
fnmaddd %f20, %f22, %f24, %f26
fumadds %f23, %f25, %f27, %f29
fumaddd %f22, %f24, %f26, %f28
fumsubs %f25, %f27, %f29, %f31
fumsubd %f24, %f26, %f28, %f30
fnumsubs %f1, %f3, %f5, %f7
fnumsubd %f2, %f4, %f6, %f8
fnumadds %f3, %f5, %f7, %f9
fnumaddd %f4, %f6, %f8, %f10
addxc %g5, %g6, %g7
addxccc %o1, %o2, %o3
nop
umulxhi %o5, %o6, %o7
lzcnt %i1, %i2
cmask8 %i3
cmask16 %i4
cmask32 %i5
fsll16 %f32, %f34, %f36
fsrl16 %f34, %f36, %f38
fsll32 %f36, %f38, %f40
fsrl32 %f38, %f40, %f42
fslas16 %f40, %f42, %f44
fsra16 %f42, %f44, %f46
fslas32 %f44, %f46, %f48
fsra32 %f46, %f48, %f50
pdistn %f48, %f50, %g1
fmean16 %f50, %f52, %f54
fpadd64 %f52, %f54, %f56
fchksm16 %f54, %f56, %f58
fpsub64 %f56, %f58, %f60
fpadds16 %f58, %f60, %f62
fpadds16s %f2, %f4, %f6
fpadds32 %f4, %f6, %f8
fpadds32s %f6, %f8, %f10
fpsubs16 %f8, %f10, %f12
fpsubs16s %f10, %f12, %f14
fpsubs32 %f12, %f14, %f16
fpsubs32s %f14, %f16, %f18
movdtox %f20, %g1
movstouw %f21, %g2
movstosw %f23, %g3
movxtod %g4, %f22
movwtos %g5, %f23
xmulx %o1, %o2, %o3
xmulxhi %o4, %o5, %o6
fucmple8 %f16, %f18, %g1
fucmpne8 %f18, %f20, %g2
fucmpgt8 %f20, %f22, %g3
fucmpeq8 %f22, %f24, %g4
flcmps %fcc0, %f1, %f3
flcmps %fcc1, %f3, %f5
flcmps %fcc2, %f5, %f7
flcmps %fcc3, %f7, %f9
flcmpd %fcc0, %f12, %f14
flcmpd %fcc1, %f14, %f16
flcmpd %fcc2, %f16, %f18
flcmpd %fcc3, %f18, %f20
lzd %i1, %i2
|
tactcomplabs/xbgas-binutils-gdb
| 1,284
|
gas/testsuite/gas/sparc/wrhpr.s
|
# Test wrpr
.text
wrhpr %g1, %g2, %hpstate
wrhpr %g1, %hpstate
wrhpr %g1,666, %hpstate
wrhpr 666, %g1, %hpstate
wrhpr 666, %hpstate
wrhpr %g1, %g2, %htstate
wrhpr %g1, %htstate
wrhpr %g1,666, %htstate
wrhpr 666, %g1, %htstate
wrhpr 666, %htstate
wrhpr %g1, %g2, %hintp
wrhpr %g1, %hintp
wrhpr %g1,666, %hintp
wrhpr 666, %g1, %hintp
wrhpr 666, %hintp
wrhpr %g1, %g2, %htba
wrhpr %g1, %htba
wrhpr %g1,666, %htba
wrhpr 666, %g1, %htba
wrhpr 666, %htba
wrhpr %g1, %g2, %hmcdper
wrhpr %g1, %hmcdper
wrhpr %g1,666, %hmcdper
wrhpr 666, %g1, %hmcdper
wrhpr 666, %hmcdper
wrhpr %g1, %g2, %hmcddfr
wrhpr %g1, %hmcddfr
wrhpr %g1,666, %hmcddfr
wrhpr 666, %g1, %hmcddfr
wrhpr 666, %hmcddfr
wrhpr %g1, %g2, %hva_mask_nz
wrhpr %g1, %hva_mask_nz
wrhpr %g1,666, %hva_mask_nz
wrhpr 666, %g1, %hva_mask_nz
wrhpr 666, %hva_mask_nz
wrhpr %g1, %g2, %hstick_offset
wrhpr %g1, %hstick_offset
wrhpr %g1,666, %hstick_offset
wrhpr 666, %g1, %hstick_offset
wrhpr 666, %hstick_offset
wrhpr %g1, %g2, %hstick_enable
wrhpr %g1, %hstick_enable
wrhpr %g1,666, %hstick_enable
wrhpr 666, %g1, %hstick_enable
wrhpr 666, %hstick_enable
wrhpr %g1, %g2, %hstick_cmpr
wrhpr %g1, %hstick_cmpr
wrhpr %g1,666, %hstick_cmpr
wrhpr 666, %g1, %hstick_cmpr
wrhpr 666, %hstick_cmpr
|
tactcomplabs/xbgas-binutils-gdb
| 1,434
|
gas/testsuite/gas/sparc/cbcond.s
|
# Test CBCOND instructions
.text
cwbe %o1, %o2,1f
cwbe %o1, 2, 1f
cxbe %o2, %o3, 1f
cxbe %o2, 3, 1f
cwble %o3, %o4, 1f
cwble %o3, 4, 1f
cxble %o4, %o5, 1f
cxble %o4, 5, 1f
cwbl %o5, %l0, 1f
cwbl %o5, 6, 1f
cxbl %l0, %l1, 1f
cxbl %l0, 7, 1f
cwbleu %l1, %l2, 1f
cwbleu %l1, 8, 1f
cxbleu %l2, %l3, 1f
cxbleu %l2, 9, 1f
cwbcs %l3, %l4, 1f
cwbcs %l3, 10, 1f
cxbcs %l4, %l5, 1f
cxbcs %l4, 11, 1f
cwbneg %l5, %l6, 1f
cwbneg %l5, 12, 1f
cxbneg %l6, %l7, 1f
cxbneg %l6, 13, 1f
cwbvs %l7, %i0, 1f
cwbvs %l7, 14, 1f
cxbvs %i0, %i1, 1f
cxbvs %i0, 15, 1f
cwbne %i1, %i2, 1f
cwbne %i1, 16, 1f
cxbne %i2, %i3, 1f
cxbne %i2, 17, 1f
cwbg %i3, %i4, 1f
cwbg %i3, 18, 1f
cxbg %i4, %i5, 1f
cxbg %i4, 19, 1f
cwbge %i5, %o0, 1f
cwbge %i5, 20, 1f
cxbge %o0, %o1, 1f
cxbge %o0, 21, 1f
cwbgu %o1, %o2, 1f
cwbgu %o1, 22, 1f
cxbgu %o2, %o3, 1f
cxbgu %o2, 22, 1f
cwbcc %o3, %o4, 1f
cwbcc %o3, 23, 1f
cxbcc %o4, %o5, 1f
cxbcc %o4, 24, 1f
cwbpos %o5, %l0, 1f
cwbpos %o5, 25, 1f
cxbpos %l0, %l1, 1f
cxbpos %l0, 25, 1f
cwbvc %l1, %l2, 1f
cwbvc %l1, 26, 1f
cxbvc %l2, %l3, 1f
cxbvc %l2, 27, 1f
cwbz %l3, %l4, 1f
cwbz %l3, 28, 1f
cxbz %l4, %l5, 1f
cxbz %l4, 29, 1f
cwblu %l5, %l6, 1f
cwblu %l5, 28, 1f
cxblu %l6, %l7, 1f
cxblu %l6, 29, 1f
cwbnz %l7, %o0, 1f
cwbnz %l7, 30, 1f
cxbnz %o0, %o1, 1f
cxbnz %o0, 31, 1f
cwbgeu %o1, %o2, 1f
cwbgeu %o1, 1, 1f
cxbgeu %o2, %o3, 1f
cxbgeu %o2, 2, 1f
1: nop
|
tactcomplabs/xbgas-binutils-gdb
| 10,803
|
gas/testsuite/gas/pe/peseh-x64-4.s
|
.file "t4.C"
.section .text$_ZN5VBase1fEv,"x"
.linkonce discard
.align 2
.globl _ZN5VBase1fEv
.def _ZN5VBase1fEv; .scl 2; .type 32; .endef
.seh_proc _ZN5VBase1fEv
_ZN5VBase1fEv:
.LFB0:
pushq %rbp
.seh_pushreg %rbp
movq %rsp, %rbp
.seh_setframe %rbp, 0
.seh_endprologue
movq %rcx, 16(%rbp)
nop
popq %rbp
ret
.seh_endproc
.section .text$_ZN10StreamBaseD2Ev,"x"
.linkonce discard
.align 2
.globl _ZN10StreamBaseD2Ev
.def _ZN10StreamBaseD2Ev; .scl 2; .type 32; .endef
.seh_proc _ZN10StreamBaseD2Ev
_ZN10StreamBaseD2Ev:
.LFB2:
pushq %rbp
.seh_pushreg %rbp
movq %rsp, %rbp
.seh_setframe %rbp, 0
subq $32, %rsp
.seh_stackalloc 32
.seh_endprologue
movq %rcx, 16(%rbp)
movq 16(%rbp), %rax
leaq 16+_ZTV10StreamBase(%rip), %rdx
movq %rdx, (%rax)
movl $0, %eax
andl $1, %eax
testb %al, %al
je .L2
movq 16(%rbp), %rcx
call _ZdlPv
.L2:
nop
addq $32, %rsp
popq %rbp
ret
.seh_endproc
.section .text$_ZN10StreamBaseD1Ev,"x"
.linkonce discard
.align 2
.globl _ZN10StreamBaseD1Ev
.def _ZN10StreamBaseD1Ev; .scl 2; .type 32; .endef
.seh_proc _ZN10StreamBaseD1Ev
_ZN10StreamBaseD1Ev:
.LFB3:
pushq %rbp
.seh_pushreg %rbp
movq %rsp, %rbp
.seh_setframe %rbp, 0
subq $32, %rsp
.seh_stackalloc 32
.seh_endprologue
movq %rcx, 16(%rbp)
movq 16(%rbp), %rax
leaq 16+_ZTV10StreamBase(%rip), %rdx
movq %rdx, (%rax)
movl $2, %eax
andl $1, %eax
testb %al, %al
je .L5
movq 16(%rbp), %rcx
call _ZdlPv
.L5:
nop
addq $32, %rsp
popq %rbp
ret
.seh_endproc
.section .text$_ZN10StreamBaseD0Ev,"x"
.linkonce discard
.align 2
.globl _ZN10StreamBaseD0Ev
.def _ZN10StreamBaseD0Ev; .scl 2; .type 32; .endef
.seh_proc _ZN10StreamBaseD0Ev
_ZN10StreamBaseD0Ev:
.LFB4:
pushq %rbp
.seh_pushreg %rbp
movq %rsp, %rbp
.seh_setframe %rbp, 0
subq $32, %rsp
.seh_stackalloc 32
.seh_endprologue
movq %rcx, 16(%rbp)
movq 16(%rbp), %rcx
call _ZN10StreamBaseD1Ev
movq 16(%rbp), %rcx
call _ZdlPv
nop
addq $32, %rsp
popq %rbp
ret
.seh_endproc
.section .text$_ZN6StreamD2Ev,"x"
.linkonce discard
.align 2
.globl _ZN6StreamD2Ev
.def _ZN6StreamD2Ev; .scl 2; .type 32; .endef
.seh_proc _ZN6StreamD2Ev
_ZN6StreamD2Ev:
.LFB6:
pushq %rbp
.seh_pushreg %rbp
movq %rsp, %rbp
.seh_setframe %rbp, 0
subq $32, %rsp
.seh_stackalloc 32
.seh_endprologue
movq %rcx, 16(%rbp)
movq %rdx, 24(%rbp)
movq 24(%rbp), %rax
movq (%rax), %rdx
movq 16(%rbp), %rax
movq %rdx, (%rax)
movq 16(%rbp), %rax
movq (%rax), %rax
subq $24, %rax
movq (%rax), %rax
addq 16(%rbp), %rax
movq 24(%rbp), %rdx
addq $8, %rdx
movq (%rdx), %rdx
movq %rdx, (%rax)
movq 16(%rbp), %rcx
call _ZN10StreamBaseD2Ev
movl $0, %eax
andl $2, %eax
testl %eax, %eax
je .L11
movq 16(%rbp), %rax
addq $8, %rax
movq %rax, %rcx
call _ZN5VBaseD2Ev
.L11:
movl $0, %eax
andl $1, %eax
testb %al, %al
je .L10
movq 16(%rbp), %rcx
call _ZdlPv
.L10:
nop
addq $32, %rsp
popq %rbp
ret
.seh_endproc
.section .text$_ZN6StreamD1Ev,"x"
.linkonce discard
.align 2
.globl _ZN6StreamD1Ev
.def _ZN6StreamD1Ev; .scl 2; .type 32; .endef
.seh_proc _ZN6StreamD1Ev
_ZN6StreamD1Ev:
.LFB7:
pushq %rbp
.seh_pushreg %rbp
movq %rsp, %rbp
.seh_setframe %rbp, 0
subq $32, %rsp
.seh_stackalloc 32
.seh_endprologue
movq %rcx, 16(%rbp)
leaq 24+_ZTV6Stream(%rip), %rdx
movq 16(%rbp), %rax
movq %rdx, (%rax)
movl $8, %eax
addq 16(%rbp), %rax
leaq 64+_ZTV6Stream(%rip), %rdx
movq %rdx, (%rax)
movq 16(%rbp), %rcx
call _ZN10StreamBaseD2Ev
movl $2, %eax
andl $2, %eax
testl %eax, %eax
je .L14
movq 16(%rbp), %rax
addq $8, %rax
movq %rax, %rcx
call _ZN5VBaseD2Ev
.L14:
movl $2, %eax
andl $1, %eax
testb %al, %al
je .L13
movq 16(%rbp), %rcx
call _ZdlPv
.L13:
nop
addq $32, %rsp
popq %rbp
ret
.seh_endproc
.section .text$_ZN6StreamD0Ev,"x"
.linkonce discard
.align 2
.globl _ZN6StreamD0Ev
.def _ZN6StreamD0Ev; .scl 2; .type 32; .endef
.seh_proc _ZN6StreamD0Ev
_ZN6StreamD0Ev:
.LFB8:
pushq %rbp
.seh_pushreg %rbp
movq %rsp, %rbp
.seh_setframe %rbp, 0
subq $32, %rsp
.seh_stackalloc 32
.seh_endprologue
movq %rcx, 16(%rbp)
movq 16(%rbp), %rcx
call _ZN6StreamD1Ev
movq 16(%rbp), %rcx
call _ZdlPv
nop
addq $32, %rsp
popq %rbp
ret
.seh_endproc
.globl r
.bss
.align 4
r:
.space 4
.section .text$_ZN13DerivedStreamD1Ev,"x"
.linkonce discard
.align 2
.globl _ZN13DerivedStreamD1Ev
.def _ZN13DerivedStreamD1Ev; .scl 2; .type 32; .endef
.seh_proc _ZN13DerivedStreamD1Ev
_ZN13DerivedStreamD1Ev:
.LFB12:
pushq %rbp
.seh_pushreg %rbp
movq %rsp, %rbp
.seh_setframe %rbp, 0
pushq %rbx
.seh_pushreg %rbx
subq $40, %rsp
.seh_stackalloc 40
.seh_endprologue
movq %rcx, 16(%rbp)
leaq 24+_ZTV13DerivedStream(%rip), %rdx
movq 16(%rbp), %rax
movq %rdx, (%rax)
movl $8, %eax
addq 16(%rbp), %rax
leaq 64+_ZTV13DerivedStream(%rip), %rdx
movq %rdx, (%rax)
leaq _ZTT13DerivedStream(%rip), %rax
addq $8, %rax
movq %rax, %rdx
movq 16(%rbp), %rcx
.LEHB0:
call _ZN6StreamD2Ev
.LEHE0:
movl $2, %eax
andl $2, %eax
testl %eax, %eax
je .L19
movq 16(%rbp), %rax
addq $8, %rax
movq %rax, %rcx
.LEHB1:
call _ZN5VBaseD2Ev
.LEHE1:
.L19:
movl $2, %eax
andl $1, %eax
testb %al, %al
je .L18
movq 16(%rbp), %rcx
call _ZdlPv
jmp .L18
.L23:
movq %rax, %rbx
movl $2, %eax
andl $2, %eax
testl %eax, %eax
je .L22
movq 16(%rbp), %rax
addq $8, %rax
movq %rax, %rcx
call _ZN5VBaseD2Ev
.L22:
movq %rbx, %rax
movq %rax, %rcx
.LEHB2:
call _Unwind_Resume
nop
.LEHE2:
.L18:
nop
addq $40, %rsp
popq %rbx
popq %rbp
ret
.def __gxx_personality_v0; .scl 2; .type 32; .endef
.seh_handler _GCC_specific_handler, @unwind, @except
.seh_handlerdata
.rva __gxx_personality_v0
.section .text$_ZN13DerivedStreamD1Ev,"x"
.linkonce discard
.seh_handlerdata
.LLSDA12:
.byte 0xff
.byte 0xff
.byte 0x1
.uleb128 .LLSDACSE12-.LLSDACSB12
.LLSDACSB12:
.uleb128 .LEHB0-.LFB12
.uleb128 .LEHE0-.LEHB0
.uleb128 .L23-.LFB12
.uleb128 0
.uleb128 .LEHB1-.LFB12
.uleb128 .LEHE1-.LEHB1
.uleb128 0
.uleb128 0
.uleb128 .LEHB2-.LFB12
.uleb128 .LEHE2-.LEHB2
.uleb128 0
.uleb128 0
.LLSDACSE12:
.section .text$_ZN13DerivedStreamD1Ev,"x"
.linkonce discard
.seh_endproc
.section .text$_ZN13DerivedStreamD0Ev,"x"
.linkonce discard
.align 2
.globl _ZN13DerivedStreamD0Ev
.def _ZN13DerivedStreamD0Ev; .scl 2; .type 32; .endef
.seh_proc _ZN13DerivedStreamD0Ev
_ZN13DerivedStreamD0Ev:
.LFB13:
pushq %rbp
.seh_pushreg %rbp
movq %rsp, %rbp
.seh_setframe %rbp, 0
subq $32, %rsp
.seh_stackalloc 32
.seh_endprologue
movq %rcx, 16(%rbp)
movq 16(%rbp), %rcx
call _ZN13DerivedStreamD1Ev
movq 16(%rbp), %rcx
call _ZdlPv
nop
addq $32, %rsp
popq %rbp
ret
.seh_endproc
.text
.globl _Z7ctor2_xv
.def _Z7ctor2_xv; .scl 2; .type 32; .endef
.seh_proc _Z7ctor2_xv
_Z7ctor2_xv:
.LFB9:
pushq %rbp
.seh_pushreg %rbp
movq %rsp, %rbp
.seh_setframe %rbp, 0
subq $48, %rsp
.seh_stackalloc 48
.seh_endprologue
leaq -16(%rbp), %rax
movq %rax, %rcx
.LEHB3:
call _ZN13DerivedStreamC1Ev
leaq -16(%rbp), %rax
movq %rax, %rcx
call _ZN13DerivedStreamD1Ev
.LEHE3:
.L29:
movl r(%rip), %eax
testl %eax, %eax
je .L27
.LEHB4:
call abort
nop
.L27:
movl $0, %ecx
call exit
nop
.L30:
movq %rax, %rcx
call __cxa_begin_catch
call __cxa_end_catch
.LEHE4:
jmp .L29
.seh_handler _GCC_specific_handler, @unwind, @except
.seh_handlerdata
.rva __gxx_personality_v0
.text
.seh_handlerdata
.align 4
.LLSDA9:
.byte 0xff
.byte 0x9b
.uleb128 .LLSDATT9-.LLSDATTD9
.LLSDATTD9:
.byte 0x1
.uleb128 .LLSDACSE9-.LLSDACSB9
.LLSDACSB9:
.uleb128 .LEHB3-.LFB9
.uleb128 .LEHE3-.LEHB3
.uleb128 .L30-.LFB9
.uleb128 0x1
.uleb128 .LEHB4-.LFB9
.uleb128 .LEHE4-.LEHB4
.uleb128 0
.uleb128 0
.LLSDACSE9:
.byte 0x1
.byte 0
.align 4
.long 0
.LLSDATT9:
.text
.seh_endproc
.globl _ZTV13DerivedStream
.section .data$_ZTV13DerivedStream,"w"
.linkonce same_size
.align 32
_ZTV13DerivedStream:
.quad 8
.quad 0
.quad _ZTI13DerivedStream
.quad _ZN13DerivedStreamD1Ev
.quad _ZN13DerivedStreamD0Ev
.quad 0
.quad -8
.quad _ZTI13DerivedStream
.quad _ZN5VBase1fEv
.globl _ZTT13DerivedStream
.section .data$_ZTT13DerivedStream,"w"
.linkonce same_size
.align 32
_ZTT13DerivedStream:
.quad _ZTV13DerivedStream+24
.quad _ZTC13DerivedStream0_6Stream+24
.quad _ZTC13DerivedStream0_6Stream+64
.quad _ZTV13DerivedStream+64
.globl _ZTC13DerivedStream0_6Stream
.section .data$_ZTC13DerivedStream0_6Stream,"w"
.linkonce same_size
.align 32
_ZTC13DerivedStream0_6Stream:
.quad 8
.quad 0
.quad _ZTI6Stream
.quad _ZN6StreamD1Ev
.quad _ZN6StreamD0Ev
.quad 0
.quad -8
.quad _ZTI6Stream
.quad _ZN5VBase1fEv
.globl _ZTV6Stream
.section .data$_ZTV6Stream,"w"
.linkonce same_size
.align 32
_ZTV6Stream:
.quad 8
.quad 0
.quad _ZTI6Stream
.quad _ZN6StreamD1Ev
.quad _ZN6StreamD0Ev
.quad 0
.quad -8
.quad _ZTI6Stream
.quad _ZN5VBase1fEv
.globl _ZTT6Stream
.section .data$_ZTT6Stream,"w"
.linkonce same_size
.align 16
_ZTT6Stream:
.quad _ZTV6Stream+24
.quad _ZTV6Stream+64
.globl _ZTV10StreamBase
.section .data$_ZTV10StreamBase,"w"
.linkonce same_size
.align 32
_ZTV10StreamBase:
.quad 0
.quad _ZTI10StreamBase
.quad _ZN10StreamBaseD1Ev
.quad _ZN10StreamBaseD0Ev
.globl _ZTS13DerivedStream
.section .rdata$_ZTS13DerivedStream,"dr"
.linkonce same_size
.align 16
_ZTS13DerivedStream:
.ascii "13DerivedStream\0"
.globl _ZTI13DerivedStream
.section .data$_ZTI13DerivedStream,"w"
.linkonce same_size
.align 16
_ZTI13DerivedStream:
.quad _ZTVN10__cxxabiv120__si_class_type_infoE+16
.quad _ZTS13DerivedStream
.quad _ZTI6Stream
.globl _ZTS6Stream
.section .rdata$_ZTS6Stream,"dr"
.linkonce same_size
_ZTS6Stream:
.ascii "6Stream\0"
.globl _ZTI6Stream
.section .data$_ZTI6Stream,"w"
.linkonce same_size
.align 32
_ZTI6Stream:
.quad _ZTVN10__cxxabiv121__vmi_class_type_infoE+16
.quad _ZTS6Stream
.long 0
.long 2
.quad _ZTI5VBase
.long -6141
.space 4
.quad _ZTI10StreamBase
.long 2
.space 4
.globl _ZTS10StreamBase
.section .rdata$_ZTS10StreamBase,"dr"
.linkonce same_size
_ZTS10StreamBase:
.ascii "10StreamBase\0"
.globl _ZTI10StreamBase
.section .data$_ZTI10StreamBase,"w"
.linkonce same_size
.align 16
_ZTI10StreamBase:
.quad _ZTVN10__cxxabiv117__class_type_infoE+16
.quad _ZTS10StreamBase
.globl _ZTS5VBase
.section .rdata$_ZTS5VBase,"dr"
.linkonce same_size
_ZTS5VBase:
.ascii "5VBase\0"
.globl _ZTI5VBase
.section .data$_ZTI5VBase,"w"
.linkonce same_size
.align 16
_ZTI5VBase:
.quad _ZTVN10__cxxabiv117__class_type_infoE+16
.quad _ZTS5VBase
.def _ZdlPv; .scl 2; .type 32; .endef
.def _ZN5VBaseD2Ev; .scl 2; .type 32; .endef
.def _Unwind_Resume; .scl 2; .type 32; .endef
.def _ZN13DerivedStreamC1Ev; .scl 2; .type 32; .endef
.def abort; .scl 2; .type 32; .endef
.def exit; .scl 2; .type 32; .endef
.def __cxa_begin_catch; .scl 2; .type 32; .endef
.def __cxa_end_catch; .scl 2; .type 32; .endef
|
tactcomplabs/xbgas-binutils-gdb
| 13,140
|
gas/testsuite/gas/ft32/insnsc.s
|
.section .text
add $r21,$r21,$r0
sub $r21,$r21,$r0
and $r21,$r21,$r0
or $r21,$r21,$r0
bins $r21,$r21,$r0
add $r21,$r21,$r1
sub $r21,$r21,$r1
and $r21,$r21,$r1
or $r21,$r21,$r1
ashl $r21,$r21,$r1
bins $r21,$r21,$r1
add $r21,$r21,$r2
sub $r21,$r21,$r2
and $r21,$r21,$r2
or $r21,$r21,$r2
bins $r21,$r21,$r2
add $r21,$r21,$r3
sub $r21,$r21,$r3
and $r21,$r21,$r3
or $r21,$r21,$r3
bins $r21,$r21,$r3
add $r21,$r21,$r4
sub $r21,$r21,$r4
and $r21,$r21,$r4
or $r21,$r21,$r4
bins $r21,$r21,$r4
add $r21,$r21,$r5
add $r21,$r21,$r6
sub $r21,$r21,$r6
or $r21,$r21,$r6
add $r21,$r21,$r7
sub $r21,$r21,$r7
or $r21,$r21,$r7
add $r21,$r21,$r8
sub $r21,$r21,$r8
add $r21,$r21,$r9
sub $r21,$r21,$r9
add $r21,$r21,$r10
sub $r21,$r21,$r10
add $r21,$r21,$r11
add $r21,$r21,$r13
sub $r21,$r21,$r13
or $r21,$r21,$r13
add $r21,$r21,$r14
sub $r21,$r21,$r14
add $r21,$r21,$r15
sub $r21,$r21,$r15
or $r21,$r21,$r15
add $r21,$r21,$r16
sub $r21,$r21,$r16
add $r21,$r21,$r17
add $r21,$r21,$r18
sub $r21,$r21,$r18
add $r21,$r21,$r19
add $r21,$r21,$r20
add $r21,$r21,$r23
add $r21,$r21,$r24
add $r21,$r21,$r25
add $r21,$r21,$r26
add $r21,$r21,$r27
add $r21,$r21,$r30
add $r21,$r21,$r31
add $r21,$r21,0
ldl $r21,$r21,0
bexts $r21,$r21,0
bextu $r21,$r21,0
add $r21,$r21,1
ror $r21,$r21,1
ldl $r21,$r21,1
and $r21,$r21,1
xor $r21,$r21,1
lshr $r21,$r21,1
ashr $r21,$r21,1
add $r21,$r21,2
ror $r21,$r21,2
ashl $r21,$r21,2
ashr $r21,$r21,2
and $r21,$r21,3
ashl $r21,$r21,3
lshr $r21,$r21,3
add $r21,$r21,4
ashl $r21,$r21,4
ashl $r21,$r21,5
and $r21,$r21,6
and $r21,$r21,7
add $r21,$r21,8
ror $r21,$r21,8
ashl $r21,$r21,8
lshr $r21,$r21,8
ashl $r21,$r21,9
add $r21,$r21,12
ashr $r21,$r21,12
and $r21,$r21,15
add $r21,$r21,16
ashl $r21,$r21,16
lshr $r21,$r21,16
ashr $r21,$r21,16
bins $r21,$r21,16
add $r21,$r21,24
sub $r21,$r21,24
add $r21,$r21,28
sub $r21,$r21,28
and $r21,$r21,31
add $r21,$r21,32
sub $r21,$r21,32
bins $r21,$r21,32
bins $r21,$r21,33
bins $r21,$r21,34
add $r21,$r21,36
sub $r21,$r21,36
bins $r21,$r21,36
bins $r21,$r21,37
bins $r21,$r21,38
bins $r21,$r21,39
add $r21,$r21,40
sub $r21,$r21,40
bins $r21,$r21,42
bins $r21,$r21,43
add $r21,$r21,44
sub $r21,$r21,44
add $r21,$r21,48
add $r21,$r21,52
ldl $r21,$r21,60
bins $r21,$r21,64
ldl $r21,$r21,84
ldl $r21,$r21,85
and $r21,$r21,127
add $r21,$r21,128
add $r21,$r21,152
bins $r21,$r21,160
add $r21,$r21,168
add $r21,$r21,220
and $r21,$r21,255
ldl $r21,$r21,256
ldl $r21,$r21,309
bins $r21,$r21,311
ldl $r21,$r21,318
add $r21,$r21,368
bins $r21,$r21,404
bins $r21,$r21,-480
bins $r21,$r21,-479
bins $r21,$r21,-478
bins $r21,$r21,-477
bins $r21,$r21,-476
bins $r21,$r21,-475
bins $r21,$r21,-474
bins $r21,$r21,-473
bins $r21,$r21,-472
bins $r21,$r21,-471
bins $r21,$r21,-469
bins $r21,$r21,-465
bins $r21,$r21,-461
bins $r21,$r21,-457
ldl $r21,$r21,-456
add $r21,$r21,-16
add $r21,$r21,-8
add $r21,$r21,-4
add $r21,$r21,-2
add $r21,$r21,-1
ldl $r21,$r21,-1
xor $r21,$r21,-1
sub $r21,$r0,$r21
sub $r21,$r1,$r21
sub $r21,$r2,$r21
ashl $r21,$r2,$r21
sub $r21,$r3,$r21
sub $r21,$r4,$r21
sub $r21,$r6,$r21
sub $r21,$r7,$r21
cmp.b $r21,0
cmp.b $r21,1
cmp.b $r21,2
cmp.b $r21,3
cmp.b $r21,4
cmp.b $r21,5
cmp.b $r21,7
cmp.b $r21,8
cmp.b $r21,-1
cmp $r21,$r0
cmp $r21,$r1
cmp $r21,$r2
cmp $r21,$r3
cmp $r21,$r4
cmp $r21,$r5
cmp $r21,$r6
cmp $r21,$r7
cmp $r21,$r8
cmp $r21,$r10
cmp $r21,$r13
cmp $r21,$r14
cmp $r21,$r15
cmp $r21,$r16
cmp $r21,$r17
cmp $r21,$r18
cmp $r21,$r19
cmp $r21,$r23
cmp $r21,0
cmp $r21,1
cmp $r21,2
cmp $r21,3
cmp $r21,4
cmp $r21,5
cmp $r21,6
cmp $r21,7
cmp $r21,8
cmp $r21,9
cmp $r21,15
cmp $r21,16
cmp $r21,17
cmp $r21,20
cmp $r21,22
cmp $r21,27
cmp $r21,31
cmp $r21,32
btst $r21,32
btst $r21,33
btst $r21,34
btst $r21,35
cmp $r21,36
btst $r21,36
btst $r21,37
btst $r21,38
btst $r21,39
btst $r21,40
btst $r21,41
btst $r21,42
btst $r21,44
btst $r21,45
btst $r21,46
btst $r21,47
btst $r21,48
btst $r21,49
btst $r21,51
btst $r21,54
btst $r21,55
cmp $r21,56
btst $r21,58
btst $r21,59
btst $r21,60
btst $r21,61
cmp $r21,255
cmp $r21,-1
push $r21
add $r21,$r0,$r1
and $r21,$r0,$r2
or $r21,$r0,$r2
add $r21,$r0,$r3
and $r21,$r0,$r3
add $r21,$r0,0
bexts $r21,$r0,0
bextu $r21,$r0,0
add $r21,$r0,1
and $r21,$r0,1
ashl $r21,$r0,2
ashl $r21,$r0,3
add $r21,$r0,4
ashr $r21,$r0,12
lshr $r21,$r0,16
add $r21,$r0,136
add $r21,$r0,172
add $r21,$r0,224
add $r21,$r0,232
and $r21,$r0,255
bexts $r21,$r0,256
add $r21,$r0,288
add $r21,$r0,360
add $r21,$r0,368
bins $r21,$r0,-480
add $r21,$r0,-8
add $r21,$r1,$r2
and $r21,$r1,$r2
add $r21,$r1,0
bexts $r21,$r1,0
bextu $r21,$r1,0
add $r21,$r1,1
ashl $r21,$r1,2
ashl $r21,$r1,3
lshr $r21,$r1,31
and $r21,$r1,255
bexts $r21,$r1,256
ldl $r21,$r1,279
add $r21,$r1,-8
add $r21,$r1,-1
add $r21,$r2,$r3
add $r21,$r2,0
bexts $r21,$r2,0
bextu $r21,$r2,0
add $r21,$r2,1
ashl $r21,$r2,2
and $r21,$r2,7
add $r21,$r2,8
ldl $r21,$r2,63
and $r21,$r2,255
bexts $r21,$r2,256
ldl $r21,$r2,372
add $r21,$r2,-1
add $r21,$r3,$r3
add $r21,$r3,0
bexts $r21,$r3,0
add $r21,$r3,1
and $r21,$r3,255
bexts $r21,$r3,256
add $r21,$r4,$r16
add $r21,$r4,0
and $r21,$r4,1
add $r21,$r4,-8
add $r21,$r5,0
or $r21,$r6,$r8
add $r21,$r6,0
lshr $r21,$r6,16
add $r21,$r7,0
add $r21,$r8,0
lshr $r21,$r8,16
bins $r21,$r8,16
add $r21,$r9,0
add $r21,$r13,$r14
add $r21,$r13,0
add $r21,$r13,4
add $r21,$r13,8
add $r21,$r13,12
add $r21,$r13,48
add $r21,$r13,60
add $r21,$r13,144
add $r21,$r13,172
add $r21,$r13,180
add $r21,$r13,188
add $r21,$r13,232
add $r21,$r13,348
add $r21,$r13,360
add $r21,$r13,368
add $r21,$r13,372
add $r21,$r13,376
add $r21,$r13,508
add $r21,$r14,0
add $r21,$r14,1
add $r21,$r14,232
add $r21,$r15,0
bextu $r21,$r15,0
and $r21,$r15,7
add $r21,$r16,0
add $r21,$r17,0
add $r21,$r18,0
add $r21,$r19,0
add $r21,$r20,0
add $r21,$r21,0
add $r21,$r22,0
add $r21,$r23,0
add $r21,$r24,0
add $r21,$r24,4
add $r21,$r25,0
add $r21,$r26,0
add $r21,$r27,0
add $r21,$r29,-204
add $r21,$r29,-192
add $r21,$r29,-188
add $r21,$r29,-180
add $r21,$r29,-172
add $r21,$r29,-164
add $r21,$r29,-149
add $r21,$r29,-144
add $r21,$r29,-136
add $r21,$r29,-116
bextu $r21,$r30,32
bextu $r21,$r30,33
bextu $r21,$r30,38
add $r21,$r31,24
add $r21,$r31,26
add $r21,$r31,27
add $r21,$r31,28
add $r21,$r31,32
add $r21,$r31,36
add $r21,$r31,40
add $r21,$r31,56
add $r21,$r31,132
add $r21,$r31,140
add $r21,$r31,144
add $r21,$r31,152
add $r21,$r31,216
ldk $r21,0
ldk $r21,1
ldk $r21,2
ldk $r21,3
ldk $r21,4
ldk $r21,5
ldk $r21,6
ldk $r21,7
ldk $r21,8
ldk $r21,9
ldk $r21,10
ldk $r21,11
ldk $r21,12
ldk $r21,13
ldk $r21,14
ldk $r21,15
ldk $r21,16
ldk $r21,17
ldk $r21,18
ldk $r21,19
ldk $r21,20
ldk $r21,21
ldk $r21,23
ldk $r21,24
ldk $r21,25
ldk $r21,26
ldk $r21,29
ldk $r21,32
ldk $r21,35
ldk $r21,36
ldk $r21,39
ldk $r21,43
ldk $r21,44
ldk $r21,45
ldk $r21,48
ldk $r21,53
ldk $r21,60
ldk $r21,64
ldk $r21,72
ldk $r21,100
ldk $r21,108
ldk $r21,128
ldk $r21,255
ldk $r21,256
ldk $r21,432
ldk $r21,440
ldk $r21,512
ldk $r21,536
ldk $r21,576
ldk $r21,588
ldk $r21,592
ldk $r21,1000
ldk $r21,1024
ldk $r21,1033
ldk $r21,1364
ldk $r21,1536
ldk $r21,1680
ldk $r21,1840
ldk $r21,2047
ldk $r21,2048
ldk $r21,2304
ldk $r21,4095
ldk $r21,4096
ldk $r21,6188
ldk $r21,7024
ldk $r21,7196
ldk $r21,7204
ldk $r21,8191
ldk $r21,8192
ldk $r21,13720
ldk $r21,14060
ldk $r21,16383
ldk $r21,21184
ldk $r21,21732
ldk $r21,23100
ldk $r21,24484
ldk $r21,25704
ldk $r21,26392
ldk $r21,32768
ldk $r21,49152
ldk $r21,65535
ldk $r21,65536
ldk $r21,65544
ldk $r21,66208
ldk $r21,83221
ldk $r21,262144
ldk $r21,327680
ldk $r21,507904
ldk $r21,-2048
ldk $r21,-1024
ldk $r21,-1023
ldk $r21,-1022
ldk $r21,-1
pop $r21
link $r21,0
link $r21,24
link $r21,28
link $r21,32
unlink $r21
return
ldi.b $r21,$r0,0
ldi.b $r21,$r0,1
ldi.b $r21,$r0,2
ldi.b $r21,$r0,3
ldi.b $r21,$r1,0
ldi.b $r21,$r1,2
ldi.b $r21,$r2,0
ldi.b $r21,$r3,0
ldi.b $r21,$r4,0
ldi.b $r21,$r13,0
ldi.b $r21,$r13,5
ldi.b $r21,$r13,6
ldi.b $r21,$r13,7
ldi.b $r21,$r13,15
ldi.b $r21,$r13,64
ldi.b $r21,$r13,67
ldi.b $r21,$r13,84
ldi.b $r21,$r14,0
ldi.b $r21,$r15,0
ldi.b $r21,$r16,0
ldi.b $r21,$r18,0
ldi.b $r21,$r22,0
ldi.b $r21,$r31,27
ldi.s $r21,$r0,0
ldi.s $r21,$r1,0
ldi.s $r21,$r1,2
ldi.s $r21,$r1,12
ldi.s $r21,$r2,0
ldi.s $r21,$r13,0
ldi.s $r21,$r13,2
ldi.s $r21,$r13,4
ldi.s $r21,$r13,6
ldi.s $r21,$r13,8
ldi.s $r21,$r13,10
ldi.s $r21,$r13,12
ldi.s $r21,$r13,36
ldi.s $r21,$r14,0
ldi.s $r21,$r14,2
ldi.s $r21,$r14,12
ldi.s $r21,$r15,0
ldi.s $r21,$r15,2
ldi $r21,$r0,0
ldi $r21,$r0,4
ldi $r21,$r0,8
ldi $r21,$r0,12
ldi $r21,$r0,16
ldi $r21,$r0,20
ldi $r21,$r0,24
ldi $r21,$r0,28
ldi $r21,$r0,32
ldi $r21,$r0,36
ldi $r21,$r0,40
ldi $r21,$r0,44
ldi $r21,$r0,48
ldi $r21,$r0,56
ldi $r21,$r0,60
ldi $r21,$r0,64
ldi $r21,$r0,68
ldi $r21,$r0,88
ldi $r21,$r0,108
ldi $r21,$r1,0
ldi $r21,$r1,4
ldi $r21,$r1,8
ldi $r21,$r1,12
ldi $r21,$r1,16
ldi $r21,$r1,24
ldi $r21,$r1,32
ldi $r21,$r1,36
ldi $r21,$r1,40
ldi $r21,$r1,48
ldi $r21,$r1,52
ldi $r21,$r1,56
ldi $r21,$r1,60
ldi $r21,$r1,68
ldi $r21,$r1,112
ldi $r21,$r1,120
ldi $r21,$r2,0
ldi $r21,$r2,4
ldi $r21,$r2,8
ldi $r21,$r2,12
ldi $r21,$r2,16
ldi $r21,$r2,44
ldi $r21,$r3,0
ldi $r21,$r3,4
ldi $r21,$r3,8
ldi $r21,$r4,0
ldi $r21,$r4,4
ldi $r21,$r4,8
ldi $r21,$r5,0
ldi $r21,$r6,0
ldi $r21,$r7,0
ldi $r21,$r9,0
ldi $r21,$r10,0
ldi $r21,$r13,0
ldi $r21,$r13,4
ldi $r21,$r13,8
ldi $r21,$r13,12
ldi $r21,$r13,16
ldi $r21,$r13,20
ldi $r21,$r13,24
ldi $r21,$r13,28
ldi $r21,$r13,32
ldi $r21,$r13,36
ldi $r21,$r13,40
ldi $r21,$r13,44
ldi $r21,$r13,48
ldi $r21,$r13,52
ldi $r21,$r13,56
ldi $r21,$r13,60
ldi $r21,$r13,64
ldi $r21,$r13,68
ldi $r21,$r13,72
ldi $r21,$r13,76
ldi $r21,$r13,80
ldi $r21,$r13,88
ldi $r21,$r14,0
ldi $r21,$r14,4
ldi $r21,$r14,8
ldi $r21,$r14,12
ldi $r21,$r14,16
ldi $r21,$r14,20
ldi $r21,$r14,24
ldi $r21,$r14,28
ldi $r21,$r14,36
ldi $r21,$r14,40
ldi $r21,$r14,44
ldi $r21,$r14,56
ldi $r21,$r14,60
ldi $r21,$r14,64
ldi $r21,$r14,68
ldi $r21,$r14,72
ldi $r21,$r14,76
ldi $r21,$r14,84
ldi $r21,$r15,0
ldi $r21,$r15,4
ldi $r21,$r15,8
ldi $r21,$r15,12
ldi $r21,$r15,36
ldi $r21,$r15,60
ldi $r21,$r16,0
ldi $r21,$r16,4
ldi $r21,$r16,8
ldi $r21,$r16,60
ldi $r21,$r17,0
ldi $r21,$r17,4
ldi $r21,$r17,8
ldi $r21,$r17,12
ldi $r21,$r18,0
ldi $r21,$r19,0
ldi $r21,$r20,0
ldi $r21,$r20,4
ldi $r21,$r21,0
ldi $r21,$r22,0
ldi $r21,$r24,0
ldi $r21,$r27,4
ldi $r21,$r27,8
ldi $r21,$r29,-112
ldi $r21,$r29,-108
ldi $r21,$r29,-4
ldi $r21,$r31,24
ldi $r21,$r31,28
ldi $r21,$r31,32
ldi $r21,$r31,36
ldi $r21,$r31,40
ldi $r21,$r31,44
ldi $r21,$r31,48
ldi $r21,$r31,52
ldi $r21,$r31,56
ldi $r21,$r31,60
ldi $r21,$r31,64
ldi $r21,$r31,68
ldi $r21,$r31,72
ldi $r21,$r31,76
ldi $r21,$r31,80
ldi $r21,$r31,84
ldi $r21,$r31,88
sti.b $r21,0,$r0
sti.b $r21,6,$r0
sti.b $r21,9,$r0
sti.b $r21,24,$r0
sti.b $r21,25,$r0
sti.b $r21,27,$r0
sti.b $r21,0,$r1
sti.b $r21,0,$r2
sti.b $r21,0,$r3
sti.b $r21,-121,$r3
sti.b $r21,0,$r4
sti.b $r21,0,$r6
sti.b $r21,0,$r14
sti.b $r21,0,$r15
sti.b $r21,0,$r16
sti.s $r21,0,$r0
sti.s $r21,6,$r0
sti.s $r21,12,$r0
sti.s $r21,0,$r1
sti.s $r21,12,$r1
sti.s $r21,0,$r2
sti.s $r21,0,$r3
sti.s $r21,0,$r4
sti.s $r21,0,$r15
sti $r21,0,$r0
sti $r21,4,$r0
sti $r21,8,$r0
sti $r21,12,$r0
sti $r21,16,$r0
sti $r21,20,$r0
sti $r21,24,$r0
sti $r21,28,$r0
sti $r21,32,$r0
sti $r21,36,$r0
sti $r21,40,$r0
sti $r21,44,$r0
sti $r21,48,$r0
sti $r21,52,$r0
sti $r21,56,$r0
sti $r21,60,$r0
sti $r21,64,$r0
sti $r21,68,$r0
sti $r21,72,$r0
sti $r21,80,$r0
sti $r21,84,$r0
sti $r21,108,$r0
sti $r21,0,$r1
sti $r21,4,$r1
sti $r21,8,$r1
sti $r21,12,$r1
sti $r21,16,$r1
sti $r21,20,$r1
sti $r21,24,$r1
sti $r21,28,$r1
sti $r21,32,$r1
sti $r21,36,$r1
sti $r21,40,$r1
sti $r21,44,$r1
sti $r21,52,$r1
sti $r21,56,$r1
sti $r21,64,$r1
sti $r21,68,$r1
sti $r21,108,$r1
sti $r21,112,$r1
sti $r21,0,$r2
sti $r21,4,$r2
sti $r21,8,$r2
sti $r21,12,$r2
sti $r21,16,$r2
sti $r21,24,$r2
sti $r21,28,$r2
sti $r21,32,$r2
sti $r21,36,$r2
sti $r21,40,$r2
sti $r21,44,$r2
sti $r21,0,$r3
sti $r21,4,$r3
sti $r21,8,$r3
sti $r21,24,$r3
sti $r21,28,$r3
sti $r21,0,$r4
sti $r21,4,$r4
sti $r21,8,$r4
sti $r21,12,$r4
sti $r21,24,$r4
sti $r21,0,$r5
sti $r21,24,$r5
sti $r21,0,$r6
sti $r21,8,$r6
sti $r21,24,$r6
sti $r21,0,$r7
sti $r21,0,$r13
sti $r21,4,$r13
sti $r21,8,$r13
sti $r21,12,$r13
sti $r21,24,$r13
sti $r21,28,$r13
sti $r21,32,$r13
sti $r21,36,$r13
sti $r21,40,$r13
sti $r21,0,$r14
sti $r21,4,$r14
sti $r21,8,$r14
sti $r21,16,$r14
sti $r21,24,$r14
sti $r21,36,$r14
sti $r21,40,$r14
sti $r21,0,$r15
sti $r21,36,$r15
sti $r21,0,$r16
sti $r21,4,$r16
sti $r21,0,$r18
sti $r21,0,$r19
sti $r21,0,$r25
|
tactcomplabs/xbgas-binutils-gdb
| 4,042
|
gas/testsuite/gas/ft32/insn.s
|
# Used for all instructions that have a 3-address form
.macro TERNARY insn
# reg-reg
\insn $r31, $r0, $r0
\insn $r0, $r31, $r0
\insn $r0, $r0, $r31
\insn $r1, $r2, $r4
\insn $r8, $r16, $r0
# immediate
\insn $r31, $r0, -512
\insn $r0, $r31, 0
\insn $r0, $r31, 1
\insn $r0, $r31, 511
# short and byte
\insn\().s $r0, $r31, $r1
\insn\().s $r0, $r31, 77
\insn\().b $r0, $r31, $r1
\insn\().b $r0, $r31, 77
.endm
.macro RegUImm insn
\insn r0, r0, 0
\insn r0, r0, 65535
\insn r0, r31, 0
\insn r0, r31, 65535
\insn r31, r0, 0
\insn r31, r0, 65535
.endm
.macro CMPOP insn
# reg-reg
\insn $r0, $r0
\insn $r31, $r0
\insn $r0, $r31
# immediate
\insn $r0, -512
\insn $r31, 0
\insn $r31, 1
\insn $r31, 511
# short and byte
\insn\().s $r31, $r1
\insn\().s $r31, 77
\insn\().b $r31, $r1
\insn\().b $r31, 77
.endm
.section .data
dalabel:
.long 0
.section .text
pmlabel:
TERNARY add
TERNARY sub
TERNARY and
TERNARY or
TERNARY xor
TERNARY xnor
TERNARY ashl
TERNARY lshr
TERNARY ashr
TERNARY ror
TERNARY ldl
TERNARY bins
TERNARY bexts
TERNARY bextu
TERNARY flip
CMPOP addcc
CMPOP cmp
CMPOP tst
CMPOP btst
# LDI, STI, EXI
ldi.l $r0,$r31,-128
ldi.l $r31,$r0,127
ldi.s $r0,$r31,-128
ldi.s $r0,$r31,127
ldi.b $r31,$r0,-128
ldi.b $r31,$r0,127
sti.l $r31,-128,$r0
sti.l $r0,127,$r31
sti.s $r31,-128,$r0
sti.s $r31,127,$r0
sti.b $r0,-128,$r31
sti.b $r0,127,$r31
exi.l $r0,$r31,-128
exi.l $r31,$r0,127
exi.s $r0,$r31,-128
exi.s $r0,$r31,127
exi.b $r31,$r0,-128
exi.b $r31,$r0,127
# LPM, LPMI
lpm.l $r0,pmlabel
lpm.s $r16,pmlabel
lpm.b $r31,pmlabel
lpmi.l $r0,$r1,-128
lpmi.s $r16,$r1,127
lpmi.b $r31,$r1,-128
# JMP
jmp pmlabel
jmpi $r16
jmpx 31,$r28,1,pmlabel
jmpc nz,pmlabel
# CALL
call pmlabel
calli $r16
callx 31,$r28,1,pmlabel
callc nz,pmlabel
# PUSH, POP
push $r0
push $r16
push $r31
pop $r0
pop $r16
pop $r31
# LINK,UNLINK
link $r0,0
link $r16,65535
link $r31,1017
unlink $r0
unlink $r16
unlink $r31
# RETURN,RETI
return
reti
# LDA,STA,EXA
lda.l $r0,dalabel
lda.s $r16,dalabel
lda.b $r31,dalabel
sta.l dalabel,$r0
sta.s dalabel,$r16
sta.b dalabel,$r31
exa.l $r0,dalabel
exa.s $r16,dalabel
exa.b $r31,dalabel
# LDK
ldk $r0,-524288
ldk $r0,524287
ldk $r0,0
move $r0,$r31
move $r31,$r0
TERNARY udiv
TERNARY umod
TERNARY div
TERNARY mod
TERNARY strcmp
TERNARY memcpy
TERNARY memset
TERNARY mul
TERNARY muluh
TERNARY streamin
TERNARY streamini
TERNARY streamout
TERNARY streamouti
strlen.l $r0,$r31
strlen.l $r31,$r0
strlen.s $r0,$r31
strlen.s $r31,$r0
strlen.b $r0,$r31
strlen.b $r31,$r0
stpcpy.l $r0,$r31
stpcpy.l $r31,$r0
stpcpy.s $r0,$r31
stpcpy.s $r31,$r0
stpcpy.b $r0,$r31
stpcpy.b $r31,$r0
|
tactcomplabs/xbgas-binutils-gdb
| 2,747
|
gas/testsuite/gas/bfin/parallel2.s
|
.section .text;
A0 = A1 || P0 = [sp+20];
a1 = a0 || P0 = [p5+24];
a0 = R0 || P0 = [P4+28];
A1 = r1 || P0 = [P3+32];
R4 = A0 (fu) || P0 = [p3+36];
r5 = A1 (ISS2) || P0 = [P3+40];
R6 = a0 || P0 = [P4+44];
R7 = A1 || P0 = [P4+48];
R6 = A0, R7 = a1 || P0 = [P4+52];
r1 = a1, r0 = a0 (fu) || P0 = [P4+56];
A0.X = r5.l || p0 = [p4+60];
a1.X = r2.L || r0 = [i0 ++ m0];
r0.l = a0.x || r1 = [i0 ++ m1];
R7.l = A1.X || r0 = [i0 ++ m2];
A0.L = r3.l || r0 = [i0 ++ m3];
a1.l = r4.l || r0 = [i1 ++ m3];
A0.h = r6.H || r0 = [i1 ++ m2];
A1.H = r5.h || r0 = [i1 ++ m1];
r0.l = A0 (iu) || r4 = [i1 ++ m0];
R1.H = A1 (s2rnd) || r0 = [i2 ++ m0];
r1.h = a1 || r0 = [i2 ++ m1];
R2.l = A0, r2.H = A1 (IH) || r0 = [i2 ++ m2];
R2.l = A0, r2.H = A1 || r0 = [i2 ++ m3];
r0.H = A1, R0.L = a0 (t) || r5 = [i3 ++ m0];
r0.H = A1, R0.L = a0 (fu) || r5 = [i3 ++ m1];
r0.H = A1, R0.L = a0 (is) || r5 = [i3 ++ m2];
r0.H = A1, R0.L = a0 || r5 = [i3 ++ m3];
A0 = A0 >> 31 || r0 = [fp - 32];
a0 = a0 << 31 || r0 = [fp - 28];
a1 = a1 >> 0 || r0 = [fp - 24];
A1 = A1 << 0 || r0 = [fp - 20];
r7 = r5 << 31 (s) || r0 = [fp - 16];
R3 = r2 >>> 22 || r0 = [fp - 12];
r1.L = R2.H << 15 (S) || r0 = [fp - 8];
r5.h = r2.l >>> 2 || r0 = [fp - 4];
r3.l = Ashift r4.h by r2.l || r0 = [fp - 100];
R7.H = ASHIFT R7.L by R0.L (S) || r0 = [fp - 104];
r7.h = ashift r7.l by r0.l (s) || r0 = [fp - 108];
r6 = AShiFT R5 by R2.L || r0 = [fp - 112];
R0 = Ashift R4 by r1.l (s) || r3 = [fp - 116];
r2 = ashift r6 BY r3.L (S) || r0 = [fp - 120];
A0 = Ashift a0 by r1.l || r0 = [fp - 124];
a1 = ASHIFT a1 by r0.L || r0 = [fp - 128];
r1.H = r2.l >> 15 || R5 = W [P1--] (z);
r7.l = r0.L << 0 || R5 = W [P2] (z);
r5 = r5 >> 31 || R7 = W [P2++] (z);
r0 = r0 << 12 || R5 = W [P2--] (z);
A0 = A0 >> 1 || R5 = W [P2+0] (z);
A0 = A0 << 0 || R5 = W [P2+2] (z);
a1 = A1 << 31 || R5 = W [P2+4] (z);
a1 = a1 >> 16 || R5 = W [P2+30] (z);
R1.H = LShift r2.h by r0.l || R5 = W [P2+24] (z);
r0.l = LSHIFT r0.h by r1.l || R5 = W [P2+22] (z);
r7.L = lshift r6.L BY r2.l || R5 = W [P2+20] (z);
r5 = LShIft R4 bY r3.L || R4 = W [P2+18] (z);
A0 = Lshift a0 By R6.L || R5 = W [P2+16] (z);
A1 = LsHIFt a1 by r5.l || R5 = W [P2+14] (z);
r7 = ROT r7 by -32 || R5 = W [P2+12] (z);
R6 = Rot r7 by -31 || R5 = W [P2+10] (z);
R5 = RoT R7 by 31 || R6 = W [P2+8] (z);
R4 = Rot r7 by 30 || R5 = W [P2+6] (z);
a0 = rot A0 by 0 || R5 = W [P3] (z);
A0 = ROT a0 BY 10 || R5 = W [P3++] (z);
A1 = ROT A1 by -20 || R5 = W [P3--] (z);
A1 = ROT a1 By -32 || R5 = W [P4] (z);
r0 = rot r1 by r2.L || R5 = W [P4++] (z);
R0 = Rot R4 BY R3.L || R5 = W [P4--] (z);
A0 = ROT A0 by r7.l || R5 = W [P5] (z);
A1 = rot a1 bY r6.l || R5 = W [P5++] (z);
NOp || R5 = W [P5--] (z);
|
tactcomplabs/xbgas-binutils-gdb
| 3,614
|
gas/testsuite/gas/bfin/flow2.s
|
.EXTERN MY_LABEL2;
.section .text;
//
//2 Program Flow Control
//
//JUMP ( Preg ) ; /* indirect to an absolute (not PC-relative)address (a) */
//Preg: P5-0, SP, FP
JUMP (P0);
JUMP (P1);
JUMP (P2);
JUMP (P3);
JUMP (P4);
JUMP (P5);
JUMP (SP);
JUMP (FP);
//JUMP ( PC + Preg ) ; /* PC-relative, indexed (a) */
JUMP (PC+P0);
JUMP (PC+P1);
JUMP (PC+P2);
JUMP (PC+P3);
JUMP (PC+P4);
JUMP (PC+P5);
JUMP (PC+SP);
JUMP (PC+FP);
//JUMP pcrelm2 ; /* PC-relative, immediate (a) or (b) */
JUMP 0X0;
JUMP 1234;
JUMP -1234;
JUMP 2;
JUMP -2;
MY_LABEL1:
//JUMP.S pcrel13m2 ; /* PC-relative, immediate, short (a) */
JUMP.S 0X0;
JUMP.S 1234;
JUMP.S -1234;
JUMP.S 2;
JUMP.S -2;
//JUMP.L pcrel25m2 ; /* PC-relative, immediate, long (b) */
JUMP.L 0XFF800000;
JUMP.L 0X007FFFFE;
JUMP.L 0X0;
JUMP.L 1234;
JUMP.L -1234;
JUMP.L 2;
JUMP.L -2;
//JUMP user_label ; /* user-defined absolute address label, */
JUMP MY_LABEL1;
JUMP MY_LABEL2;
JUMP MY_LABEL1-2;
JUMP MY_LABEL2-2;
//IF CC JUMP pcrel11m2 ; /* branch if CC=1, branch predicted as not taken (a) */
IF CC JUMP 0xFFFFFE08;
IF CC JUMP 0x0B4;
IF CC JUMP 0;
//IF CC JUMP pcrel11m2 (bp) ; /* branch if CC=1, branch predicted as taken (a) */
IF CC JUMP 0xFFFFFE08(bp);
IF CC JUMP 0x0B4(bp);
//IF !CC JUMP pcrel11m2 ; /* branch if CC=0, branch predicted as not taken (a) */
IF !CC JUMP 0xFFFFFF22;
IF !CC JUMP 0X120;
//IF !CC JUMP pcrel11m2 (bp) ; /* branch if CC=0, branch predicted as taken (a) */
IF !CC JUMP 0xFFFFFF22(bp);
IF !CC JUMP 0X120(bp);
//IF CC JUMP user_label ; /* user-defined absolute address label, resolved by the assembler/linker to the appropriate PC-relative instruction (a) */
IF CC JUMP MY_LABEL1;
IF CC JUMP MY_LABEL2;
//IF CC JUMP user_label (bp) ; /* user-defined absolute address label, resolved by the assembler/linker to the appropriate PC-relative instruction (a) */
IF CC JUMP MY_LABEL1(bp);
IF CC JUMP MY_LABEL2(bp);
//IF !CC JUMP user_label ; /* user-defined absolute address label, resolved by the assembler/linker to the appropriate PC-relative instruction (a) */
IF !CC JUMP MY_LABEL1;
IF !CC JUMP MY_LABEL2;
//IF !CC JUMP user_label (bp) ; /* user-defined absolute address label, resolved by the assembler/linker to the appropriate PC-relative instruction (a) */
IF !CC JUMP MY_LABEL1(bp);
IF !CC JUMP MY_LABEL2(bp);
//CALL ( Preg ) ; /* indirect to an absolute (not PC-relative) address (a) */
CALL(P0);
CALL(P1);
CALL(P2);
CALL(P3);
CALL(P4);
CALL(P5);
//CALL ( PC + Preg ) ; /* PC-relative, indexed (a) */
CALL(PC+P0);
CALL(PC+P1);
CALL(PC+P2);
CALL(PC+P3);
CALL(PC+P4);
CALL(PC+P5);
//CALL pcrel25m2 ; /* PC-relative, immediate (b) */
CALL 0x123456 ;
CALL -1234;
//CALL user_label ; /* user-defined absolute address label,resolved by the assembler/linker to the appropriate PC-relative instruction (a) or (b) */
CALL MY_LABEL1;
CALL MY_LABEL2;
RTS ; // Return from Subroutine (a)
RTI ; // Return from Interrupt (a)
RTX ; // Return from Exception (a)
RTN ; // Return from NMI (a)
RTE ; // Return from Emulation (a)
lsetup ( 4, 4 ) lc0 ;
lsetup ( beg_poll_bit, end_poll_bit ) lc0 ;
NOP;NOP;
beg_poll_bit: R0=1(Z);
end_poll_bit: R1=2(Z);
lsetup ( 4, 6 ) lc1 ;
lsetup ( FIR_filter, bottom_of_FIR_filter ) lc1 ;
NOP;
FIR_filter: R0=1(Z);
bottom_of_FIR_filter: R1=2(Z);
lsetup ( 4, 8 ) lc0 = p1 ;
lsetup ( 4, 8 ) lc0 = p1>>1 ;
loop DoItSome LC0 ; /* define loop DoItSome with Loop Counter 0 */
loop_begin DoItSome ; /* place before the first instruction in the loop */
R0=1;
R1=2;
loop_end DoItSome ; /* place after the last instruction in the loop */
loop DoItSomeMore LC1 ; /* define loop MyLoop with Loop Counter 1*/
|
tactcomplabs/xbgas-binutils-gdb
| 1,442
|
gas/testsuite/gas/bfin/shift.s
|
.text
.global add_with_shift
add_with_shift:
P0 = (P0 + p1) << 1;
P2 = (p2 + p5) << 2;
r7 = (R7 + r1) << 2;
r3 = (r3 + R0) << 1;
.text
.global shift_with_add
shift_with_add:
P5 = p4 + (P0 << 2);
P0 = p2 + (p1 << 1);
.text
.global arithmetic_shift
arithmetic_shift:
A0 = A0 >> 31;
a0 = a0 << 31;
a1 = a1 >> 0;
A1 = A1 << 0;
r7 = r5 << 31 (s);
R3 = r2 >>> 22;
r1.L = R2.H << 15 (S);
r5.h = r2.l >>> 2;
r0 <<= 0;
r1 >>>= 31;
r0 >>>= R1;
R2 <<= R1;
r3.l = Ashift r4.h by r2.l;
R7.H = ASHIFT R7.L by R0.L (S);
r7.h = ashift r7.l by r0.l (s);
r6 = AShiFT R5 by R2.L;
R0 = Ashift R4 by r1.l (s);
r2 = ashift r6 BY r3.L (S);
A0 = Ashift a0 by r1.l;
a1 = ASHIFT a1 by r0.L;
.text
.global logical_shift
logical_shift:
p0 = p0 >> 1;
P1 = p2 >> 2;
P3 = P1 << 1;
p4 = p5 << 2;
r0 >>= 31;
R7 <<= 31;
r1.H = r2.l >> 15;
r7.l = r0.L << 0;
r5 = r5 >> 31;
r0 = r0 << 12;
A0 = A0 >> 1;
A0 = A0 << 0;
a1 = A1 << 31;
a1 = a1 >> 16;
r5 >>= R7;
R6 <<= r0;
R1.H = LShift r2.h by r0.l;
r0.l = LSHIFT r0.h by r1.l;
r7.L = lshift r6.L BY r2.l;
r5 = LShIft R4 bY r3.L;
A0 = Lshift a0 By R6.L;
A1 = LsHIFt a1 by r5.l;
.text
.global rotate
rotate:
r7 = ROT r7 by -32;
R6 = Rot r7 by -31;
R5 = RoT R7 by 31;
R4 = Rot r7 by 30;
a0 = rot A0 by 0;
A0 = ROT a0 BY 10;
A1 = ROT A1 by -20;
A1 = ROT a1 By -32;
r0 = rot r1 by r2.L;
R0 = Rot R4 BY R3.L;
A0 = ROT A0 by r7.l;
A1 = rot a1 bY r6.l;
|
tactcomplabs/xbgas-binutils-gdb
| 3,725
|
gas/testsuite/gas/bfin/arithmetic.s
|
.text
.global abs
abs:
a0 = abs a0;
A0 = ABS A1;
A1 = Abs a0;
a1 = aBs A1;
A1 = abs a1, a0 = ABS A0;
r0 = abs r2;
.text
.global add
add:
sp = sp + P0;
SP = SP + P2;
FP = p1 + fp;
R7 = r7 + r2 (NS);
r6 = r6 + r0 (s);
r4.L = R2.h + r0.L (s);
r5.H = R1.H + R1.L (S);
r6.L = R6.L + r5.l (NS);
.text
.global add_sub_prescale_down
add_sub_prescale_down:
r4.l = r0 + r1 (RND20);
R3.H = r5 + r0 (rnd20);
r1.L = r7 - R5 (rND20);
.text
.global add_sub_prescale_up
add_sub_prescale_up:
r2.L = R0 + R1 (rnd12);
r7.H = r7 + r6 (RND12);
r5.l = r3 - R2 (rNd12);
r2.h = R1 - R2 (Rnd12);
.text
.global add_immediate
add_immediate:
R5 += -64;
r2 += 63;
i0 += 2;
I3 += 2;
I2 += 4;
i1 += 4;
P0 += 4;
sp += 16;
FP += -32;
.text
.global divide_primitive
divide_primitive:
divs (r3, r5);
divq (R3, R5);
.text
.global expadj
expadj:
r6.L = EXPADJ (r5, r4.l);
R5.l = ExpAdj (r0.h, r1.l);
R4.L = expadj (R3, R5.L) (V);
.text
.global max
max:
R6 = MAX (r5, R2);
r0 = max (r1, r3);
.text
.global min
min:
r5 = mIn (r2, R3);
R4 = Min (r7, R0);
.text
.global modify_decrement
modify_decrement:
A0 -= A1;
a0 -= a1 (w32);
fp -= p2;
SP -= P0;
I3 -= M0;
i1 -= m1;
.text
.global modify_increment
modify_increment:
a0 += a1;
A0 += A1 (w32);
Sp += P1 (Brev);
P5 += Fp (BREV);
i2 += M2;
I0 += m0 (brev);
r7 = ( a0 += a1);
r6.l = (A0 += a1);
R0.H = (a0 += A1);
.text
.global multiply16
multiply16:
R0.l = r1.h * r2.l;
r1.L = r5.H * r0.H (s2rnd);
r7.l = r3.l * r3.H (FU);
r4 = r2.H * r5.H (iSS2);
r0 = r1.l * r3.l (is);
r6 = R5.H * r0.l;
r2.h = r7.l * r6.H (M, iu);
r3.H = r5.H * r0.L;
R0.H = r1.L * r1.H (M);
r1 = r7.H * r6.L (M, is);
R5 = r0.l * r2.h;
r3 = r6.H * r0.H (m);
.text
.global multiply32
multiply32:
R4 *= r0;
r7 *= R2;
.text
.global multiply_accumulate
multiply_accumulate:
a0 = r5.l * R7.H (w32);
a0 = r0.h * r0.l;
A0 += R2.L * r3.H (FU);
A0 += r4.h * r1.L;
a0 -= r7.l * r6.H (Is);
A0 -= R5.H * r2.H;
a1 = r1.L * r0.H (M);
A1 = r2.h * r0.L;
A1 = R7.H * R6.L (M, W32);
a1 += r3.l * r2.l (fu);
a1 += R6.H * r1.L;
A1 -= r0.L * R3.H (is);
a1 -= r2.l * r7.h;
.text
.global multiply_accumulate_half
multiply_accumulate_half:
r7.l = (a0 = r6.H * r5.L);
r0.L = (A0 = r1.h * R2.l) (tfu);
R2.L = (a0 += r5.L * r4.L);
r3.l = (A0 += r7.H * r6.h) (T);
r0.l = (a0 -= r3.h * r2.h);
r1.l = (a0 -= r5.L * r4.L) (iH);
r1.H = (a1 = r1.l * R0.H);
r2.h = (A1 = r0.H * r3.L) (M, Iss2);
R6.H = (a1 += r7.l * r7.H);
r7.h = (a1 += R2.L * R3.L) (S2rnd);
r6.H = (A1 -= R4.h * r2.h);
r5.h = (a1 -= r3.H * r7.L) (M, tFu);
.text
.global multiply_accumulate_data_reg
multiply_accumulate_data_reg:
R0 = (A0 = R1.L * R2.L);
R2 = (A0 = r1.l * r2.l) (is);
r4 = (a0 += r7.h * r6.L);
r6 = (A0 += R5.L * r3.h) (s2RND);
R6 = (a0 -= r2.h * r7.l);
r4 = (A0 -= R0.L * r6.H) (FU);
r7 = (a1 = r0.h * r1.l);
R5 = (A1 = r2.H * r3.H) (M, fu);
R3 = (A1 += r7.l * r5.l);
r1 = (a1 += r2.h * r7.h) (iss2);
r3 = (A1 -= r0.l * R0.H);
R5 = (a1 -= R2.l * R7.h) (m, is);
r7 = (a1+=r1.h*r3.h) (IU);
.text
.global negate
negate:
R5 = - r0;
r7 = -R2(s);
R7 = -r2(Ns);
A0 = -A0;
a0 = -a1;
A1 = -A0;
a1 = -A1;
a1 = -a1, a0 = -a0;
.text
.global round_half
round_half:
R5.L = r3 (rnd);
r6.H = r0 (RND);
.text
.global saturate
saturate:
A0 = A0 (S);
a1 = a1 (s);
A1 = a1 (S), a0 = A0 (s);
.text
.global signbits
signbits:
R5.l = signbits r0;
r0.L = SIGNbits r7.H;
r3.l = signBits A0;
r7.L = SIGNBITS a1;
.text
.global subtract
subtract:
R5 = R3 - R0;
R7 = R7 - r0 (S);
r3 = r2 - r1 (ns);
r5.l = R6.H - R7.h (s);
r0.H = r3.l - r3.h (NS);
.text
.global subtract_immediate
subtract_immediate:
I2 -= 2;
i0 -= 4;
|
tactcomplabs/xbgas-binutils-gdb
| 1,354
|
gas/testsuite/gas/bfin/video.s
|
.text
.global align
align:
R7 = Align8 (r5, r2);
R5 = ALIGN16 (R0, R1);
r2 = ALIGN24 (r5, r0);
.global disalgnexcpt
disalgnexcpt:
DISAlgnExcpt;
.text
.global byteop3p
byteop3p:
R5 = Byteop3p (r1:0, r3:2) (lO);
R0 = BYTEOP3P (R1:0, R3:2) (HI);
R1 = byteop3p (r1:0, r3:2) (LO, r);
r2 = ByteOp3P (r1:0, R3:2) (hi, R);
.text
.global dual16
dual16:
R5 = A1.l + A1.h, R2 = a0.l + a0.h;
.text
.global byteop16p
byteop16p:
(r2, r3) = BYTEOP16P (R1:0, R3:2);
(R6, R0) = byteop16p (r1:0, r3:2) (r);
.text
.global byteop1p
byteop1p:
R7 = BYTEOP1P (R1:0, R3:2);
r2 = byteop1p (r1:0, r3:2) (t);
R3 = ByteOp1P (r1:0, R3:2) (R);
r7 = byteOP1P (R1:0, r3:2) (T, r);
.text
.global byteop2p
byteop2p:
R0 = BYTEOP2P (R1:0, R3:2) (RNDL);
r1 = byteop2p (r1:0, r3:2) (rndh);
R2 = Byteop2p (R1:0, R3:2) (tL);
R3 = Byteop2p (r1:0, r3:2) (TH);
r4 = ByTEOP2P (r1:0, R3:2) (Rndl, R);
R5 = byTeOp2p (R1:0, r3:2) (rndH, r);
r6 = BYTEop2p (r1:0, r3:2) (tl, R);
R7 = byteop2p (r1:0, R3:2) (TH, r);
.text
.global bytepack
bytepack:
R5 = BytePack (R0, R3);
.text
.global byteop16m
byteop16m:
(R6, R2) = ByteOp16M (r1:0, r3:2);
(r0, r5) = byteop16m (R1:0, R3:2) (r);
.text
.global saa
saa:
saa(r1:0, r3:2);
SAA (R1:0, R3:2) (r);
.text
.global byteunpack
byteunpack:
(R7, R2) = byteunpack R1:0;
(R6, R4) = BYTEUNPACK r3:2 (R);
|
tactcomplabs/xbgas-binutils-gdb
| 6,705
|
gas/testsuite/gas/bfin/shift2.s
|
.EXTERN MY_LABEL2;
.section .text;
//
//9 SHIFT/ROTATE OPERATIONS
//
//Preg = ( Preg + Preg ) << 1 ; /* dest_reg = (dest_reg + src_reg) x 2 (a) */
P0 = (P0+P0)<<1;
P0 = (P0+P1)<<1;
P2 = (P2+P0)<<1;
P1 = (P1+P2)<<1;
//P0 = (P2+P0)<<1;
//Preg = ( Preg + Preg ) << 2 ; /* dest_reg = (dest_reg + src_reg) x 4 (a) */
P0 = (P0+P0)<<2;
P0 = (P0+P1)<<2;
P2 = (P2+P0)<<2;
P1 = (P1+P2)<<2;
//P0 = (P2+P0)<<2;
//Dreg = (Dreg + Dreg) << 1 ; /* dest_reg = (dest_reg + src_reg) x 2 (a) */
R0 = (R0+R0)<<1;
R0 = (R0+R1)<<1;
R2 = (R2+R0)<<1;
R1 = (R1+R2)<<1;
//R0 = (R2+R0)<<1;
//Dreg = (Dreg + Dreg) << 2 ; /* dest_reg = (dest_reg + src_reg) x 4 (a) */
R0 = (R0+R0)<<2;
R0 = (R0+R1)<<2;
R2 = (R2+R0)<<2;
R1 = (R1+R2)<<2;
//R0 = (R2+R0)<<2;
//Preg = Preg + ( Preg << 1 ) ; /* adder_pntr + (src_pntr x 2) (a) */
P0 = P0 + (P0 << 1);
P0 = P0 + (P1 << 1);
P0 = P0 + (P2 << 1);
P0 = P1 + (P2 << 1);
P0 = P2 + (P3 << 1);
P1 = P0 + (P0 << 1);
P1 = P0 + (P1 << 1);
P1 = P0 + (P2 << 1);
P1 = P1 + (P2 << 1);
P1 = P2 + (P3 << 1);
//Preg = Preg + ( Preg << 2 ) ; /* adder_pntr + (src_pntr x 4) (a) */
P0 = P0 + (P0 << 2);
P0 = P0 + (P1 << 2);
P0 = P0 + (P2 << 2);
P0 = P1 + (P2 << 2);
P0 = P2 + (P3 << 2);
P1 = P0 + (P0 << 2);
P1 = P0 + (P1 << 2);
P1 = P0 + (P2 << 2);
P1 = P1 + (P2 << 2);
P1 = P2 + (P3 << 2);
//Dreg >>>= uimm5 ; /* arithmetic right shift (a) */
R0 >>>= 0;
R0 >>>= 31;
R0 >>>= 5;
R5 >>>= 0;
R5 >>>= 31;
R5 >>>= 5;
//Dreg <<= uimm5 ; /* logical left shift (a) */
R0 <<= 0;
R0 <<= 31;
R0 <<= 5;
R5 <<= 0;
R5 <<= 31;
R5 <<= 5;
//Dreg_lo_hi = Dreg_lo_hi >>> uimm4 ; /* arithmetic right shift (b) */
R0.L = R0.L >>> 0;
R0.L = R0.L >>> 15;
R0.L = R0.H >>> 0;
R0.L = R0.H >>> 15;
R0.H = R0.L >>> 0;
R0.H = R0.L >>> 15;
R0.H = R0.H >>> 0;
R0.H = R0.H >>> 15;
R0.L = R1.L >>> 0;
R0.L = R1.L >>> 15;
R0.L = R1.H >>> 0;
R0.L = R1.H >>> 15;
R0.H = R1.L >>> 0;
R0.H = R1.L >>> 15;
R0.H = R1.H >>> 0;
R0.H = R1.H >>> 15;
R0.L = R7.L >>> 0;
R1.L = R6.L >>> 15;
R2.L = R5.H >>> 0;
R3.L = R4.H >>> 15;
R4.H = R3.L >>> 0;
R5.H = R2.L >>> 15;
R6.H = R1.H >>> 0;
R7.H = R0.H >>> 15;
//Dreg_lo_hi = Dreg_lo_hi << uimm4 (S) ; /* arithmetic left shift (b) */
R0.L = R0.L << 0(S);
R0.L = R0.L << 15(S);
R0.L = R0.H << 0(S);
R0.L = R0.H << 15(S);
R0.H = R0.L << 0(S);
R0.H = R0.L << 15(S);
R0.H = R0.H << 0(S);
R0.H = R0.H << 15(S);
R0.L = R1.L << 0(S);
R0.L = R1.L << 15(S);
R0.L = R1.H << 0(S);
R0.L = R1.H << 15(S);
R0.H = R1.L << 0(S);
R0.H = R1.L << 15(S);
R0.H = R1.H << 0(S);
R0.H = R1.H << 15(S);
R0.L = R7.L << 0(S);
R1.L = R6.L << 15(S);
R2.L = R5.H << 0(S);
R3.L = R4.H << 15(S);
R4.H = R3.L << 0(S);
R5.H = R2.L << 15(S);
R6.H = R1.H << 0(S);
R7.H = R0.H << 15(S);
//Dreg = Dreg >>> uimm5 ; /* arithmetic right shift (b) */
R0 = R0 >>> 0;
R0 = R0 >>> 31;
R0 = R1 >>> 0;
R0 = R1 >>> 31;
R7 = R0 >>> 0;
R6 = R1 >>> 31;
R5 = R2 >>> 0;
R4 = R3 >>> 31;
R3 = R4 >>> 0;
R2 = R5 >>> 31;
R1 = R6 >>> 0;
R0 = R7 >>> 31;
//Dreg = Dreg << uimm5 (S) ; /* arithmetic left shift (b) */
R0 = R0 << 0(S);
R0 = R0 << 31(S);
R0 = R1 << 0(S);
R0 = R1 << 31(S);
R7 = R0 << 0(S);
R6 = R1 << 31(S);
R5 = R2 << 0(S);
R4 = R3 << 31(S);
R3 = R4 << 0(S);
R2 = R5 << 31(S);
R1 = R6 << 0(S);
R0 = R7 << 31(S);
//A0 = A0 >>> uimm5 ; /* arithmetic right shift (b) */
A0 = A0 >>> 0;
A0 = A0 >>> 15;
A0 = A0 >>> 31;
//A0 = A0 << uimm5 ; /* logical left shift (b) */
A0 = A0 << 0;
A0 = A0 << 15;
A0 = A0 << 31;
//A1 = A1 >>> uimm5 ; /* arithmetic right shift (b) */
A1 = A1 >>> 0;
A1 = A1 >>> 15;
A1 = A1 >>> 31;
//A1 = A1 << uimm5 ; /* logical left shift (b) */
A1 = A1 << 0;
A1 = A1 << 15;
A1 = A1 << 31;
//Dreg >>>= Dreg ; /* arithmetic right shift (a) */
R0 >>>= R0;
R0 >>>= R1;
R1 >>>= R0;
R1 >>>= R7;
//Dreg <<= Dreg ; /* logical left shift (a) */
R0 <<= R0;
R0 <<= R1;
R1 <<= R0;
R1 <<= R7;
//Dreg_lo_hi = ASHIFT Dreg_lo_hi BY Dreg_lo (opt_sat) ; /* arithmetic right shift (b) */
r3.l = ashift r0.h by r7.l ; /* shift, half-word */
r3.h = ashift r0.l by r7.l ;
r3.h = ashift r0.h by r7.l ;
r3.l = ashift r0.l by r7.l ;
r3.l = ashift r0.h by r7.l(s) ; /* shift, half-word, saturated */
r3.h = ashift r0.l by r7.l(s) ; /* shift, half-word, saturated */
r3.h = ashift r0.h by r7.l(s) ;
r3.l = ashift r0.l by r7.l (s) ;
//Dreg = ASHIFT Dreg BY Dreg_lo (opt_sat) ; /* arithmetic right shift (b) */
r4 = ashift r2 by r7.l ; /* shift, word */
r4 = ashift r2 by r7.l (s) ; /* shift, word, saturated */
//A0 = ASHIFT A0 BY Dreg_lo ; /* arithmetic right shift (b)*/
A0 = ashift A0 by r7.l ; /* shift, Accumulator */
//A1 = ASHIFT A1 BY Dreg_lo ; /* arithmetic right shift (b)*/
A1 = ashift A1 by r7.l ; /* shift, Accumulator */
p3 = p2 >> 1 ; /* pointer right shift by 1 */
p3 = p3 >> 2 ; /* pointer right shift by 2 */
p4 = p5 << 1 ; /* pointer left shift by 1 */
p0 = p1 << 2 ; /* pointer left shift by 2 */
r3 >>= 17 ; /* data right shift */
r3 <<= 17 ; /* data left shift */
r3.l = r0.l >> 4 ; /* data right shift, half-word register */
r3.l = r0.h >> 4 ; /* same as above; half-word register combinations are arbitrary */
r3.h = r0.l << 12 ; /* data left shift, half-word register */
r3.h = r0.h << 14 ; /* same as above; half-word register combinations are arbitrary */
r3 = r6 >> 4 ; /* right shift, 32-bit word */
r3 = r6 << 4 ; /* left shift, 32-bit word */
a0 = a0 >> 7 ; /* Accumulator right shift */
a1 = a1 >> 25 ; /* Accumulator right shift */
a0 = a0 << 7 ; /* Accumulator left shift */
a1 = a1 << 14 ; /* Accumulator left shift */
r3 >>= r0 ; /* data right shift */
r3 <<= r1 ; /* data left shift */
r3.l = lshift r0.l by r2.l ; /* shift direction controlled by sign of R2.L */
r3.h = lshift r0.l by r2.l ;
a0 = lshift a0 by r7.l ;
a1 = lshift a1 by r7.l ;
r4 = rot r1 by 31 ; /* rotate left */
r4 = rot r1 by -32 ; /* rotate right */
r4 = rot r1 by 5 ; /* rotate right */
a0 = rot a0 by 22 ; /* rotate Accumulator left */
a0 = rot a0 by -32 ; /* rotate Accumulator left */
a0 = rot a0 by 31 ; /* rotate Accumulator left */
a1 = rot a1 by -32 ; /* rotate Accumulator right */
a1 = rot a1 by 31 ; /* rotate Accumulator right */
a1 = rot a1 by 22 ; /* rotate Accumulator right */
r4 = rot r1 by r2.l ;
a0 = rot a0 by r3.l ;
a1 = rot a1 by r7.l ;
r0.l = r1.l << 0;
r0.l = r1.l << 1;
r0.l = r1.l << 2;
r0.l = r1.l << 4;
r0.l = r1.l >> 0;
r0.l = r1.l >> 1;
r0.l = r1.l >> 2;
r0.l = r1.l >> 4;
r0.l = r1.l >>> 1;
r0.l = r1.l >>> 2;
r0.l = r1.l >>> 4;
r0.l = r1.h << 0;
r0.l = r1.h << 1;
r0.l = r1.h << 2;
r0.l = r1.h << 4;
r0.l = r1.h >> 0;
r0.l = r1.h >> 1;
r0.l = r1.h >> 2;
r0.l = r1.h >> 4;
r0.l = r1.h >>> 1;
r0.l = r1.h >>> 2;
r0.l = r1.h >>> 4;
r0.l = r1.h << 0 (S);
r0.l = r1.h << 1 (S);
r0.l = r1.h << 2 (S);
r0.l = r1.h << 4 (S);
r0.l = r1.h >>> 1 (S);
r0.l = r1.h >>> 2 (S);
r0.l = r1.h >>> 4 (S);
|
tactcomplabs/xbgas-binutils-gdb
| 1,451
|
gas/testsuite/gas/bfin/parallel4.s
|
.section .text;
R7 = Align8 (r5, r2) || [i0] = r0;
R5 = ALIGN16 (R0, R1) || [i0++] = r0;
r2 = ALIGN24 (r5, r0) || [i0--] = r0;
DISAlgnExcpt || [i1] = r0;
R5 = Byteop3p (r1:0, r3:2) (lO)
|| [i1++] = r0;
R0 = BYTEOP3P (R1:0, R3:2) (HI) || // comment test
[i1--] = r0;
R1 = byteop3p (r1:0, r3:2) (LO, r) || [i2] = r0;
r2 = ByteOp3P (r1:0, R3:2) (hi, R) || [i2++] = r0;
R5 = A1.l + A1.h, R2 = a0.l + a0.h || [i2--] = r0;
(r2, r3) = BYTEOP16P (R1:0, R3:2) || [i3] = r0;
(R6, R0) = byteop16p (r1:0, r3:2) (r) || [i3++] = r0;
R7 = BYTEOP1P (R1:0, R3:2) (t) || [i3--] = r0;
r2 = byteop1p (r1:0, r3:2) (t) || [p0] = r0;
R3 = ByteOp1P (r1:0, R3:2) (R) || [p0++] = r0;
r7 = byteOP1P (R1:0, r3:2) (T, r) || [p0--] = r0;
R0 = BYTEOP2P (R1:0, R3:2) (RNDL) || [p1] = r0;
r1 = byteop2p (r1:0, r3:2) (rndh) || [p1++] = r0;
R2 = Byteop2p (R1:0, R3:2) (tL) || [p1--] = r0;
R3 = Byteop2p (r1:0, r3:2) (TH) || [p2] = r0;
r4 = ByTEOP2P (r1:0, R3:2) (Rndl, R) || [p2++] = r0;
R5 = byTeOp2p (R1:0, r3:2) (rndH, r) || [p2--] = r0;
r6 = BYTEop2p (r1:0, r3:2) (tl, R) || [p3] = r0;
R7 = byteop2p (r1:0, R3:2) (TH, r) || [p3++] = r0;
R5 = BytePack (R0, R3) || [p3--] = r0;
(R6, R2) = ByteOp16M (r1:0, r3:2) || [p4] = r0;
(r0, r5) = byteop16m (R1:0, R3:2) (r) || [p4++] = r0;
saa (r1:0, r3:2) || [p4--] = r0;
SAA (R1:0, R3:2) (r) || [p5] = r0;
(R7, R2) = byteunpack R1:0 || [p5++] = r0;
(R6, R4) = BYTEUNPACK r3:2 (R) || [p5--] = r0;
|
tactcomplabs/xbgas-binutils-gdb
| 7,644
|
gas/testsuite/gas/bfin/move2.s
|
.EXTERN MY_LABEL2;
.section .text;
//
//4 MOVE
//
//genreg = genreg ; /* (a) */
R0 = R0;
R1 = R1;
R2 = R2;
R3 = R3;
R4 = R4;
R5 = R5;
R6 = R6;
R7 = R7;
P0 = P0;
P1 = P1;
P2 = P2;
P3 = P3;
P4 = P4;
P5 = P5;
SP = SP;
FP = FP;
A0.X = A0.X;
A0.W = A0.W;
A1.X = A1.X;
A1.W = A1.W;
R0 = A1.W;
R1 = A1.X;
R2 = A0.W;
R3 = A0.X;
R4 = FP;
R5 = SP;
R6 = P5;
R7 = P4;
P0 = P3;
P1 = P2;
P2 = P1;
P3 = P0;
P4 = R7;
P5 = R6;
SP = R5;
FP = R4;
A0.X = R3;
A0.W = R2;
A1.X = R1;
A1.W = R0;
A0.X = A0.W;
A0.X = A1.W;
A0.X = A1.X;
A1.X = A1.W;
A1.X = A0.W;
A1.X = A0.X;
A0.W = A0.W;
A0.W = A1.W;
A0.W = A1.X;
A1.W = A1.W;
A1.W = A0.W;
A1.W = A0.X;
//genreg = dagreg ; /* (a) */
R0 = I0;
R1 = I1;
R2 = I2;
R3 = I3;
R4 = M0;
R5 = M1;
R6 = M2;
R7 = M3;
R0 = B0;
R1 = B1;
R2 = B2;
R3 = B3;
R4 = L0;
R5 = L1;
R6 = L2;
R7 = L3;
P0 = I0;
P1 = I1;
P2 = I2;
P3 = I3;
P4 = M0;
P5 = M1;
SP = M2;
FP = M3;
P0 = B0;
P1 = B1;
P2 = B2;
P3 = B3;
P4 = L0;
P5 = L1;
SP = L2;
FP = L3;
A0.X = I0;
A0.W = I1;
A1.X = I2;
A1.W = I3;
A0.X = M0;
A0.W = M1;
A1.X = M2;
A1.W = M3;
A0.X = B0;
A0.W = B1;
A1.X = B2;
A1.W = B3;
A0.X = L0;
A0.W = L1;
A1.X = L2;
A1.W = L3;
//dagreg = genreg ; /* (a) */
I0 = R0;
I1 = P0;
I2 = SP;
I3 = FP;
I0 = A0.X;
I1 = A0.W;
I2 = A1.X;
I3 = A1.W;
M0 = R0;
M1 = P0;
M2 = SP;
M3 = FP;
M0 = A0.X;
M1 = A0.W;
M2 = A1.X;
M3 = A1.W;
B0 = R0;
B1 = P0;
B2 = SP;
B3 = FP;
B0 = A0.X;
B1 = A0.W;
B2 = A1.X;
B3 = A1.W;
L0 = R0;
L1 = P0;
L2 = SP;
L3 = FP;
L0 = A0.X;
L1 = A0.W;
L2 = A1.X;
L3 = A1.W;
//dagreg = dagreg ; /* (a) */
I0 = I1;
I1 = M0;
I2 = B1;
I3 = L0;
M0 = I1;
M1 = M0;
M2 = B1;
M3 = L0;
B0 = I1;
B1 = M0;
B2 = B1;
B3 = L0;
L0 = I1;
L1 = M0;
L2 = B1;
L3 = L0;
//genreg = USP ; /* (a)*/
R1 = USP;
P2 = USP;
SP = USP;
FP = USP;
A0.X = USP;
A1.W = USP;
//USP = genreg ; /* (a)*/
USP = R2;
USP = P4;
USP = SP;
USP = FP;
USP = A0.X;
USP = A1.W;
//Dreg = sysreg ; /* sysreg to 32-bit D-register (a) */
R0 = ASTAT;
R1 = SEQSTAT;
R2 = SYSCFG;
R3 = RETI;
R4 = RETX;
R5 = RETN;
R6 = RETE;
R7 = RETS;
R0 = LC0;
R1 = LC1;
R2 = LT0;
R3 = LT1;
R4 = LB0;
R5 = LB1;
R6 = CYCLES;
R7 = CYCLES2;
//R0 = EMUDAT;
//sysreg = Dreg ; /* 32-bit D-register to sysreg (a) */
ASTAT = R0;
SEQSTAT = R1;
SYSCFG = R3;
RETI = R4;
RETX =R5;
RETN = R6;
RETE = R7;
RETS = R0;
LC0 = R1;
LC1 = R2;
LT0 = R3;
LT1 = R4;
LB0 = R5;
LB1 = R6;
CYCLES = R7;
CYCLES2 = R0;
//EMUDAT = R1;
//sysreg = Preg ; /* 32-bit P-register to sysreg (a) */
ASTAT = P0;
SEQSTAT = P1;
SYSCFG = P3;
RETI = P4;
RETX =P5;
RETN = SP;
RETE = FP;
RETS = P0;
LC0 = P1;
LC1 = P2;
LT0 = P3;
LT1 = P4;
LB0 = P5;
LB1 = SP;
CYCLES = SP;
CYCLES2 = P0;
//EMUDAT = P1;
//sysreg = USP ; /* (a) */
//ASTAT = USP;
//SEQSTAT = USP;
//SYSCFG = USP;
//RETI = USP;
//RETX =USP;
//RETN = USP;
//RETE = USP;
//RETS = USP;
//LC0 = USP;
//LC1 = USP;
//LT0 = USP;
//LT1 = USP;
//LB0 = USP;
//LB1 = USP;
//CYCLES = USP;
//CYCLES2 = USP;
//EMUDAT = USP;
A0 = A1 ; /* move 40-bit Accumulator value (b) */
A1 = A0 ; /* move 40-bit Accumulator value (b) */
//A0 = Dreg ; /* 32-bit D-register to 40-bit A0, sign extended (b)*/
A0 = R0;
A0 = R1;
A0 = R2;
//A1 = Dreg ; /* 32-bit D-register to 40-bit A1, sign extended (b)*/
A1 = R0;
A1 = R1;
A1 = R2;
//Dreg_even = A0 (opt_mode) ; /* move 32-bit A0.W to even Dreg (b) */
R0 = A0;
R2 = A0(FU);
R4 = A0(ISS2);
//Dreg_odd = A1 (opt_mode) ; /* move 32-bit A1.W to odd Dreg (b) */
R1 = A1;
R3 = A1(FU);
R5 = A1(ISS2);
//Dreg_even = A0, Dreg_odd = A1 (opt_mode) ; /* move both Accumulators to a register pair (b) */
R0 = A0, R1 = A1;
R0 = A0, R1 = A1(FU);
R6 = A0, R7 = A1(ISS2);
//Dreg_odd = A1, Dreg_even = A0 (opt_mode) ; /* move both Accumulators to a register pair (b) */
R1 = A1, R0 = A0;
R3 = A1, R2 = A0(FU);
R5 = A1, R4 = A0(ISS2);
//IF CC DPreg = DPreg ; /* move if CC = 1 (a) */
IF CC R3 = R0;
IF CC R2 = R0;
IF CC R7 = R0;
IF CC R2 = P2;
IF CC R4 = P1;
IF CC R0 = P0;
IF CC R7 = P4;
IF CC P0 = P2;
IF CC P4 = P5;
IF CC P1 = P3;
IF CC P5 = P4;
IF CC P0 = R2;
IF CC P4 = R3;
IF CC P5 = R7;
IF CC P2 = R6;
//IF ! CC DPreg = DPreg ; /* move if CC = 0 (a) */
IF !CC R3 = R0;
IF !CC R2 = R0;
IF !CC R7 = R0;
IF !CC R2 = P2;
IF !CC R4 = P1;
IF !CC R0 = P0;
IF !CC R7 = P4;
IF !CC P0 = P2;
IF !CC P4 = P5;
IF !CC P1 = P3;
IF !CC P5 = P4;
IF !CC P0 = R2;
IF !CC P4 = R3;
IF !CC P5 = R7;
IF !CC P2 = R6;
//Dreg = Dreg_lo (Z) ; /* (a) */
R0 = R0.L(Z);
R2 = R1.L(Z);
R1 = R2.L(Z);
R7 = R6.L(Z);
//Dreg = Dreg_lo (X) ; /* (a)*/
R0 = R0.L(X);
R2 = R1.L(X);
R1 = R2.L(X);
R7 = R6.L(X);
R0 = R0.L;
R2 = R1.L;
R1 = R2.L;
R7 = R6.L;
//A0.X = Dreg_lo ; /* least significant 8 bits of Dreg into A0.X (b) */
A0.X = R0.L;
A0.X = R1.L;
//A1.X = Dreg_lo ; /* least significant 8 bits of Dreg into A1.X (b) */
A1.X = R0.L;
A1.X = R1.L;
//Dreg_lo = A0.X ; /* 8-bit A0.X, sign-extended, into least significant 16 bits of Dreg (b) */
R0.L = A0.X;
R1.L = A0.X;
R7.L = A0.X;
//Dreg_lo = A1.X ; /* 8-bit A1.X, sign-extended, into least significant 16 bits of Dreg (b) */
R0.L = A1.X;
R1.L = A1.X;
R7.L = A1.X;
//A0.L = Dreg_lo ; /* least significant 16 bits of Dreg into least significant 16 bits of A0.W (b) */
A0.L = R0.L;
A0.L = R1.L;
A0.L = R6.L;
//A1.L = Dreg_lo ; /* least significant 16 bits of Dreg into least significant 16 bits of A1.W (b) */
A1.L = R0.L;
A1.L = R1.L;
A1.L = R6.L;
//A0.H = Dreg_hi ; /* most significant 16 bits of Dreg into most significant 16 bits of A0.W (b) */
A0.H = R0.H;
A0.H = R1.H;
A0.H = R6.H;
//A1.H = Dreg_hi ; /* most significant 16 bits of Dreg into most significant 16 bits of A1.W (b) */
A1.H = R0.H;
A1.H = R1.H;
A1.H = R6.H;
//Dreg_lo = A0 (opt_mode) ; /* move A0 to lower half of Dreg (b) */
R0.L = A0;
R1.L = A0;
R0.L = A0(FU);
R1.L = A0(FU);
R0.L = A0(IS);
R1.L = A0(IS);
R0.L = A0(IU);
R1.L = A0(IU);
R0.L = A0(T);
R1.L = A0(T);
R0.L = A0(S2RND);
R1.L = A0(S2RND);
R0.L = A0(ISS2);
R1.L = A0(ISS2);
R0.L = A0(IH);
R1.L = A0(IH);
//Dreg_hi = A1 (opt_mode) ; /* move A1 to upper half of Dreg (b) */
R0.H = A1;
R1.H = A1;
R0.H = A1(FU);
R1.H = A1(FU);
R0.H = A1(IS);
R1.H = A1(IS);
R0.H = A1(IU);
R1.H = A1(IU);
R0.H = A1(T);
R1.H = A1(T);
R0.H = A1(S2RND);
R1.H = A1(S2RND);
R0.H = A1(ISS2);
R1.H = A1(ISS2);
R0.H = A1(IH);
R1.H = A1(IH);
//Dreg_lo = A0, Dreg_hi = A1 (opt_mode) ; /* move both values at once; must go to the lower and upper halves of the same Dreg (b)*/
R0.L = A0, R0.H = A1;
R1.L = A0, R1.H = A1;
R0.L = A0, R0.H = A1(FU);
R1.L = A0, R1.H = A1(FU);
R0.L = A0, R0.H = A1(IS);
R1.L = A0, R1.H = A1(IS);
R0.L = A0, R0.H = A1(IU);
R1.L = A0, R1.H = A1(IU);
R0.L = A0, R0.H = A1(T);
R1.L = A0, R1.H = A1(T);
R0.L = A0, R0.H = A1(S2RND);
R1.L = A0, R1.H = A1(S2RND);
R0.L = A0, R0.H = A1(ISS2);
R1.L = A0, R1.H = A1(ISS2);
R0.L = A0, R0.H = A1(IH);
R1.L = A0, R1.H = A1(IH);
//Dreg_hi = A1, Dreg_lo = AO (opt_mode) ; /* move both values at once; must go to the upper and lower halves of the same Dreg (b) */
R0.H = A1,R0.L = A0;
R1.H = A1,R1.L = A0;
R0.H = A1,R0.L = A0 (FU);
R1.H = A1,R1.L = A0 (FU);
R0.H = A1,R0.L = A0 (IS);
R1.H = A1,R1.L = A0 (IS);
R0.H = A1,R0.L = A0 (IU);
R1.H = A1,R1.L = A0 (IU);
R0.H = A1,R0.L = A0 (T);
R1.H = A1,R1.L = A0 (T);
R0.H = A1,R0.L = A0 (S2RND);
R1.H = A1,R1.L = A0 (S2RND);
R0.H = A1,R0.L = A0 (ISS2);
R1.H = A1,R1.L = A0 (ISS2);
R0.H = A1,R0.L = A0 (IH);
R1.H = A1,R1.L = A0 (IH);
//Dreg = Dreg_byte (Z) ; /* (a)*/
R0 = R1.B(Z);
R0 = R2.B(Z);
R7 = R1.B(Z);
R7 = R2.B(Z);
//Dreg = Dreg_byte (X) ; /* (a) */
R0 = R1.B(X);
R0 = R2.B(X);
R7 = R1.B(X);
R7 = R2.B(X);
|
tactcomplabs/xbgas-binutils-gdb
| 4,881
|
gas/testsuite/gas/bfin/loop_temps.s
|
.align 4
.global _filter_mem16;
.type _filter_mem16, STT_FUNC;
_filter_mem16:
[--sp] = ( r7:7 );
[--SP] = R4;
[--SP] = R5;
[--SP] = P3;
[--SP] = P4;
LINK 68;
[FP+28] = R0;
[FP+32] = R1;
[FP+36] = R2;
[FP+-68] = SP;
R0 = SP;
[FP+-24] = R0;
R0 = [FP+44];
R3 = R0;
R3 += 1;
R2 = R3;
R0 = R2;
R1 = 0 (X);
R2 = -1 (X);
R0 = R0 & R2;
R2 = 15 (X);
R1 = R1 & R2;
R2 = R0 >> 27;
R7 = R1 << 5;
[FP+-60] = R7;
R7 = [FP+-60];
R7 = R2 | R7;
[FP+-60] = R7;
R2 = R0 << 5;
[FP+-64] = R2;
R0 = [FP+-64];
R1 = [FP+-60];
[FP+-64] = R0;
[FP+-60] = R1;
R0 = -1 (X);
R1 = [FP+-64];
R1 = R1 & R0;
[FP+-64] = R1;
R0 = 15 (X);
R2 = [FP+-60];
R2 = R2 & R0;
R2 = R3;
R0 = R2;
R1 = 0 (X);
R2 = -1 (X);
R0 = R0 & R2;
R2 = 15 (X);
R1 = R1 & R2;
R2 = R0 >> 27;
R7 = R1 << 5;
[FP+-52] = R7;
R7 = [FP+-52];
R7 = R2 | R7;
[FP+-52] = R7;
R2 = R0 << 5;
[FP+-56] = R2;
R0 = [FP+-56];
R1 = [FP+-52];
[FP+-56] = R0;
[FP+-52] = R1;
R0 = -1 (X);
R1 = [FP+-56];
R1 = R1 & R0;
[FP+-56] = R1;
R0 = 15 (X);
R2 = [FP+-52];
R2 = R2 & R0;
R0 = R3;
R0 <<= 2;
R0 += 3;
R0 += 3;
R0 >>= 2;
P1 = R0;
P2 = P1 << 2;
SP -= P2;
[FP+-48] = SP;
R0 = [FP+-48];
R0 += 3;
R0 >>= 2;
R0 <<= 2;
[FP+-48] = R0;
R0 = [FP+-48];
[FP+-12] = R0;
R0 = [FP+48];
R0 += 1;
R3 = R0 << 1;
R2 = R3;
R0 = R2;
R1 = 0 (X);
R2 = -1 (X);
R0 = R0 & R2;
R2 = 15 (X);
R1 = R1 & R2;
R2 = R0 >> 27;
R7 = R1 << 5;
[FP+-40] = R7;
R7 = [FP+-40];
R7 = R2 | R7;
[FP+-40] = R7;
R2 = R0 << 5;
[FP+-44] = R2;
R0 = [FP+-44];
R1 = [FP+-40];
[FP+-44] = R0;
[FP+-40] = R1;
R0 = -1 (X);
R1 = [FP+-44];
R1 = R1 & R0;
[FP+-44] = R1;
R0 = 15 (X);
R2 = [FP+-40];
R2 = R2 & R0;
R2 = R3;
R0 = R2;
R1 = 0 (X);
R2 = -1 (X);
R0 = R0 & R2;
R2 = 15 (X);
R1 = R1 & R2;
R2 = R0 >> 27;
R7 = R1 << 5;
[FP+-32] = R7;
R7 = [FP+-32];
R7 = R2 | R7;
[FP+-32] = R7;
R2 = R0 << 5;
[FP+-36] = R2;
R0 = [FP+-36];
R1 = [FP+-32];
[FP+-36] = R0;
[FP+-32] = R1;
R0 = -1 (X);
R1 = [FP+-36];
R1 = R1 & R0;
[FP+-36] = R1;
R0 = 15 (X);
R2 = [FP+-32];
R2 = R2 & R0;
R0 = R3;
R0 <<= 2;
R0 += 3;
R0 += 3;
R0 >>= 2;
P1 = R0;
P2 = P1 << 2;
SP -= P2;
[FP+-28] = SP;
R0 = [FP+-28];
R0 += 3;
R0 >>= 2;
R0 <<= 2;
[FP+-28] = R0;
R0 = [FP+-28];
[FP+-8] = R0;
R0 = [FP+-12];
R0 += 4;
[FP+-16] = R0;
R0 = [FP+-8];
[FP+-20] = R0;
R0 = 0 (X);
[FP+-4] = R0;
jump.s .L6;
.L7:
R1 = [FP+-20];
R0 = [FP+-4];
R0 <<= 2;
R1 = R1 + R0;
P1 = R1;
R0 = [FP+-4];
R1 = R0 << 1;
R0 = [FP+32];
R0 = R0 + R1;
P2 = R0;
R0 = W [P2] (Z);
W [P1] = R0;
R1 = [FP+-20];
R0 = [FP+-4];
R0 <<= 1;
R0 += 1;
R0 <<= 1;
R1 = R1 + R0;
P1 = R1;
R0 = [FP+-4];
R1 = R0 << 1;
R0 = [FP+36];
R0 = R0 + R1;
P2 = R0;
R0 = W [P2] (Z);
W [P1] = R0;
R0 = [FP+-4];
R0 += 1;
[FP+-4] = R0;
.L6:
R1 = [FP+48];
R0 = [FP+-4];
cc =R0<R1;
if cc jump .L7;
R0 = [FP+48];
P0 = [FP+-20];
I0 = P0;
B0 = P0;
L0 = 0;
P2 = [FP+-16];
I2 = P2;
L2 = 0;
P4 = [FP+52];
P0 = [FP+28];
P1 = [FP+40];
R1 = [P4++];
R1 <<= 3;
R1.L = R1 (RND);
R2 = W[P0++];
R1.L = R1.L + R2.L;
W[P1++] = R1;
R2 = PACK(R1.L, R2.L);
[P2] = R2;
R0 += -1;
R3 = 0;
LC0 = R0;
LOOP filter_start256 LC0;
LOOP_BEGIN filter_start256;
R3 += 1;
LC1 = R3;
R1 = [P4++];
A1 = R1;
A0 = 0;
I0 = B0;
I2 = P2;
P2 += 4;
R4 = [I0++] || R5 = [I2--];
LOOP filter_start_inner256 LC1;
LOOP_BEGIN filter_start_inner256;
A1 -= R4.H*R5.H, A0 += R4.L*R5.L (IS) || R4 = [I0++] || R5 = [I2--];
LOOP_END filter_start_inner256;
A0 += A1;
R4 = A0;
R4 <<= 3;
R4.L = R4 (RND);
R2 = W[P0++];
R4.L = R4.L + R2.L;
W[P1++] = R4;
R2 = PACK(R4.L, R2.L);
[P2] = R2;
LOOP_END filter_start256;
R0 = [FP+48];
R0 <<= 1;
I0 = B0;
R0 <<= 1;
L0 = R0;
R0 = [FP+48];
R2 = [FP+44];
R2 = R2 - R0;
R4 = [I0++];
LC0 = R2;
P3 = R0;
R0 <<= 2;
R0 += 8;
I2 = P2;
M0 = R0;
A1 = A0 = 0;
R5 = [I2--];
LOOP filter_mid256 LC0;
LOOP_BEGIN filter_mid256;
LOOP filter_mid_inner256 LC1=P3;
LOOP_BEGIN filter_mid_inner256;
A1 -= R4.H*R5.H, A0 += R4.L*R5.L (IS) || R4 = [I0++] || R5 = [I2--];
LOOP_END filter_mid_inner256;
R0 = (A0 += A1) || I2 += M0;
R0 = R0 << 3 || R5 = W[P0++];
R0.L = R0 (RND);
R0.L = R0.L + R5.L;
R5 = PACK(R0.L, R5.L) || W[P1++] = R0;
A1 = A0 = 0 || [I2--] = R5
LOOP_END filter_mid256;
I2 += 4;
P2 = I2;
P4 = [FP+52];
R0 = [FP+48];
LC0 = R0;
P0 = B0;
A1 = A0 = 0;
LOOP mem_update256 LC0;
LOOP_BEGIN mem_update256;
I2 = P2;
I0 = P0;
P0 += 4;
R0 = LC0;
LC1 = R0;
R5 = [I2--] || R4 = [I0++];
LOOP mem_accum256 LC1;
LOOP_BEGIN mem_accum256;
A1 -= R4.H*R5.H, A0 += R4.L*R5.L (IS) || R4 = [I0++] || R5 = [I2--];
LOOP_END mem_accum256;
R0 = (A0 += A1);
A1 = A0 = 0 || [P4++] = R0;
LOOP_END mem_update256;
L0 = 0;
SP = [FP+-24];
SP = [FP+-68];
UNLINK;
P4 = [SP++];
P3 = [SP++];
R5 = [SP++];
R4 = [SP++];
( r7:7 ) = [sp++];
rts;
.size _filter_mem16, .-_filter_mem16
|
tactcomplabs/xbgas-binutils-gdb
| 3,271
|
gas/testsuite/gas/bfin/vector.s
|
.text
.global add_on_sign
add_on_sign:
r4.h = r4.l = Sign (R1.h) * R5.h + Sign(r1.L) * R5.L;
.text
.global vit_max
vit_max:
R7 = Vit_Max (R5, r2) (ASL);
r0 = VIT_MAX (r0, r6) (asr);
r5.l = vit_max (R3) (asL);
r2.L = VIT_Max (r2) (Asr);
.text
.global vector_abs
vector_abs:
R5 = ABS R5 (V);
r2 = abs r0 (v);
.text
.global vector_add_sub
vector_add_sub:
R5 = r3 +|+ R2;
r5 = r3 +|+ r2 (Sco);
r7 = R0 -|+ r6;
r2 = R1 -|+ R3 (S);
R4 = R0 +|- R2;
R5 = r1 +|- r2 (CO);
r6 = r3 -|- R4;
r7 = R5 -|- R6 (co);
r5 = r4 +|+ r3, R7 = r4 -|- r3 (Sco, ASR);
R0 = R3 +|+ r6, R1 = R3 -|- R6 (ASL);
R7 = R1 +|- R2, R6 = R1 -|+ R2 (S);
r1 = r2 +|- r3, r5 = r2 -|+ r3;
R5 = R0 + R1, R6 = R0 - R1;
r0 = r7 + r1, r3 = r7 - r1 (s);
r7 = A1 + A0, r5 = A1 - A0;
r3 = a0 + a1, r6 = a0 - a1 (s);
.text
.global vector_ashift
vector_ashift:
R1 = R3 >>> 15 (V);
r4 = r0 >>> 4 (v);
r5 = r0 << 0 (v,s);
r2 = r2 << 12 (v, S);
R7 = ASHIFT R5 BY R2.L (V);
r0 = Ashift r2 by r0.L (v, s);
.text
.global vector_lshift
vector_lshift:
R5 = r2 >> 15 (V);
r0 = R1 << 2 (v);
R4 = lshift r1 by r2.L (v);
.text
.global vector_max
vector_max:
R6 = MAX (R0, R1) (V);
.text
.global vector_min
vector_min:
r0 = min (r2, r7) (v);
.text
.global vector_mul
vector_mul:
r2.h = r7.l * r6.h, r2.l = r7.h * r6.h;
R4.L = R1.L * R0.L, R4.H = R1.H * R0.H;
R0.h = R3.H * r2.l, r0.l=r3.l * r2.l;
r5.h = r3.h * r2.h (M), r5.l = r3.L * r2.L (fu);
R0 = r4.l * r7.l, r1 = r4.h * r7.h (s2rnd);
R7 = R2.l * r5.l, r6 = r2.h * r5.h;
R0.L = R7.L * R6.L, R0.H = R7.H * R6.H (ISS2);
r3.h = r0.h * r1.h, r3.l = r0.l * r1.l (is);
a1 = r2.l * r3.h, a0 = r2.h * R3.H;
A0 = R1.l * R0.L, A1 += R1.h * R0.h;
A1 = R5.h * R7.H, A0 += r5.L * r7.l (w32);
a1 += r0.H * r1.H, A0 = R0.L * R1.l (is);
a1 = r3.h * r4.h (m), a0 += r3.l * R4.L (FU);
A1 += r4.H * R4.L, a0 -= r4.h * r4.h;
r0.l = (a0 += R7.l * R6.L), R0.H = (A1 += R7.H * R6.H) (Iss2);
r2.H = A1, r2.l = (a0 += r0.L * r1.L) (s2rnd);
r7.h = (a1 = r2.h * r1.h), a0 += r2.l * r1.l;
R2.H = (A1 = R7.L * R6.H), R2.L = (A0 = R7.H * R6.h);
r6.L = (A0 = R3.L * r2.L), R6.H = (A1 += R3.H * R2.H);
R7.h = (a1 += r6.h * r5.l), r7.l = (a0=r6.h * r5.h);
r0.h = (A1 = r7.h * R4.l) (M), R0.l = (a0 += r7.l * r4.l);
R5.H = (a1 = r3.h * r2.h) (m), r5.l= (a0 += r3.l * r2.l) (fu);
r0.h = (A1 += R3.h * R2.h), R0.L = ( A0 = R3.L * R2.L) (is);
R3 = (A1 = R6.H * R7.H) (M), A0 -= R6.L * R7.L;
r1 = (a1 = r7.l * r4.l) (m), r0 = (a0 += r7.h * r4.h);
R0 = (a0 += r7.l * r6.l), r1 = (a1+= r7.h * r6.h) (ISS2);
r4 = (a0 = r6.l * r7.l), r5 = (a1 += r6.h * r7.h);
R7 = (A1 += r3.h * r5.H), R6 = (A0 -= r3.l * r5.l);
r5 = (a1 -= r6.h * r7.h), a0 += r6.l * r7.l;
R3 = (A1 = r6.h * R7.h), R2 = (A0 = R6.l * r7.l);
R5 = (A1 = r3.h * r7.h) (M), r4 = (A0 += R3.l * r7.l) (fu);
R3 = a1, r2 = (a0 += r0.l *r1.l) (s2rnd);
r1 = (a1 += r3.h * r2.h), r0 = (a0 = r3.l * r2.l) (is);
.text
.global vector_negate
vector_negate:
R0 = - R1 (V);
r7 = - r2 (v);
.text
.global vector_pack
vector_pack:
R7 = Pack (r0.h, r1.l);
r6 = PACK (r1.H, r6.H);
R5 = pack (R2.L, R2.H);
.text
.global vector_search
vector_search:
(R0, R1) = search R2 (lt);
(r6, r7) = Search r0 (LE);
(r3, r6) = SEARCH r1 (Gt);
(r4, R5) = sEARch r3 (gE);
|
tactcomplabs/xbgas-binutils-gdb
| 2,311
|
gas/testsuite/gas/bfin/load.s
|
.extern f001
.extern F002
.text
.global load_immediate
load_immediate:
/* Half-Word Load. */
M3.l = 0xffff;
b2.l = 0xfffe;
Sp.l = 0;
FP.L = 0xfedc;
r0.h = 2;
p5.H = 32;
I2.h = 0xf204;
b1.H = 64;
l0.h = 0xffff;
R5.h = load_data1;
B2.H = F002;
/* Zero Extended. */
fp = 0xff20 (Z);
l2 = 32 (z);
R5 = foo2 (Z);
A0 = 0;
A1 = 0;
a1 = a0 = 0;
/* Sign Extended. */
r2 = -64 (x);
R0 = 0x7f (X);
P2 = 0 (x);
sp = -32 (x);
fp = 44 (X);
l3 = 0x800 (x);
m2 = 0x7fff (X);
R1 = 16 (X);
L0 = foo1;
r7 = load_data2;
/* Test constant folding. */
r0.l = (a + 5) - 2;
r1.l = (a + 5) + 10;
.text
.global load_pointer_register
load_pointer_register:
Sp = [ fp];
FP = [ p0++ ];
p1 = [sp--];
SP = [P2 +56];
p3 = [fp + 0];
P4 = [FP + 0x0001FFFC];
sp = [fp-0x0001fffc];
sp = [p4-0];
P5 = [FP-128];
.text
.global load_data_register
load_data_register:
R7 = [p0];
r6 = [p5++];
r5 = [P4 --];
R4 = [Fp + 40];
r3 = [sp+131068];
r2 = [sp-0];
r1 = [fp - 0x0001fffc];
R0 = [sp ++ p0];
R5 = [Fp-128];
r2 = [i0];
r1 = [I1++];
R3 = [I2--];
R4 = [i3 ++ M0];
.text
.global load_half_word_zero_extend
load_half_word_zero_extend:
r7 = w [sp] (z);
R6 = W [FP ++] (Z);
R5 = W [P0 --] (z);
R4 = w [p1 + 30] (Z);
r3 = w [sp + 0xfffc] (z);
r2 = w [fp - 0xfffc] (Z);
R0 = W [ P0 ++ P5] (z);
.text
.global load_half_word_sign_extend
load_half_word_sign_extend:
r7 = w [sp] (x);
R6 = W [FP ++] (X);
R5 = W [P0 --] (X);
r5 = w [p1 + 24] (x);
R3 = w [sp + 0xfffc] (X);
r7 = w [fp - 0xfffc] (x);
R1 = W [ P1 ++ P2] (X);
.text
.global load_high_data_register_half
load_high_data_register_half:
r0.h = w [i0];
R1.H = W [I1 ++];
R2.h = w [I2 --];
r3.H = W [sp];
R4.h = W [Fp ++ p0];
.text
.global load_low_data_register_half
load_low_data_register_half:
r7.l = w [i3];
R6.L = W [I2++];
R5.l = w [i1 --];
r4.L = w [P0];
r3.l = W [p2 ++ p3];
.text
.global load_byte_zero_extend
load_byte_zero_extend:
r5 = b [p0] (z);
R4 = B [P1++] (Z);
r0 = b [p2--] (z);
R3 = B [sp + 0x7fff] (Z);
r7 = b [SP - 32767] (z);
.text
.global load_byte_sign_extend
load_byte_sign_extend:
r5 = b [ P0 ] (X);
r2 = B [ p1++ ] (x);
R3 = b [ FP--] (x);
r7 = B [ sp+0] (x);
r6 = b [fp-0x7fff] (X);
.data
.global load_data
load_data1: .byte 0
load_data2: .word 16
|
tactcomplabs/xbgas-binutils-gdb
| 1,042
|
gas/testsuite/gas/bfin/logical2.s
|
.EXTERN MY_LABEL2;
.section .text;
//
//7 LOGICAL OPERATIONS
//
//Dreg = Dreg & Dreg ; /* (a) */
R7 = R7 & R7;
R7 = R7 & R0;
r7 = R7 & R1;
R1 = R7 & R7;
R2 = R7 & R0;
r3 = R7 & R1;
//Dreg = ~ Dreg ; /* (a)*/
R7 = ~R7;
R7 = ~R0;
R0 = ~R7;
R0 = ~R2;
//Dreg = Dreg | Dreg ; /* (a) */
R7 = R7 | R7;
R7 = R7 | R1;
R7 = R7 | R0;
R1 = R7 | R7;
R2 = R7 | R1;
R3 = R7 | R0;
//Dreg = Dreg ^ Dreg ; /* (a) */
R7 = R7 ^ R7;
R7 = R7 ^ R1;
R7 = R7 ^ R0;
R1 = R7 ^ R7;
R2 = R7 ^ R1;
R3 = R7 ^ R0;
//Dreg_lo = CC = BXORSHIFT ( A0, Dreg ) ; /* (b) */
R0.L = CC = BXORSHIFT(A0, R0);
R0.L = CC = BXORSHIFT(A0, R1);
R3.L = CC = BXORSHIFT(A0, R0);
R3.L = CC = BXORSHIFT(A0, R1);
//Dreg_lo = CC = BXOR ( A0, Dreg ) ; /* (b) */
R0.L = CC = BXOR(A0, R0);
R0.L = CC = BXOR(A0, R1);
R3.L = CC = BXOR(A0, R0);
R3.L = CC = BXOR(A0, R1);
//Dreg_lo = CC = BXOR ( A0, A1, CC ) ; /* (b) */
R0.L = CC = BXOR(A0, A1, CC);
R0.L = CC = BXOR(A0, A1, CC);
R3.L = CC = BXOR(A0, A1, CC);
R3.L = CC = BXOR(A0, A1, CC);
A0 = BXORSHIFT ( A0, A1, CC ) ; /* (b) */
|
tactcomplabs/xbgas-binutils-gdb
| 4,594
|
gas/testsuite/gas/bfin/control_code2.s
|
.EXTERN MY_LABEL2;
.section .text;
//
//6 CONTROL CODE BIT MANAGEMENT
//
//CC = Dreg == Dreg ; /* equal, register, signed (a) */
CC = R7 == R0;
CC = R6 == R1;
CC = R0 == R7;
//CC = Dreg == imm3 ; /* equal, immediate, signed (a) */
CC = R7 == -4;
CC = R7 == 3;
CC = R0 == -4;
CC = R0 == 3;
//CC = Dreg < Dreg ; /* less than, register, signed (a) */
CC = R7 < R0;
CC = R6 < R0;
CC = R7 < R1;
CC = R1 < R7;
CC = R0 < R6;
//CC = Dreg < imm3 ; /* less than, immediate, signed (a) */
CC = R7 < -4;
CC = R6 < -4;
CC = R7 < 3;
CC = R1 < 3;
//CC = Dreg <= Dreg ; /* less than or equal, register, signed (a) */
CC = R7 <= R0;
CC = R6 <= R0;
CC = R7 <= R1;
CC = R1 <= R7;
CC = R0 <= R6;
//CC = Dreg <= imm3 ; /* less than or equal, immediate, signed (a) */
CC = R7 <= -4;
CC = R6 <= -4;
CC = R7 <= 3;
CC = R1 <= 3;
//CC = Dreg < Dreg (IU) ; /* less than, register, unsigned (a) */
CC = R7 < R0(IU);
CC = R6 < R0(IU);
CC = R7 < R1(IU);
CC = R1 < R7(IU);
CC = R0 < R6(IU);
//CC = Dreg < uimm3 (IU) ; /* less than, immediate, unsigned (a) */
CC = R7 < 0(IU);
CC = R6 < 0(IU);
CC = R7 < 7(IU);
CC = R1 < 7(IU);
//CC = Dreg <= Dreg (IU) ; /* less than or equal, register, unsigned (a) */
CC = R7 <= R0(IU);
CC = R6 <= R0(IU);
CC = R7 <= R1(IU);
CC = R1 <= R7(IU);
CC = R0 <= R6(IU);
//CC = Dreg <= uimm3 (IU) ; /* less than or equal, immediate unsigned (a) */
CC = R7 <= 0(IU);
CC = R6 <= 0(IU);
CC = R7 <= 7(IU);
CC = R1 <= 7(IU);
//CC = Preg == Preg ; /* equal, register, signed (a) */
CC = P5 == P0;
CC = P5 == P1;
CC = P0 == P2;
CC = P3 == P5;
//CC = Preg == imm3 ; /* equal, immediate, signed (a) */
CC = P5 == -4;
CC = P5 == 0;
CC = P5 == 3;
CC = P2 == -4;
CC = P2 == 0;
CC = P2 == 3;
//CC = Preg < Preg ; /* less than, register, signed (a) */
CC = P5 < P0;
CC = P5 < P1;
CC = P0 < P2;
CC = P3 < P5;
//CC = Preg < imm3 ; /* less than, immediate, signed (a) */
CC = P5 < -4;
CC = P5 < 0;
CC = P5 < 3;
CC = P2 < -4;
CC = P2 < 0;
CC = P2 < 3;
//CC = Preg <= Preg ; /* less than or equal, register, signed (a) */
CC = P5 <= P0;
CC = P5 <= P1;
CC = P0 <= P2;
CC = P3 <= P5;
//CC = Preg <= imm3 ; /* less than or equal, immediate, signed (a) */
CC = P5 <= -4;
CC = P5 <= 0;
CC = P5 <= 3;
CC = P2 <= -4;
CC = P2 <= 0;
CC = P2 <= 3;
//CC = Preg < Preg (IU) ; /* less than, register, unsigned (a) */
CC = P5 < P0(IU);
CC = P5 < P1(IU);
CC = P0 < P2(IU);
CC = P3 < P5(IU);
//CC = Preg < uimm3 (IU) ; /* less than, immediate, unsigned (a) */
CC = P5 < 0(IU);
CC = P5 < 7(IU);
CC = P2 < 0(IU);
CC = P2 < 7(IU);
//CC = Preg <= Preg (IU) ; /* less than or equal, register, unsigned (a) */
CC = P5 <= P0(IU);
CC = P5 <= P1(IU);
CC = P0 <= P2(IU);
CC = P3 <= P5(IU);
//CC = Preg <= uimm3 (IU) ; /* less than or equal, immediate unsigned (a) */
CC = P5 <= 0(IU);
CC = P5 <= 7(IU);
CC = P2 <= 0(IU);
CC = P2 <= 7(IU);
CC = A0 == A1 ; /* equal, signed (a) */
CC = A0 < A1 ; /* less than, Accumulator, signed (a) */
CC = A0 <= A1 ; /* less than or equal, Accumulator, signed (a) */
//Dreg = CC ; /* CC into 32-bit data register, zero-extended (a) */
R7 = CC;
R0 = CC;
//statbit = CC ; /* status bit equals CC (a) */
AZ = CC;
AN = CC;
AC0= CC;
AC1= CC;
//V = CC;
VS = CC;
AV0= CC;
AV0S= CC;
AV1 = CC;
AV1S= CC;
AQ = CC;
//statbit |= CC ; /* status bit equals status bit OR CC (a) */
AZ |= CC;
AN |= CC;
AC0|= CC;
AC1|= CC;
//V |= CC;
VS |= CC;
AV0|= CC;
AV0S|= CC;
AV1 |= CC;
AV1S|= CC;
AQ |= CC;
//statbit &= CC ; /* status bit equals status bit AND CC (a) */
AZ &= CC;
AN &= CC;
AC0&= CC;
AC1&= CC;
//V &= CC;
VS &= CC;
AV0&= CC;
AV0S&= CC;
AV1 &= CC;
AV1S&= CC;
AQ &= CC;
//statbit ^= CC ; /* status bit equals status bit XOR CC (a) */
AZ ^= CC;
AN ^= CC;
AC0^= CC;
AC1^= CC;
//V ^= CC;
VS ^= CC;
AV0^= CC;
AV0S^= CC;
AV1 ^= CC;
AV1S^= CC;
AQ ^= CC;
//CC = Dreg ; /* CC set if the register is non-zero (a) */
CC = R7;
CC = R6;
CC = R1;
CC = R0;
//CC = statbit ; /* CC equals status bit (a) */
CC = AZ;
CC = AN;
CC = AC0;
CC = AC1;
//CC = V;
CC = VS;
CC = AV0;
CC = AV0S;
CC = AV1;
CC = AV1S;
CC = AQ;
//CC |= statbit ; /* CC equals CC OR status bit (a) */
CC |= AZ;
CC |= AN;
CC |= AC0;
CC |= AC1;
//CC |= V;
CC |= VS;
CC |= AV0;
CC |= AV0S;
CC |= AV1;
CC |= AV1S;
CC |= AQ;
//CC &= statbit ; /* CC equals CC AND status bit (a) */
CC &= AZ;
CC &= AN;
CC &= AC0;
CC &= AC1;
//CC &= V;
CC &= VS;
CC &= AV0;
CC &= AV0S;
CC &= AV1;
CC &= AV1S;
CC &= AQ;
//CC ^= statbit ; /* CC equals CC XOR status bit (a) */
CC ^= AZ;
CC ^= AN;
CC ^= AC0;
CC ^= AC1;
//CC ^= V;
CC ^= VS;
CC ^= AV0;
CC ^= AV0S;
CC ^= AV1;
CC ^= AV1S;
CC ^= AQ;
CC = ! CC ; /* (a) */
|
tactcomplabs/xbgas-binutils-gdb
| 1,601
|
gas/testsuite/gas/bfin/arith_mode.s
|
.text
// Accumulator to Half D-register Moves
R0.L = A0;
R0.L = A0 (FU);
R0.L = A0 (IS);
R0.L = A0 (IU);
R0.L = A0 (T);
R0.L = A0 (TFU); // Not documented
R0.L = A0 (S2RND);
R0.L = A0 (ISS2);
R0.L = A0 (IH);
// Accumulator to D-register Moves
R0 = A0;
R0 = A0 (FU);
R0 = A0 (IS); // Not documented
R0 = A0 (IU); // Not documented
R0 = A0 (S2RND);
R0 = A0 (ISS2);
// Multiply 16-Bit Operands to Half Dreg
R0.H = R1.L * R2.H;
R0.H = R1.L * R2.H (FU);
R0.H = R1.L * R2.H (IS);
R0.H = R1.L * R2.H (IU);
R0.H = R1.L * R2.H (T);
R0.H = R1.L * R2.H (TFU);
R0.H = R1.L * R2.H (S2RND);
R0.H = R1.L * R2.H (ISS2);
R0.H = R1.L * R2.H (IH);
// Multiply 16-Bit Operands to Dreg
R0 = R1.L * R2.H;
R0 = R1.L * R2.H (FU);
R0 = R1.L * R2.H (IS);
R0 = R1.L * R2.H (S2RND); // Not documented
R0 = R1.L * R2.H (ISS2);
// Multiply and Multiply-Accumulate to Accumulator
A0 = R1.L * R2.H;
A0 = R1.L * R2.H (FU);
A0 = R1.L * R2.H (IS);
A0 = R1.L * R2.H (W32);
// Multiply and Multiply-Accumulate to Half-Register
R0.L = (A0 = R1.L * R2.H);
R0.L = (A0 = R1.L * R2.H) (FU);
R0.L = (A0 = R1.L * R2.H) (IS);
R0.L = (A0 = R1.L * R2.H) (IU);
R0.L = (A0 = R1.L * R2.H) (T);
R0.L = (A0 = R1.L * R2.H) (TFU);
R0.L = (A0 = R1.L * R2.H) (S2RND);
R0.L = (A0 = R1.L * R2.H) (ISS2);
R0.L = (A0 = R1.L * R2.H) (IH);
// Multiply and Multiply-Accumulate to Data Register
R0 = (A0 = R1.L * R2.H);
R0 = (A0 = R1.L * R2.H) (FU);
R0 = (A0 = R1.L * R2.H) (IS);
R0 = (A0 = R1.L * R2.H) (IU); // Not documented
R0 = (A0 = R1.L * R2.H) (S2RND);
R0 = (A0 = R1.L * R2.H) (ISS2);
|
tactcomplabs/xbgas-binutils-gdb
| 1,354
|
gas/testsuite/gas/bfin/move.s
|
.text
.global move_register
move_register:
r7 = A0.X;
Fp = B3;
l2 = R5;
M2 = i2;
a1.w = usp;
r0 = astat;
r1 = sEQstat;
R2 = SYScfg;
R3 = reti;
R4 = RETX;
r5 = reTN;
r6 = rETe;
R7 = RETS;
R5 = lc0;
r4 = Lc1;
r3 = Lt0;
r2 = LT1;
r1 = Lb0;
r0 = LB1;
R2 = Cycles;
R3 = Cycles2;
r1 = emudat;
Rets = Fp;
Lt1 = USP;
ASTAT = P2;
A0 = A1;
a1 = a0;
a0 = R0;
A1 = r1;
R4 = A0 (fu);
r5 = A1 (ISS2);
R6 = a0;
R7 = A1;
R6 = A0, R7 = a1;
r1 = a1, r0 = a0 (fu);
.text
.global move_conditional
move_conditional:
if cc R5 = P2;
if !cc Sp = R0;
.text
.global move_half_to_full_zero_extend
move_half_to_full_zero_extend:
R2 = r7.L (Z);
r0 = R1.L (z);
.text
.global move_half_to_full_sign_extend
move_half_to_full_sign_extend:
R5 = R1.L (x);
r3 = r2.L (X);
.text
.global move_register_half
move_register_half:
A0.X = r5.l;
a1.X = r2.L;
r0.l = a0.x;
R7.l = A1.X;
A0.L = r3.l;
a1.l = r4.l;
A0.h = r6.H;
A1.H = r5.h;
r0.l = A0 (iu);
R1.H = A1 (s2rnd);
r1.h = a1;
R2.l = A0, r2.H = A1 (IH);
R2.l = A0, r2.H = A1;
r0.H = A1, R0.L = a0 (t);
r0.H = A1, R0.L = a0 (fu);
r0.H = A1, R0.L = a0 (is);
r0.H = A1, R0.L = a0;
.text
.global move_byte_zero_extend
move_byte_zero_extend:
R7 = r2.b (z);
r0 = R1.B (Z);
.text
.global move_byte_sign_extend
move_byte_sign_extend:
r6 = r1.b (Z);
R5 = R4.B (z);
|
tactcomplabs/xbgas-binutils-gdb
| 2,282
|
gas/testsuite/gas/bfin/bit2.s
|
.EXTERN MY_LABEL2;
.section .text;
//
//8 BIT OPERATIONS
//
//BITCLR ( Dreg , uimm5 ) ; /* (a) */
BITCLR ( R7 , 0 ) ;
BITCLR ( R7 , 31 ) ;
BITCLR ( R7 , 15 ) ;
BITCLR ( R1 , 0 ) ;
BITCLR ( R2 , 1 ) ;
BITCLR ( R3 , 19 ) ;
//BITSET ( Dreg , uimm5 ) ; /* (a) */
BITSET ( R7 , 0 ) ;
BITSET ( R7 , 31 ) ;
BITSET ( R7 , 15 ) ;
BITSET ( R1 , 0 ) ;
BITSET ( R2 , 1 ) ;
BITSET ( R3 , 19 ) ;
//BITTGL ( Dreg , uimm5 ) ; /* (a) */
BITTGL ( R7 , 0 ) ;
BITTGL ( R7 , 31 ) ;
BITTGL ( R7 , 15 ) ;
BITTGL ( R1 , 0 ) ;
BITTGL ( R2 , 1 ) ;
BITTGL ( R3 , 19 ) ;
//CC = BITTST ( Dreg , uimm5 ) ; /* set CC if bit = 1 (a)*/
CC = BITTST ( R7 , 0 ) ;
CC = BITTST ( R7 , 31 ) ;
CC = BITTST ( R7 , 15 ) ;
CC = BITTST ( R1 , 0 ) ;
CC = BITTST ( R2 , 1 ) ;
CC = BITTST ( R3 , 19 ) ;
//CC = ! BITTST ( Dreg , uimm5 ) ; /* set CC if bit = 0 (a)*/
CC = !BITTST ( R7 , 0 ) ;
CC = !BITTST ( R7 , 31 ) ;
CC = !BITTST ( R7 , 15 ) ;
CC = !BITTST ( R1 , 0 ) ;
CC = !BITTST ( R2 , 1 ) ;
CC = !BITTST ( R3 , 19 ) ;
//Dreg = DEPOSIT ( Dreg, Dreg ) ; /* no extension (b) */
R7 = DEPOSIT(R0, R1);
R7 = DEPOSIT(R7, R1);
R7 = DEPOSIT(R7, R7);
R1 = DEPOSIT(R0, R1);
R2 = DEPOSIT(R7, R1);
R3 = DEPOSIT(R7, R7);
//Dreg = DEPOSIT ( Dreg, Dreg ) (X) ; /* sign-extended (b) */
R7 = DEPOSIT(R0, R1)(X);
R7 = DEPOSIT(R7, R1)(X);
R7 = DEPOSIT(R7, R7)(X);
R1 = DEPOSIT(R0, R1)(X);
R2 = DEPOSIT(R7, R1)(X);
R3 = DEPOSIT(R7, R7)(X);
//Dreg = EXTRACT ( Dreg, Dreg_lo ) (Z) ; /* zero-extended (b)*/
R7 = EXTRACT(R0, R1.L)(Z);
R7 = EXTRACT(R7, R1.L)(Z);
R7 = EXTRACT(R7, R7.L)(Z);
R1 = EXTRACT(R0, R1.L)(Z);
R2 = EXTRACT(R7, R1.L)(Z);
R3 = EXTRACT(R7, R7.L)(Z);
//Dreg = EXTRACT ( Dreg, Dreg_lo ) (X) ; /* sign-extended (b)*/
R7 = EXTRACT(R0, R1.L)(X);
R7 = EXTRACT(R7, R1.L)(X);
R7 = EXTRACT(R7, R7.L)(X);
R1 = EXTRACT(R0, R1.L)(X);
R2 = EXTRACT(R7, R1.L)(X);
R3 = EXTRACT(R7, R7.L)(X);
//BITMUX ( Dreg , Dreg , A0 ) (ASR) ; /* shift right, LSB is shifted out (b) */
BITMUX(R0, R1, A0)(ASR);
BITMUX(R0, R2, A0)(ASR);
BITMUX(R1, R3, A0)(ASR);
//BITMUX(R0, R0, A0)(ASR);
//BITMUX ( Dreg , Dreg , A0 ) (ASL) ; /* shift left, MSB is shifted out (b) */
//BITMUX(R0, R0, A0)(ASL);
BITMUX(R0, R1, A0)(ASL);
BITMUX(R1, R2, A0)(ASL);
//Dreg_lo = ONES Dreg ; /* (b) */
R0.L = ONES R0;
R0.L = ONES R1;
R1.L = ONES R6;
R2.L = ONES R7;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.