repo_id
stringlengths 5
115
| size
int64 590
5.01M
| file_path
stringlengths 4
212
| content
stringlengths 590
5.01M
|
|---|---|---|---|
tactcomplabs/xbgas-binutils-gdb
| 5,447
|
gas/testsuite/gas/i386/x86-64-avx512f-opts.s
|
# Check 64bit AVX512F instructions
.allow_index_reg
.text
_start:
vmovapd.s %zmm29, %zmm30 # AVX512F
vmovapd %zmm29, %zmm30 # AVX512F
vmovapd.s %zmm29, %zmm30{%k7} # AVX512F
vmovapd %zmm29, %zmm30{%k7} # AVX512F
vmovapd.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovapd %zmm29, %zmm30{%k7}{z} # AVX512F
vmovaps.s %zmm29, %zmm30 # AVX512F
vmovaps %zmm29, %zmm30 # AVX512F
vmovaps.s %zmm29, %zmm30{%k7} # AVX512F
vmovaps %zmm29, %zmm30{%k7} # AVX512F
vmovaps.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovaps %zmm29, %zmm30{%k7}{z} # AVX512F
vmovd.s %xmm30, %eax # AVX512F
vmovd %xmm30, %eax # AVX512F
vmovd.s %xmm30, %ebp # AVX512F
vmovd %xmm30, %ebp # AVX512F
vmovd.s %xmm30, %r13d # AVX512F
vmovd %xmm30, %r13d # AVX512F
vmovdqa32.s %zmm29, %zmm30 # AVX512F
vmovdqa32 %zmm29, %zmm30 # AVX512F
vmovdqa32.s %zmm29, %zmm30{%k7} # AVX512F
vmovdqa32 %zmm29, %zmm30{%k7} # AVX512F
vmovdqa32.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqa32 %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqa64.s %zmm29, %zmm30 # AVX512F
vmovdqa64 %zmm29, %zmm30 # AVX512F
vmovdqa64.s %zmm29, %zmm30{%k7} # AVX512F
vmovdqa64 %zmm29, %zmm30{%k7} # AVX512F
vmovdqa64.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqa64 %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqu32.s %zmm29, %zmm30 # AVX512F
vmovdqu32 %zmm29, %zmm30 # AVX512F
vmovdqu32.s %zmm29, %zmm30{%k7} # AVX512F
vmovdqu32 %zmm29, %zmm30{%k7} # AVX512F
vmovdqu32.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqu32 %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqu64.s %zmm29, %zmm30 # AVX512F
vmovdqu64 %zmm29, %zmm30 # AVX512F
vmovdqu64.s %zmm29, %zmm30{%k7} # AVX512F
vmovdqu64 %zmm29, %zmm30{%k7} # AVX512F
vmovdqu64.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovdqu64 %zmm29, %zmm30{%k7}{z} # AVX512F
vmovq.s %xmm30, %rax # AVX512F
vmovq %xmm30, %rax # AVX512F
vmovq.s %xmm30, %r8 # AVX512F
vmovq %xmm30, %r8 # AVX512F
vmovq.s %xmm29, %xmm30 # AVX512F
vmovq %xmm29, %xmm30 # AVX512F
vmovsd.s %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmovsd %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmovsd.s %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vmovsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vmovss.s %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmovss %xmm28, %xmm29, %xmm30{%k7} # AVX512F
vmovss.s %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vmovss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512F
vmovupd.s %zmm29, %zmm30 # AVX512F
vmovupd %zmm29, %zmm30 # AVX512F
vmovupd.s %zmm29, %zmm30{%k7} # AVX512F
vmovupd %zmm29, %zmm30{%k7} # AVX512F
vmovupd.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovupd %zmm29, %zmm30{%k7}{z} # AVX512F
vmovups.s %zmm29, %zmm30 # AVX512F
vmovups %zmm29, %zmm30 # AVX512F
vmovups.s %zmm29, %zmm30{%k7} # AVX512F
vmovups %zmm29, %zmm30{%k7} # AVX512F
vmovups.s %zmm29, %zmm30{%k7}{z} # AVX512F
vmovups %zmm29, %zmm30{%k7}{z} # AVX512F
.intel_syntax noprefix
vmovapd.s zmm30, zmm29 # AVX512F
vmovapd zmm30, zmm29 # AVX512F
vmovapd.s zmm30{k7}, zmm29 # AVX512F
vmovapd zmm30{k7}, zmm29 # AVX512F
vmovapd.s zmm30{k7}{z}, zmm29 # AVX512F
vmovapd zmm30{k7}{z}, zmm29 # AVX512F
vmovaps.s zmm30, zmm29 # AVX512F
vmovaps zmm30, zmm29 # AVX512F
vmovaps.s zmm30{k7}, zmm29 # AVX512F
vmovaps zmm30{k7}, zmm29 # AVX512F
vmovaps.s zmm30{k7}{z}, zmm29 # AVX512F
vmovaps zmm30{k7}{z}, zmm29 # AVX512F
vmovd.s eax, xmm30 # AVX512F
vmovd eax, xmm30 # AVX512F
vmovd.s ebp, xmm30 # AVX512F
vmovd ebp, xmm30 # AVX512F
vmovd.s r13d, xmm30 # AVX512F
vmovd r13d, xmm30 # AVX512F
vmovdqa32.s zmm30, zmm29 # AVX512F
vmovdqa32 zmm30, zmm29 # AVX512F
vmovdqa32.s zmm30{k7}, zmm29 # AVX512F
vmovdqa32 zmm30{k7}, zmm29 # AVX512F
vmovdqa32.s zmm30{k7}{z}, zmm29 # AVX512F
vmovdqa32 zmm30{k7}{z}, zmm29 # AVX512F
vmovdqa64.s zmm30, zmm29 # AVX512F
vmovdqa64 zmm30, zmm29 # AVX512F
vmovdqa64.s zmm30{k7}, zmm29 # AVX512F
vmovdqa64 zmm30{k7}, zmm29 # AVX512F
vmovdqa64.s zmm30{k7}{z}, zmm29 # AVX512F
vmovdqa64 zmm30{k7}{z}, zmm29 # AVX512F
vmovdqu32.s zmm30, zmm29 # AVX512F
vmovdqu32 zmm30, zmm29 # AVX512F
vmovdqu32.s zmm30{k7}, zmm29 # AVX512F
vmovdqu32 zmm30{k7}, zmm29 # AVX512F
vmovdqu32.s zmm30{k7}{z}, zmm29 # AVX512F
vmovdqu32 zmm30{k7}{z}, zmm29 # AVX512F
vmovdqu64.s zmm30, zmm29 # AVX512F
vmovdqu64 zmm30, zmm29 # AVX512F
vmovdqu64.s zmm30{k7}, zmm29 # AVX512F
vmovdqu64 zmm30{k7}, zmm29 # AVX512F
vmovdqu64.s zmm30{k7}{z}, zmm29 # AVX512F
vmovdqu64 zmm30{k7}{z}, zmm29 # AVX512F
vmovq.s rax, xmm30 # AVX512F
vmovq rax, xmm30 # AVX512F
vmovq.s r8, xmm30 # AVX512F
vmovq r8, xmm30 # AVX512F
vmovq.s xmm30, xmm29 # AVX512F
vmovq xmm30, xmm29 # AVX512F
vmovsd.s xmm30{k7}, xmm29, xmm28 # AVX512F
vmovsd xmm30{k7}, xmm29, xmm28 # AVX512F
vmovsd.s xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vmovsd xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vmovss.s xmm30{k7}, xmm29, xmm28 # AVX512F
vmovss xmm30{k7}, xmm29, xmm28 # AVX512F
vmovss.s xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vmovss xmm30{k7}{z}, xmm29, xmm28 # AVX512F
vmovupd.s zmm30, zmm29 # AVX512F
vmovupd zmm30, zmm29 # AVX512F
vmovupd.s zmm30{k7}, zmm29 # AVX512F
vmovupd zmm30{k7}, zmm29 # AVX512F
vmovupd.s zmm30{k7}{z}, zmm29 # AVX512F
vmovupd zmm30{k7}{z}, zmm29 # AVX512F
vmovups.s zmm30, zmm29 # AVX512F
vmovups zmm30, zmm29 # AVX512F
vmovups.s zmm30{k7}, zmm29 # AVX512F
vmovups zmm30{k7}, zmm29 # AVX512F
vmovups.s zmm30{k7}{z}, zmm29 # AVX512F
vmovups zmm30{k7}{z}, zmm29 # AVX512F
|
tactcomplabs/xbgas-binutils-gdb
| 4,802
|
gas/testsuite/gas/i386/x86-64-mpx.s
|
# MPX instructions
.allow_index_reg
.text
start:
### bndmk
bndmk (%r11), %bnd1
bndmk (%rax), %bnd1
bndmk (0x399), %bnd1
bndmk 0x3(%r9), %bnd1
bndmk 0x3(%rax), %bnd1
bndmk 0x3(,%r12,1), %bnd1
bndmk (%rax,%rcx), %bnd1
bndmk 0x3(%r11,%rax,2), %bnd1
bndmk 0x3(%rbx,%r9,1), %bnd1
### bndmov
bndmov (%r11), %bnd1
bndmov (%rax), %bnd1
bndmov (0x399), %bnd1
bndmov 0x3(%r9), %bnd2
bndmov 0x3(%rax), %bnd2
bndmov 0x3333(%rip), %bnd2
bndmov 0x3(,%r12,1), %bnd0
bndmov (%rax,%rdx), %bnd2
bndmov 0x3(%r11,%rax,2), %bnd1
bndmov 0x3(%rbx,%r9,1), %bnd1
bndmov %bnd2, %bnd0
bndmov %bnd1, (%r11)
bndmov %bnd1, (%rax)
bndmov %bnd1, (0x399)
bndmov %bnd2, 0x3(%r9)
bndmov %bnd2, 0x3(%rax)
bndmov %bnd2, 0x3333(%rip)
bndmov %bnd0, 0x3(,%r12,1)
bndmov %bnd2, (%rax,%rdx)
bndmov %bnd1, 0x3(%r11,%rax,2)
bndmov %bnd1, 0x3(%rbx,%r9,1)
bndmov %bnd0, %bnd2
### bndcl
bndcl (%r11), %bnd1
bndcl (%rax), %bnd1
bndcl %r11, %bnd1
bndcl %rcx, %bnd1
bndcl (0x399), %bnd1
bndcl 0x3(%r9), %bnd2
bndcl 0x3(%rax), %bnd2
bndcl 0x3333(%rip), %bnd2
bndcl 0x3(,%r12,1), %bnd0
bndcl (%rax,%rdx), %bnd2
bndcl 0x3(%r11,%rax,2), %bnd1
bndcl 0x3(%rbx,%r9,1), %bnd1
### bndcu
bndcu (%r11), %bnd1
bndcu (%rax), %bnd1
bndcu %r11, %bnd1
bndcu %rcx, %bnd1
bndcu (0x399), %bnd1
bndcu 0x3(%r9), %bnd2
bndcu 0x3(%rax), %bnd2
bndcu 0x3333(%rip), %bnd2
bndcu 0x3(,%r12,1), %bnd0
bndcu (%rax,%rdx), %bnd2
bndcu 0x3(%r11,%rax,2), %bnd1
bndcu 0x3(%rbx,%r9,1), %bnd1
### bndcn
bndcn (%r11), %bnd1
bndcn (%rax), %bnd1
bndcn %r11, %bnd1
bndcn %rcx, %bnd1
bndcn (0x399), %bnd1
bndcn 0x3(%r9), %bnd2
bndcn 0x3(%rax), %bnd2
bndcn 0x3333(%rip), %bnd2
bndcn 0x3(,%r12,1), %bnd0
bndcn (%rax,%rdx), %bnd2
bndcn 0x3(%r11,%rax,2), %bnd1
bndcn 0x3(%rbx,%r9,1), %bnd1
### bndstx
bndstx %bnd0, 0x3(%rax,%rbx,1)
bndstx %bnd2, 3(%rbx,%rdx)
bndstx %bnd3, 0x399(%r12)
bndstx %bnd1, 0x1234(%r11)
bndstx %bnd2, 0x1234(%rbx)
bndstx %bnd2, 3(,%rbx,1)
bndstx %bnd2, 3(,%r12,1)
bndstx %bnd1, (%rdx)
### bndldx
bndldx 0x3(%rax,%rbx,1), %bnd0
bndldx 3(%rbx,%rdx), %bnd2
bndldx 0x399(%r12), %bnd3
bndldx 0x1234(%r11), %bnd1
bndldx 0x1234(%rbx), %bnd2
bndldx 3(,%rbx,1), %bnd2
bndldx 3(,%r12,1), %bnd2
bndldx (%rdx), %bnd1
### bnd
bnd call foo
bnd call *(%rax)
bnd call *(%r11)
bnd je foo
bnd jmp foo
bnd jmp *(%rcx)
bnd jmp *(%r12)
bnd ret
.intel_syntax noprefix
bndmk bnd1, [r11]
bndmk bnd1, [rax]
bndmk bnd1, [0x399]
bndmk bnd1, [r9+0x3]
bndmk bnd1, [rax+0x3]
bndmk bnd1, [1*r12+0x3]
bndmk bnd1, [rax+rcx]
bndmk bnd1, [r11+1*rax+0x3]
bndmk bnd1, [rbx+1*r9+0x3]
### bndmov
bndmov bnd1, [r11]
bndmov bnd1, [rax]
bndmov bnd1, [0x399]
bndmov bnd2, [r9+0x3]
bndmov bnd2, [rax+0x3]
bndmov bnd0, [1*r12+0x3]
bndmov bnd2, [rax+rdx]
bndmov bnd1, [r11+1*rax+0x3]
bndmov bnd1, [rbx+1*r9+0x3]
bndmov bnd0, bnd2
bndmov [r11], bnd1
bndmov [rax], bnd1
bndmov [0x399], bnd1
bndmov [r9+0x3], bnd2
bndmov [rax+0x3], bnd2
bndmov [1*r12+0x3], bnd0
bndmov [rax+rdx], bnd2
bndmov [r11+1*rax+0x3], bnd1
bndmov [rbx+1*r9+0x3], bnd1
bndmov bnd2, bnd0
### bndcl
bndcl bnd1, [r11]
bndcl bnd1, [rax]
bndcl bnd1, r11
bndcl bnd1, rcx
bndcl bnd1, [0x399]
bndcl bnd1, [r9+0x3]
bndcl bnd1, [rax+0x3]
bndcl bnd1, [1*r12+0x3]
bndcl bnd1, [rax+rcx]
bndcl bnd1, [r11+1*rax+0x3]
bndcl bnd1, [rbx+1*r9+0x3]
### bndcu
bndcu bnd1, [r11]
bndcu bnd1, [rax]
bndcu bnd1, r11
bndcu bnd1, rcx
bndcu bnd1, [0x399]
bndcu bnd1, [r9+0x3]
bndcu bnd1, [rax+0x3]
bndcu bnd1, [1*r12+0x3]
bndcu bnd1, [rax+rcx]
bndcu bnd1, [r11+1*rax+0x3]
bndcu bnd1, [rbx+1*r9+0x3]
### bndcn
bndcn bnd1, [r11]
bndcn bnd1, [rax]
bndcn bnd1, r11
bndcn bnd1, rcx
bndcn bnd1, [0x399]
bndcn bnd1, [r9+0x3]
bndcn bnd1, [rax+0x3]
bndcn bnd1, [1*r9+0x3]
bndcn bnd1, [rax+rcx]
bndcn bnd1, [r11+1*rax+0x3]
bndcn bnd1, [rbx+1*r9+0x3]
### bndstx
bndstx [rax+rbx*1+0x3], bnd0
bndstx [rbx+rdx+3], bnd2
bndstx [r12+0x399], bnd3
bndstx [r11+0x1234], bnd1
bndstx [rbx+0x1234], bnd2
bndstx [1*rbx+3], bnd2
bndstx [1*r12+3], bnd2
bndstx [rdx], bnd1
### bndldx
bndldx bnd0, [rax+rbx*1+0x3]
bndldx bnd2, [rbx+rdx+3]
bndldx bnd3, [r12+0x399]
bndldx bnd1, [r11+0x1234]
bndldx bnd2, [rbx+0x1234]
bndldx bnd2, [1*rbx+3]
bndldx bnd2, [1*r12+3]
bndldx bnd1, [rdx]
### bnd
bnd call foo
bnd call rax
bnd call r11
bnd je foo
bnd jmp foo
bnd jmp rcx
bnd jmp r12
bnd ret
foo: bnd ret
bad:
# bndldx (%eax),(bad)
.byte 0x0f
.byte 0x1a
.byte 0x30
# bndmov (bad),%bnd0
.byte 0x66
.byte 0x0f
.byte 0x1a
.byte 0xc4
# bndmov with REX.B set
.byte 0x66
.byte 0x41
.byte 0x0f
.byte 0x1a
.byte 0xc0
# bndmov with REX.R set
.byte 0x66
.byte 0x44
.byte 0x0f
.byte 0x1a
.byte 0xc0
# bndmk (bad),%bnd0
.byte 0xf3
.byte 0x0f
.byte 0x1b
.byte 0x05
.long 0x90909090
|
tactcomplabs/xbgas-binutils-gdb
| 3,423
|
gas/testsuite/gas/i386/x86-64-avx512ifma.s
|
# Check 64bit AVX512IFMA instructions
.allow_index_reg
.text
_start:
vpmadd52luq %zmm28, %zmm29, %zmm30 # AVX512IFMA
vpmadd52luq %zmm28, %zmm29, %zmm30{%k7} # AVX512IFMA
vpmadd52luq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512IFMA
vpmadd52luq (%rcx), %zmm29, %zmm30 # AVX512IFMA
vpmadd52luq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512IFMA
vpmadd52luq (%rcx){1to8}, %zmm29, %zmm30 # AVX512IFMA
vpmadd52luq 8128(%rdx), %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52luq 8192(%rdx), %zmm29, %zmm30 # AVX512IFMA
vpmadd52luq -8192(%rdx), %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52luq -8256(%rdx), %zmm29, %zmm30 # AVX512IFMA
vpmadd52luq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52luq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA
vpmadd52luq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52luq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq %zmm28, %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq %zmm28, %zmm29, %zmm30{%k7} # AVX512IFMA
vpmadd52huq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512IFMA
vpmadd52huq (%rcx), %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq (%rcx){1to8}, %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq 8128(%rdx), %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52huq 8192(%rdx), %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq -8192(%rdx), %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52huq -8256(%rdx), %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52huq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52huq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA
.intel_syntax noprefix
vpmadd52luq zmm30, zmm29, zmm28 # AVX512IFMA
vpmadd52luq zmm30{k7}, zmm29, zmm28 # AVX512IFMA
vpmadd52luq zmm30{k7}{z}, zmm29, zmm28 # AVX512IFMA
vpmadd52luq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512IFMA
vpmadd52luq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512IFMA
vpmadd52luq zmm30, zmm29, [rcx]{1to8} # AVX512IFMA
vpmadd52luq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512IFMA Disp8
vpmadd52luq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512IFMA
vpmadd52luq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512IFMA Disp8
vpmadd52luq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512IFMA
vpmadd52luq zmm30, zmm29, [rdx+1016]{1to8} # AVX512IFMA Disp8
vpmadd52luq zmm30, zmm29, [rdx+1024]{1to8} # AVX512IFMA
vpmadd52luq zmm30, zmm29, [rdx-1024]{1to8} # AVX512IFMA Disp8
vpmadd52luq zmm30, zmm29, [rdx-1032]{1to8} # AVX512IFMA
vpmadd52huq zmm30, zmm29, zmm28 # AVX512IFMA
vpmadd52huq zmm30{k7}, zmm29, zmm28 # AVX512IFMA
vpmadd52huq zmm30{k7}{z}, zmm29, zmm28 # AVX512IFMA
vpmadd52huq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512IFMA
vpmadd52huq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512IFMA
vpmadd52huq zmm30, zmm29, [rcx]{1to8} # AVX512IFMA
vpmadd52huq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512IFMA Disp8
vpmadd52huq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512IFMA
vpmadd52huq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512IFMA Disp8
vpmadd52huq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512IFMA
vpmadd52huq zmm30, zmm29, [rdx+1016]{1to8} # AVX512IFMA Disp8
vpmadd52huq zmm30, zmm29, [rdx+1024]{1to8} # AVX512IFMA
vpmadd52huq zmm30, zmm29, [rdx-1024]{1to8} # AVX512IFMA Disp8
vpmadd52huq zmm30, zmm29, [rdx-1032]{1to8} # AVX512IFMA
|
tactcomplabs/xbgas-binutils-gdb
| 2,881
|
gas/testsuite/gas/i386/x86-64-avx2-wig.s
|
# Check AVX2 WIG instructions
.allow_index_reg
.text
_start:
vmovntdqa (%rcx),%ymm4
vmpsadbw $7,%ymm4,%ymm6,%ymm2
vpabsb %ymm4,%ymm6
vpabsd %ymm4,%ymm6
vpabsw %ymm4,%ymm6
vpackssdw %ymm4,%ymm6,%ymm2
vpacksswb %ymm4,%ymm6,%ymm2
vpackusdw %ymm4,%ymm6,%ymm2
vpackuswb %ymm4,%ymm6,%ymm2
vpaddb %ymm4,%ymm6,%ymm2
vpaddd %ymm4,%ymm6,%ymm2
vpaddq %ymm4,%ymm6,%ymm2
vpaddsb %ymm4,%ymm6,%ymm2
vpaddsw %ymm4,%ymm6,%ymm2
vpaddusb %ymm4,%ymm6,%ymm2
vpaddusw %ymm4,%ymm6,%ymm2
vpaddw %ymm4,%ymm6,%ymm2
vpalignr $7,%ymm4,%ymm6,%ymm2
vpand %ymm4,%ymm6,%ymm2
vpandn %ymm4,%ymm6,%ymm2
vpavgb %ymm4,%ymm6,%ymm2
vpavgw %ymm4,%ymm6,%ymm2
vpblendw $7,%ymm4,%ymm6,%ymm2
vpcmpeqb %ymm4,%ymm6,%ymm2
vpcmpeqd %ymm4,%ymm6,%ymm2
vpcmpeqq %ymm4,%ymm6,%ymm2
vpcmpeqw %ymm4,%ymm6,%ymm2
vpcmpgtb %ymm4,%ymm6,%ymm2
vpcmpgtd %ymm4,%ymm6,%ymm2
vpcmpgtq %ymm4,%ymm6,%ymm2
vpcmpgtw %ymm4,%ymm6,%ymm2
vphaddd %ymm4,%ymm6,%ymm2
vphaddsw %ymm4,%ymm6,%ymm2
vphaddw %ymm4,%ymm6,%ymm2
vphsubd %ymm4,%ymm6,%ymm2
vphsubsw %ymm4,%ymm6,%ymm2
vphsubw %ymm4,%ymm6,%ymm2
vpmaddubsw %ymm4,%ymm6,%ymm2
vpmaddwd %ymm4,%ymm6,%ymm2
vpmaxsb %ymm4,%ymm6,%ymm2
vpmaxsd %ymm4,%ymm6,%ymm2
vpmaxsw %ymm4,%ymm6,%ymm2
vpmaxub %ymm4,%ymm6,%ymm2
vpmaxud %ymm4,%ymm6,%ymm2
vpmaxuw %ymm4,%ymm6,%ymm2
vpminsb %ymm4,%ymm6,%ymm2
vpminsd %ymm4,%ymm6,%ymm2
vpminsw %ymm4,%ymm6,%ymm2
vpminub %ymm4,%ymm6,%ymm2
vpminud %ymm4,%ymm6,%ymm2
vpminuw %ymm4,%ymm6,%ymm2
vpmovmskb %ymm4,%ecx
vpmovsxbd %xmm4,%ymm6
vpmovsxbq %xmm4,%ymm4
vpmovsxbw %xmm4,%ymm4
vpmovsxdq %xmm4,%ymm4
vpmovsxwd %xmm4,%ymm4
vpmovsxwq %xmm4,%ymm6
vpmovzxbd %xmm4,%ymm6
vpmovzxbq %xmm4,%ymm4
vpmovzxbw %xmm4,%ymm4
vpmovzxdq %xmm4,%ymm4
vpmovzxwd %xmm4,%ymm4
vpmovzxwq %xmm4,%ymm6
vpmuldq %ymm4,%ymm6,%ymm2
vpmulhrsw %ymm4,%ymm6,%ymm2
vpmulhuw %ymm4,%ymm6,%ymm2
vpmulhw %ymm4,%ymm6,%ymm2
vpmulld %ymm4,%ymm6,%ymm2
vpmullw %ymm4,%ymm6,%ymm2
vpmuludq %ymm4,%ymm6,%ymm2
vpor %ymm4,%ymm6,%ymm2
vpsadbw %ymm4,%ymm6,%ymm2
vpshufb %ymm4,%ymm6,%ymm2
vpshufd $7,%ymm6,%ymm2
vpshufhw $7,%ymm6,%ymm2
vpshuflw $7,%ymm6,%ymm2
vpsignb %ymm4,%ymm6,%ymm2
vpsignd %ymm4,%ymm6,%ymm2
vpsignw %ymm4,%ymm6,%ymm2
vpslld $7,%ymm6,%ymm2
vpslldq $7,%ymm6,%ymm2
vpsllq $7,%ymm6,%ymm2
vpsllw $7,%ymm6,%ymm2
vpsrad $7,%ymm6,%ymm2
vpsraw $7,%ymm6,%ymm2
vpsrld $7,%ymm6,%ymm2
vpsrldq $7,%ymm6,%ymm2
vpsrlq $7,%ymm6,%ymm2
vpsrlw $7,%ymm6,%ymm2
vpsubb %ymm4,%ymm6,%ymm2
vpsubd %ymm4,%ymm6,%ymm2
vpsubq %ymm4,%ymm6,%ymm2
vpsubsb %ymm4,%ymm6,%ymm2
vpsubsw %ymm4,%ymm6,%ymm2
vpsubusb %ymm4,%ymm6,%ymm2
vpsubusw %ymm4,%ymm6,%ymm2
vpsubw %ymm4,%ymm6,%ymm2
vpunpckhbw %ymm4,%ymm6,%ymm2
vpunpckhdq %ymm4,%ymm6,%ymm2
vpunpckhqdq %ymm4,%ymm6,%ymm2
vpunpckhwd %ymm4,%ymm6,%ymm2
vpunpcklbw %ymm4,%ymm6,%ymm2
vpunpckldq %ymm4,%ymm6,%ymm2
vpunpcklqdq %ymm4,%ymm6,%ymm2
vpunpcklwd %ymm4,%ymm6,%ymm2
vpxor %ymm4,%ymm6,%ymm2
|
tactcomplabs/xbgas-binutils-gdb
| 4,435
|
gas/testsuite/gas/i386/optimize-2.s
|
# Check instructions with optimized encoding
.allow_index_reg
.text
_start:
testl $0x7f, %eax
testw $0x7f, %ax
testb $0x7f, %al
test $0x7f, %ebx
test $0x7f, %bx
test $0x7f, %bl
test $0x7f, %edi
test $0x7f, %di
and %cl, %cl
and %dx, %dx
and %ebx, %ebx
or %ah, %ah
or %bp, %bp
or %esi, %esi
vandnpd %zmm1, %zmm1, %zmm5
vmovdqa32 %xmm1, %xmm2
vmovdqa64 %xmm1, %xmm2
vmovdqu8 %xmm1, %xmm2
vmovdqu16 %xmm1, %xmm2
vmovdqu32 %xmm1, %xmm2
vmovdqu64 %xmm1, %xmm2
vmovdqa32 127(%eax), %xmm2
vmovdqa64 127(%eax), %xmm2
vmovdqu8 127(%eax), %xmm2
vmovdqu16 127(%eax), %xmm2
vmovdqu32 127(%eax), %xmm2
vmovdqu64 127(%eax), %xmm2
vmovdqa32 %xmm1, 128(%eax)
vmovdqa64 %xmm1, 128(%eax)
vmovdqu8 %xmm1, 128(%eax)
vmovdqu16 %xmm1, 128(%eax)
vmovdqu32 %xmm1, 128(%eax)
vmovdqu64 %xmm1, 128(%eax)
vmovdqa32 %ymm1, %ymm2
vmovdqa64 %ymm1, %ymm2
vmovdqu8 %ymm1, %ymm2
vmovdqu16 %ymm1, %ymm2
vmovdqu32 %ymm1, %ymm2
vmovdqu64 %ymm1, %ymm2
vmovdqa32 127(%eax), %ymm2
vmovdqa64 127(%eax), %ymm2
vmovdqu8 127(%eax), %ymm2
vmovdqu16 127(%eax), %ymm2
vmovdqu32 127(%eax), %ymm2
vmovdqu64 127(%eax), %ymm2
vmovdqa32 %ymm1, 128(%eax)
vmovdqa64 %ymm1, 128(%eax)
vmovdqu8 %ymm1, 128(%eax)
vmovdqu16 %ymm1, 128(%eax)
vmovdqu32 %ymm1, 128(%eax)
vmovdqu64 %ymm1, 128(%eax)
vmovdqa32 %zmm1, %zmm2
vmovdqa64 %zmm1, %zmm2
vmovdqu8 %zmm1, %zmm2
vmovdqu16 %zmm1, %zmm2
vmovdqu32 %zmm1, %zmm2
vmovdqu64 %zmm1, %zmm2
{evex} vmovdqa32 %ymm1, %ymm2
{evex} vmovdqa64 %ymm1, %ymm2
{evex} vmovdqu8 %xmm1, %xmm2
{evex} vmovdqu16 %xmm1, %xmm2
{evex} vmovdqu32 %xmm1, %xmm2
{evex} vmovdqu64 %xmm1, %xmm2
vmovdqa32 %ymm1, %ymm2{%k1}
vmovdqa64 %ymm1, %ymm2{%k1}
vmovdqu8 %xmm1, %xmm2{%k1}
vmovdqu16 %xmm1, %xmm2{%k1}
vmovdqu32 %xmm1, %xmm2{%k1}
vmovdqu64 %xmm1, %xmm2{%k1}
vmovdqa32 (%eax), %ymm2{%k1}
vmovdqa64 (%eax), %ymm2{%k1}
vmovdqu8 (%eax), %xmm2{%k1}
vmovdqu16 (%eax), %xmm2{%k1}
vmovdqu32 (%eax), %xmm2{%k1}
vmovdqu64 (%eax), %xmm2{%k1}
vmovdqa32 %ymm1, (%eax){%k1}
vmovdqa64 %ymm1, (%eax){%k1}
vmovdqu8 %xmm1, (%eax){%k1}
vmovdqu16 %xmm1, (%eax){%k1}
vmovdqu32 %xmm1, (%eax){%k1}
vmovdqu64 %xmm1, (%eax){%k1}
vmovdqa32 %xmm1, %xmm2{%k1}{z}
vmovdqa64 %xmm1, %xmm2{%k1}{z}
vmovdqu8 %xmm1, %xmm2{%k1}{z}
vmovdqu16 %xmm1, %xmm2{%k1}{z}
vmovdqu32 %xmm1, %xmm2{%k1}{z}
vmovdqu64 %xmm1, %xmm2{%k1}{z}
vpandd %xmm2, %xmm3, %xmm4
vpandq %xmm2, %xmm3, %xmm4
vpandnd %xmm2, %xmm3, %xmm4
vpandnq %xmm2, %xmm3, %xmm4
vpord %xmm2, %xmm3, %xmm4
vporq %xmm2, %xmm3, %xmm4
vpxord %xmm2, %xmm3, %xmm4
vpxorq %xmm2, %xmm3, %xmm4
vpandd %ymm2, %ymm3, %ymm4
vpandq %ymm2, %ymm3, %ymm4
vpandnd %ymm2, %ymm3, %ymm4
vpandnq %ymm2, %ymm3, %ymm4
vpord %ymm2, %ymm3, %ymm4
vporq %ymm2, %ymm3, %ymm4
vpxord %ymm2, %ymm3, %ymm4
vpxorq %ymm2, %ymm3, %ymm4
vpandd 112(%eax), %xmm2, %xmm3
vpandq 112(%eax), %xmm2, %xmm3
vpandnd 112(%eax), %xmm2, %xmm3
vpandnq 112(%eax), %xmm2, %xmm3
vpord 112(%eax), %xmm2, %xmm3
vporq 112(%eax), %xmm2, %xmm3
vpxord 112(%eax), %xmm2, %xmm3
vpxorq 112(%eax), %xmm2, %xmm3
vpandd 128(%eax), %xmm2, %xmm3
vpandq 128(%eax), %xmm2, %xmm3
vpandnd 128(%eax), %xmm2, %xmm3
vpandnq 128(%eax), %xmm2, %xmm3
vpord 128(%eax), %xmm2, %xmm3
vporq 128(%eax), %xmm2, %xmm3
vpxord 128(%eax), %xmm2, %xmm3
vpxorq 128(%eax), %xmm2, %xmm3
vpandd 96(%eax), %ymm2, %ymm3
vpandq 96(%eax), %ymm2, %ymm3
vpandnd 96(%eax), %ymm2, %ymm3
vpandnq 96(%eax), %ymm2, %ymm3
vpord 96(%eax), %ymm2, %ymm3
vporq 96(%eax), %ymm2, %ymm3
vpxord 96(%eax), %ymm2, %ymm3
vpxorq 96(%eax), %ymm2, %ymm3
vpandd 128(%eax), %ymm2, %ymm3
vpandq 128(%eax), %ymm2, %ymm3
vpandnd 128(%eax), %ymm2, %ymm3
vpandnq 128(%eax), %ymm2, %ymm3
vpord 128(%eax), %ymm2, %ymm3
vporq 128(%eax), %ymm2, %ymm3
vpxord 128(%eax), %ymm2, %ymm3
vpxorq 128(%eax), %ymm2, %ymm3
vpandd %xmm2, %xmm3, %xmm4{%k5}
vpandq %ymm2, %ymm3, %ymm4{%k5}
vpandnd %ymm2, %ymm3, %ymm4{%k5}
vpandnq %xmm2, %xmm3, %xmm4{%k5}
vpord %xmm2, %xmm3, %xmm4{%k5}
vporq %ymm2, %ymm3, %ymm4{%k5}
vpxord %ymm2, %ymm3, %ymm4{%k5}
vpxorq %xmm2, %xmm3, %xmm4{%k5}
vpandd (%eax){1to8}, %ymm2, %ymm3
vpandq (%eax){1to2}, %xmm2, %xmm3
vpandnd (%eax){1to4}, %xmm2, %xmm3
vpandnq (%eax){1to4}, %ymm2, %ymm3
vpord (%eax){1to8}, %ymm2, %ymm3
vporq (%eax){1to2}, %xmm2, %xmm3
vpxord (%eax){1to4}, %xmm2, %xmm3
vpxorq (%eax){1to4}, %ymm2, %ymm3
|
tactcomplabs/xbgas-binutils-gdb
| 1,281
|
gas/testsuite/gas/i386/x86-64-sse-noavx.s
|
# Check 64bit SSE instructions without AVX equivalent
.text
_start:
cmpxchg16b (%rax)
crc32 %cl,%ebx
cvtpd2pi %xmm3,%mm2
cvtpi2pd %mm3,%xmm2
cvtpi2ps %mm3,%xmm2
cvtps2pi %xmm7,%mm6
cvttpd2pi %xmm4,%mm3
cvttps2pi %xmm4,%mm3
fisttps (%rax)
fisttpl (%rax)
fisttpll (%rax)
lfence
maskmovq %mm7,%mm0
mfence
monitor
movdq2q %xmm0, %mm1
movnti %eax, (%rax)
movntq %mm2,(%rax)
movq2dq %mm0, %xmm1
mwait
pabsb %mm1,%mm0
pabsd %mm1,%mm0
pabsw %mm1,%mm0
paddq %mm1,%mm0
palignr $0x2,%mm1,%mm0
pavgb %mm1,%mm0
pavgw %mm3,%mm2
pextrw $0x0,%mm1,%eax
phaddd %mm1,%mm0
phaddsw %mm1,%mm0
phaddw %mm1,%mm0
phsubd %mm1,%mm0
phsubsw %mm1,%mm0
phsubw %mm1,%mm0
pinsrw $0x2,%edx,%mm2
pmaddubsw %mm1,%mm0
pmaxsw %mm1,%mm0
pmaxub %mm2,%mm2
pminsw %mm5,%mm4
pminub %mm7,%mm6
pmovmskb %mm5,%eax
pmulhrsw %mm1,%mm0
pmulhuw %mm5,%mm4
pmuludq %mm0, %mm1
popcnt %ebx,%ecx
prefetchnta (%rax)
prefetcht0 (%rax)
prefetcht1 (%rax)
prefetcht2 (%rax)
psadbw %mm7,%mm6
pshufb %mm1,%mm0
pshufw $0x1,%mm2,%mm3
psignb %mm1,%mm0
psignd %mm1,%mm0
psignw %mm1,%mm0
psubq %mm1,%mm0
sfence
|
tactcomplabs/xbgas-binutils-gdb
| 4,226
|
gas/testsuite/gas/i386/x86-64-avx512_4fmaps.s
|
# Check 64bit AVX512_4FMAPS instructions
.allow_index_reg
.text
_start:
v4fmaddps (%rcx), %zmm8, %zmm1 # AVX512_4FMAPS
v4fmaddps (%rcx), %zmm8, %zmm1{%k7} # AVX512_4FMAPS
v4fmaddps (%rcx), %zmm8, %zmm1{%k7}{z} # AVX512_4FMAPS
v4fmaddps -123456(%rax,%r14,8), %zmm8, %zmm1 # AVX512_4FMAPS
v4fmaddps 0x7f0(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS Disp8
v4fmaddps 0x800(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS
v4fmaddps -0x800(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS Disp8
v4fmaddps -0x810(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS
v4fnmaddps (%rcx), %zmm8, %zmm1 # AVX512_4FMAPS
v4fnmaddps (%rcx), %zmm8, %zmm1{%k7} # AVX512_4FMAPS
v4fnmaddps (%rcx), %zmm8, %zmm1{%k7}{z} # AVX512_4FMAPS
v4fnmaddps -123456(%rax,%r14,8), %zmm8, %zmm1 # AVX512_4FMAPS
v4fnmaddps 0x7f0(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS Disp8
v4fnmaddps 0x800(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS
v4fnmaddps -0x800(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS Disp8
v4fnmaddps -0x810(%rdx), %zmm8, %zmm1 # AVX512_4FMAPS
v4fmaddss (%rcx), %xmm8, %xmm1 # AVX512_4FMAPS
v4fmaddss (%rcx), %xmm8, %xmm1{%k7} # AVX512_4FMAPS
v4fmaddss (%rcx), %xmm8, %xmm1{%k7}{z} # AVX512_4FMAPS
v4fmaddss -123456(%rax,%r14,8), %xmm8, %xmm1 # AVX512_4FMAPS
v4fmaddss 0x7f0(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS Disp8
v4fmaddss 0x800(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS
v4fmaddss -0x800(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS Disp8
v4fmaddss -0x810(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS
v4fnmaddss (%rcx), %xmm8, %xmm1 # AVX512_4FMAPS
v4fnmaddss (%rcx), %xmm8, %xmm1{%k7} # AVX512_4FMAPS
v4fnmaddss (%rcx), %xmm8, %xmm1{%k7}{z} # AVX512_4FMAPS
v4fnmaddss -123456(%rax,%r14,8), %xmm8, %xmm1 # AVX512_4FMAPS
v4fnmaddss 0x7f0(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS Disp8
v4fnmaddss 0x800(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS
v4fnmaddss -0x800(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS Disp8
v4fnmaddss -0x810(%rdx), %xmm8, %xmm1 # AVX512_4FMAPS
.intel_syntax noprefix
v4fmaddps zmm1, zmm8, [rcx] # AVX512_4FMAPS
v4fmaddps zmm1, zmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fmaddps zmm1{k7}, zmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fmaddps zmm1{k7}{z}, zmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fmaddps zmm1, zmm8, XMMWORD PTR [rax+r14*8-123456] # AVX512_4FMAPS
v4fmaddps zmm1, zmm8, XMMWORD PTR [rdx+0x7f0] # AVX512_4FMAPS Disp8
v4fmaddps zmm1, zmm8, XMMWORD PTR [rdx+0x800] # AVX512_4FMAPS
v4fmaddps zmm1, zmm8, XMMWORD PTR [rdx-0x800] # AVX512_4FMAPS Disp8
v4fmaddps zmm1, zmm8, XMMWORD PTR [rdx-0x810] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm8, [rcx] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fnmaddps zmm1{k7}, zmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fnmaddps zmm1{k7}{z}, zmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm8, XMMWORD PTR [rax+r14*8-123456] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm8, XMMWORD PTR [rdx+0x7f0] # AVX512_4FMAPS Disp8
v4fnmaddps zmm1, zmm8, XMMWORD PTR [rdx+0x800] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm8, XMMWORD PTR [rdx-0x800] # AVX512_4FMAPS Disp8
v4fnmaddps zmm1, zmm8, XMMWORD PTR [rdx-0x810] # AVX512_4FMAPS
v4fmaddss xmm1, xmm8, [rcx] # AVX512_4FMAPS
v4fmaddss xmm1, xmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fmaddss xmm1{k7}, xmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fmaddss xmm1{k7}{z}, xmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fmaddss xmm1, xmm8, XMMWORD PTR [rax+r14*8-123456] # AVX512_4FMAPS
v4fmaddss xmm1, xmm8, XMMWORD PTR [rdx+0x7f0] # AVX512_4FMAPS Disp8
v4fmaddss xmm1, xmm8, XMMWORD PTR [rdx+0x800] # AVX512_4FMAPS
v4fmaddss xmm1, xmm8, XMMWORD PTR [rdx-0x800] # AVX512_4FMAPS Disp8
v4fmaddss xmm1, xmm8, XMMWORD PTR [rdx-0x810] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm8, [rcx] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fnmaddss xmm1{k7}, xmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fnmaddss xmm1{k7}{z}, xmm8, XMMWORD PTR [rcx] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm8, XMMWORD PTR [rax+r14*8-123456] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm8, XMMWORD PTR [rdx+0x7f0] # AVX512_4FMAPS Disp8
v4fnmaddss xmm1, xmm8, XMMWORD PTR [rdx+0x800] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm8, XMMWORD PTR [rdx-0x800] # AVX512_4FMAPS Disp8
v4fnmaddss xmm1, xmm8, XMMWORD PTR [rdx-0x810] # AVX512_4FMAPS
|
tactcomplabs/xbgas-binutils-gdb
| 2,258
|
gas/testsuite/gas/i386/avx512vl_vpclmulqdq.s
|
# Check 32bit AVX512VL,VPCLMULQDQ instructions
.allow_index_reg
.text
_start:
vpclmulqdq $0xab, %xmm2, %xmm2, %xmm3 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, -123456(%esp,%esi,8), %xmm2, %xmm3 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 2032(%edx), %xmm2, %xmm3 # AVX512VL,VPCLMULQDQ Disp8
vpclmulqdq $0xab, %ymm1, %ymm5, %ymm4 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, -123456(%esp,%esi,8), %ymm5, %ymm4 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 4064(%edx), %ymm5, %ymm4 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq $0xab, %xmm2, %xmm2, %xmm3 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, -123456(%esp,%esi,8), %xmm2, %xmm3 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 2032(%edx), %xmm2, %xmm3 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq $0xab, %ymm1, %ymm5, %ymm4 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, -123456(%esp,%esi,8), %ymm5, %ymm4 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 4064(%edx), %ymm5, %ymm4 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulhqhqdq %xmm2, %xmm3, %xmm4
{evex} vpclmulhqlqdq %xmm3, %xmm4, %xmm5
{evex} vpclmullqhqdq %xmm4, %xmm5, %xmm6
{evex} vpclmullqlqdq %xmm5, %xmm6, %xmm7
{evex} vpclmulhqhqdq %ymm1, %ymm2, %ymm3
{evex} vpclmulhqlqdq %ymm2, %ymm3, %ymm4
{evex} vpclmullqhqdq %ymm3, %ymm4, %ymm5
{evex} vpclmullqlqdq %ymm4, %ymm5, %ymm6
.intel_syntax noprefix
vpclmulqdq xmm3, xmm5, xmm3, 0xab # AVX512VL,VPCLMULQDQ
vpclmulqdq xmm3, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,VPCLMULQDQ
vpclmulqdq xmm3, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512VL,VPCLMULQDQ Disp8
vpclmulqdq ymm2, ymm2, ymm2, 0xab # AVX512VL,VPCLMULQDQ
vpclmulqdq ymm2, ymm2, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,VPCLMULQDQ
vpclmulqdq ymm2, ymm2, YMMWORD PTR [edx+4064], 123 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq xmm3, xmm5, xmm3, 0xab # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq xmm3, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq xmm3, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq ymm2, ymm2, ymm2, 0xab # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq ymm2, ymm2, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq ymm2, ymm2, YMMWORD PTR [edx+4064], 123 # AVX512VL,VPCLMULQDQ Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 1,032
|
gas/testsuite/gas/i386/x86-64-avx512er-rcig.s
|
# Check 64bit AVX512ER-RCIG instructions
.allow_index_reg
.text
_start:
vexp2ps {sae}, %zmm29, %zmm30 # AVX512ER
vexp2pd {sae}, %zmm29, %zmm30 # AVX512ER
vrcp28ps {sae}, %zmm29, %zmm30 # AVX512ER
vrcp28pd {sae}, %zmm29, %zmm30 # AVX512ER
vrcp28ss {sae}, %xmm28, %xmm29, %xmm30 # AVX512ER
vrcp28sd {sae}, %xmm28, %xmm29, %xmm30 # AVX512ER
vrsqrt28ps {sae}, %zmm29, %zmm30 # AVX512ER
vrsqrt28pd {sae}, %zmm29, %zmm30 # AVX512ER
vrsqrt28ss {sae}, %xmm28, %xmm29, %xmm30 # AVX512ER
vrsqrt28sd {sae}, %xmm28, %xmm29, %xmm30 # AVX512ER
.intel_syntax noprefix
vexp2ps zmm30, zmm29, {sae} # AVX512ER
vexp2pd zmm30, zmm29, {sae} # AVX512ER
vrcp28ps zmm30, zmm29, {sae} # AVX512ER
vrcp28pd zmm30, zmm29, {sae} # AVX512ER
vrcp28ss xmm30, xmm29, xmm28, {sae} # AVX512ER
vrcp28sd xmm30, xmm29, xmm28, {sae} # AVX512ER
vrsqrt28ps zmm30, zmm29, {sae} # AVX512ER
vrsqrt28pd zmm30, zmm29, {sae} # AVX512ER
vrsqrt28ss xmm30, xmm29, xmm28, {sae} # AVX512ER
vrsqrt28sd xmm30, xmm29, xmm28, {sae} # AVX512ER
|
tactcomplabs/xbgas-binutils-gdb
| 1,067
|
gas/testsuite/gas/i386/secidx.s
|
.text
.ascii ">>>>"
pre04: .ascii "<<<<"
.ascii ">>>>>"
pre0d: .ascii "<<<"
.ascii ">>>>>>"
pre16: .ascii "<<"
.ascii ">>>>>>>"
pre1f: .ascii "<"
.data
.ascii ">>>>"
sam04: .ascii "<<<<"
.ascii ">>>>>"
sam0d: .ascii "<<<"
.ascii ">>>>>>"
sam16: .ascii "<<"
.ascii ">>>>>>>"
sam1f: .ascii "<"
.ascii ">>>>"
.secidx pre04
.byte 0x11
.secidx pre0d
.byte 0x11
.secidx pre16
.byte 0x11
.secidx pre1f
.byte 0x11
.ascii "<<<<<<<<"
.ascii ">>>>"
.secidx sam04
.byte 0x11
.secidx sam0d
.byte 0x11
.secidx sam16
.byte 0x11
.secidx sam1f
.byte 0x11
.ascii "<<<<<<<<"
.ascii ">>>>"
.secidx nex04
.byte 0x11
.secidx nex0d
.byte 0x11
.secidx nex16
.byte 0x11
.secidx nex1f
.byte 0x11
.ascii "<<<<<<<<"
.ascii ">>>>"
.secidx ext24
.byte 0x11
.secidx ext2d
.byte 0x11
.secidx ext36
.byte 0x11
.secidx ext3f
.byte 0x11
.ascii "<<<<<<<<"
.section .rdata
.ascii ">>>>"
nex04: .ascii "<<<<"
.ascii ">>>>>"
nex0d: .ascii "<<<"
.ascii ">>>>>>"
nex16: .ascii "<<"
.ascii ">>>>>>>"
nex1f: .ascii "<"
.ascii ">>>>"
.p2align 4,0
|
tactcomplabs/xbgas-binutils-gdb
| 95,745
|
gas/testsuite/gas/i386/avx512dq_vl.s
|
# Check 32bit AVX512{DQ,VL} instructions
.allow_index_reg
.text
_start:
vbroadcastf64x2 (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf64x2 (%ecx), %ymm6{%k7}{z} # AVX512{DQ,VL}
vbroadcastf64x2 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf64x2 2032(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcastf64x2 2048(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf64x2 -2048(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcastf64x2 -2064(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti64x2 (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti64x2 (%ecx), %ymm6{%k7}{z} # AVX512{DQ,VL}
vbroadcasti64x2 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti64x2 2032(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcasti64x2 2048(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti64x2 -2048(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcasti64x2 -2064(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf32x2 %xmm7, %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf32x2 %xmm7, %ymm6{%k7}{z} # AVX512{DQ,VL}
vbroadcastf32x2 (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf32x2 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf32x2 1016(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcastf32x2 1024(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf32x2 -1024(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcastf32x2 -1032(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq 1016(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq 1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq -1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq -1032(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtpd2uqq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvtpd2uqq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq 1016(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq 1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq -1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq -1032(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtps2qq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq 1016(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq 1024(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq -1024(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq -1032(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq 508(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq 512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq -512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq -516(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq %xmm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq %xmm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvtps2qq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq 2032(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq 2048(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq -2048(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq -2064(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq 508(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq 512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq -512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq -516(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtps2uqq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq 1016(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq 1024(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq -1024(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq -1032(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq 508(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq 512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq -512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq -516(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq %xmm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq %xmm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvtps2uqq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq 2032(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq 2048(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq -2048(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq -2064(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq 508(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq 512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq -512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq -516(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtqq2pd (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvtqq2pd (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd 1016(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd 1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd -1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd -1032(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2ps %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2ps %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtqq2psx (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psx -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2ps (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psx 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psx 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psx -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psx -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psx 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psx 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psx -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psx -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2ps %ymm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2ps %ymm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtqq2psy (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psy -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2ps (%eax){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psy 4064(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psy 4096(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psy -4096(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psy -4128(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psy 1016(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psy 1024(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psy -1024(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psy -1032(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2pd (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2pd (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd 1016(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd 1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd -1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd -1032(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2ps %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2ps %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2psx (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psx -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2ps (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psx 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psx 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psx -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psx -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psx 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psx 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psx -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psx -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2ps %ymm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2ps %ymm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2psy (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psy -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2ps (%eax){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psy 4064(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psy 4096(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psy -4096(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psy -4128(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psy 1016(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psy 1024(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psy -1024(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psy -1032(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm5, %xmm6{%k7} # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vextractf64x2 $123, %ymm5, %xmm6{%k7} # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm5, %xmm6{%k7} # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vextracti64x2 $123, %ymm5, %xmm6{%k7} # AVX512{DQ,VL}
vfpclasspd $0xab, %xmm6, %k5{%k7} # AVX512{DQ,VL}
vfpclasspd $123, %xmm6, %k5{%k7} # AVX512{DQ,VL}
vfpclasspdx $123, (%ecx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspdx $123, -123456(%esp,%esi,8), %k5{%k7} # AVX512{DQ,VL}
vfpclasspd $123, (%eax){1to2}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspdx $123, 2032(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdx $123, 2048(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspdx $123, -2048(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdx $123, -2064(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspdx $123, 1016(%edx){1to2}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdx $123, 1024(%edx){1to2}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspdx $123, -1024(%edx){1to2}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdx $123, -1032(%edx){1to2}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspd $0xab, %ymm6, %k5{%k7} # AVX512{DQ,VL}
vfpclasspd $123, %ymm6, %k5{%k7} # AVX512{DQ,VL}
vfpclasspdy $123, (%ecx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspdy $123, -123456(%esp,%esi,8), %k5{%k7} # AVX512{DQ,VL}
vfpclasspd $123, (%eax){1to4}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspdy $123, 4064(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdy $123, 4096(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspdy $123, -4096(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdy $123, -4128(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspdy $123, 1016(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdy $123, 1024(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspdy $123, -1024(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdy $123, -1032(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL}
vfpclassps $0xab, %xmm6, %k5{%k7} # AVX512{DQ,VL}
vfpclassps $123, %xmm6, %k5{%k7} # AVX512{DQ,VL}
vfpclasspsx $123, (%ecx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspsx $123, -123456(%esp,%esi,8), %k5{%k7} # AVX512{DQ,VL}
vfpclassps $123, (%eax){1to4}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspsx $123, 2032(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsx $123, 2048(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspsx $123, -2048(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsx $123, -2064(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspsx $123, 508(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsx $123, 512(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspsx $123, -512(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsx $123, -516(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL}
vfpclassps $0xab, %ymm6, %k5{%k7} # AVX512{DQ,VL}
vfpclassps $123, %ymm6, %k5{%k7} # AVX512{DQ,VL}
vfpclasspsy $123, (%ecx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspsy $123, -123456(%esp,%esi,8), %k5{%k7} # AVX512{DQ,VL}
vfpclassps $123, (%eax){1to8}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspsy $123, 4064(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsy $123, 4096(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspsy $123, -4096(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsy $123, -4128(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspsy $123, 508(%edx){1to8}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsy $123, 512(%edx){1to8}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspsy $123, -512(%edx){1to8}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsy $123, -516(%edx){1to8}, %k5{%k7} # AVX512{DQ,VL}
vinsertf64x2 $0xab, %xmm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinsertf64x2 $0xab, %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vinsertf64x2 $123, %xmm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinsertf64x2 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinsertf64x2 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinsertf64x2 $123, 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vinsertf64x2 $123, 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinsertf64x2 $123, -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vinsertf64x2 $123, -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinserti64x2 $0xab, %xmm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinserti64x2 $0xab, %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vinserti64x2 $123, %xmm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinserti64x2 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinserti64x2 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinserti64x2 $123, 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vinserti64x2 $123, 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinserti64x2 $123, -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vinserti64x2 $123, -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 %xmm7, %xmm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 %xmm7, %xmm6{%k7}{z} # AVX512{DQ,VL}
vbroadcasti32x2 (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 1016(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcasti32x2 1024(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 -1024(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcasti32x2 -1032(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 %xmm7, %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 %xmm7, %ymm6{%k7}{z} # AVX512{DQ,VL}
vbroadcasti32x2 (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 1016(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcasti32x2 1024(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 -1024(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcasti32x2 -1032(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vpmullq %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vpmullq (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vpmullq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vpmullq (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vpmullq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vpmullq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vpmullq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vpmullq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vpmullq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vpmullq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vrangepd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $123, (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $123, -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vrangepd $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $123, (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $123, -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vrangeps $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $123, (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $123, 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $123, -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vrangeps $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $123, (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $123, -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vandpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vandpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vandps (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vandps (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vandnpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vandnpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vandnps (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vandnps (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vorpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vorpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vorps (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vorps (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vxorpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vxorpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vxorps (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vxorps (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $0xab, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vreducepd $123, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $123, (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $123, (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $123, 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $123, -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $0xab, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vreducepd $123, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $123, (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $123, (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $123, 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $123, -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $0xab, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vreduceps $123, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $123, (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $123, (%eax){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $123, 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $123, -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $0xab, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vreduceps $123, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $123, (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $123, (%eax){1to8}, %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $123, 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $123, -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm5, (%ecx){%k7} # AVX512{DQ,VL}
vextractf64x2 $123, %ymm5, (%ecx){%k7} # AVX512{DQ,VL}
vextractf64x2 $123, %ymm5, -123456(%esp,%esi,8){%k7} # AVX512{DQ,VL}
vextractf64x2 $123, %ymm5, 2032(%edx){%k7} # AVX512{DQ,VL} Disp8
vextractf64x2 $123, %ymm5, 2048(%edx){%k7} # AVX512{DQ,VL}
vextractf64x2 $123, %ymm5, -2048(%edx){%k7} # AVX512{DQ,VL} Disp8
vextractf64x2 $123, %ymm5, -2064(%edx){%k7} # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm5, (%ecx){%k7} # AVX512{DQ,VL}
vextracti64x2 $123, %ymm5, (%ecx){%k7} # AVX512{DQ,VL}
vextracti64x2 $123, %ymm5, -123456(%esp,%esi,8){%k7} # AVX512{DQ,VL}
vextracti64x2 $123, %ymm5, 2032(%edx){%k7} # AVX512{DQ,VL} Disp8
vextracti64x2 $123, %ymm5, 2048(%edx){%k7} # AVX512{DQ,VL}
vextracti64x2 $123, %ymm5, -2048(%edx){%k7} # AVX512{DQ,VL} Disp8
vextracti64x2 $123, %ymm5, -2064(%edx){%k7} # AVX512{DQ,VL}
vcvttpd2qq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvttpd2qq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvttpd2qq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq 1016(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq 1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq -1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq -1032(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvttpd2uqq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvttpd2uqq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq 1016(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq 1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq -1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq -1032(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvttps2qq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq 1016(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq 1024(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq -1024(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq -1032(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq 508(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq 512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq -512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq -516(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq %xmm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq %xmm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvttps2qq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq 2032(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq 2048(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq -2048(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq -2064(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq 508(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq 512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq -512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq -516(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvttps2uqq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq 1016(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq 1024(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq -1024(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq -1032(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq 508(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq 512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq -512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq -516(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq %xmm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq %xmm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvttps2uqq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq 2032(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq 2048(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq -2048(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq -2064(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq 508(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq 512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq -512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq -516(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vpmovd2m %xmm6, %k5 # AVX512{DQ,VL}
vpmovd2m %ymm6, %k5 # AVX512{DQ,VL}
vpmovq2m %xmm6, %k5 # AVX512{DQ,VL}
vpmovq2m %ymm6, %k5 # AVX512{DQ,VL}
vpmovm2d %k5, %xmm6 # AVX512{DQ,VL}
vpmovm2d %k5, %ymm6 # AVX512{DQ,VL}
vpmovm2q %k5, %xmm6 # AVX512{DQ,VL}
vpmovm2q %k5, %ymm6 # AVX512{DQ,VL}
.intel_syntax noprefix
vbroadcastf64x2 ymm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vbroadcastf64x2 ymm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vbroadcastf64x2 ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vbroadcastf64x2 ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vbroadcastf64x2 ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vbroadcastf64x2 ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vbroadcastf64x2 ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vbroadcasti64x2 ymm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vbroadcasti64x2 ymm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vbroadcasti64x2 ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vbroadcasti64x2 ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vbroadcasti64x2 ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vbroadcasti64x2 ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vbroadcasti64x2 ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vbroadcastf32x2 ymm6{k7}, xmm7 # AVX512{DQ,VL}
vbroadcastf32x2 ymm6{k7}{z}, xmm7 # AVX512{DQ,VL}
vbroadcastf32x2 ymm6{k7}, QWORD PTR [ecx] # AVX512{DQ,VL}
vbroadcastf32x2 ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vbroadcastf32x2 ymm6{k7}, QWORD PTR [edx+1016] # AVX512{DQ,VL} Disp8
vbroadcastf32x2 ymm6{k7}, QWORD PTR [edx+1024] # AVX512{DQ,VL}
vbroadcastf32x2 ymm6{k7}, QWORD PTR [edx-1024] # AVX512{DQ,VL} Disp8
vbroadcastf32x2 ymm6{k7}, QWORD PTR [edx-1032] # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm6{k7}, [edx+1024]{1to2} # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm6{k7}, [edx-1032]{1to2} # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, ymm5 # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm6{k7}, [edx+1024]{1to4} # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm6{k7}, [edx-1032]{1to4} # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm6{k7}, [edx+1024]{1to2} # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm6{k7}, [edx-1032]{1to2} # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, ymm5 # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm6{k7}, [edx+1024]{1to4} # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm6{k7}, [edx-1032]{1to4} # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, QWORD PTR [ecx] # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, QWORD PTR [edx+1016] # AVX512{DQ,VL} Disp8
vcvtps2qq xmm6{k7}, QWORD PTR [edx+1024] # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, QWORD PTR [edx-1024] # AVX512{DQ,VL} Disp8
vcvtps2qq xmm6{k7}, QWORD PTR [edx-1032] # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, [edx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2qq xmm6{k7}, [edx+512]{1to2} # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, [edx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2qq xmm6{k7}, [edx-516]{1to2} # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vcvtps2qq ymm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtps2qq ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtps2qq ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2qq ymm6{k7}, [edx+512]{1to4} # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2qq ymm6{k7}, [edx-516]{1to4} # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, QWORD PTR [ecx] # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, QWORD PTR [edx+1016] # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm6{k7}, QWORD PTR [edx+1024] # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, QWORD PTR [edx-1024] # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm6{k7}, QWORD PTR [edx-1032] # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, [edx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm6{k7}, [edx+512]{1to2} # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, [edx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm6{k7}, [edx-516]{1to2} # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm6{k7}, [edx+512]{1to4} # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm6{k7}, [edx-516]{1to4} # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm6{k7}, [edx+1024]{1to2} # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm6{k7}, [edx-1032]{1to2} # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, ymm5 # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm6{k7}, [edx+1024]{1to4} # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm6{k7}, [edx-1032]{1to4} # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, QWORD BCST [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, QWORD BCST [edx+1024]{1to2} # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, QWORD BCST [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, QWORD BCST [edx-1032]{1to2} # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, ymm5 # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, QWORD BCST [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, QWORD BCST [edx+1024]{1to4} # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, QWORD BCST [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, QWORD BCST [edx-1032]{1to4} # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm6{k7}, [edx+1024]{1to2} # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm6{k7}, [edx-1032]{1to2} # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, ymm5 # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm6{k7}, [edx+1024]{1to4} # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm6{k7}, [edx-1032]{1to4} # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx+1024]{1to2} # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx-1032]{1to2} # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, ymm5 # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx+1024]{1to4} # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx-1032]{1to4} # AVX512{DQ,VL}
vextractf64x2 xmm6{k7}, ymm5, 0xab # AVX512{DQ,VL}
vextractf64x2 xmm6{k7}{z}, ymm5, 0xab # AVX512{DQ,VL}
vextractf64x2 xmm6{k7}, ymm5, 123 # AVX512{DQ,VL}
vextracti64x2 xmm6{k7}, ymm5, 0xab # AVX512{DQ,VL}
vextracti64x2 xmm6{k7}{z}, ymm5, 0xab # AVX512{DQ,VL}
vextracti64x2 xmm6{k7}, ymm5, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, xmm6, 0xab # AVX512{DQ,VL}
vfpclasspd k5{k7}, xmm6, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, [eax]{1to2}, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, QWORD BCST [edx+1016]{1to2}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, QWORD BCST [edx+1024]{1to2}, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, QWORD BCST [edx-1024]{1to2}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, QWORD BCST [edx-1032]{1to2}, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, ymm6, 0xab # AVX512{DQ,VL}
vfpclasspd k5{k7}, ymm6, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, YMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, [eax]{1to4}, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, QWORD BCST [edx+1016]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, QWORD BCST [edx+1024]{1to4}, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, QWORD BCST [edx-1024]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, QWORD BCST [edx-1032]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, xmm6, 0xab # AVX512{DQ,VL}
vfpclassps k5{k7}, xmm6, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, [eax]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, DWORD BCST [edx+508]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, DWORD BCST [edx+512]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, DWORD BCST [edx-512]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, DWORD BCST [edx-516]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, ymm6, 0xab # AVX512{DQ,VL}
vfpclassps k5{k7}, ymm6, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, YMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, [eax]{1to8}, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, DWORD BCST [edx+508]{1to8}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, DWORD BCST [edx+512]{1to8}, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, DWORD BCST [edx-512]{1to8}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, DWORD BCST [edx-516]{1to8}, 123 # AVX512{DQ,VL}
vinsertf64x2 ymm6{k7}, ymm5, xmm4, 0xab # AVX512{DQ,VL}
vinsertf64x2 ymm6{k7}{z}, ymm5, xmm4, 0xab # AVX512{DQ,VL}
vinsertf64x2 ymm6{k7}, ymm5, xmm4, 123 # AVX512{DQ,VL}
vinsertf64x2 ymm6{k7}, ymm5, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vinsertf64x2 ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vinsertf64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vinsertf64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vinsertf64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vinsertf64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vinserti64x2 ymm6{k7}, ymm5, xmm4, 0xab # AVX512{DQ,VL}
vinserti64x2 ymm6{k7}{z}, ymm5, xmm4, 0xab # AVX512{DQ,VL}
vinserti64x2 ymm6{k7}, ymm5, xmm4, 123 # AVX512{DQ,VL}
vinserti64x2 ymm6{k7}, ymm5, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vinserti64x2 ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vinserti64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vinserti64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vinserti64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vinserti64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vbroadcasti32x2 xmm6{k7}, xmm7 # AVX512{DQ,VL}
vbroadcasti32x2 xmm6{k7}{z}, xmm7 # AVX512{DQ,VL}
vbroadcasti32x2 xmm6{k7}, QWORD PTR [ecx] # AVX512{DQ,VL}
vbroadcasti32x2 xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vbroadcasti32x2 xmm6{k7}, QWORD PTR [edx+1016] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 xmm6{k7}, QWORD PTR [edx+1024] # AVX512{DQ,VL}
vbroadcasti32x2 xmm6{k7}, QWORD PTR [edx-1024] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 xmm6{k7}, QWORD PTR [edx-1032] # AVX512{DQ,VL}
vbroadcasti32x2 ymm6{k7}, xmm7 # AVX512{DQ,VL}
vbroadcasti32x2 ymm6{k7}{z}, xmm7 # AVX512{DQ,VL}
vbroadcasti32x2 ymm6{k7}, QWORD PTR [ecx] # AVX512{DQ,VL}
vbroadcasti32x2 ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vbroadcasti32x2 ymm6{k7}, QWORD PTR [edx+1016] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 ymm6{k7}, QWORD PTR [edx+1024] # AVX512{DQ,VL}
vbroadcasti32x2 ymm6{k7}, QWORD PTR [edx-1024] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 ymm6{k7}, QWORD PTR [edx-1032] # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vpmullq xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vpmullq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vpmullq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vpmullq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vpmullq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vpmullq ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vpmullq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vpmullq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vpmullq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vpmullq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, xmm4, 0xab # AVX512{DQ,VL}
vrangepd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, xmm4, 123 # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, [eax]{1to2}, 123 # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vrangepd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vrangepd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{DQ,VL} Disp8
vrangepd xmm6{k7}, xmm5, [edx+1024]{1to2}, 123 # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, [edx-1024]{1to2}, 123 # AVX512{DQ,VL} Disp8
vrangepd xmm6{k7}, xmm5, [edx-1032]{1to2}, 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, ymm4, 0xab # AVX512{DQ,VL}
vrangepd ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, ymm4, 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{DQ,VL} Disp8
vrangepd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{DQ,VL} Disp8
vrangepd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangepd ymm6{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangepd ymm6{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, xmm4, 0xab # AVX512{DQ,VL}
vrangeps xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, xmm4, 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, [eax]{1to4}, 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vrangeps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vrangeps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangeps xmm6{k7}, xmm5, [edx+512]{1to4}, 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, [edx-512]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangeps xmm6{k7}, xmm5, [edx-516]{1to4}, 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, ymm4, 0xab # AVX512{DQ,VL}
vrangeps ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, ymm4, 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, [eax]{1to8}, 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{DQ,VL} Disp8
vrangeps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{DQ,VL} Disp8
vrangeps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{DQ,VL} Disp8
vrangeps ymm6{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{DQ,VL} Disp8
vrangeps ymm6{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vandpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vandpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vandpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vandpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vandpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vandpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vandpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vandpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vandpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vandpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vandps xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vandps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vandps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vandps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vandps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vandps ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vandps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vandps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{DQ,VL} Disp8
vandps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{DQ,VL} Disp8
vandps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vandnpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vandnpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vandnpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vandnpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vandnpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vandnpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vandnpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vandnpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vandnpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vandnpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vandnps xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vandnps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vandnps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vandnps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vandnps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vandnps ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vandnps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vandnps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{DQ,VL} Disp8
vandnps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{DQ,VL} Disp8
vandnps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vorpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vorpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vorpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vorpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vorpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vorpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vorpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vorpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vorpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vorpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vorps xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vorps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vorps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vorps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vorps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vorps ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vorps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vorps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{DQ,VL} Disp8
vorps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{DQ,VL} Disp8
vorps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vxorpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vxorpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vxorpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vxorpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vxorpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vxorpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vxorpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vxorpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vxorpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vxorpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vxorps xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vxorps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vxorps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vxorps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vxorps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vxorps ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vxorps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vxorps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{DQ,VL} Disp8
vxorps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{DQ,VL} Disp8
vxorps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{DQ,VL}
vreducepd xmm6{k7}, xmm5, 0xab # AVX512{DQ,VL}
vreducepd xmm6{k7}{z}, xmm5, 0xab # AVX512{DQ,VL}
vreducepd xmm6{k7}, xmm5, 123 # AVX512{DQ,VL}
vreducepd xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vreducepd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vreducepd xmm6{k7}, [eax]{1to2}, 123 # AVX512{DQ,VL}
vreducepd xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vreducepd xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vreducepd xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vreducepd xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vreducepd xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{DQ,VL} Disp8
vreducepd xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{DQ,VL}
vreducepd xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{DQ,VL} Disp8
vreducepd xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, ymm5, 0xab # AVX512{DQ,VL}
vreducepd ymm6{k7}{z}, ymm5, 0xab # AVX512{DQ,VL}
vreducepd ymm6{k7}, ymm5, 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, [eax]{1to4}, 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{DQ,VL} Disp8
vreducepd ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{DQ,VL} Disp8
vreducepd ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreducepd ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreducepd ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, xmm5, 0xab # AVX512{DQ,VL}
vreduceps xmm6{k7}{z}, xmm5, 0xab # AVX512{DQ,VL}
vreduceps xmm6{k7}, xmm5, 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, [eax]{1to4}, 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vreduceps xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vreduceps xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreduceps xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreduceps xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, ymm5, 0xab # AVX512{DQ,VL}
vreduceps ymm6{k7}{z}, ymm5, 0xab # AVX512{DQ,VL}
vreduceps ymm6{k7}, ymm5, 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, [eax]{1to8}, 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{DQ,VL} Disp8
vreduceps ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{DQ,VL} Disp8
vreduceps ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{DQ,VL} Disp8
vreduceps ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{DQ,VL} Disp8
vreduceps ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [ecx]{k7}, ymm5, 0xab # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [ecx]{k7}, ymm5, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [esp+esi*8-123456]{k7}, ymm5, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [edx+2032]{k7}, ymm5, 123 # AVX512{DQ,VL} Disp8
vextractf64x2 XMMWORD PTR [edx+2048]{k7}, ymm5, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [edx-2048]{k7}, ymm5, 123 # AVX512{DQ,VL} Disp8
vextractf64x2 XMMWORD PTR [edx-2064]{k7}, ymm5, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [ecx]{k7}, ymm5, 0xab # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [ecx]{k7}, ymm5, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [esp+esi*8-123456]{k7}, ymm5, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [edx+2032]{k7}, ymm5, 123 # AVX512{DQ,VL} Disp8
vextracti64x2 XMMWORD PTR [edx+2048]{k7}, ymm5, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [edx-2048]{k7}, ymm5, 123 # AVX512{DQ,VL} Disp8
vextracti64x2 XMMWORD PTR [edx-2064]{k7}, ymm5, 123 # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm6{k7}, [edx+1024]{1to2} # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm6{k7}, [edx-1032]{1to2} # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, ymm5 # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm6{k7}, [edx+1024]{1to4} # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm6{k7}, [edx-1032]{1to4} # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm6{k7}, [edx+1024]{1to2} # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm6{k7}, [edx-1032]{1to2} # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, ymm5 # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm6{k7}, [edx+1024]{1to4} # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm6{k7}, [edx-1032]{1to4} # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, QWORD PTR [ecx] # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, QWORD PTR [edx+1016] # AVX512{DQ,VL} Disp8
vcvttps2qq xmm6{k7}, QWORD PTR [edx+1024] # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, QWORD PTR [edx-1024] # AVX512{DQ,VL} Disp8
vcvttps2qq xmm6{k7}, QWORD PTR [edx-1032] # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, [edx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2qq xmm6{k7}, [edx+512]{1to2} # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, [edx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2qq xmm6{k7}, [edx-516]{1to2} # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vcvttps2qq ymm6{k7}, xmm5 # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvttps2qq ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvttps2qq ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2qq ymm6{k7}, [edx+512]{1to4} # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2qq ymm6{k7}, [edx-516]{1to4} # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, QWORD PTR [ecx] # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, QWORD PTR [edx+1016] # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm6{k7}, QWORD PTR [edx+1024] # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, QWORD PTR [edx-1024] # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm6{k7}, QWORD PTR [edx-1032] # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, [edx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm6{k7}, [edx+512]{1to2} # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, [edx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm6{k7}, [edx-516]{1to2} # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm6{k7}, xmm5 # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm6{k7}, [edx+512]{1to4} # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm6{k7}, [edx-516]{1to4} # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vpmovd2m k5, xmm6 # AVX512{DQ,VL}
vpmovd2m k5, ymm6 # AVX512{DQ,VL}
vpmovq2m k5, xmm6 # AVX512{DQ,VL}
vpmovq2m k5, ymm6 # AVX512{DQ,VL}
vpmovm2d xmm6, k5 # AVX512{DQ,VL}
vpmovm2d ymm6, k5 # AVX512{DQ,VL}
vpmovm2q xmm6, k5 # AVX512{DQ,VL}
vpmovm2q ymm6, k5 # AVX512{DQ,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 3,504
|
gas/testsuite/gas/i386/reloc32.s
|
.macro bad args:vararg
.ifdef _bad_
\args
.endif
.endm
.macro ill args:vararg
# This is used to mark entries that aren't handled consistently,
# and thus shouldn't currently be checked for.
# \args
.endm
.text
_start:
mov $xtrn, %eax
mov $xtrn, %ax
mov $xtrn, %al
mov xtrn(%ebx), %eax
mov xtrn(%bx), %eax
mov $(xtrn - .), %eax
mov $(xtrn - .), %ax
mov $(xtrn - .), %al
mov xtrn - .(%ebx), %eax
mov xtrn - .(%bx), %eax
call xtrn
jecxz xtrn
mov $xtrn@got, %eax
bad mov $xtrn@got, %ax
bad mov $xtrn@got, %al
mov xtrn@got(%ebx), %eax
bad mov xtrn@got(%bx), %eax
bad call xtrn@got
mov $xtrn@gotoff, %eax
bad mov $xtrn@gotoff, %ax
bad mov $xtrn@gotoff, %al
mov xtrn@gotoff(%ebx), %eax
bad mov xtrn@gotoff(%bx), %eax
bad call xtrn@gotoff
add $_GLOBAL_OFFSET_TABLE_, %eax
ill add $_GLOBAL_OFFSET_TABLE_, %ax
ill add $_GLOBAL_OFFSET_TABLE_, %al
add $(_GLOBAL_OFFSET_TABLE_ - .), %eax
ill add $(_GLOBAL_OFFSET_TABLE_ - .), %ax
ill add $(_GLOBAL_OFFSET_TABLE_ - .), %al
mov $xtrn@plt, %eax
bad mov $xtrn@plt, %ax
bad mov $xtrn@plt, %al
mov xtrn@plt(%ebx), %eax
bad mov xtrn@plt(%bx), %eax
call xtrn@plt
bad jecxz xtrn@plt
mov $xtrn@tlsgd, %eax
bad mov $xtrn@tlsgd, %ax
bad mov $xtrn@tlsgd, %al
mov xtrn@tlsgd(%ebx), %eax
bad mov xtrn@tlsgd(%bx), %eax
bad call xtrn@tlsgd
mov $xtrn@gotntpoff, %eax
bad mov $xtrn@gotntpoff, %ax
bad mov $xtrn@gotntpoff, %al
mov xtrn@gotntpoff(%ebx), %eax
bad mov xtrn@gotntpoff(%bx), %eax
bad call xtrn@gotntpoff
mov $xtrn@indntpoff, %eax
bad mov $xtrn@indntpoff, %ax
bad mov $xtrn@indntpoff, %al
mov xtrn@indntpoff(%ebx), %eax
bad mov xtrn@indntpoff(%bx), %eax
bad call xtrn@indntpoff
mov $xtrn@gottpoff, %eax
bad mov $xtrn@gottpoff, %ax
bad mov $xtrn@gottpoff, %al
mov xtrn@gottpoff(%ebx), %eax
bad mov xtrn@gottpoff(%bx), %eax
bad call xtrn@gottpoff
mov $xtrn@tlsldm, %eax
bad mov $xtrn@tlsldm, %ax
bad mov $xtrn@tlsldm, %al
mov xtrn@tlsldm(%ebx), %eax
bad mov xtrn@tlsldm(%bx), %eax
bad call xtrn@tlsldm
mov $xtrn@dtpoff, %eax
bad mov $xtrn@dtpoff, %ax
bad mov $xtrn@dtpoff, %al
mov xtrn@dtpoff(%ebx), %eax
bad mov xtrn@dtpoff(%bx), %eax
bad call xtrn@dtpoff
mov $xtrn@ntpoff, %eax
bad mov $xtrn@ntpoff, %ax
bad mov $xtrn@ntpoff, %al
mov xtrn@ntpoff(%ebx), %eax
bad mov xtrn@ntpoff(%bx), %eax
bad call xtrn@ntpoff
mov $xtrn@tpoff, %eax
bad mov $xtrn@tpoff, %ax
bad mov $xtrn@tpoff, %al
mov xtrn@tpoff(%ebx), %eax
bad mov xtrn@tpoff(%bx), %eax
bad call xtrn@tpoff
.data
.long xtrn
.long xtrn - .
.long xtrn@got
.long xtrn@gotoff
.long _GLOBAL_OFFSET_TABLE_
.long _GLOBAL_OFFSET_TABLE_ - .
.long xtrn@plt
.long xtrn@tlsgd
.long xtrn@gotntpoff
.long xtrn@indntpoff
.long xtrn@gottpoff
.long xtrn@tlsldm
.long xtrn@dtpoff
.long xtrn@ntpoff
.long xtrn@tpoff
.word xtrn
.word xtrn - .
bad .word xtrn@got
bad .word xtrn@gotoff
ill .word _GLOBAL_OFFSET_TABLE_
ill .word _GLOBAL_OFFSET_TABLE_ - .
bad .word xtrn@plt
bad .word xtrn@tlsgd
bad .word xtrn@gotntpoff
bad .word xtrn@indntpoff
bad .word xtrn@gottpoff
bad .word xtrn@tlsldm
bad .word xtrn@dtpoff
bad .word xtrn@ntpoff
bad .word xtrn@tpoff
.byte xtrn
.byte xtrn - .
bad .byte xtrn@got
bad .byte xtrn@gotoff
ill .byte _GLOBAL_OFFSET_TABLE_
ill .byte _GLOBAL_OFFSET_TABLE_ - .
bad .byte xtrn@plt
bad .byte xtrn@tlsgd
bad .byte xtrn@gotntpoff
bad .byte xtrn@indntpoff
bad .byte xtrn@gottpoff
bad .byte xtrn@tlsldm
bad .byte xtrn@dtpoff
bad .byte xtrn@ntpoff
bad .byte xtrn@tpoff
.long xtrn@got + 4
.long xtrn@got - 4
bad .long xtrn@plt - .
|
tactcomplabs/xbgas-binutils-gdb
| 7,793
|
gas/testsuite/gas/i386/x86-64-lwp.s
|
# Check 64bit LWP instructions
.allow_index_reg
.text
_start:
llwpcb %eax
llwpcb %ecx
llwpcb %edx
llwpcb %ebx
llwpcb %esp
llwpcb %ebp
llwpcb %esi
llwpcb %edi
llwpcb %r8d
llwpcb %r9d
llwpcb %r10d
llwpcb %r11d
llwpcb %r12d
llwpcb %r13d
llwpcb %r14d
llwpcb %r15d
llwpcb %rax
llwpcb %rcx
llwpcb %rdx
llwpcb %rbx
llwpcb %rsp
llwpcb %rbp
llwpcb %rsi
llwpcb %rdi
llwpcb %r8
llwpcb %r9
llwpcb %r10
llwpcb %r11
llwpcb %r12
llwpcb %r13
llwpcb %r14
llwpcb %r15
slwpcb %r15
slwpcb %r14
slwpcb %r13
slwpcb %r12
slwpcb %r11
slwpcb %r10
slwpcb %r9
slwpcb %r8
slwpcb %rdi
slwpcb %rsi
slwpcb %rbp
slwpcb %rsp
slwpcb %rbx
slwpcb %rdx
slwpcb %rcx
slwpcb %rax
slwpcb %r15d
slwpcb %r14d
slwpcb %r13d
slwpcb %r12d
slwpcb %r11d
slwpcb %r10d
slwpcb %r9d
slwpcb %r8d
slwpcb %edi
slwpcb %esi
slwpcb %ebp
slwpcb %esp
slwpcb %ebx
slwpcb %edx
slwpcb %ecx
slwpcb %eax
lwpins $0x12345678, %r15d, %eax
lwpins $0x12345678, %r14d, %ecx
lwpins $0x12345678, %r13d, %edx
lwpins $0x12345678, %r12d, %ebx
lwpins $0x12345678, %r11d, %esp
lwpins $0x12345678, %r10d, %ebp
lwpins $0x12345678, %r9d, %esi
lwpins $0x12345678, %r8d, %edi
lwpins $0x12345678, %edi, %r8d
lwpins $0x12345678, %esi, %r9d
lwpins $0x12345678, %ebp, %r10d
lwpins $0x12345678, %esp, %r11d
lwpins $0x12345678, %ebx, %r12d
lwpins $0x12345678, %edx, %r13d
lwpins $0x12345678, %ecx, %r14d
lwpins $0x12345678, %eax, %r15d
lwpins $0x12345678, %r15d, %rax
lwpins $0x12345678, %r14d, %rcx
lwpins $0x12345678, %r13d, %rdx
lwpins $0x12345678, %r12d, %rbx
lwpins $0x12345678, %r11d, %rsp
lwpins $0x12345678, %r10d, %rbp
lwpins $0x12345678, %r9d, %rsi
lwpins $0x12345678, %r8d, %rdi
lwpins $0x12345678, %eax, %r8
lwpins $0x12345678, %ecx, %r9
lwpins $0x12345678, %edx, %r10
lwpins $0x12345678, %ebx, %r11
lwpins $0x12345678, %esp, %r12
lwpins $0x12345678, %ebp, %r13
lwpins $0x12345678, %esi, %r14
lwpins $0x12345678, %edi, %r15
lwpval $0x12345678, %r15d, %eax
lwpval $0x12345678, %r14d, %ecx
lwpval $0x12345678, %r13d, %edx
lwpval $0x12345678, %r12d, %ebx
lwpval $0x12345678, %r11d, %esp
lwpval $0x12345678, %r10d, %ebp
lwpval $0x12345678, %r9d, %esi
lwpval $0x12345678, %r8d, %edi
lwpval $0x12345678, %edi, %r8d
lwpval $0x12345678, %esi, %r9d
lwpval $0x12345678, %ebp, %r10d
lwpval $0x12345678, %esp, %r11d
lwpval $0x12345678, %ebx, %r12d
lwpval $0x12345678, %edx, %r13d
lwpval $0x12345678, %ecx, %r14d
lwpval $0x12345678, %eax, %r15d
lwpval $0x12345678, %r15d, %rax
lwpval $0x12345678, %r14d, %rcx
lwpval $0x12345678, %r13d, %rdx
lwpval $0x12345678, %r12d, %rbx
lwpval $0x12345678, %r11d, %rsp
lwpval $0x12345678, %r10d, %rbp
lwpval $0x12345678, %r9d, %rsi
lwpval $0x12345678, %r8d, %rdi
lwpval $0x12345678, %eax, %r8
lwpval $0x12345678, %ecx, %r9
lwpval $0x12345678, %edx, %r10
lwpval $0x12345678, %ebx, %r11
lwpval $0x12345678, %esp, %r12
lwpval $0x12345678, %ebp, %r13
lwpval $0x12345678, %esi, %r14
lwpval $0x12345678, %edi, %r15
lwpins $0x12345678, (%r15d), %eax
lwpins $0x12345678, (%r14d), %ecx
lwpins $0x12345678, (%r13d), %edx
lwpins $0x12345678, (%r12d), %ebx
lwpins $0x12345678, (%r11d), %esp
lwpins $0x12345678, (%r10d), %ebp
lwpins $0x12345678, (%r9d), %esi
lwpins $0x12345678, (%r8d), %edi
lwpins $0x12345678, (%edi), %r8d
lwpins $0x12345678, (%esi), %r9d
lwpins $0x12345678, (%ebp), %r10d
lwpins $0x12345678, (%esp), %r11d
lwpins $0x12345678, (%ebx), %r12d
lwpins $0x12345678, (%edx), %r13d
lwpins $0x12345678, (%ecx), %r14d
lwpins $0x12345678, (%eax), %r15d
lwpins $0x12345678, (%r15d), %rax
lwpins $0x12345678, (%r14d), %rcx
lwpins $0x12345678, (%r13d), %rdx
lwpins $0x12345678, (%r12d), %rbx
lwpins $0x12345678, (%r11d), %rsp
lwpins $0x12345678, (%r10d), %rbp
lwpins $0x12345678, (%r9d), %rsi
lwpins $0x12345678, (%r8d), %rdi
lwpins $0x12345678, (%eax), %r8
lwpins $0x12345678, (%ecx), %r9
lwpins $0x12345678, (%edx), %r10
lwpins $0x12345678, (%ebx), %r11
lwpins $0x12345678, (%esp), %r12
lwpins $0x12345678, (%ebp), %r13
lwpins $0x12345678, (%esi), %r14
lwpins $0x12345678, (%edi), %r15
lwpval $0x12345678, (%r15d), %eax
lwpval $0x12345678, (%r14d), %ecx
lwpval $0x12345678, (%r13d), %edx
lwpval $0x12345678, (%r12d), %ebx
lwpval $0x12345678, (%r11d), %esp
lwpval $0x12345678, (%r10d), %ebp
lwpval $0x12345678, (%r9d), %esi
lwpval $0x12345678, (%r8d), %edi
lwpval $0x12345678, (%edi), %r8d
lwpval $0x12345678, (%esi), %r9d
lwpval $0x12345678, (%ebp), %r10d
lwpval $0x12345678, (%esp), %r11d
lwpval $0x12345678, (%ebx), %r12d
lwpval $0x12345678, (%edx), %r13d
lwpval $0x12345678, (%ecx), %r14d
lwpval $0x12345678, (%eax), %r15d
lwpval $0x12345678, (%r15d), %rax
lwpval $0x12345678, (%r14d), %rcx
lwpval $0x12345678, (%r13d), %rdx
lwpval $0x12345678, (%r12d), %rbx
lwpval $0x12345678, (%r11d), %rsp
lwpval $0x12345678, (%r10d), %rbp
lwpval $0x12345678, (%r9d), %rsi
lwpval $0x12345678, (%r8d), %rdi
lwpval $0x12345678, (%eax), %r8
lwpval $0x12345678, (%ecx), %r9
lwpval $0x12345678, (%edx), %r10
lwpval $0x12345678, (%ebx), %r11
lwpval $0x12345678, (%esp), %r12
lwpval $0x12345678, (%ebp), %r13
lwpval $0x12345678, (%esi), %r14
lwpval $0x12345678, (%edi), %r15
lwpins $0x12345678, 0xcafe(%r15d), %eax
lwpins $0x12345678, 0xcafe(%r14d), %ecx
lwpins $0x12345678, 0xcafe(%r13d), %edx
lwpins $0x12345678, 0xcafe(%r12d), %ebx
lwpins $0x12345678, 0xcafe(%r11d), %esp
lwpins $0x12345678, 0xcafe(%r10d), %ebp
lwpins $0x12345678, 0xcafe(%r9d), %esi
lwpins $0x12345678, 0xcafe(%r8d), %edi
lwpins $0x12345678, 0xcafe(%edi), %r8d
lwpins $0x12345678, 0xcafe(%esi), %r9d
lwpins $0x12345678, 0xcafe(%ebp), %r10d
lwpins $0x12345678, 0xcafe(%esp), %r11d
lwpins $0x12345678, 0xcafe(%ebx), %r12d
lwpins $0x12345678, 0xcafe(%edx), %r13d
lwpins $0x12345678, 0xcafe(%ecx), %r14d
lwpins $0x12345678, 0xcafe(%eax), %r15d
lwpins $0x12345678, 0xcafe(%r15d), %rax
lwpins $0x12345678, 0xcafe(%r14d), %rcx
lwpins $0x12345678, 0xcafe(%r13d), %rdx
lwpins $0x12345678, 0xcafe(%r12d), %rbx
lwpins $0x12345678, 0xcafe(%r11d), %rsp
lwpins $0x12345678, 0xcafe(%r10d), %rbp
lwpins $0x12345678, 0xcafe(%r9d), %rsi
lwpins $0x12345678, 0xcafe(%r8d), %rdi
lwpins $0x12345678, 0xcafe(%eax), %r8
lwpins $0x12345678, 0xcafe(%ecx), %r9
lwpins $0x12345678, 0xcafe(%edx), %r10
lwpins $0x12345678, 0xcafe(%ebx), %r11
lwpins $0x12345678, 0xcafe(%esp), %r12
lwpins $0x12345678, 0xcafe(%ebp), %r13
lwpins $0x12345678, 0xcafe(%esi), %r14
lwpins $0x12345678, 0xcafe(%edi), %r15
lwpval $0x12345678, 0xcafe(%r15d), %eax
lwpval $0x12345678, 0xcafe(%r14d), %ecx
lwpval $0x12345678, 0xcafe(%r13d), %edx
lwpval $0x12345678, 0xcafe(%r12d), %ebx
lwpval $0x12345678, 0xcafe(%r11d), %esp
lwpval $0x12345678, 0xcafe(%r10d), %ebp
lwpval $0x12345678, 0xcafe(%r9d), %esi
lwpval $0x12345678, 0xcafe(%r8d), %edi
lwpval $0x12345678, 0xcafe(%edi), %r8d
lwpval $0x12345678, 0xcafe(%esi), %r9d
lwpval $0x12345678, 0xcafe(%ebp), %r10d
lwpval $0x12345678, 0xcafe(%esp), %r11d
lwpval $0x12345678, 0xcafe(%ebx), %r12d
lwpval $0x12345678, 0xcafe(%edx), %r13d
lwpval $0x12345678, 0xcafe(%ecx), %r14d
lwpval $0x12345678, 0xcafe(%eax), %r15d
lwpval $0x12345678, 0xcafe(%r15d), %rax
lwpval $0x12345678, 0xcafe(%r14d), %rcx
lwpval $0x12345678, 0xcafe(%r13d), %rdx
lwpval $0x12345678, 0xcafe(%r12d), %rbx
lwpval $0x12345678, 0xcafe(%r11d), %rsp
lwpval $0x12345678, 0xcafe(%r10d), %rbp
lwpval $0x12345678, 0xcafe(%r9d), %rsi
lwpval $0x12345678, 0xcafe(%r8d), %rdi
lwpval $0x12345678, 0xcafe(%eax), %r8
lwpval $0x12345678, 0xcafe(%ecx), %r9
lwpval $0x12345678, 0xcafe(%edx), %r10
lwpval $0x12345678, 0xcafe(%ebx), %r11
lwpval $0x12345678, 0xcafe(%esp), %r12
lwpval $0x12345678, 0xcafe(%ebp), %r13
lwpval $0x12345678, 0xcafe(%esi), %r14
lwpval $0x12345678, 0xcafe(%edi), %r15
|
tactcomplabs/xbgas-binutils-gdb
| 1,501
|
gas/testsuite/gas/i386/avx512f_vaes.s
|
# Check 32bit AVX512F,VAES instructions
.allow_index_reg
.text
_start:
vaesdec %zmm4, %zmm5, %zmm6 # AVX512F,VAES
vaesdec -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,VAES
vaesdec 8128(%edx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesdeclast %zmm4, %zmm5, %zmm6 # AVX512F,VAES
vaesdeclast -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,VAES
vaesdeclast 8128(%edx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesenc %zmm4, %zmm5, %zmm6 # AVX512F,VAES
vaesenc -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,VAES
vaesenc 8128(%edx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesenclast %zmm4, %zmm5, %zmm6 # AVX512F,VAES
vaesenclast -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,VAES
vaesenclast 8128(%edx), %zmm5, %zmm6 # AVX512F,VAES Disp8
.intel_syntax noprefix
vaesdec zmm6, zmm5, zmm4 # AVX512F,VAES
vaesdec zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F,VAES
vaesdec zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F,VAES Disp8
vaesdeclast zmm6, zmm5, zmm4 # AVX512F,VAES
vaesdeclast zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F,VAES
vaesdeclast zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F,VAES Disp8
vaesenc zmm6, zmm5, zmm4 # AVX512F,VAES
vaesenc zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F,VAES
vaesenc zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F,VAES Disp8
vaesenclast zmm6, zmm5, zmm4 # AVX512F,VAES
vaesenclast zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F,VAES
vaesenclast zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F,VAES Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 1,697
|
gas/testsuite/gas/i386/inval.s
|
.text
.allow_index_reg
# All the following should be illegal
mov (%dx),%al
mov (%eax,%esp,2),%al
setae %eax
pushb %ds
popb %ds
pushb %al
popb %al
pushb %ah
popb %ah
pushb %ax
popb %ax
pushb %eax
popb %eax
movb %ds,%ax
movb %ds,%eax
movb %ax,%ds
movb %eax,%ds
movdb %eax,%mm0
movqb 0,%mm0
ldsb 0,%eax
setnew 0
movdw %eax,%mm0
movqw 0,%mm0
div %cx,%al
div %cl,%ax
div %ecx,%al
imul 10,%bx,%ecx
imul 10,%bx,%al
popab
stil
aaab
cwdel
cwdw
callww 0
foo: jaw foo
jcxzw foo
jecxzl foo
loopb foo
xlatw %es:%bx
xlatl %es:%bx
intl 2
int3b
hltb
fstb %st(0)
fcompll 28(%ebp)
fldlw (%eax)
movl $%ebx,%eax
insertq $4,$2,%xmm2,%ebx
cvtsi2ssq (%eax),%xmm1
cvtsi2sdq (%eax),%xmm1
fnstsw %eax
fnstsw %al
fstsw %eax
fstsw %al
movnti %ax, (%eax)
movntiw %ax, (%eax)
add (%si,%esi), %eax
add (%esi,%si), %eax
add (%eiz), %eax
add (%eax), %eiz
mov %cr0, %di
mov %ax, %cr7
mov %cr0, %bh
mov %al, %cr7
.intel_syntax noprefix
cvtsi2ss xmm1,QWORD PTR [eax]
cvtsi2sd xmm1,QWORD PTR [eax]
cvtsi2ssq xmm1,QWORD PTR [eax]
cvtsi2sdq xmm1,QWORD PTR [eax]
movq xmm1, XMMWORD PTR [esp]
movq xmm1, DWORD PTR [esp]
movq xmm1, WORD PTR [esp]
movq xmm1, BYTE PTR [esp]
movq XMMWORD PTR [esp],xmm1
movq DWORD PTR [esp],xmm1
movq WORD PTR [esp],xmm1
movq BYTE PTR [esp],xmm1
fnstsw eax
fnstsw al
fstsw eax
fstsw al
movsx ax, [eax]
movsx eax, [eax]
movzx ax, [eax]
movzx eax, [eax]
movnti word ptr [eax], ax
shl [eax], 1
rol [ecx], 2
rcl [edx], cl
.att_syntax prefix
movsd (%esi), %ss:(%edi), %ss:(%eax)
movl %ds, %ax
movl %ax, %ds
movl %ax, %bx
movw %ds, %eax
movw %eax, %ds
movw %eax, %ebx
inb %dx, %ax
outb %ax, %dx
movb %ax, %bx
|
tactcomplabs/xbgas-binutils-gdb
| 10,956
|
gas/testsuite/gas/i386/hlebad.s
|
# Check 32bit unsupported HLE instructions
.allow_index_reg
.text
_start:
# Tests for op imm8 al
xacquire adc $100,%al
xacquire lock adc $100,%al
lock xacquire adc $100,%al
xrelease adc $100,%al
xrelease lock adc $100,%al
lock xrelease adc $100,%al
# Tests for op imm16 ax
xacquire adc $1000,%ax
xacquire lock adc $1000,%ax
lock xacquire adc $1000,%ax
xrelease adc $1000,%ax
xrelease lock adc $1000,%ax
lock xrelease adc $1000,%ax
# Tests for op imm32 eax
xacquire adc $10000000,%eax
xacquire lock adc $10000000,%eax
lock xacquire adc $10000000,%eax
xrelease adc $10000000,%eax
xrelease lock adc $10000000,%eax
lock xrelease adc $10000000,%eax
# Tests for op imm8 regb/m8
xacquire adcb $100,%cl
xacquire lock adcb $100,%cl
lock xacquire adcb $100,%cl
xrelease adcb $100,%cl
xrelease lock adcb $100,%cl
lock xrelease adcb $100,%cl
xacquire adcb $100,(%ecx)
xrelease adcb $100,(%ecx)
# Tests for op imm16 regs/m16
xacquire adcw $1000,%cx
xacquire lock adcw $1000,%cx
lock xacquire adcw $1000,%cx
xrelease adcw $1000,%cx
xrelease lock adcw $1000,%cx
lock xrelease adcw $1000,%cx
xacquire adcw $1000,(%ecx)
xrelease adcw $1000,(%ecx)
# Tests for op imm32 regl/m32
xacquire adcl $10000000,%ecx
xacquire lock adcl $10000000,%ecx
lock xacquire adcl $10000000,%ecx
xrelease adcl $10000000,%ecx
xrelease lock adcl $10000000,%ecx
lock xrelease adcl $10000000,%ecx
xacquire adcl $10000000,(%ecx)
xrelease adcl $10000000,(%ecx)
# Tests for op imm8 regs/m16
xacquire adcw $100,%cx
xacquire lock adcw $100,%cx
lock xacquire adcw $100,%cx
xrelease adcw $100,%cx
xrelease lock adcw $100,%cx
lock xrelease adcw $100,%cx
xacquire adcw $100,(%ecx)
xrelease adcw $100,(%ecx)
# Tests for op imm8 regl/m32
xacquire adcl $100,%ecx
xacquire lock adcl $100,%ecx
lock xacquire adcl $100,%ecx
xrelease adcl $100,%ecx
xrelease lock adcl $100,%ecx
lock xrelease adcl $100,%ecx
xacquire adcl $100,(%ecx)
xrelease adcl $100,(%ecx)
# Tests for op imm8 regb/m8
xacquire adcb $100,%cl
xacquire lock adcb $100,%cl
lock xacquire adcb $100,%cl
xrelease adcb $100,%cl
xrelease lock adcb $100,%cl
lock xrelease adcb $100,%cl
xacquire adcb $100,(%ecx)
xrelease adcb $100,(%ecx)
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire adcb %al,%cl
xacquire lock adcb %al,%cl
lock xacquire adcb %al,%cl
xrelease adcb %al,%cl
xrelease lock adcb %al,%cl
lock xrelease adcb %al,%cl
xacquire adcb %al,(%ecx)
xrelease adcb %al,(%ecx)
xacquire adcb %cl,%al
xacquire lock adcb %cl,%al
lock xacquire adcb %cl,%al
xrelease adcb %cl,%al
xrelease lock adcb %cl,%al
lock xrelease adcb %cl,%al
xacquire adcb (%ecx),%al
xacquire lock adcb (%ecx),%al
lock xacquire adcb (%ecx),%al
xrelease adcb (%ecx),%al
xrelease lock adcb (%ecx),%al
lock xrelease adcb (%ecx),%al
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire adcw %ax,%cx
xacquire lock adcw %ax,%cx
lock xacquire adcw %ax,%cx
xrelease adcw %ax,%cx
xrelease lock adcw %ax,%cx
lock xrelease adcw %ax,%cx
xacquire adcw %ax,(%ecx)
xrelease adcw %ax,(%ecx)
xacquire adcw %cx,%ax
xacquire lock adcw %cx,%ax
lock xacquire adcw %cx,%ax
xrelease adcw %cx,%ax
xrelease lock adcw %cx,%ax
lock xrelease adcw %cx,%ax
xacquire adcw (%ecx),%ax
xacquire lock adcw (%ecx),%ax
lock xacquire adcw (%ecx),%ax
xrelease adcw (%ecx),%ax
xrelease lock adcw (%ecx),%ax
lock xrelease adcw (%ecx),%ax
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire adcl %eax,%ecx
xacquire lock adcl %eax,%ecx
lock xacquire adcl %eax,%ecx
xrelease adcl %eax,%ecx
xrelease lock adcl %eax,%ecx
lock xrelease adcl %eax,%ecx
xacquire adcl %eax,(%ecx)
xrelease adcl %eax,(%ecx)
xacquire adcl %ecx,%eax
xacquire lock adcl %ecx,%eax
lock xacquire adcl %ecx,%eax
xrelease adcl %ecx,%eax
xrelease lock adcl %ecx,%eax
lock xrelease adcl %ecx,%eax
xacquire adcl (%ecx),%eax
xacquire lock adcl (%ecx),%eax
lock xacquire adcl (%ecx),%eax
xrelease adcl (%ecx),%eax
xrelease lock adcl (%ecx),%eax
lock xrelease adcl (%ecx),%eax
# Tests for op regs, regs/m16
xacquire btcw %ax,%cx
xacquire lock btcw %ax,%cx
lock xacquire btcw %ax,%cx
xrelease btcw %ax,%cx
xrelease lock btcw %ax,%cx
lock xrelease btcw %ax,%cx
xacquire btcw %ax,(%ecx)
xrelease btcw %ax,(%ecx)
# Tests for op regl regl/m32
xacquire btcl %eax,%ecx
xacquire lock btcl %eax,%ecx
lock xacquire btcl %eax,%ecx
xrelease btcl %eax,%ecx
xrelease lock btcl %eax,%ecx
lock xrelease btcl %eax,%ecx
xacquire btcl %eax,(%ecx)
xrelease btcl %eax,(%ecx)
# Tests for op regb/m8
xacquire decb %cl
xacquire lock decb %cl
lock xacquire decb %cl
xrelease decb %cl
xrelease lock decb %cl
lock xrelease decb %cl
xacquire decb (%ecx)
xrelease decb (%ecx)
# Tests for op regs/m16
xacquire decw %cx
xacquire lock decw %cx
lock xacquire decw %cx
xrelease decw %cx
xrelease lock decw %cx
lock xrelease decw %cx
xacquire decw (%ecx)
xrelease decw (%ecx)
# Tests for op regl/m32
xacquire decl %ecx
xacquire lock decl %ecx
lock xacquire decl %ecx
xrelease decl %ecx
xrelease lock decl %ecx
lock xrelease decl %ecx
xacquire decl (%ecx)
xrelease decl (%ecx)
# Tests for op m64
xacquire cmpxchg8bq (%ecx)
xrelease cmpxchg8bq (%ecx)
# Tests for op regb, regb/m8
xacquire cmpxchgb %cl,%al
xacquire lock cmpxchgb %cl,%al
lock xacquire cmpxchgb %cl,%al
xrelease cmpxchgb %cl,%al
xrelease lock cmpxchgb %cl,%al
lock xrelease cmpxchgb %cl,%al
xacquire cmpxchgb %cl,(%ecx)
xrelease cmpxchgb %cl,(%ecx)
.intel_syntax noprefix
# Tests for op imm8 al
xacquire adc al,100
xacquire lock adc al,100
lock xacquire adc al,100
xrelease adc al,100
xrelease lock adc al,100
lock xrelease adc al,100
# Tests for op imm16 ax
xacquire adc ax,1000
xacquire lock adc ax,1000
lock xacquire adc ax,1000
xrelease adc ax,1000
xrelease lock adc ax,1000
lock xrelease adc ax,1000
# Tests for op imm32 eax
xacquire adc eax,10000000
xacquire lock adc eax,10000000
lock xacquire adc eax,10000000
xrelease adc eax,10000000
xrelease lock adc eax,10000000
lock xrelease adc eax,10000000
# Tests for op imm8 regb/m8
xacquire adc cl,100
xacquire lock adc cl,100
lock xacquire adc cl,100
xrelease adc cl,100
xrelease lock adc cl,100
lock xrelease adc cl,100
xacquire adc BYTE PTR [ecx],100
xrelease adc BYTE PTR [ecx],100
# Tests for op imm16 regs/m16
xacquire adc cx,1000
xacquire lock adc cx,1000
lock xacquire adc cx,1000
xrelease adc cx,1000
xrelease lock adc cx,1000
lock xrelease adc cx,1000
xacquire adc WORD PTR [ecx],1000
xrelease adc WORD PTR [ecx],1000
# Tests for op imm32 regl/m32
xacquire adc ecx,10000000
xacquire lock adc ecx,10000000
lock xacquire adc ecx,10000000
xrelease adc ecx,10000000
xrelease lock adc ecx,10000000
lock xrelease adc ecx,10000000
xacquire adc DWORD PTR [ecx],10000000
xrelease adc DWORD PTR [ecx],10000000
# Tests for op imm8 regs/m16
xacquire adc cx,100
xacquire lock adc cx,100
lock xacquire adc cx,100
xrelease adc cx,100
xrelease lock adc cx,100
lock xrelease adc cx,100
xacquire adc WORD PTR [ecx],100
xrelease adc WORD PTR [ecx],100
# Tests for op imm8 regl/m32
xacquire adc ecx,100
xacquire lock adc ecx,100
lock xacquire adc ecx,100
xrelease adc ecx,100
xrelease lock adc ecx,100
lock xrelease adc ecx,100
xacquire adc DWORD PTR [ecx],100
xrelease adc DWORD PTR [ecx],100
# Tests for op imm8 regb/m8
xacquire adc cl,100
xacquire lock adc cl,100
lock xacquire adc cl,100
xrelease adc cl,100
xrelease lock adc cl,100
lock xrelease adc cl,100
xacquire adc BYTE PTR [ecx],100
xrelease adc BYTE PTR [ecx],100
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire adc cl,al
xacquire lock adc cl,al
lock xacquire adc cl,al
xrelease adc cl,al
xrelease lock adc cl,al
lock xrelease adc cl,al
xacquire adc BYTE PTR [ecx],al
xrelease adc BYTE PTR [ecx],al
xacquire adc al,cl
xacquire lock adc al,cl
lock xacquire adc al,cl
xrelease adc al,cl
xrelease lock adc al,cl
lock xrelease adc al,cl
xacquire adc al,BYTE PTR [ecx]
xacquire lock adc al,BYTE PTR [ecx]
lock xacquire adc al,BYTE PTR [ecx]
xrelease adc al,BYTE PTR [ecx]
xrelease lock adc al,BYTE PTR [ecx]
lock xrelease adc al,BYTE PTR [ecx]
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire adc cx,ax
xacquire lock adc cx,ax
lock xacquire adc cx,ax
xrelease adc cx,ax
xrelease lock adc cx,ax
lock xrelease adc cx,ax
xacquire adc WORD PTR [ecx],ax
xrelease adc WORD PTR [ecx],ax
xacquire adc ax,cx
xacquire lock adc ax,cx
lock xacquire adc ax,cx
xrelease adc ax,cx
xrelease lock adc ax,cx
lock xrelease adc ax,cx
xacquire adc ax,WORD PTR [ecx]
xacquire lock adc ax,WORD PTR [ecx]
lock xacquire adc ax,WORD PTR [ecx]
xrelease adc ax,WORD PTR [ecx]
xrelease lock adc ax,WORD PTR [ecx]
lock xrelease adc ax,WORD PTR [ecx]
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire adc ecx,eax
xacquire lock adc ecx,eax
lock xacquire adc ecx,eax
xrelease adc ecx,eax
xrelease lock adc ecx,eax
lock xrelease adc ecx,eax
xacquire adc DWORD PTR [ecx],eax
xrelease adc DWORD PTR [ecx],eax
xacquire adc eax,ecx
xacquire lock adc eax,ecx
lock xacquire adc eax,ecx
xrelease adc eax,ecx
xrelease lock adc eax,ecx
lock xrelease adc eax,ecx
xacquire adc eax,DWORD PTR [ecx]
xacquire lock adc eax,DWORD PTR [ecx]
lock xacquire adc eax,DWORD PTR [ecx]
xrelease adc eax,DWORD PTR [ecx]
xrelease lock adc eax,DWORD PTR [ecx]
lock xrelease adc eax,DWORD PTR [ecx]
# Tests for op regs, regs/m16
xacquire btc cx,ax
xacquire lock btc cx,ax
lock xacquire btc cx,ax
xrelease btc cx,ax
xrelease lock btc cx,ax
lock xrelease btc cx,ax
xacquire btc WORD PTR [ecx],ax
xrelease btc WORD PTR [ecx],ax
# Tests for op regl regl/m32
xacquire btc ecx,eax
xacquire lock btc ecx,eax
lock xacquire btc ecx,eax
xrelease btc ecx,eax
xrelease lock btc ecx,eax
lock xrelease btc ecx,eax
xacquire btc DWORD PTR [ecx],eax
xrelease btc DWORD PTR [ecx],eax
# Tests for op regb/m8
xacquire dec cl
xacquire lock dec cl
lock xacquire dec cl
xrelease dec cl
xrelease lock dec cl
lock xrelease dec cl
xacquire dec BYTE PTR [ecx]
xrelease dec BYTE PTR [ecx]
# Tests for op regs/m16
xacquire dec cx
xacquire lock dec cx
lock xacquire dec cx
xrelease dec cx
xrelease lock dec cx
lock xrelease dec cx
xacquire dec WORD PTR [ecx]
xrelease dec WORD PTR [ecx]
# Tests for op regl/m32
xacquire dec ecx
xacquire lock dec ecx
lock xacquire dec ecx
xrelease dec ecx
xrelease lock dec ecx
lock xrelease dec ecx
xacquire dec DWORD PTR [ecx]
xrelease dec DWORD PTR [ecx]
# Tests for op m64
xacquire cmpxchg8b QWORD PTR [ecx]
xrelease cmpxchg8b QWORD PTR [ecx]
# Tests for op regb, regb/m8
xacquire cmpxchg al,cl
xacquire lock cmpxchg al,cl
lock xacquire cmpxchg al,cl
xrelease cmpxchg al,cl
xrelease lock cmpxchg al,cl
lock xrelease cmpxchg al,cl
xacquire cmpxchg BYTE PTR [ecx],cl
xrelease cmpxchg BYTE PTR [ecx],cl
|
tactcomplabs/xbgas-binutils-gdb
| 2,147
|
gas/testsuite/gas/i386/avx512_4vnniw.s
|
# Check 32bit AVX512_4VNNIW instructions
.allow_index_reg
.text
_start:
vp4dpwssd (%ecx), %zmm4, %zmm1 # AVX512_4VNNIW
vp4dpwssd (%ecx), %zmm4, %zmm1{%k7} # AVX512_4VNNIW
vp4dpwssd (%ecx), %zmm4, %zmm1{%k7}{z} # AVX512_4VNNIW
vp4dpwssd -123456(%esp,%esi,8), %zmm4, %zmm1 # AVX512_4VNNIW
vp4dpwssd 4064(%edx), %zmm4, %zmm1 # AVX512_4VNNIW Disp8
vp4dpwssd 4096(%edx), %zmm4, %zmm1 # AVX512_4VNNIW
vp4dpwssd -4096(%edx), %zmm4, %zmm1 # AVX512_4VNNIW Disp8
vp4dpwssd -4128(%edx), %zmm4, %zmm1 # AVX512_4VNNIW
vp4dpwssds (%ecx), %zmm4, %zmm1 # AVX512_4VNNIW
vp4dpwssds (%ecx), %zmm4, %zmm1{%k7} # AVX512_4VNNIW
vp4dpwssds (%ecx), %zmm4, %zmm1{%k7}{z} # AVX512_4VNNIW
vp4dpwssds -123456(%esp,%esi,8), %zmm4, %zmm1 # AVX512_4VNNIW
vp4dpwssds 4064(%edx), %zmm4, %zmm1 # AVX512_4VNNIW Disp8
vp4dpwssds 4096(%edx), %zmm4, %zmm1 # AVX512_4VNNIW
vp4dpwssds -4096(%edx), %zmm4, %zmm1 # AVX512_4VNNIW Disp8
vp4dpwssds -4128(%edx), %zmm4, %zmm1 # AVX512_4VNNIW
.intel_syntax noprefix
vp4dpwssd zmm1, zmm4, [ecx] # AVX512_4VNNIW
vp4dpwssd zmm1, zmm4, XMMWORD PTR [ecx] # AVX512_4VNNIW
vp4dpwssd zmm1{k7}, zmm4, XMMWORD PTR [ecx] # AVX512_4VNNIW
vp4dpwssd zmm1{k7}{z}, zmm4, XMMWORD PTR [ecx] # AVX512_4VNNIW
vp4dpwssd zmm1, zmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512_4VNNIW
vp4dpwssd zmm1, zmm4, XMMWORD PTR [edx+4064] # AVX512_4VNNIW Disp8
vp4dpwssd zmm1, zmm4, XMMWORD PTR [edx+4096] # AVX512_4VNNIW
vp4dpwssd zmm1, zmm4, XMMWORD PTR [edx-4096] # AVX512_4VNNIW Disp8
vp4dpwssd zmm1, zmm4, XMMWORD PTR [edx-4128] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm4, [ecx] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm4, XMMWORD PTR [ecx] # AVX512_4VNNIW
vp4dpwssds zmm1{k7}, zmm4, XMMWORD PTR [ecx] # AVX512_4VNNIW
vp4dpwssds zmm1{k7}{z}, zmm4, XMMWORD PTR [ecx] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm4, XMMWORD PTR [edx+4064] # AVX512_4VNNIW Disp8
vp4dpwssds zmm1, zmm4, XMMWORD PTR [edx+4096] # AVX512_4VNNIW
vp4dpwssds zmm1, zmm4, XMMWORD PTR [edx-4096] # AVX512_4VNNIW Disp8
vp4dpwssds zmm1, zmm4, XMMWORD PTR [edx-4128] # AVX512_4VNNIW
|
tactcomplabs/xbgas-binutils-gdb
| 2,454
|
gas/testsuite/gas/i386/intel-movs.s
|
.text
.intel_syntax noprefix
.ifdef x86_16
.code16
.endif
.ifdef x86_64
.equ adi, rdi
.equ asi, rsi
.else
.equ adi, di
.equ asi, si
.endif
movs:
movsb
movsb es:[edi], [esi]
movsb es:[edi], fs:[esi]
movsb [edi], [esi]
movsb byte ptr es:[edi], [esi]
movsb es:[edi], byte ptr [esi]
movsb byte ptr es:[edi], byte ptr [esi]
movs byte ptr es:[edi], [esi]
movs es:[edi], byte ptr [esi]
movs byte ptr es:[edi], byte ptr [esi]
movsb es:[adi], [asi]
movsb es:[adi], fs:[asi]
movsb [adi], [asi]
movsb byte ptr es:[adi], [asi]
movsb es:[adi], byte ptr [asi]
movsb byte ptr es:[adi], byte ptr [asi]
movs byte ptr es:[adi], [asi]
movs es:[adi], byte ptr [asi]
movs byte ptr es:[adi], byte ptr [asi]
movsw
movsw es:[edi], [esi]
movsw es:[edi], fs:[esi]
movsw [edi], [esi]
movsw word ptr es:[edi], [esi]
movsw es:[edi], word ptr [esi]
movsw word ptr es:[edi], word ptr [esi]
movs word ptr es:[edi], [esi]
movs es:[edi], word ptr [esi]
movs word ptr es:[edi], word ptr [esi]
movsw es:[adi], [asi]
movsw es:[adi], fs:[asi]
movsw [adi], [asi]
movsw word ptr es:[adi], [asi]
movsw es:[adi], word ptr [asi]
movsw word ptr es:[adi], word ptr [asi]
movs word ptr es:[adi], [asi]
movs es:[adi], word ptr [asi]
movs word ptr es:[adi], word ptr [asi]
movsd
movsd es:[edi], [esi]
movsd es:[edi], fs:[esi]
movsd [edi], [esi]
movsd dword ptr es:[edi], [esi]
movsd es:[edi], dword ptr [esi]
movsd dword ptr es:[edi], dword ptr [esi]
movs dword ptr es:[edi], [esi]
movs es:[edi], dword ptr [esi]
movs dword ptr es:[edi], dword ptr [esi]
movsd es:[adi], [asi]
movsd es:[adi], fs:[asi]
movsd [adi], [asi]
movsd dword ptr es:[adi], [asi]
movsd es:[adi], dword ptr [asi]
movsd dword ptr es:[adi], dword ptr [asi]
movs dword ptr es:[adi], [asi]
movs es:[adi], dword ptr [asi]
movs dword ptr es:[adi], dword ptr [asi]
.ifdef x86_64
movsq
movsq es:[rdi], [rsi]
movsq es:[rdi], fs:[rsi]
movsq [rdi], [rsi]
movsq qword ptr es:[rdi], [rsi]
movsq es:[rdi], qword ptr [rsi]
movsq qword ptr es:[rdi], qword ptr [rsi]
movs qword ptr es:[rdi], [rsi]
movs es:[rdi], qword ptr [rsi]
movs qword ptr es:[rdi], qword ptr [rsi]
movsq es:[edi], [esi]
movsq es:[edi], fs:[esi]
movsq [edi], [esi]
movsq qword ptr es:[edi], [esi]
movsq es:[edi], qword ptr [esi]
movsq qword ptr es:[edi], qword ptr [esi]
movs qword ptr es:[edi], [esi]
movs es:[edi], qword ptr [esi]
movs qword ptr es:[edi], qword ptr [esi]
.endif
|
tactcomplabs/xbgas-binutils-gdb
| 4,496
|
gas/testsuite/gas/i386/tbm.s
|
.allow_index_reg
.text
_start:
BEXTR $0x67,(%edx,%esi,8),%ebx
BEXTR $0x0,%esi,%eax
BEXTR $0x7FFFFFFF,%eax,%edi
BEXTR $0x35B2,(%esi),%esp
BEXTR $0x9C86,%edi,%ebp
BEXTR $0x3,%ecx,%ecx
BEXTR $0xEE,-0x3(%ebx,%eax,2),%esi
BEXTR $0x55,(%ebx),%esp
BEXTR $0x4EE8,(%edx),%edx
BEXTR $0x0,%ebx,%edi
BEXTR $0xDC,%esp,%esi
BEXTR $0xA9,(%eax),%eax
BEXTR $0x189,%edx,%ebp
BEXTR $0x84,0x0(%ecx,%eax,2),%ecx
BEXTR $0xCAFE,(%ecx,%eax),%eax
BEXTR $0xDEAD,0x7109(%esi,%edi),%edi
BLCFILL (%ecx),%eax
BLCFILL %esi,%edi
BLCFILL %eax,%ecx
BLCFILL %edi,%esi
BLCFILL (%esi),%esp
BLCFILL (%ebx),%ebp
BLCFILL 0x1A95(%ebx,%eax),%edx
BLCFILL (%edx),%edi
BLCFILL %ebx,%edi
BLCFILL 0xCE(%eax,%esi),%eax
BLCFILL -0xCAFE(,%ebx,1),%eax
BLCFILL -0xAE5F(,%eax),%ebx
BLCFILL %ecx,%edi
BLCFILL %esp,%eax
BLCFILL %ebp,%edi
BLCFILL (%esi,%ecx,2),%eax
BLCI %eax,%ecx
BLCI %ecx,%ebx
BLCI 0x12B0(,%eax,2),%eax
BLCI (%eax),%edi
BLCI %edi,%esi
BLCI %esp,%edx
BLCI %esi,%ebp
BLCI %edx,%eax
BLCI -0x72A9(%ebx,%eax,4),%esp
BLCI (%esi),%ebx
BLCI (%ebx,%esi,2),%eax
BLCI (%ebx),%edx
BLCI %ebx,%eax
BLCI 0xE0A2(%ebx,%edx,4),%ecx
BLCI (%edi),%edi
BLCI 0x3FFFFFFF(,%eax,2),%eax
BLCIC %edi,%ecx
BLCIC %eax,%edi
BLCIC (%eax),%ebx
BLCIC %ecx,%edx
BLCIC %esi,%esp
BLCIC -0xCAFE(,%ebx),%ebp
BLCIC %ebp,%eax
BLCIC (%esi),%esi
BLCIC %esp,%ebx
BLCIC 0x0(%edi,%edi,1),%esi
BLCIC -0x3FFFFFFF(,%esi),%ebp
BLCIC (%ebx),%edi
BLCIC 0x8(%edi,%eax,8),%eax
BLCIC 0x3A574AD1(%ecx),%edi
BLCIC %esp,%edi
BLCIC %edx,%edi
BLCMSK 0xC(%eax),%edi
BLCMSK (%esi,%edx),%ebp
BLCMSK -0x1DC2DE00(%edi),%ecx
BLCMSK %eax,%esp
BLCMSK 0x0(%edi,%edx,2),%eax
BLCMSK (%ebx),%edx
BLCMSK (%edx),%edi
BLCMSK %esi,%esi
BLCMSK %esp,%edi
BLCMSK %edi,%esp
BLCMSK -0x0(%ebx,%eax,8),%ebx
BLCMSK (%edi),%eax
BLCMSK %edx,%eax
BLCMSK 0x67(%ebx,%edi),%edi
BLCMSK -0x55ED2760(,%eax),%edi
BLCMSK 0x1(,%eax),%eax
BLCS %edx,%esi
BLCS (%ebx),%eax
BLCS %eax,%edi
BLCS 0xCAFE(%ecx,%eax),%esp
BLCS %edi,%ebp
BLCS (%edx),%ecx
BLCS (%edi),%edi
BLCS -0xCAFE(%ebx),%ebx
BLCS %esp,%ecx
BLCS %esi,%edx
BLCS (%eax),%edi
BLCS 0x1(,%ecx,1),%edi
BLCS %ecx,%eax
BLCS (%ebx,%edx),%edi
BLCS -0xDEAD(%eax,%eax),%eax
BLCS 0x0(%ebx,%edx),%edi
BLSFILL %eax,%eax
BLSFILL %ecx,%esi
BLSFILL (%eax),%edi
BLSFILL %ebx,%esp
BLSFILL %edx,%edx
BLSFILL (%ecx),%ecx
BLSFILL %edi,%edi
BLSFILL %ebp,%ebp
BLSFILL (%edi),%edi
BLSFILL (%ebx),%ebx
BLSFILL (%esi),%eax
BLSFILL (%eax,%eax,4),%eax
BLSFILL %esi,%edi
BLSFILL 0xA221(%eax,%ebx),%edi
BLSFILL (%eax,%eax,1),%eax
BLSFILL -0x8(,%ebx,2),%ecx
BLSIC %eax,%edi
BLSIC (%esi),%ebx
BLSIC (,%ebx,2),%ebp
BLSIC (%ecx,%eax,2),%eax
BLSIC (%edi),%esp
BLSIC (%ebx),%eax
BLSIC %edi,%ecx
BLSIC 0x51(%eax,%ebx,1),%edi
BLSIC %esp,%edx
BLSIC -0x67(%esi,%edi),%edx
BLSIC (%ecx),%edi
BLSIC 0x67(%esi,%ecx,4),%esi
BLSIC 0x81(%ebx,%edx,8),%edi
BLSIC 0xE(%ecx,%edx),%edi
BLSIC 0x3B(%eax),%esp
BLSIC %ecx,%edi
T1MSKC %eax,%eax
T1MSKC %edi,%edi
T1MSKC (%ecx),%ecx
T1MSKC (%ebx,%esi,1),%esi
T1MSKC %edx,%ebp
T1MSKC 0x0(,%ecx,1),%edx
T1MSKC (,%esi,4),%esp
T1MSKC %ebx,%ecx
T1MSKC (%ebx),%ebx
T1MSKC %esp,%edi
T1MSKC (%eax),%edi
T1MSKC %ecx,%eax
T1MSKC 0xDEAD(%eax),%edi
T1MSKC %ecx,%edx
T1MSKC 0xDEAD(,%edx),%ebx
T1MSKC (%edx),%edi
TZMSK (%ebx),%esp
TZMSK %edi,%eax
TZMSK -0xCAFE(%edi),%esi
TZMSK (,%edi,1),%edx
TZMSK %eax,%ebp
TZMSK %ebp,%ebx
TZMSK (%esi),%edi
TZMSK (%ecx),%ecx
TZMSK (,%eax,2),%edi
TZMSK %edi,%edi
TZMSK %esp,%edx
TZMSK (%eax),%ecx
TZMSK (%edx,%edi),%eax
TZMSK (%ebx),%eax
TZMSK 0x2A2AC6D9(%ebx),%eax
TZMSK -0x16B9(%ecx,%eax,1),%ecx
|
tactcomplabs/xbgas-binutils-gdb
| 4,226
|
gas/testsuite/gas/i386/avx512_4fmaps.s
|
# Check 32bit AVX512_4FMAPS instructions
.allow_index_reg
.text
_start:
v4fmaddps (%ecx), %zmm4, %zmm1 # AVX512_4FMAPS
v4fmaddps (%ecx), %zmm4, %zmm1{%k7} # AVX512_4FMAPS
v4fmaddps (%ecx), %zmm4, %zmm1{%k7}{z} # AVX512_4FMAPS
v4fmaddps -123456(%esp,%esi,8), %zmm4, %zmm1 # AVX512_4FMAPS
v4fmaddps 0x7f0(%edx), %zmm4, %zmm1 # AVX512_4FMAPS Disp8
v4fmaddps 0x800(%edx), %zmm4, %zmm1 # AVX512_4FMAPS
v4fmaddps -0x800(%edx), %zmm4, %zmm1 # AVX512_4FMAPS Disp8
v4fmaddps -0x810(%edx), %zmm4, %zmm1 # AVX512_4FMAPS
v4fnmaddps (%ecx), %zmm4, %zmm1 # AVX512_4FMAPS
v4fnmaddps (%ecx), %zmm4, %zmm1{%k7} # AVX512_4FMAPS
v4fnmaddps (%ecx), %zmm4, %zmm1{%k7}{z} # AVX512_4FMAPS
v4fnmaddps -123456(%esp,%esi,8), %zmm4, %zmm1 # AVX512_4FMAPS
v4fnmaddps 0x7f0(%edx), %zmm4, %zmm1 # AVX512_4FMAPS Disp8
v4fnmaddps 0x800(%edx), %zmm4, %zmm1 # AVX512_4FMAPS
v4fnmaddps -0x800(%edx), %zmm4, %zmm1 # AVX512_4FMAPS Disp8
v4fnmaddps -0x810(%edx), %zmm4, %zmm1 # AVX512_4FMAPS
v4fmaddss (%ecx), %xmm4, %xmm1 # AVX512_4FMAPS
v4fmaddss (%ecx), %xmm4, %xmm1{%k7} # AVX512_4FMAPS
v4fmaddss (%ecx), %xmm4, %xmm1{%k7}{z} # AVX512_4FMAPS
v4fmaddss -123456(%esp,%esi,8), %xmm4, %xmm1 # AVX512_4FMAPS
v4fmaddss 0x7f0(%edx), %xmm4, %xmm1 # AVX512_4FMAPS Disp8
v4fmaddss 0x800(%edx), %xmm4, %xmm1 # AVX512_4FMAPS
v4fmaddss -0x800(%edx), %xmm4, %xmm1 # AVX512_4FMAPS Disp8
v4fmaddss -0x810(%edx), %xmm4, %xmm1 # AVX512_4FMAPS
v4fnmaddss (%ecx), %xmm4, %xmm1 # AVX512_4FMAPS
v4fnmaddss (%ecx), %xmm4, %xmm1{%k7} # AVX512_4FMAPS
v4fnmaddss (%ecx), %xmm4, %xmm1{%k7}{z} # AVX512_4FMAPS
v4fnmaddss -123456(%esp,%esi,8), %xmm4, %xmm1 # AVX512_4FMAPS
v4fnmaddss 0x7f0(%edx), %xmm4, %xmm1 # AVX512_4FMAPS Disp8
v4fnmaddss 0x800(%edx), %xmm4, %xmm1 # AVX512_4FMAPS
v4fnmaddss -0x800(%edx), %xmm4, %xmm1 # AVX512_4FMAPS Disp8
v4fnmaddss -0x810(%edx), %xmm4, %xmm1 # AVX512_4FMAPS
.intel_syntax noprefix
v4fmaddps zmm1, zmm4, [ecx] # AVX512_4FMAPS
v4fmaddps zmm1, zmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fmaddps zmm1{k7}, zmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fmaddps zmm1{k7}{z}, zmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fmaddps zmm1, zmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512_4FMAPS
v4fmaddps zmm1, zmm4, XMMWORD PTR [edx+0x7f0] # AVX512_4FMAPS Disp8
v4fmaddps zmm1, zmm4, XMMWORD PTR [edx+0x800] # AVX512_4FMAPS
v4fmaddps zmm1, zmm4, XMMWORD PTR [edx-0x800] # AVX512_4FMAPS Disp8
v4fmaddps zmm1, zmm4, XMMWORD PTR [edx-0x810] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm4, [ecx] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fnmaddps zmm1{k7}, zmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fnmaddps zmm1{k7}{z}, zmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm4, XMMWORD PTR [edx+0x7f0] # AVX512_4FMAPS Disp8
v4fnmaddps zmm1, zmm4, XMMWORD PTR [edx+0x800] # AVX512_4FMAPS
v4fnmaddps zmm1, zmm4, XMMWORD PTR [edx-0x800] # AVX512_4FMAPS Disp8
v4fnmaddps zmm1, zmm4, XMMWORD PTR [edx-0x810] # AVX512_4FMAPS
v4fmaddss xmm1, xmm4, [ecx] # AVX512_4FMAPS
v4fmaddss xmm1, xmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fmaddss xmm1{k7}, xmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fmaddss xmm1{k7}{z}, xmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fmaddss xmm1, xmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512_4FMAPS
v4fmaddss xmm1, xmm4, XMMWORD PTR [edx+0x7f0] # AVX512_4FMAPS Disp8
v4fmaddss xmm1, xmm4, XMMWORD PTR [edx+0x800] # AVX512_4FMAPS
v4fmaddss xmm1, xmm4, XMMWORD PTR [edx-0x800] # AVX512_4FMAPS Disp8
v4fmaddss xmm1, xmm4, XMMWORD PTR [edx-0x810] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm4, [ecx] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fnmaddss xmm1{k7}, xmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fnmaddss xmm1{k7}{z}, xmm4, XMMWORD PTR [ecx] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm4, XMMWORD PTR [edx+0x7f0] # AVX512_4FMAPS Disp8
v4fnmaddss xmm1, xmm4, XMMWORD PTR [edx+0x800] # AVX512_4FMAPS
v4fnmaddss xmm1, xmm4, XMMWORD PTR [edx-0x800] # AVX512_4FMAPS Disp8
v4fnmaddss xmm1, xmm4, XMMWORD PTR [edx-0x810] # AVX512_4FMAPS
|
tactcomplabs/xbgas-binutils-gdb
| 1,774
|
gas/testsuite/gas/i386/gotpc.s
|
.text
test:
addl $_GLOBAL_OFFSET_TABLE_+[.-test], %eax
addl $_GLOBAL_OFFSET_TABLE_+[.-test], %ebx
addl $_GLOBAL_OFFSET_TABLE_, %eax
addl $_GLOBAL_OFFSET_TABLE_, %ebx
leal _GLOBAL_OFFSET_TABLE_+[.-test](%eax), %ebx
leal _GLOBAL_OFFSET_TABLE_+[.-test](%ebx), %eax
leal _GLOBAL_OFFSET_TABLE_+[.-test](%eax), %eax
leal _GLOBAL_OFFSET_TABLE_+[.-test](%ebx), %ebx
subl $_GLOBAL_OFFSET_TABLE_+[.-test], %eax
subl $_GLOBAL_OFFSET_TABLE_+[.-test], %ebx
subl $_GLOBAL_OFFSET_TABLE_, %eax
subl $_GLOBAL_OFFSET_TABLE_, %ebx
orl $_GLOBAL_OFFSET_TABLE_+[.-test], %eax
orl $_GLOBAL_OFFSET_TABLE_+[.-test], %ebx
orl $_GLOBAL_OFFSET_TABLE_, %eax
orl $_GLOBAL_OFFSET_TABLE_, %ebx
movl $_GLOBAL_OFFSET_TABLE_+[.-test], %eax
movl $_GLOBAL_OFFSET_TABLE_+[.-test], %ebx
movl $_GLOBAL_OFFSET_TABLE_, %eax
movl $_GLOBAL_OFFSET_TABLE_, %ebx
movl $_GLOBAL_OFFSET_TABLE_+[.-test], foo
movl $_GLOBAL_OFFSET_TABLE_+[.-test], %gs:foo
gs; movl $_GLOBAL_OFFSET_TABLE_+[.-test], foo
movl $_GLOBAL_OFFSET_TABLE_+[.-test], _GLOBAL_OFFSET_TABLE_
movl _GLOBAL_OFFSET_TABLE_+[.-test], %eax
movl _GLOBAL_OFFSET_TABLE_+[.-test], %ebx
movl %eax, _GLOBAL_OFFSET_TABLE_+[.-test]
movl %ebx, _GLOBAL_OFFSET_TABLE_+[.-test]
movl %eax, %gs:_GLOBAL_OFFSET_TABLE_+[.-test]
movl %ebx, %gs:_GLOBAL_OFFSET_TABLE_+[.-test]
gs; movl %eax, _GLOBAL_OFFSET_TABLE_+[.-test]
gs; movl %ebx, _GLOBAL_OFFSET_TABLE_+[.-test]
leal _GLOBAL_OFFSET_TABLE_@GOTOFF(%ebx), %eax
leal _GLOBAL_OFFSET_TABLE_@GOTOFF(%ebx), %ebx
movl _GLOBAL_OFFSET_TABLE_@GOTOFF(%ebx), %eax
movl _GLOBAL_OFFSET_TABLE_@GOTOFF(%ebx), %ebx
.long _GLOBAL_OFFSET_TABLE_+[.-test]
.long _GLOBAL_OFFSET_TABLE_@GOTOFF
movl _GLOBAL_OFFSET_TABLE_@GOTOFF (%ebx), %eax
.intel_syntax noprefix
add ebx, OFFSET FLAT:_GLOBAL_OFFSET_TABLE_
|
tactcomplabs/xbgas-binutils-gdb
| 4,569
|
gas/testsuite/gas/i386/x86-64-avx512bw_vl-opts.s
|
# Check 64bit AVX512{BW,VL} swap instructions
.allow_index_reg
.text
_start:
vmovdqu8 %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu8.s %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu8 %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu8.s %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu8 %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8.s %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8 %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu8.s %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu8 %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu8.s %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu8 %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8.s %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu8.s %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu8.s %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8.s %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu8.s %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu8.s %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8.s %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu16.s %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu16.s %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16.s %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu16.s %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu16.s %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16.s %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu16.s %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu16.s %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16.s %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu16.s %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu16.s %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16.s %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
.intel_syntax noprefix
vmovdqu8 xmm30, xmm29 # AVX512{BW,VL}
vmovdqu8.s xmm30, xmm29 # AVX512{BW,VL}
vmovdqu8 xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu8.s xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu8 xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu8.s xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu8 xmm30, xmm29 # AVX512{BW,VL}
vmovdqu8.s xmm30, xmm29 # AVX512{BW,VL}
vmovdqu8 xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu8.s xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu8 xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu8.s xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu8 ymm30, ymm29 # AVX512{BW,VL}
vmovdqu8.s ymm30, ymm29 # AVX512{BW,VL}
vmovdqu8 ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu8.s ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu8 ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu8.s ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu8 ymm30, ymm29 # AVX512{BW,VL}
vmovdqu8.s ymm30, ymm29 # AVX512{BW,VL}
vmovdqu8 ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu8.s ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu8 ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu8.s ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu16 xmm30, xmm29 # AVX512{BW,VL}
vmovdqu16.s xmm30, xmm29 # AVX512{BW,VL}
vmovdqu16 xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu16.s xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu16 xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu16.s xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu16 xmm30, xmm29 # AVX512{BW,VL}
vmovdqu16.s xmm30, xmm29 # AVX512{BW,VL}
vmovdqu16 xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu16.s xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu16 xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu16.s xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu16 ymm30, ymm29 # AVX512{BW,VL}
vmovdqu16.s ymm30, ymm29 # AVX512{BW,VL}
vmovdqu16 ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu16.s ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu16 ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu16.s ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu16 ymm30, ymm29 # AVX512{BW,VL}
vmovdqu16.s ymm30, ymm29 # AVX512{BW,VL}
vmovdqu16 ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu16.s ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu16 ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu16.s ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 83,054
|
gas/testsuite/gas/i386/avx512_fp16_pseudo_ops.s
|
# Check 32bit VCM.*{PH,SH} instructions
.allow_index_reg
.text
_start:
vcmpeq_oqph %zmm5, %zmm6, %k5
vcmpeq_oqph %zmm5, %zmm6, %k5{%k7}
vcmpeq_oqph {sae}, %zmm5, %zmm6, %k5
vcmpeq_oqph (%ecx), %zmm6, %k5
vcmpeq_oqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpeq_oqph (%eax){1to32}, %zmm6, %k5
vcmpeq_oqph 8128(%edx), %zmm6, %k5
vcmpeq_oqph 8192(%edx), %zmm6, %k5
vcmpeq_oqph -8192(%edx), %zmm6, %k5
vcmpeq_oqph -8256(%edx), %zmm6, %k5
vcmpeq_oqph 1016(%edx){1to32}, %zmm6, %k5
vcmpeq_oqph 1024(%edx){1to32}, %zmm6, %k5
vcmpeq_oqph -1024(%edx){1to32}, %zmm6, %k5
vcmpeq_oqph -1032(%edx){1to32}, %zmm6, %k5
vcmpeqph %zmm5, %zmm6, %k5
vcmpeqph %zmm5, %zmm6, %k5{%k7}
vcmpeqph {sae}, %zmm5, %zmm6, %k5
vcmpeqph (%ecx), %zmm6, %k5
vcmpeqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpeqph (%eax){1to32}, %zmm6, %k5
vcmpeqph 8128(%edx), %zmm6, %k5
vcmpeqph 8192(%edx), %zmm6, %k5
vcmpeqph -8192(%edx), %zmm6, %k5
vcmpeqph -8256(%edx), %zmm6, %k5
vcmpeqph 1016(%edx){1to32}, %zmm6, %k5
vcmpeqph 1024(%edx){1to32}, %zmm6, %k5
vcmpeqph -1024(%edx){1to32}, %zmm6, %k5
vcmpeqph -1032(%edx){1to32}, %zmm6, %k5
vcmplt_osph %zmm5, %zmm6, %k5
vcmplt_osph %zmm5, %zmm6, %k5{%k7}
vcmplt_osph {sae}, %zmm5, %zmm6, %k5
vcmplt_osph (%ecx), %zmm6, %k5
vcmplt_osph -123456(%esp,%esi,8), %zmm6, %k5
vcmplt_osph (%eax){1to32}, %zmm6, %k5
vcmplt_osph 8128(%edx), %zmm6, %k5
vcmplt_osph 8192(%edx), %zmm6, %k5
vcmplt_osph -8192(%edx), %zmm6, %k5
vcmplt_osph -8256(%edx), %zmm6, %k5
vcmplt_osph 1016(%edx){1to32}, %zmm6, %k5
vcmplt_osph 1024(%edx){1to32}, %zmm6, %k5
vcmplt_osph -1024(%edx){1to32}, %zmm6, %k5
vcmplt_osph -1032(%edx){1to32}, %zmm6, %k5
vcmpltph %zmm5, %zmm6, %k5
vcmpltph %zmm5, %zmm6, %k5{%k7}
vcmpltph {sae}, %zmm5, %zmm6, %k5
vcmpltph (%ecx), %zmm6, %k5
vcmpltph -123456(%esp,%esi,8), %zmm6, %k5
vcmpltph (%eax){1to32}, %zmm6, %k5
vcmpltph 8128(%edx), %zmm6, %k5
vcmpltph 8192(%edx), %zmm6, %k5
vcmpltph -8192(%edx), %zmm6, %k5
vcmpltph -8256(%edx), %zmm6, %k5
vcmpltph 1016(%edx){1to32}, %zmm6, %k5
vcmpltph 1024(%edx){1to32}, %zmm6, %k5
vcmpltph -1024(%edx){1to32}, %zmm6, %k5
vcmpltph -1032(%edx){1to32}, %zmm6, %k5
vcmple_osph %zmm5, %zmm6, %k5
vcmple_osph %zmm5, %zmm6, %k5{%k7}
vcmple_osph {sae}, %zmm5, %zmm6, %k5
vcmple_osph (%ecx), %zmm6, %k5
vcmple_osph -123456(%esp,%esi,8), %zmm6, %k5
vcmple_osph (%eax){1to32}, %zmm6, %k5
vcmple_osph 8128(%edx), %zmm6, %k5
vcmple_osph 8192(%edx), %zmm6, %k5
vcmple_osph -8192(%edx), %zmm6, %k5
vcmple_osph -8256(%edx), %zmm6, %k5
vcmple_osph 1016(%edx){1to32}, %zmm6, %k5
vcmple_osph 1024(%edx){1to32}, %zmm6, %k5
vcmple_osph -1024(%edx){1to32}, %zmm6, %k5
vcmple_osph -1032(%edx){1to32}, %zmm6, %k5
vcmpleph %zmm5, %zmm6, %k5
vcmpleph %zmm5, %zmm6, %k5{%k7}
vcmpleph {sae}, %zmm5, %zmm6, %k5
vcmpleph (%ecx), %zmm6, %k5
vcmpleph -123456(%esp,%esi,8), %zmm6, %k5
vcmpleph (%eax){1to32}, %zmm6, %k5
vcmpleph 8128(%edx), %zmm6, %k5
vcmpleph 8192(%edx), %zmm6, %k5
vcmpleph -8192(%edx), %zmm6, %k5
vcmpleph -8256(%edx), %zmm6, %k5
vcmpleph 1016(%edx){1to32}, %zmm6, %k5
vcmpleph 1024(%edx){1to32}, %zmm6, %k5
vcmpleph -1024(%edx){1to32}, %zmm6, %k5
vcmpleph -1032(%edx){1to32}, %zmm6, %k5
vcmpunord_qph %zmm5, %zmm6, %k5
vcmpunord_qph %zmm5, %zmm6, %k5{%k7}
vcmpunord_qph {sae}, %zmm5, %zmm6, %k5
vcmpunord_qph (%ecx), %zmm6, %k5
vcmpunord_qph -123456(%esp,%esi,8), %zmm6, %k5
vcmpunord_qph (%eax){1to32}, %zmm6, %k5
vcmpunord_qph 8128(%edx), %zmm6, %k5
vcmpunord_qph 8192(%edx), %zmm6, %k5
vcmpunord_qph -8192(%edx), %zmm6, %k5
vcmpunord_qph -8256(%edx), %zmm6, %k5
vcmpunord_qph 1016(%edx){1to32}, %zmm6, %k5
vcmpunord_qph 1024(%edx){1to32}, %zmm6, %k5
vcmpunord_qph -1024(%edx){1to32}, %zmm6, %k5
vcmpunord_qph -1032(%edx){1to32}, %zmm6, %k5
vcmpunordph %zmm5, %zmm6, %k5
vcmpunordph %zmm5, %zmm6, %k5{%k7}
vcmpunordph {sae}, %zmm5, %zmm6, %k5
vcmpunordph (%ecx), %zmm6, %k5
vcmpunordph -123456(%esp,%esi,8), %zmm6, %k5
vcmpunordph (%eax){1to32}, %zmm6, %k5
vcmpunordph 8128(%edx), %zmm6, %k5
vcmpunordph 8192(%edx), %zmm6, %k5
vcmpunordph -8192(%edx), %zmm6, %k5
vcmpunordph -8256(%edx), %zmm6, %k5
vcmpunordph 1016(%edx){1to32}, %zmm6, %k5
vcmpunordph 1024(%edx){1to32}, %zmm6, %k5
vcmpunordph -1024(%edx){1to32}, %zmm6, %k5
vcmpunordph -1032(%edx){1to32}, %zmm6, %k5
vcmpneq_uqph %zmm5, %zmm6, %k5
vcmpneq_uqph %zmm5, %zmm6, %k5{%k7}
vcmpneq_uqph {sae}, %zmm5, %zmm6, %k5
vcmpneq_uqph (%ecx), %zmm6, %k5
vcmpneq_uqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpneq_uqph (%eax){1to32}, %zmm6, %k5
vcmpneq_uqph 8128(%edx), %zmm6, %k5
vcmpneq_uqph 8192(%edx), %zmm6, %k5
vcmpneq_uqph -8192(%edx), %zmm6, %k5
vcmpneq_uqph -8256(%edx), %zmm6, %k5
vcmpneq_uqph 1016(%edx){1to32}, %zmm6, %k5
vcmpneq_uqph 1024(%edx){1to32}, %zmm6, %k5
vcmpneq_uqph -1024(%edx){1to32}, %zmm6, %k5
vcmpneq_uqph -1032(%edx){1to32}, %zmm6, %k5
vcmpneqph %zmm5, %zmm6, %k5
vcmpneqph %zmm5, %zmm6, %k5{%k7}
vcmpneqph {sae}, %zmm5, %zmm6, %k5
vcmpneqph (%ecx), %zmm6, %k5
vcmpneqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpneqph (%eax){1to32}, %zmm6, %k5
vcmpneqph 8128(%edx), %zmm6, %k5
vcmpneqph 8192(%edx), %zmm6, %k5
vcmpneqph -8192(%edx), %zmm6, %k5
vcmpneqph -8256(%edx), %zmm6, %k5
vcmpneqph 1016(%edx){1to32}, %zmm6, %k5
vcmpneqph 1024(%edx){1to32}, %zmm6, %k5
vcmpneqph -1024(%edx){1to32}, %zmm6, %k5
vcmpneqph -1032(%edx){1to32}, %zmm6, %k5
vcmpnlt_usph %zmm5, %zmm6, %k5
vcmpnlt_usph %zmm5, %zmm6, %k5{%k7}
vcmpnlt_usph {sae}, %zmm5, %zmm6, %k5
vcmpnlt_usph (%ecx), %zmm6, %k5
vcmpnlt_usph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnlt_usph (%eax){1to32}, %zmm6, %k5
vcmpnlt_usph 8128(%edx), %zmm6, %k5
vcmpnlt_usph 8192(%edx), %zmm6, %k5
vcmpnlt_usph -8192(%edx), %zmm6, %k5
vcmpnlt_usph -8256(%edx), %zmm6, %k5
vcmpnlt_usph 1016(%edx){1to32}, %zmm6, %k5
vcmpnlt_usph 1024(%edx){1to32}, %zmm6, %k5
vcmpnlt_usph -1024(%edx){1to32}, %zmm6, %k5
vcmpnlt_usph -1032(%edx){1to32}, %zmm6, %k5
vcmpnltph %zmm5, %zmm6, %k5
vcmpnltph %zmm5, %zmm6, %k5{%k7}
vcmpnltph {sae}, %zmm5, %zmm6, %k5
vcmpnltph (%ecx), %zmm6, %k5
vcmpnltph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnltph (%eax){1to32}, %zmm6, %k5
vcmpnltph 8128(%edx), %zmm6, %k5
vcmpnltph 8192(%edx), %zmm6, %k5
vcmpnltph -8192(%edx), %zmm6, %k5
vcmpnltph -8256(%edx), %zmm6, %k5
vcmpnltph 1016(%edx){1to32}, %zmm6, %k5
vcmpnltph 1024(%edx){1to32}, %zmm6, %k5
vcmpnltph -1024(%edx){1to32}, %zmm6, %k5
vcmpnltph -1032(%edx){1to32}, %zmm6, %k5
vcmpnle_usph %zmm5, %zmm6, %k5
vcmpnle_usph %zmm5, %zmm6, %k5{%k7}
vcmpnle_usph {sae}, %zmm5, %zmm6, %k5
vcmpnle_usph (%ecx), %zmm6, %k5
vcmpnle_usph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnle_usph (%eax){1to32}, %zmm6, %k5
vcmpnle_usph 8128(%edx), %zmm6, %k5
vcmpnle_usph 8192(%edx), %zmm6, %k5
vcmpnle_usph -8192(%edx), %zmm6, %k5
vcmpnle_usph -8256(%edx), %zmm6, %k5
vcmpnle_usph 1016(%edx){1to32}, %zmm6, %k5
vcmpnle_usph 1024(%edx){1to32}, %zmm6, %k5
vcmpnle_usph -1024(%edx){1to32}, %zmm6, %k5
vcmpnle_usph -1032(%edx){1to32}, %zmm6, %k5
vcmpnleph %zmm5, %zmm6, %k5
vcmpnleph %zmm5, %zmm6, %k5{%k7}
vcmpnleph {sae}, %zmm5, %zmm6, %k5
vcmpnleph (%ecx), %zmm6, %k5
vcmpnleph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnleph (%eax){1to32}, %zmm6, %k5
vcmpnleph 8128(%edx), %zmm6, %k5
vcmpnleph 8192(%edx), %zmm6, %k5
vcmpnleph -8192(%edx), %zmm6, %k5
vcmpnleph -8256(%edx), %zmm6, %k5
vcmpnleph 1016(%edx){1to32}, %zmm6, %k5
vcmpnleph 1024(%edx){1to32}, %zmm6, %k5
vcmpnleph -1024(%edx){1to32}, %zmm6, %k5
vcmpnleph -1032(%edx){1to32}, %zmm6, %k5
vcmpord_qph %zmm5, %zmm6, %k5
vcmpord_qph %zmm5, %zmm6, %k5{%k7}
vcmpord_qph {sae}, %zmm5, %zmm6, %k5
vcmpord_qph (%ecx), %zmm6, %k5
vcmpord_qph -123456(%esp,%esi,8), %zmm6, %k5
vcmpord_qph (%eax){1to32}, %zmm6, %k5
vcmpord_qph 8128(%edx), %zmm6, %k5
vcmpord_qph 8192(%edx), %zmm6, %k5
vcmpord_qph -8192(%edx), %zmm6, %k5
vcmpord_qph -8256(%edx), %zmm6, %k5
vcmpord_qph 1016(%edx){1to32}, %zmm6, %k5
vcmpord_qph 1024(%edx){1to32}, %zmm6, %k5
vcmpord_qph -1024(%edx){1to32}, %zmm6, %k5
vcmpord_qph -1032(%edx){1to32}, %zmm6, %k5
vcmpordph %zmm5, %zmm6, %k5
vcmpordph %zmm5, %zmm6, %k5{%k7}
vcmpordph {sae}, %zmm5, %zmm6, %k5
vcmpordph (%ecx), %zmm6, %k5
vcmpordph -123456(%esp,%esi,8), %zmm6, %k5
vcmpordph (%eax){1to32}, %zmm6, %k5
vcmpordph 8128(%edx), %zmm6, %k5
vcmpordph 8192(%edx), %zmm6, %k5
vcmpordph -8192(%edx), %zmm6, %k5
vcmpordph -8256(%edx), %zmm6, %k5
vcmpordph 1016(%edx){1to32}, %zmm6, %k5
vcmpordph 1024(%edx){1to32}, %zmm6, %k5
vcmpordph -1024(%edx){1to32}, %zmm6, %k5
vcmpordph -1032(%edx){1to32}, %zmm6, %k5
vcmpeq_uqph %zmm5, %zmm6, %k5
vcmpeq_uqph %zmm5, %zmm6, %k5{%k7}
vcmpeq_uqph {sae}, %zmm5, %zmm6, %k5
vcmpeq_uqph (%ecx), %zmm6, %k5
vcmpeq_uqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpeq_uqph (%eax){1to32}, %zmm6, %k5
vcmpeq_uqph 8128(%edx), %zmm6, %k5
vcmpeq_uqph 8192(%edx), %zmm6, %k5
vcmpeq_uqph -8192(%edx), %zmm6, %k5
vcmpeq_uqph -8256(%edx), %zmm6, %k5
vcmpeq_uqph 1016(%edx){1to32}, %zmm6, %k5
vcmpeq_uqph 1024(%edx){1to32}, %zmm6, %k5
vcmpeq_uqph -1024(%edx){1to32}, %zmm6, %k5
vcmpeq_uqph -1032(%edx){1to32}, %zmm6, %k5
vcmpnge_usph %zmm5, %zmm6, %k5
vcmpnge_usph %zmm5, %zmm6, %k5{%k7}
vcmpnge_usph {sae}, %zmm5, %zmm6, %k5
vcmpnge_usph (%ecx), %zmm6, %k5
vcmpnge_usph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnge_usph (%eax){1to32}, %zmm6, %k5
vcmpnge_usph 8128(%edx), %zmm6, %k5
vcmpnge_usph 8192(%edx), %zmm6, %k5
vcmpnge_usph -8192(%edx), %zmm6, %k5
vcmpnge_usph -8256(%edx), %zmm6, %k5
vcmpnge_usph 1016(%edx){1to32}, %zmm6, %k5
vcmpnge_usph 1024(%edx){1to32}, %zmm6, %k5
vcmpnge_usph -1024(%edx){1to32}, %zmm6, %k5
vcmpnge_usph -1032(%edx){1to32}, %zmm6, %k5
vcmpngeph %zmm5, %zmm6, %k5
vcmpngeph %zmm5, %zmm6, %k5{%k7}
vcmpngeph {sae}, %zmm5, %zmm6, %k5
vcmpngeph (%ecx), %zmm6, %k5
vcmpngeph -123456(%esp,%esi,8), %zmm6, %k5
vcmpngeph (%eax){1to32}, %zmm6, %k5
vcmpngeph 8128(%edx), %zmm6, %k5
vcmpngeph 8192(%edx), %zmm6, %k5
vcmpngeph -8192(%edx), %zmm6, %k5
vcmpngeph -8256(%edx), %zmm6, %k5
vcmpngeph 1016(%edx){1to32}, %zmm6, %k5
vcmpngeph 1024(%edx){1to32}, %zmm6, %k5
vcmpngeph -1024(%edx){1to32}, %zmm6, %k5
vcmpngeph -1032(%edx){1to32}, %zmm6, %k5
vcmpngt_usph %zmm5, %zmm6, %k5
vcmpngt_usph %zmm5, %zmm6, %k5{%k7}
vcmpngt_usph {sae}, %zmm5, %zmm6, %k5
vcmpngt_usph (%ecx), %zmm6, %k5
vcmpngt_usph -123456(%esp,%esi,8), %zmm6, %k5
vcmpngt_usph (%eax){1to32}, %zmm6, %k5
vcmpngt_usph 8128(%edx), %zmm6, %k5
vcmpngt_usph 8192(%edx), %zmm6, %k5
vcmpngt_usph -8192(%edx), %zmm6, %k5
vcmpngt_usph -8256(%edx), %zmm6, %k5
vcmpngt_usph 1016(%edx){1to32}, %zmm6, %k5
vcmpngt_usph 1024(%edx){1to32}, %zmm6, %k5
vcmpngt_usph -1024(%edx){1to32}, %zmm6, %k5
vcmpngt_usph -1032(%edx){1to32}, %zmm6, %k5
vcmpngtph %zmm5, %zmm6, %k5
vcmpngtph %zmm5, %zmm6, %k5{%k7}
vcmpngtph {sae}, %zmm5, %zmm6, %k5
vcmpngtph (%ecx), %zmm6, %k5
vcmpngtph -123456(%esp,%esi,8), %zmm6, %k5
vcmpngtph (%eax){1to32}, %zmm6, %k5
vcmpngtph 8128(%edx), %zmm6, %k5
vcmpngtph 8192(%edx), %zmm6, %k5
vcmpngtph -8192(%edx), %zmm6, %k5
vcmpngtph -8256(%edx), %zmm6, %k5
vcmpngtph 1016(%edx){1to32}, %zmm6, %k5
vcmpngtph 1024(%edx){1to32}, %zmm6, %k5
vcmpngtph -1024(%edx){1to32}, %zmm6, %k5
vcmpngtph -1032(%edx){1to32}, %zmm6, %k5
vcmpfalse_oqph %zmm5, %zmm6, %k5
vcmpfalse_oqph %zmm5, %zmm6, %k5{%k7}
vcmpfalse_oqph {sae}, %zmm5, %zmm6, %k5
vcmpfalse_oqph (%ecx), %zmm6, %k5
vcmpfalse_oqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpfalse_oqph (%eax){1to32}, %zmm6, %k5
vcmpfalse_oqph 8128(%edx), %zmm6, %k5
vcmpfalse_oqph 8192(%edx), %zmm6, %k5
vcmpfalse_oqph -8192(%edx), %zmm6, %k5
vcmpfalse_oqph -8256(%edx), %zmm6, %k5
vcmpfalse_oqph 1016(%edx){1to32}, %zmm6, %k5
vcmpfalse_oqph 1024(%edx){1to32}, %zmm6, %k5
vcmpfalse_oqph -1024(%edx){1to32}, %zmm6, %k5
vcmpfalse_oqph -1032(%edx){1to32}, %zmm6, %k5
vcmpfalseph %zmm5, %zmm6, %k5
vcmpfalseph %zmm5, %zmm6, %k5{%k7}
vcmpfalseph {sae}, %zmm5, %zmm6, %k5
vcmpfalseph (%ecx), %zmm6, %k5
vcmpfalseph -123456(%esp,%esi,8), %zmm6, %k5
vcmpfalseph (%eax){1to32}, %zmm6, %k5
vcmpfalseph 8128(%edx), %zmm6, %k5
vcmpfalseph 8192(%edx), %zmm6, %k5
vcmpfalseph -8192(%edx), %zmm6, %k5
vcmpfalseph -8256(%edx), %zmm6, %k5
vcmpfalseph 1016(%edx){1to32}, %zmm6, %k5
vcmpfalseph 1024(%edx){1to32}, %zmm6, %k5
vcmpfalseph -1024(%edx){1to32}, %zmm6, %k5
vcmpfalseph -1032(%edx){1to32}, %zmm6, %k5
vcmpneq_oqph %zmm5, %zmm6, %k5
vcmpneq_oqph %zmm5, %zmm6, %k5{%k7}
vcmpneq_oqph {sae}, %zmm5, %zmm6, %k5
vcmpneq_oqph (%ecx), %zmm6, %k5
vcmpneq_oqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpneq_oqph (%eax){1to32}, %zmm6, %k5
vcmpneq_oqph 8128(%edx), %zmm6, %k5
vcmpneq_oqph 8192(%edx), %zmm6, %k5
vcmpneq_oqph -8192(%edx), %zmm6, %k5
vcmpneq_oqph -8256(%edx), %zmm6, %k5
vcmpneq_oqph 1016(%edx){1to32}, %zmm6, %k5
vcmpneq_oqph 1024(%edx){1to32}, %zmm6, %k5
vcmpneq_oqph -1024(%edx){1to32}, %zmm6, %k5
vcmpneq_oqph -1032(%edx){1to32}, %zmm6, %k5
vcmpge_osph %zmm5, %zmm6, %k5
vcmpge_osph %zmm5, %zmm6, %k5{%k7}
vcmpge_osph {sae}, %zmm5, %zmm6, %k5
vcmpge_osph (%ecx), %zmm6, %k5
vcmpge_osph -123456(%esp,%esi,8), %zmm6, %k5
vcmpge_osph (%eax){1to32}, %zmm6, %k5
vcmpge_osph 8128(%edx), %zmm6, %k5
vcmpge_osph 8192(%edx), %zmm6, %k5
vcmpge_osph -8192(%edx), %zmm6, %k5
vcmpge_osph -8256(%edx), %zmm6, %k5
vcmpge_osph 1016(%edx){1to32}, %zmm6, %k5
vcmpge_osph 1024(%edx){1to32}, %zmm6, %k5
vcmpge_osph -1024(%edx){1to32}, %zmm6, %k5
vcmpge_osph -1032(%edx){1to32}, %zmm6, %k5
vcmpgeph %zmm5, %zmm6, %k5
vcmpgeph %zmm5, %zmm6, %k5{%k7}
vcmpgeph {sae}, %zmm5, %zmm6, %k5
vcmpgeph (%ecx), %zmm6, %k5
vcmpgeph -123456(%esp,%esi,8), %zmm6, %k5
vcmpgeph (%eax){1to32}, %zmm6, %k5
vcmpgeph 8128(%edx), %zmm6, %k5
vcmpgeph 8192(%edx), %zmm6, %k5
vcmpgeph -8192(%edx), %zmm6, %k5
vcmpgeph -8256(%edx), %zmm6, %k5
vcmpgeph 1016(%edx){1to32}, %zmm6, %k5
vcmpgeph 1024(%edx){1to32}, %zmm6, %k5
vcmpgeph -1024(%edx){1to32}, %zmm6, %k5
vcmpgeph -1032(%edx){1to32}, %zmm6, %k5
vcmpgt_osph %zmm5, %zmm6, %k5
vcmpgt_osph %zmm5, %zmm6, %k5{%k7}
vcmpgt_osph {sae}, %zmm5, %zmm6, %k5
vcmpgt_osph (%ecx), %zmm6, %k5
vcmpgt_osph -123456(%esp,%esi,8), %zmm6, %k5
vcmpgt_osph (%eax){1to32}, %zmm6, %k5
vcmpgt_osph 8128(%edx), %zmm6, %k5
vcmpgt_osph 8192(%edx), %zmm6, %k5
vcmpgt_osph -8192(%edx), %zmm6, %k5
vcmpgt_osph -8256(%edx), %zmm6, %k5
vcmpgt_osph 1016(%edx){1to32}, %zmm6, %k5
vcmpgt_osph 1024(%edx){1to32}, %zmm6, %k5
vcmpgt_osph -1024(%edx){1to32}, %zmm6, %k5
vcmpgt_osph -1032(%edx){1to32}, %zmm6, %k5
vcmpgtph %zmm5, %zmm6, %k5
vcmpgtph %zmm5, %zmm6, %k5{%k7}
vcmpgtph {sae}, %zmm5, %zmm6, %k5
vcmpgtph (%ecx), %zmm6, %k5
vcmpgtph -123456(%esp,%esi,8), %zmm6, %k5
vcmpgtph (%eax){1to32}, %zmm6, %k5
vcmpgtph 8128(%edx), %zmm6, %k5
vcmpgtph 8192(%edx), %zmm6, %k5
vcmpgtph -8192(%edx), %zmm6, %k5
vcmpgtph -8256(%edx), %zmm6, %k5
vcmpgtph 1016(%edx){1to32}, %zmm6, %k5
vcmpgtph 1024(%edx){1to32}, %zmm6, %k5
vcmpgtph -1024(%edx){1to32}, %zmm6, %k5
vcmpgtph -1032(%edx){1to32}, %zmm6, %k5
vcmptrue_uqph %zmm5, %zmm6, %k5
vcmptrue_uqph %zmm5, %zmm6, %k5{%k7}
vcmptrue_uqph {sae}, %zmm5, %zmm6, %k5
vcmptrue_uqph (%ecx), %zmm6, %k5
vcmptrue_uqph -123456(%esp,%esi,8), %zmm6, %k5
vcmptrue_uqph (%eax){1to32}, %zmm6, %k5
vcmptrue_uqph 8128(%edx), %zmm6, %k5
vcmptrue_uqph 8192(%edx), %zmm6, %k5
vcmptrue_uqph -8192(%edx), %zmm6, %k5
vcmptrue_uqph -8256(%edx), %zmm6, %k5
vcmptrue_uqph 1016(%edx){1to32}, %zmm6, %k5
vcmptrue_uqph 1024(%edx){1to32}, %zmm6, %k5
vcmptrue_uqph -1024(%edx){1to32}, %zmm6, %k5
vcmptrue_uqph -1032(%edx){1to32}, %zmm6, %k5
vcmptrueph %zmm5, %zmm6, %k5
vcmptrueph %zmm5, %zmm6, %k5{%k7}
vcmptrueph {sae}, %zmm5, %zmm6, %k5
vcmptrueph (%ecx), %zmm6, %k5
vcmptrueph -123456(%esp,%esi,8), %zmm6, %k5
vcmptrueph (%eax){1to32}, %zmm6, %k5
vcmptrueph 8128(%edx), %zmm6, %k5
vcmptrueph 8192(%edx), %zmm6, %k5
vcmptrueph -8192(%edx), %zmm6, %k5
vcmptrueph -8256(%edx), %zmm6, %k5
vcmptrueph 1016(%edx){1to32}, %zmm6, %k5
vcmptrueph 1024(%edx){1to32}, %zmm6, %k5
vcmptrueph -1024(%edx){1to32}, %zmm6, %k5
vcmptrueph -1032(%edx){1to32}, %zmm6, %k5
vcmpeq_osph %zmm5, %zmm6, %k5
vcmpeq_osph %zmm5, %zmm6, %k5{%k7}
vcmpeq_osph {sae}, %zmm5, %zmm6, %k5
vcmpeq_osph (%ecx), %zmm6, %k5
vcmpeq_osph -123456(%esp,%esi,8), %zmm6, %k5
vcmpeq_osph (%eax){1to32}, %zmm6, %k5
vcmpeq_osph 8128(%edx), %zmm6, %k5
vcmpeq_osph 8192(%edx), %zmm6, %k5
vcmpeq_osph -8192(%edx), %zmm6, %k5
vcmpeq_osph -8256(%edx), %zmm6, %k5
vcmpeq_osph 1016(%edx){1to32}, %zmm6, %k5
vcmpeq_osph 1024(%edx){1to32}, %zmm6, %k5
vcmpeq_osph -1024(%edx){1to32}, %zmm6, %k5
vcmpeq_osph -1032(%edx){1to32}, %zmm6, %k5
vcmplt_oqph %zmm5, %zmm6, %k5
vcmplt_oqph %zmm5, %zmm6, %k5{%k7}
vcmplt_oqph {sae}, %zmm5, %zmm6, %k5
vcmplt_oqph (%ecx), %zmm6, %k5
vcmplt_oqph -123456(%esp,%esi,8), %zmm6, %k5
vcmplt_oqph (%eax){1to32}, %zmm6, %k5
vcmplt_oqph 8128(%edx), %zmm6, %k5
vcmplt_oqph 8192(%edx), %zmm6, %k5
vcmplt_oqph -8192(%edx), %zmm6, %k5
vcmplt_oqph -8256(%edx), %zmm6, %k5
vcmplt_oqph 1016(%edx){1to32}, %zmm6, %k5
vcmplt_oqph 1024(%edx){1to32}, %zmm6, %k5
vcmplt_oqph -1024(%edx){1to32}, %zmm6, %k5
vcmplt_oqph -1032(%edx){1to32}, %zmm6, %k5
vcmple_oqph %zmm5, %zmm6, %k5
vcmple_oqph %zmm5, %zmm6, %k5{%k7}
vcmple_oqph {sae}, %zmm5, %zmm6, %k5
vcmple_oqph (%ecx), %zmm6, %k5
vcmple_oqph -123456(%esp,%esi,8), %zmm6, %k5
vcmple_oqph (%eax){1to32}, %zmm6, %k5
vcmple_oqph 8128(%edx), %zmm6, %k5
vcmple_oqph 8192(%edx), %zmm6, %k5
vcmple_oqph -8192(%edx), %zmm6, %k5
vcmple_oqph -8256(%edx), %zmm6, %k5
vcmple_oqph 1016(%edx){1to32}, %zmm6, %k5
vcmple_oqph 1024(%edx){1to32}, %zmm6, %k5
vcmple_oqph -1024(%edx){1to32}, %zmm6, %k5
vcmple_oqph -1032(%edx){1to32}, %zmm6, %k5
vcmpunord_sph %zmm5, %zmm6, %k5
vcmpunord_sph %zmm5, %zmm6, %k5{%k7}
vcmpunord_sph {sae}, %zmm5, %zmm6, %k5
vcmpunord_sph (%ecx), %zmm6, %k5
vcmpunord_sph -123456(%esp,%esi,8), %zmm6, %k5
vcmpunord_sph (%eax){1to32}, %zmm6, %k5
vcmpunord_sph 8128(%edx), %zmm6, %k5
vcmpunord_sph 8192(%edx), %zmm6, %k5
vcmpunord_sph -8192(%edx), %zmm6, %k5
vcmpunord_sph -8256(%edx), %zmm6, %k5
vcmpunord_sph 1016(%edx){1to32}, %zmm6, %k5
vcmpunord_sph 1024(%edx){1to32}, %zmm6, %k5
vcmpunord_sph -1024(%edx){1to32}, %zmm6, %k5
vcmpunord_sph -1032(%edx){1to32}, %zmm6, %k5
vcmpneq_usph %zmm5, %zmm6, %k5
vcmpneq_usph %zmm5, %zmm6, %k5{%k7}
vcmpneq_usph {sae}, %zmm5, %zmm6, %k5
vcmpneq_usph (%ecx), %zmm6, %k5
vcmpneq_usph -123456(%esp,%esi,8), %zmm6, %k5
vcmpneq_usph (%eax){1to32}, %zmm6, %k5
vcmpneq_usph 8128(%edx), %zmm6, %k5
vcmpneq_usph 8192(%edx), %zmm6, %k5
vcmpneq_usph -8192(%edx), %zmm6, %k5
vcmpneq_usph -8256(%edx), %zmm6, %k5
vcmpneq_usph 1016(%edx){1to32}, %zmm6, %k5
vcmpneq_usph 1024(%edx){1to32}, %zmm6, %k5
vcmpneq_usph -1024(%edx){1to32}, %zmm6, %k5
vcmpneq_usph -1032(%edx){1to32}, %zmm6, %k5
vcmpnlt_uqph %zmm5, %zmm6, %k5
vcmpnlt_uqph %zmm5, %zmm6, %k5{%k7}
vcmpnlt_uqph {sae}, %zmm5, %zmm6, %k5
vcmpnlt_uqph (%ecx), %zmm6, %k5
vcmpnlt_uqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnlt_uqph (%eax){1to32}, %zmm6, %k5
vcmpnlt_uqph 8128(%edx), %zmm6, %k5
vcmpnlt_uqph 8192(%edx), %zmm6, %k5
vcmpnlt_uqph -8192(%edx), %zmm6, %k5
vcmpnlt_uqph -8256(%edx), %zmm6, %k5
vcmpnlt_uqph 1016(%edx){1to32}, %zmm6, %k5
vcmpnlt_uqph 1024(%edx){1to32}, %zmm6, %k5
vcmpnlt_uqph -1024(%edx){1to32}, %zmm6, %k5
vcmpnlt_uqph -1032(%edx){1to32}, %zmm6, %k5
vcmpnle_uqph %zmm5, %zmm6, %k5
vcmpnle_uqph %zmm5, %zmm6, %k5{%k7}
vcmpnle_uqph {sae}, %zmm5, %zmm6, %k5
vcmpnle_uqph (%ecx), %zmm6, %k5
vcmpnle_uqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnle_uqph (%eax){1to32}, %zmm6, %k5
vcmpnle_uqph 8128(%edx), %zmm6, %k5
vcmpnle_uqph 8192(%edx), %zmm6, %k5
vcmpnle_uqph -8192(%edx), %zmm6, %k5
vcmpnle_uqph -8256(%edx), %zmm6, %k5
vcmpnle_uqph 1016(%edx){1to32}, %zmm6, %k5
vcmpnle_uqph 1024(%edx){1to32}, %zmm6, %k5
vcmpnle_uqph -1024(%edx){1to32}, %zmm6, %k5
vcmpnle_uqph -1032(%edx){1to32}, %zmm6, %k5
vcmpord_sph %zmm5, %zmm6, %k5
vcmpord_sph %zmm5, %zmm6, %k5{%k7}
vcmpord_sph {sae}, %zmm5, %zmm6, %k5
vcmpord_sph (%ecx), %zmm6, %k5
vcmpord_sph -123456(%esp,%esi,8), %zmm6, %k5
vcmpord_sph (%eax){1to32}, %zmm6, %k5
vcmpord_sph 8128(%edx), %zmm6, %k5
vcmpord_sph 8192(%edx), %zmm6, %k5
vcmpord_sph -8192(%edx), %zmm6, %k5
vcmpord_sph -8256(%edx), %zmm6, %k5
vcmpord_sph 1016(%edx){1to32}, %zmm6, %k5
vcmpord_sph 1024(%edx){1to32}, %zmm6, %k5
vcmpord_sph -1024(%edx){1to32}, %zmm6, %k5
vcmpord_sph -1032(%edx){1to32}, %zmm6, %k5
vcmpeq_usph %zmm5, %zmm6, %k5
vcmpeq_usph %zmm5, %zmm6, %k5{%k7}
vcmpeq_usph {sae}, %zmm5, %zmm6, %k5
vcmpeq_usph (%ecx), %zmm6, %k5
vcmpeq_usph -123456(%esp,%esi,8), %zmm6, %k5
vcmpeq_usph (%eax){1to32}, %zmm6, %k5
vcmpeq_usph 8128(%edx), %zmm6, %k5
vcmpeq_usph 8192(%edx), %zmm6, %k5
vcmpeq_usph -8192(%edx), %zmm6, %k5
vcmpeq_usph -8256(%edx), %zmm6, %k5
vcmpeq_usph 1016(%edx){1to32}, %zmm6, %k5
vcmpeq_usph 1024(%edx){1to32}, %zmm6, %k5
vcmpeq_usph -1024(%edx){1to32}, %zmm6, %k5
vcmpeq_usph -1032(%edx){1to32}, %zmm6, %k5
vcmpnge_uqph %zmm5, %zmm6, %k5
vcmpnge_uqph %zmm5, %zmm6, %k5{%k7}
vcmpnge_uqph {sae}, %zmm5, %zmm6, %k5
vcmpnge_uqph (%ecx), %zmm6, %k5
vcmpnge_uqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpnge_uqph (%eax){1to32}, %zmm6, %k5
vcmpnge_uqph 8128(%edx), %zmm6, %k5
vcmpnge_uqph 8192(%edx), %zmm6, %k5
vcmpnge_uqph -8192(%edx), %zmm6, %k5
vcmpnge_uqph -8256(%edx), %zmm6, %k5
vcmpnge_uqph 1016(%edx){1to32}, %zmm6, %k5
vcmpnge_uqph 1024(%edx){1to32}, %zmm6, %k5
vcmpnge_uqph -1024(%edx){1to32}, %zmm6, %k5
vcmpnge_uqph -1032(%edx){1to32}, %zmm6, %k5
vcmpngt_uqph %zmm5, %zmm6, %k5
vcmpngt_uqph %zmm5, %zmm6, %k5{%k7}
vcmpngt_uqph {sae}, %zmm5, %zmm6, %k5
vcmpngt_uqph (%ecx), %zmm6, %k5
vcmpngt_uqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpngt_uqph (%eax){1to32}, %zmm6, %k5
vcmpngt_uqph 8128(%edx), %zmm6, %k5
vcmpngt_uqph 8192(%edx), %zmm6, %k5
vcmpngt_uqph -8192(%edx), %zmm6, %k5
vcmpngt_uqph -8256(%edx), %zmm6, %k5
vcmpngt_uqph 1016(%edx){1to32}, %zmm6, %k5
vcmpngt_uqph 1024(%edx){1to32}, %zmm6, %k5
vcmpngt_uqph -1024(%edx){1to32}, %zmm6, %k5
vcmpngt_uqph -1032(%edx){1to32}, %zmm6, %k5
vcmpfalse_osph %zmm5, %zmm6, %k5
vcmpfalse_osph %zmm5, %zmm6, %k5{%k7}
vcmpfalse_osph {sae}, %zmm5, %zmm6, %k5
vcmpfalse_osph (%ecx), %zmm6, %k5
vcmpfalse_osph -123456(%esp,%esi,8), %zmm6, %k5
vcmpfalse_osph (%eax){1to32}, %zmm6, %k5
vcmpfalse_osph 8128(%edx), %zmm6, %k5
vcmpfalse_osph 8192(%edx), %zmm6, %k5
vcmpfalse_osph -8192(%edx), %zmm6, %k5
vcmpfalse_osph -8256(%edx), %zmm6, %k5
vcmpfalse_osph 1016(%edx){1to32}, %zmm6, %k5
vcmpfalse_osph 1024(%edx){1to32}, %zmm6, %k5
vcmpfalse_osph -1024(%edx){1to32}, %zmm6, %k5
vcmpfalse_osph -1032(%edx){1to32}, %zmm6, %k5
vcmpneq_osph %zmm5, %zmm6, %k5
vcmpneq_osph %zmm5, %zmm6, %k5{%k7}
vcmpneq_osph {sae}, %zmm5, %zmm6, %k5
vcmpneq_osph (%ecx), %zmm6, %k5
vcmpneq_osph -123456(%esp,%esi,8), %zmm6, %k5
vcmpneq_osph (%eax){1to32}, %zmm6, %k5
vcmpneq_osph 8128(%edx), %zmm6, %k5
vcmpneq_osph 8192(%edx), %zmm6, %k5
vcmpneq_osph -8192(%edx), %zmm6, %k5
vcmpneq_osph -8256(%edx), %zmm6, %k5
vcmpneq_osph 1016(%edx){1to32}, %zmm6, %k5
vcmpneq_osph 1024(%edx){1to32}, %zmm6, %k5
vcmpneq_osph -1024(%edx){1to32}, %zmm6, %k5
vcmpneq_osph -1032(%edx){1to32}, %zmm6, %k5
vcmpge_oqph %zmm5, %zmm6, %k5
vcmpge_oqph %zmm5, %zmm6, %k5{%k7}
vcmpge_oqph {sae}, %zmm5, %zmm6, %k5
vcmpge_oqph (%ecx), %zmm6, %k5
vcmpge_oqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpge_oqph (%eax){1to32}, %zmm6, %k5
vcmpge_oqph 8128(%edx), %zmm6, %k5
vcmpge_oqph 8192(%edx), %zmm6, %k5
vcmpge_oqph -8192(%edx), %zmm6, %k5
vcmpge_oqph -8256(%edx), %zmm6, %k5
vcmpge_oqph 1016(%edx){1to32}, %zmm6, %k5
vcmpge_oqph 1024(%edx){1to32}, %zmm6, %k5
vcmpge_oqph -1024(%edx){1to32}, %zmm6, %k5
vcmpge_oqph -1032(%edx){1to32}, %zmm6, %k5
vcmpgt_oqph %zmm5, %zmm6, %k5
vcmpgt_oqph %zmm5, %zmm6, %k5{%k7}
vcmpgt_oqph {sae}, %zmm5, %zmm6, %k5
vcmpgt_oqph (%ecx), %zmm6, %k5
vcmpgt_oqph -123456(%esp,%esi,8), %zmm6, %k5
vcmpgt_oqph (%eax){1to32}, %zmm6, %k5
vcmpgt_oqph 8128(%edx), %zmm6, %k5
vcmpgt_oqph 8192(%edx), %zmm6, %k5
vcmpgt_oqph -8192(%edx), %zmm6, %k5
vcmpgt_oqph -8256(%edx), %zmm6, %k5
vcmpgt_oqph 1016(%edx){1to32}, %zmm6, %k5
vcmpgt_oqph 1024(%edx){1to32}, %zmm6, %k5
vcmpgt_oqph -1024(%edx){1to32}, %zmm6, %k5
vcmpgt_oqph -1032(%edx){1to32}, %zmm6, %k5
vcmptrue_usph %zmm5, %zmm6, %k5
vcmptrue_usph %zmm5, %zmm6, %k5{%k7}
vcmptrue_usph {sae}, %zmm5, %zmm6, %k5
vcmptrue_usph (%ecx), %zmm6, %k5
vcmptrue_usph -123456(%esp,%esi,8), %zmm6, %k5
vcmptrue_usph (%eax){1to32}, %zmm6, %k5
vcmptrue_usph 8128(%edx), %zmm6, %k5
vcmptrue_usph 8192(%edx), %zmm6, %k5
vcmptrue_usph -8192(%edx), %zmm6, %k5
vcmptrue_usph -8256(%edx), %zmm6, %k5
vcmptrue_usph 1016(%edx){1to32}, %zmm6, %k5
vcmptrue_usph 1024(%edx){1to32}, %zmm6, %k5
vcmptrue_usph -1024(%edx){1to32}, %zmm6, %k5
vcmptrue_usph -1032(%edx){1to32}, %zmm6, %k5
vcmpeq_oqsh %xmm4, %xmm5, %k5{%k7}
vcmpeq_oqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpeq_oqsh (%ecx), %xmm5, %k5{%k7}
vcmpeq_oqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpeq_oqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpeq_oqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpeq_oqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpeq_oqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpeqsh %xmm4, %xmm5, %k5{%k7}
vcmpeqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpeqsh (%ecx), %xmm5, %k5{%k7}
vcmpeqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpeqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpeqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpeqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpeqsh -1032(%edx), %xmm5, %k5{%k7}
vcmplt_ossh %xmm4, %xmm5, %k5{%k7}
vcmplt_ossh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmplt_ossh (%ecx), %xmm5, %k5{%k7}
vcmplt_ossh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmplt_ossh 1016(%edx), %xmm5, %k5{%k7}
vcmplt_ossh 1024(%edx), %xmm5, %k5{%k7}
vcmplt_ossh -1024(%edx), %xmm5, %k5{%k7}
vcmplt_ossh -1032(%edx), %xmm5, %k5{%k7}
vcmpltsh %xmm4, %xmm5, %k5{%k7}
vcmpltsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpltsh (%ecx), %xmm5, %k5{%k7}
vcmpltsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpltsh 1016(%edx), %xmm5, %k5{%k7}
vcmpltsh 1024(%edx), %xmm5, %k5{%k7}
vcmpltsh -1024(%edx), %xmm5, %k5{%k7}
vcmpltsh -1032(%edx), %xmm5, %k5{%k7}
vcmple_ossh %xmm4, %xmm5, %k5{%k7}
vcmple_ossh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmple_ossh (%ecx), %xmm5, %k5{%k7}
vcmple_ossh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmple_ossh 1016(%edx), %xmm5, %k5{%k7}
vcmple_ossh 1024(%edx), %xmm5, %k5{%k7}
vcmple_ossh -1024(%edx), %xmm5, %k5{%k7}
vcmple_ossh -1032(%edx), %xmm5, %k5{%k7}
vcmplesh %xmm4, %xmm5, %k5{%k7}
vcmplesh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmplesh (%ecx), %xmm5, %k5{%k7}
vcmplesh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmplesh 1016(%edx), %xmm5, %k5{%k7}
vcmplesh 1024(%edx), %xmm5, %k5{%k7}
vcmplesh -1024(%edx), %xmm5, %k5{%k7}
vcmplesh -1032(%edx), %xmm5, %k5{%k7}
vcmpunord_qsh %xmm4, %xmm5, %k5{%k7}
vcmpunord_qsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpunord_qsh (%ecx), %xmm5, %k5{%k7}
vcmpunord_qsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpunord_qsh 1016(%edx), %xmm5, %k5{%k7}
vcmpunord_qsh 1024(%edx), %xmm5, %k5{%k7}
vcmpunord_qsh -1024(%edx), %xmm5, %k5{%k7}
vcmpunord_qsh -1032(%edx), %xmm5, %k5{%k7}
vcmpunordsh %xmm4, %xmm5, %k5{%k7}
vcmpunordsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpunordsh (%ecx), %xmm5, %k5{%k7}
vcmpunordsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpunordsh 1016(%edx), %xmm5, %k5{%k7}
vcmpunordsh 1024(%edx), %xmm5, %k5{%k7}
vcmpunordsh -1024(%edx), %xmm5, %k5{%k7}
vcmpunordsh -1032(%edx), %xmm5, %k5{%k7}
vcmpneq_uqsh %xmm4, %xmm5, %k5{%k7}
vcmpneq_uqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpneq_uqsh (%ecx), %xmm5, %k5{%k7}
vcmpneq_uqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpneq_uqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpneq_uqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpneq_uqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpneq_uqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpneqsh %xmm4, %xmm5, %k5{%k7}
vcmpneqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpneqsh (%ecx), %xmm5, %k5{%k7}
vcmpneqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpneqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpneqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpneqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpneqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpnlt_ussh %xmm4, %xmm5, %k5{%k7}
vcmpnlt_ussh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnlt_ussh (%ecx), %xmm5, %k5{%k7}
vcmpnlt_ussh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnlt_ussh 1016(%edx), %xmm5, %k5{%k7}
vcmpnlt_ussh 1024(%edx), %xmm5, %k5{%k7}
vcmpnlt_ussh -1024(%edx), %xmm5, %k5{%k7}
vcmpnlt_ussh -1032(%edx), %xmm5, %k5{%k7}
vcmpnltsh %xmm4, %xmm5, %k5{%k7}
vcmpnltsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnltsh (%ecx), %xmm5, %k5{%k7}
vcmpnltsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnltsh 1016(%edx), %xmm5, %k5{%k7}
vcmpnltsh 1024(%edx), %xmm5, %k5{%k7}
vcmpnltsh -1024(%edx), %xmm5, %k5{%k7}
vcmpnltsh -1032(%edx), %xmm5, %k5{%k7}
vcmpnle_ussh %xmm4, %xmm5, %k5{%k7}
vcmpnle_ussh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnle_ussh (%ecx), %xmm5, %k5{%k7}
vcmpnle_ussh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnle_ussh 1016(%edx), %xmm5, %k5{%k7}
vcmpnle_ussh 1024(%edx), %xmm5, %k5{%k7}
vcmpnle_ussh -1024(%edx), %xmm5, %k5{%k7}
vcmpnle_ussh -1032(%edx), %xmm5, %k5{%k7}
vcmpnlesh %xmm4, %xmm5, %k5{%k7}
vcmpnlesh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnlesh (%ecx), %xmm5, %k5{%k7}
vcmpnlesh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnlesh 1016(%edx), %xmm5, %k5{%k7}
vcmpnlesh 1024(%edx), %xmm5, %k5{%k7}
vcmpnlesh -1024(%edx), %xmm5, %k5{%k7}
vcmpnlesh -1032(%edx), %xmm5, %k5{%k7}
vcmpord_qsh %xmm4, %xmm5, %k5{%k7}
vcmpord_qsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpord_qsh (%ecx), %xmm5, %k5{%k7}
vcmpord_qsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpord_qsh 1016(%edx), %xmm5, %k5{%k7}
vcmpord_qsh 1024(%edx), %xmm5, %k5{%k7}
vcmpord_qsh -1024(%edx), %xmm5, %k5{%k7}
vcmpord_qsh -1032(%edx), %xmm5, %k5{%k7}
vcmpordsh %xmm4, %xmm5, %k5{%k7}
vcmpordsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpordsh (%ecx), %xmm5, %k5{%k7}
vcmpordsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpordsh 1016(%edx), %xmm5, %k5{%k7}
vcmpordsh 1024(%edx), %xmm5, %k5{%k7}
vcmpordsh -1024(%edx), %xmm5, %k5{%k7}
vcmpordsh -1032(%edx), %xmm5, %k5{%k7}
vcmpeq_uqsh %xmm4, %xmm5, %k5{%k7}
vcmpeq_uqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpeq_uqsh (%ecx), %xmm5, %k5{%k7}
vcmpeq_uqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpeq_uqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpeq_uqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpeq_uqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpeq_uqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpnge_ussh %xmm4, %xmm5, %k5{%k7}
vcmpnge_ussh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnge_ussh (%ecx), %xmm5, %k5{%k7}
vcmpnge_ussh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnge_ussh 1016(%edx), %xmm5, %k5{%k7}
vcmpnge_ussh 1024(%edx), %xmm5, %k5{%k7}
vcmpnge_ussh -1024(%edx), %xmm5, %k5{%k7}
vcmpnge_ussh -1032(%edx), %xmm5, %k5{%k7}
vcmpngesh %xmm4, %xmm5, %k5{%k7}
vcmpngesh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpngesh (%ecx), %xmm5, %k5{%k7}
vcmpngesh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpngesh 1016(%edx), %xmm5, %k5{%k7}
vcmpngesh 1024(%edx), %xmm5, %k5{%k7}
vcmpngesh -1024(%edx), %xmm5, %k5{%k7}
vcmpngesh -1032(%edx), %xmm5, %k5{%k7}
vcmpngt_ussh %xmm4, %xmm5, %k5{%k7}
vcmpngt_ussh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpngt_ussh (%ecx), %xmm5, %k5{%k7}
vcmpngt_ussh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpngt_ussh 1016(%edx), %xmm5, %k5{%k7}
vcmpngt_ussh 1024(%edx), %xmm5, %k5{%k7}
vcmpngt_ussh -1024(%edx), %xmm5, %k5{%k7}
vcmpngt_ussh -1032(%edx), %xmm5, %k5{%k7}
vcmpngtsh %xmm4, %xmm5, %k5{%k7}
vcmpngtsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpngtsh (%ecx), %xmm5, %k5{%k7}
vcmpngtsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpngtsh 1016(%edx), %xmm5, %k5{%k7}
vcmpngtsh 1024(%edx), %xmm5, %k5{%k7}
vcmpngtsh -1024(%edx), %xmm5, %k5{%k7}
vcmpngtsh -1032(%edx), %xmm5, %k5{%k7}
vcmpfalse_oqsh %xmm4, %xmm5, %k5{%k7}
vcmpfalse_oqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpfalse_oqsh (%ecx), %xmm5, %k5{%k7}
vcmpfalse_oqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpfalse_oqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpfalse_oqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpfalse_oqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpfalse_oqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpfalsesh %xmm4, %xmm5, %k5{%k7}
vcmpfalsesh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpfalsesh (%ecx), %xmm5, %k5{%k7}
vcmpfalsesh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpfalsesh 1016(%edx), %xmm5, %k5{%k7}
vcmpfalsesh 1024(%edx), %xmm5, %k5{%k7}
vcmpfalsesh -1024(%edx), %xmm5, %k5{%k7}
vcmpfalsesh -1032(%edx), %xmm5, %k5{%k7}
vcmpneq_oqsh %xmm4, %xmm5, %k5{%k7}
vcmpneq_oqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpneq_oqsh (%ecx), %xmm5, %k5{%k7}
vcmpneq_oqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpneq_oqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpneq_oqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpneq_oqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpneq_oqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpge_ossh %xmm4, %xmm5, %k5{%k7}
vcmpge_ossh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpge_ossh (%ecx), %xmm5, %k5{%k7}
vcmpge_ossh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpge_ossh 1016(%edx), %xmm5, %k5{%k7}
vcmpge_ossh 1024(%edx), %xmm5, %k5{%k7}
vcmpge_ossh -1024(%edx), %xmm5, %k5{%k7}
vcmpge_ossh -1032(%edx), %xmm5, %k5{%k7}
vcmpgesh %xmm4, %xmm5, %k5{%k7}
vcmpgesh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpgesh (%ecx), %xmm5, %k5{%k7}
vcmpgesh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpgesh 1016(%edx), %xmm5, %k5{%k7}
vcmpgesh 1024(%edx), %xmm5, %k5{%k7}
vcmpgesh -1024(%edx), %xmm5, %k5{%k7}
vcmpgesh -1032(%edx), %xmm5, %k5{%k7}
vcmpgt_ossh %xmm4, %xmm5, %k5{%k7}
vcmpgt_ossh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpgt_ossh (%ecx), %xmm5, %k5{%k7}
vcmpgt_ossh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpgt_ossh 1016(%edx), %xmm5, %k5{%k7}
vcmpgt_ossh 1024(%edx), %xmm5, %k5{%k7}
vcmpgt_ossh -1024(%edx), %xmm5, %k5{%k7}
vcmpgt_ossh -1032(%edx), %xmm5, %k5{%k7}
vcmpgtsh %xmm4, %xmm5, %k5{%k7}
vcmpgtsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpgtsh (%ecx), %xmm5, %k5{%k7}
vcmpgtsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpgtsh 1016(%edx), %xmm5, %k5{%k7}
vcmpgtsh 1024(%edx), %xmm5, %k5{%k7}
vcmpgtsh -1024(%edx), %xmm5, %k5{%k7}
vcmpgtsh -1032(%edx), %xmm5, %k5{%k7}
vcmptrue_uqsh %xmm4, %xmm5, %k5{%k7}
vcmptrue_uqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmptrue_uqsh (%ecx), %xmm5, %k5{%k7}
vcmptrue_uqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmptrue_uqsh 1016(%edx), %xmm5, %k5{%k7}
vcmptrue_uqsh 1024(%edx), %xmm5, %k5{%k7}
vcmptrue_uqsh -1024(%edx), %xmm5, %k5{%k7}
vcmptrue_uqsh -1032(%edx), %xmm5, %k5{%k7}
vcmptruesh %xmm4, %xmm5, %k5{%k7}
vcmptruesh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmptruesh (%ecx), %xmm5, %k5{%k7}
vcmptruesh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmptruesh 1016(%edx), %xmm5, %k5{%k7}
vcmptruesh 1024(%edx), %xmm5, %k5{%k7}
vcmptruesh -1024(%edx), %xmm5, %k5{%k7}
vcmptruesh -1032(%edx), %xmm5, %k5{%k7}
vcmpeq_ossh %xmm4, %xmm5, %k5{%k7}
vcmpeq_ossh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpeq_ossh (%ecx), %xmm5, %k5{%k7}
vcmpeq_ossh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpeq_ossh 1016(%edx), %xmm5, %k5{%k7}
vcmpeq_ossh 1024(%edx), %xmm5, %k5{%k7}
vcmpeq_ossh -1024(%edx), %xmm5, %k5{%k7}
vcmpeq_ossh -1032(%edx), %xmm5, %k5{%k7}
vcmplt_oqsh %xmm4, %xmm5, %k5{%k7}
vcmplt_oqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmplt_oqsh (%ecx), %xmm5, %k5{%k7}
vcmplt_oqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmplt_oqsh 1016(%edx), %xmm5, %k5{%k7}
vcmplt_oqsh 1024(%edx), %xmm5, %k5{%k7}
vcmplt_oqsh -1024(%edx), %xmm5, %k5{%k7}
vcmplt_oqsh -1032(%edx), %xmm5, %k5{%k7}
vcmple_oqsh %xmm4, %xmm5, %k5{%k7}
vcmple_oqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmple_oqsh (%ecx), %xmm5, %k5{%k7}
vcmple_oqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmple_oqsh 1016(%edx), %xmm5, %k5{%k7}
vcmple_oqsh 1024(%edx), %xmm5, %k5{%k7}
vcmple_oqsh -1024(%edx), %xmm5, %k5{%k7}
vcmple_oqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpunord_ssh %xmm4, %xmm5, %k5{%k7}
vcmpunord_ssh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpunord_ssh (%ecx), %xmm5, %k5{%k7}
vcmpunord_ssh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpunord_ssh 1016(%edx), %xmm5, %k5{%k7}
vcmpunord_ssh 1024(%edx), %xmm5, %k5{%k7}
vcmpunord_ssh -1024(%edx), %xmm5, %k5{%k7}
vcmpunord_ssh -1032(%edx), %xmm5, %k5{%k7}
vcmpneq_ussh %xmm4, %xmm5, %k5{%k7}
vcmpneq_ussh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpneq_ussh (%ecx), %xmm5, %k5{%k7}
vcmpneq_ussh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpneq_ussh 1016(%edx), %xmm5, %k5{%k7}
vcmpneq_ussh 1024(%edx), %xmm5, %k5{%k7}
vcmpneq_ussh -1024(%edx), %xmm5, %k5{%k7}
vcmpneq_ussh -1032(%edx), %xmm5, %k5{%k7}
vcmpnlt_uqsh %xmm4, %xmm5, %k5{%k7}
vcmpnlt_uqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnlt_uqsh (%ecx), %xmm5, %k5{%k7}
vcmpnlt_uqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnlt_uqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpnlt_uqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpnlt_uqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpnlt_uqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpnle_uqsh %xmm4, %xmm5, %k5{%k7}
vcmpnle_uqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnle_uqsh (%ecx), %xmm5, %k5{%k7}
vcmpnle_uqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnle_uqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpnle_uqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpnle_uqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpnle_uqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpord_ssh %xmm4, %xmm5, %k5{%k7}
vcmpord_ssh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpord_ssh (%ecx), %xmm5, %k5{%k7}
vcmpord_ssh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpord_ssh 1016(%edx), %xmm5, %k5{%k7}
vcmpord_ssh 1024(%edx), %xmm5, %k5{%k7}
vcmpord_ssh -1024(%edx), %xmm5, %k5{%k7}
vcmpord_ssh -1032(%edx), %xmm5, %k5{%k7}
vcmpeq_ussh %xmm4, %xmm5, %k5{%k7}
vcmpeq_ussh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpeq_ussh (%ecx), %xmm5, %k5{%k7}
vcmpeq_ussh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpeq_ussh 1016(%edx), %xmm5, %k5{%k7}
vcmpeq_ussh 1024(%edx), %xmm5, %k5{%k7}
vcmpeq_ussh -1024(%edx), %xmm5, %k5{%k7}
vcmpeq_ussh -1032(%edx), %xmm5, %k5{%k7}
vcmpnge_uqsh %xmm4, %xmm5, %k5{%k7}
vcmpnge_uqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpnge_uqsh (%ecx), %xmm5, %k5{%k7}
vcmpnge_uqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpnge_uqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpnge_uqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpnge_uqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpnge_uqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpngt_uqsh %xmm4, %xmm5, %k5{%k7}
vcmpngt_uqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpngt_uqsh (%ecx), %xmm5, %k5{%k7}
vcmpngt_uqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpngt_uqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpngt_uqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpngt_uqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpngt_uqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpfalse_ossh %xmm4, %xmm5, %k5{%k7}
vcmpfalse_ossh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpfalse_ossh (%ecx), %xmm5, %k5{%k7}
vcmpfalse_ossh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpfalse_ossh 1016(%edx), %xmm5, %k5{%k7}
vcmpfalse_ossh 1024(%edx), %xmm5, %k5{%k7}
vcmpfalse_ossh -1024(%edx), %xmm5, %k5{%k7}
vcmpfalse_ossh -1032(%edx), %xmm5, %k5{%k7}
vcmpneq_ossh %xmm4, %xmm5, %k5{%k7}
vcmpneq_ossh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpneq_ossh (%ecx), %xmm5, %k5{%k7}
vcmpneq_ossh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpneq_ossh 1016(%edx), %xmm5, %k5{%k7}
vcmpneq_ossh 1024(%edx), %xmm5, %k5{%k7}
vcmpneq_ossh -1024(%edx), %xmm5, %k5{%k7}
vcmpneq_ossh -1032(%edx), %xmm5, %k5{%k7}
vcmpge_oqsh %xmm4, %xmm5, %k5{%k7}
vcmpge_oqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpge_oqsh (%ecx), %xmm5, %k5{%k7}
vcmpge_oqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpge_oqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpge_oqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpge_oqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpge_oqsh -1032(%edx), %xmm5, %k5{%k7}
vcmpgt_oqsh %xmm4, %xmm5, %k5{%k7}
vcmpgt_oqsh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmpgt_oqsh (%ecx), %xmm5, %k5{%k7}
vcmpgt_oqsh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmpgt_oqsh 1016(%edx), %xmm5, %k5{%k7}
vcmpgt_oqsh 1024(%edx), %xmm5, %k5{%k7}
vcmpgt_oqsh -1024(%edx), %xmm5, %k5{%k7}
vcmpgt_oqsh -1032(%edx), %xmm5, %k5{%k7}
vcmptrue_ussh %xmm4, %xmm5, %k5{%k7}
vcmptrue_ussh {sae}, %xmm4, %xmm5, %k5{%k7}
vcmptrue_ussh (%ecx), %xmm5, %k5{%k7}
vcmptrue_ussh -123456(%esp,%esi,8), %xmm5, %k5{%k7}
vcmptrue_ussh 1016(%edx), %xmm5, %k5{%k7}
vcmptrue_ussh 1024(%edx), %xmm5, %k5{%k7}
vcmptrue_ussh -1024(%edx), %xmm5, %k5{%k7}
vcmptrue_ussh -1032(%edx), %xmm5, %k5{%k7}
.intel_syntax noprefix
vcmpeq_oqph k5, zmm6, zmm5
vcmpeq_oqph k5{k7}, zmm6, zmm5
vcmpeq_oqph k5, zmm6, zmm5, {sae}
vcmpeq_oqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpeq_oqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpeq_oqph k5, zmm6, [eax]{1to32}
vcmpeq_oqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpeq_oqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpeq_oqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpeq_oqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpeq_oqph k5, zmm6, [edx+1016]{1to32}
vcmpeq_oqph k5, zmm6, [edx+1024]{1to32}
vcmpeq_oqph k5, zmm6, [edx-1024]{1to32}
vcmpeq_oqph k5, zmm6, [edx-1032]{1to32}
vcmpeqph k5, zmm6, zmm5
vcmpeqph k5{k7}, zmm6, zmm5
vcmpeqph k5, zmm6, zmm5, {sae}
vcmpeqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpeqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpeqph k5, zmm6, [eax]{1to32}
vcmpeqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpeqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpeqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpeqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpeqph k5, zmm6, [edx+1016]{1to32}
vcmpeqph k5, zmm6, [edx+1024]{1to32}
vcmpeqph k5, zmm6, [edx-1024]{1to32}
vcmpeqph k5, zmm6, [edx-1032]{1to32}
vcmplt_osph k5, zmm6, zmm5
vcmplt_osph k5{k7}, zmm6, zmm5
vcmplt_osph k5, zmm6, zmm5, {sae}
vcmplt_osph k5, zmm6, ZMMWORD PTR [ecx]
vcmplt_osph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmplt_osph k5, zmm6, [eax]{1to32}
vcmplt_osph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmplt_osph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmplt_osph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmplt_osph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmplt_osph k5, zmm6, [edx+1016]{1to32}
vcmplt_osph k5, zmm6, [edx+1024]{1to32}
vcmplt_osph k5, zmm6, [edx-1024]{1to32}
vcmplt_osph k5, zmm6, [edx-1032]{1to32}
vcmpltph k5, zmm6, zmm5
vcmpltph k5{k7}, zmm6, zmm5
vcmpltph k5, zmm6, zmm5, {sae}
vcmpltph k5, zmm6, ZMMWORD PTR [ecx]
vcmpltph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpltph k5, zmm6, [eax]{1to32}
vcmpltph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpltph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpltph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpltph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpltph k5, zmm6, [edx+1016]{1to32}
vcmpltph k5, zmm6, [edx+1024]{1to32}
vcmpltph k5, zmm6, [edx-1024]{1to32}
vcmpltph k5, zmm6, [edx-1032]{1to32}
vcmple_osph k5, zmm6, zmm5
vcmple_osph k5{k7}, zmm6, zmm5
vcmple_osph k5, zmm6, zmm5, {sae}
vcmple_osph k5, zmm6, ZMMWORD PTR [ecx]
vcmple_osph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmple_osph k5, zmm6, [eax]{1to32}
vcmple_osph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmple_osph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmple_osph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmple_osph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmple_osph k5, zmm6, [edx+1016]{1to32}
vcmple_osph k5, zmm6, [edx+1024]{1to32}
vcmple_osph k5, zmm6, [edx-1024]{1to32}
vcmple_osph k5, zmm6, [edx-1032]{1to32}
vcmpleph k5, zmm6, zmm5
vcmpleph k5{k7}, zmm6, zmm5
vcmpleph k5, zmm6, zmm5, {sae}
vcmpleph k5, zmm6, ZMMWORD PTR [ecx]
vcmpleph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpleph k5, zmm6, [eax]{1to32}
vcmpleph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpleph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpleph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpleph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpleph k5, zmm6, [edx+1016]{1to32}
vcmpleph k5, zmm6, [edx+1024]{1to32}
vcmpleph k5, zmm6, [edx-1024]{1to32}
vcmpleph k5, zmm6, [edx-1032]{1to32}
vcmpunord_qph k5, zmm6, zmm5
vcmpunord_qph k5{k7}, zmm6, zmm5
vcmpunord_qph k5, zmm6, zmm5, {sae}
vcmpunord_qph k5, zmm6, ZMMWORD PTR [ecx]
vcmpunord_qph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpunord_qph k5, zmm6, [eax]{1to32}
vcmpunord_qph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpunord_qph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpunord_qph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpunord_qph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpunord_qph k5, zmm6, [edx+1016]{1to32}
vcmpunord_qph k5, zmm6, [edx+1024]{1to32}
vcmpunord_qph k5, zmm6, [edx-1024]{1to32}
vcmpunord_qph k5, zmm6, [edx-1032]{1to32}
vcmpunordph k5, zmm6, zmm5
vcmpunordph k5{k7}, zmm6, zmm5
vcmpunordph k5, zmm6, zmm5, {sae}
vcmpunordph k5, zmm6, ZMMWORD PTR [ecx]
vcmpunordph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpunordph k5, zmm6, [eax]{1to32}
vcmpunordph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpunordph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpunordph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpunordph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpunordph k5, zmm6, [edx+1016]{1to32}
vcmpunordph k5, zmm6, [edx+1024]{1to32}
vcmpunordph k5, zmm6, [edx-1024]{1to32}
vcmpunordph k5, zmm6, [edx-1032]{1to32}
vcmpneq_uqph k5, zmm6, zmm5
vcmpneq_uqph k5{k7}, zmm6, zmm5
vcmpneq_uqph k5, zmm6, zmm5, {sae}
vcmpneq_uqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpneq_uqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpneq_uqph k5, zmm6, [eax]{1to32}
vcmpneq_uqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpneq_uqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpneq_uqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpneq_uqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpneq_uqph k5, zmm6, [edx+1016]{1to32}
vcmpneq_uqph k5, zmm6, [edx+1024]{1to32}
vcmpneq_uqph k5, zmm6, [edx-1024]{1to32}
vcmpneq_uqph k5, zmm6, [edx-1032]{1to32}
vcmpneqph k5, zmm6, zmm5
vcmpneqph k5{k7}, zmm6, zmm5
vcmpneqph k5, zmm6, zmm5, {sae}
vcmpneqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpneqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpneqph k5, zmm6, [eax]{1to32}
vcmpneqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpneqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpneqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpneqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpneqph k5, zmm6, [edx+1016]{1to32}
vcmpneqph k5, zmm6, [edx+1024]{1to32}
vcmpneqph k5, zmm6, [edx-1024]{1to32}
vcmpneqph k5, zmm6, [edx-1032]{1to32}
vcmpnlt_usph k5, zmm6, zmm5
vcmpnlt_usph k5{k7}, zmm6, zmm5
vcmpnlt_usph k5, zmm6, zmm5, {sae}
vcmpnlt_usph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnlt_usph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnlt_usph k5, zmm6, [eax]{1to32}
vcmpnlt_usph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnlt_usph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnlt_usph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnlt_usph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnlt_usph k5, zmm6, [edx+1016]{1to32}
vcmpnlt_usph k5, zmm6, [edx+1024]{1to32}
vcmpnlt_usph k5, zmm6, [edx-1024]{1to32}
vcmpnlt_usph k5, zmm6, [edx-1032]{1to32}
vcmpnltph k5, zmm6, zmm5
vcmpnltph k5{k7}, zmm6, zmm5
vcmpnltph k5, zmm6, zmm5, {sae}
vcmpnltph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnltph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnltph k5, zmm6, [eax]{1to32}
vcmpnltph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnltph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnltph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnltph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnltph k5, zmm6, [edx+1016]{1to32}
vcmpnltph k5, zmm6, [edx+1024]{1to32}
vcmpnltph k5, zmm6, [edx-1024]{1to32}
vcmpnltph k5, zmm6, [edx-1032]{1to32}
vcmpnle_usph k5, zmm6, zmm5
vcmpnle_usph k5{k7}, zmm6, zmm5
vcmpnle_usph k5, zmm6, zmm5, {sae}
vcmpnle_usph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnle_usph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnle_usph k5, zmm6, [eax]{1to32}
vcmpnle_usph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnle_usph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnle_usph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnle_usph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnle_usph k5, zmm6, [edx+1016]{1to32}
vcmpnle_usph k5, zmm6, [edx+1024]{1to32}
vcmpnle_usph k5, zmm6, [edx-1024]{1to32}
vcmpnle_usph k5, zmm6, [edx-1032]{1to32}
vcmpnleph k5, zmm6, zmm5
vcmpnleph k5{k7}, zmm6, zmm5
vcmpnleph k5, zmm6, zmm5, {sae}
vcmpnleph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnleph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnleph k5, zmm6, [eax]{1to32}
vcmpnleph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnleph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnleph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnleph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnleph k5, zmm6, [edx+1016]{1to32}
vcmpnleph k5, zmm6, [edx+1024]{1to32}
vcmpnleph k5, zmm6, [edx-1024]{1to32}
vcmpnleph k5, zmm6, [edx-1032]{1to32}
vcmpord_qph k5, zmm6, zmm5
vcmpord_qph k5{k7}, zmm6, zmm5
vcmpord_qph k5, zmm6, zmm5, {sae}
vcmpord_qph k5, zmm6, ZMMWORD PTR [ecx]
vcmpord_qph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpord_qph k5, zmm6, [eax]{1to32}
vcmpord_qph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpord_qph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpord_qph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpord_qph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpord_qph k5, zmm6, [edx+1016]{1to32}
vcmpord_qph k5, zmm6, [edx+1024]{1to32}
vcmpord_qph k5, zmm6, [edx-1024]{1to32}
vcmpord_qph k5, zmm6, [edx-1032]{1to32}
vcmpordph k5, zmm6, zmm5
vcmpordph k5{k7}, zmm6, zmm5
vcmpordph k5, zmm6, zmm5, {sae}
vcmpordph k5, zmm6, ZMMWORD PTR [ecx]
vcmpordph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpordph k5, zmm6, [eax]{1to32}
vcmpordph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpordph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpordph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpordph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpordph k5, zmm6, [edx+1016]{1to32}
vcmpordph k5, zmm6, [edx+1024]{1to32}
vcmpordph k5, zmm6, [edx-1024]{1to32}
vcmpordph k5, zmm6, [edx-1032]{1to32}
vcmpeq_uqph k5, zmm6, zmm5
vcmpeq_uqph k5{k7}, zmm6, zmm5
vcmpeq_uqph k5, zmm6, zmm5, {sae}
vcmpeq_uqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpeq_uqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpeq_uqph k5, zmm6, [eax]{1to32}
vcmpeq_uqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpeq_uqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpeq_uqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpeq_uqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpeq_uqph k5, zmm6, [edx+1016]{1to32}
vcmpeq_uqph k5, zmm6, [edx+1024]{1to32}
vcmpeq_uqph k5, zmm6, [edx-1024]{1to32}
vcmpeq_uqph k5, zmm6, [edx-1032]{1to32}
vcmpnge_usph k5, zmm6, zmm5
vcmpnge_usph k5{k7}, zmm6, zmm5
vcmpnge_usph k5, zmm6, zmm5, {sae}
vcmpnge_usph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnge_usph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnge_usph k5, zmm6, [eax]{1to32}
vcmpnge_usph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnge_usph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnge_usph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnge_usph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnge_usph k5, zmm6, [edx+1016]{1to32}
vcmpnge_usph k5, zmm6, [edx+1024]{1to32}
vcmpnge_usph k5, zmm6, [edx-1024]{1to32}
vcmpnge_usph k5, zmm6, [edx-1032]{1to32}
vcmpngeph k5, zmm6, zmm5
vcmpngeph k5{k7}, zmm6, zmm5
vcmpngeph k5, zmm6, zmm5, {sae}
vcmpngeph k5, zmm6, ZMMWORD PTR [ecx]
vcmpngeph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpngeph k5, zmm6, [eax]{1to32}
vcmpngeph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpngeph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpngeph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpngeph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpngeph k5, zmm6, [edx+1016]{1to32}
vcmpngeph k5, zmm6, [edx+1024]{1to32}
vcmpngeph k5, zmm6, [edx-1024]{1to32}
vcmpngeph k5, zmm6, [edx-1032]{1to32}
vcmpngt_usph k5, zmm6, zmm5
vcmpngt_usph k5{k7}, zmm6, zmm5
vcmpngt_usph k5, zmm6, zmm5, {sae}
vcmpngt_usph k5, zmm6, ZMMWORD PTR [ecx]
vcmpngt_usph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpngt_usph k5, zmm6, [eax]{1to32}
vcmpngt_usph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpngt_usph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpngt_usph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpngt_usph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpngt_usph k5, zmm6, [edx+1016]{1to32}
vcmpngt_usph k5, zmm6, [edx+1024]{1to32}
vcmpngt_usph k5, zmm6, [edx-1024]{1to32}
vcmpngt_usph k5, zmm6, [edx-1032]{1to32}
vcmpngtph k5, zmm6, zmm5
vcmpngtph k5{k7}, zmm6, zmm5
vcmpngtph k5, zmm6, zmm5, {sae}
vcmpngtph k5, zmm6, ZMMWORD PTR [ecx]
vcmpngtph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpngtph k5, zmm6, [eax]{1to32}
vcmpngtph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpngtph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpngtph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpngtph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpngtph k5, zmm6, [edx+1016]{1to32}
vcmpngtph k5, zmm6, [edx+1024]{1to32}
vcmpngtph k5, zmm6, [edx-1024]{1to32}
vcmpngtph k5, zmm6, [edx-1032]{1to32}
vcmpfalse_oqph k5, zmm6, zmm5
vcmpfalse_oqph k5{k7}, zmm6, zmm5
vcmpfalse_oqph k5, zmm6, zmm5, {sae}
vcmpfalse_oqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpfalse_oqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpfalse_oqph k5, zmm6, [eax]{1to32}
vcmpfalse_oqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpfalse_oqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpfalse_oqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpfalse_oqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpfalse_oqph k5, zmm6, [edx+1016]{1to32}
vcmpfalse_oqph k5, zmm6, [edx+1024]{1to32}
vcmpfalse_oqph k5, zmm6, [edx-1024]{1to32}
vcmpfalse_oqph k5, zmm6, [edx-1032]{1to32}
vcmpfalseph k5, zmm6, zmm5
vcmpfalseph k5{k7}, zmm6, zmm5
vcmpfalseph k5, zmm6, zmm5, {sae}
vcmpfalseph k5, zmm6, ZMMWORD PTR [ecx]
vcmpfalseph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpfalseph k5, zmm6, [eax]{1to32}
vcmpfalseph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpfalseph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpfalseph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpfalseph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpfalseph k5, zmm6, [edx+1016]{1to32}
vcmpfalseph k5, zmm6, [edx+1024]{1to32}
vcmpfalseph k5, zmm6, [edx-1024]{1to32}
vcmpfalseph k5, zmm6, [edx-1032]{1to32}
vcmpneq_oqph k5, zmm6, zmm5
vcmpneq_oqph k5{k7}, zmm6, zmm5
vcmpneq_oqph k5, zmm6, zmm5, {sae}
vcmpneq_oqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpneq_oqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpneq_oqph k5, zmm6, [eax]{1to32}
vcmpneq_oqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpneq_oqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpneq_oqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpneq_oqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpneq_oqph k5, zmm6, [edx+1016]{1to32}
vcmpneq_oqph k5, zmm6, [edx+1024]{1to32}
vcmpneq_oqph k5, zmm6, [edx-1024]{1to32}
vcmpneq_oqph k5, zmm6, [edx-1032]{1to32}
vcmpge_osph k5, zmm6, zmm5
vcmpge_osph k5{k7}, zmm6, zmm5
vcmpge_osph k5, zmm6, zmm5, {sae}
vcmpge_osph k5, zmm6, ZMMWORD PTR [ecx]
vcmpge_osph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpge_osph k5, zmm6, [eax]{1to32}
vcmpge_osph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpge_osph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpge_osph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpge_osph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpge_osph k5, zmm6, [edx+1016]{1to32}
vcmpge_osph k5, zmm6, [edx+1024]{1to32}
vcmpge_osph k5, zmm6, [edx-1024]{1to32}
vcmpge_osph k5, zmm6, [edx-1032]{1to32}
vcmpgeph k5, zmm6, zmm5
vcmpgeph k5{k7}, zmm6, zmm5
vcmpgeph k5, zmm6, zmm5, {sae}
vcmpgeph k5, zmm6, ZMMWORD PTR [ecx]
vcmpgeph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpgeph k5, zmm6, [eax]{1to32}
vcmpgeph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpgeph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpgeph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpgeph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpgeph k5, zmm6, [edx+1016]{1to32}
vcmpgeph k5, zmm6, [edx+1024]{1to32}
vcmpgeph k5, zmm6, [edx-1024]{1to32}
vcmpgeph k5, zmm6, [edx-1032]{1to32}
vcmpgt_osph k5, zmm6, zmm5
vcmpgt_osph k5{k7}, zmm6, zmm5
vcmpgt_osph k5, zmm6, zmm5, {sae}
vcmpgt_osph k5, zmm6, ZMMWORD PTR [ecx]
vcmpgt_osph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpgt_osph k5, zmm6, [eax]{1to32}
vcmpgt_osph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpgt_osph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpgt_osph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpgt_osph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpgt_osph k5, zmm6, [edx+1016]{1to32}
vcmpgt_osph k5, zmm6, [edx+1024]{1to32}
vcmpgt_osph k5, zmm6, [edx-1024]{1to32}
vcmpgt_osph k5, zmm6, [edx-1032]{1to32}
vcmpgtph k5, zmm6, zmm5
vcmpgtph k5{k7}, zmm6, zmm5
vcmpgtph k5, zmm6, zmm5, {sae}
vcmpgtph k5, zmm6, ZMMWORD PTR [ecx]
vcmpgtph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpgtph k5, zmm6, [eax]{1to32}
vcmpgtph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpgtph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpgtph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpgtph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpgtph k5, zmm6, [edx+1016]{1to32}
vcmpgtph k5, zmm6, [edx+1024]{1to32}
vcmpgtph k5, zmm6, [edx-1024]{1to32}
vcmpgtph k5, zmm6, [edx-1032]{1to32}
vcmptrue_uqph k5, zmm6, zmm5
vcmptrue_uqph k5{k7}, zmm6, zmm5
vcmptrue_uqph k5, zmm6, zmm5, {sae}
vcmptrue_uqph k5, zmm6, ZMMWORD PTR [ecx]
vcmptrue_uqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmptrue_uqph k5, zmm6, [eax]{1to32}
vcmptrue_uqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmptrue_uqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmptrue_uqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmptrue_uqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmptrue_uqph k5, zmm6, [edx+1016]{1to32}
vcmptrue_uqph k5, zmm6, [edx+1024]{1to32}
vcmptrue_uqph k5, zmm6, [edx-1024]{1to32}
vcmptrue_uqph k5, zmm6, [edx-1032]{1to32}
vcmptrueph k5, zmm6, zmm5
vcmptrueph k5{k7}, zmm6, zmm5
vcmptrueph k5, zmm6, zmm5, {sae}
vcmptrueph k5, zmm6, ZMMWORD PTR [ecx]
vcmptrueph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmptrueph k5, zmm6, [eax]{1to32}
vcmptrueph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmptrueph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmptrueph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmptrueph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmptrueph k5, zmm6, [edx+1016]{1to32}
vcmptrueph k5, zmm6, [edx+1024]{1to32}
vcmptrueph k5, zmm6, [edx-1024]{1to32}
vcmptrueph k5, zmm6, [edx-1032]{1to32}
vcmpeq_osph k5, zmm6, zmm5
vcmpeq_osph k5{k7}, zmm6, zmm5
vcmpeq_osph k5, zmm6, zmm5, {sae}
vcmpeq_osph k5, zmm6, ZMMWORD PTR [ecx]
vcmpeq_osph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpeq_osph k5, zmm6, [eax]{1to32}
vcmpeq_osph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpeq_osph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpeq_osph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpeq_osph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpeq_osph k5, zmm6, [edx+1016]{1to32}
vcmpeq_osph k5, zmm6, [edx+1024]{1to32}
vcmpeq_osph k5, zmm6, [edx-1024]{1to32}
vcmpeq_osph k5, zmm6, [edx-1032]{1to32}
vcmplt_oqph k5, zmm6, zmm5
vcmplt_oqph k5{k7}, zmm6, zmm5
vcmplt_oqph k5, zmm6, zmm5, {sae}
vcmplt_oqph k5, zmm6, ZMMWORD PTR [ecx]
vcmplt_oqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmplt_oqph k5, zmm6, [eax]{1to32}
vcmplt_oqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmplt_oqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmplt_oqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmplt_oqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmplt_oqph k5, zmm6, [edx+1016]{1to32}
vcmplt_oqph k5, zmm6, [edx+1024]{1to32}
vcmplt_oqph k5, zmm6, [edx-1024]{1to32}
vcmplt_oqph k5, zmm6, [edx-1032]{1to32}
vcmple_oqph k5, zmm6, zmm5
vcmple_oqph k5{k7}, zmm6, zmm5
vcmple_oqph k5, zmm6, zmm5, {sae}
vcmple_oqph k5, zmm6, ZMMWORD PTR [ecx]
vcmple_oqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmple_oqph k5, zmm6, [eax]{1to32}
vcmple_oqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmple_oqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmple_oqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmple_oqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmple_oqph k5, zmm6, [edx+1016]{1to32}
vcmple_oqph k5, zmm6, [edx+1024]{1to32}
vcmple_oqph k5, zmm6, [edx-1024]{1to32}
vcmple_oqph k5, zmm6, [edx-1032]{1to32}
vcmpunord_sph k5, zmm6, zmm5
vcmpunord_sph k5{k7}, zmm6, zmm5
vcmpunord_sph k5, zmm6, zmm5, {sae}
vcmpunord_sph k5, zmm6, ZMMWORD PTR [ecx]
vcmpunord_sph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpunord_sph k5, zmm6, [eax]{1to32}
vcmpunord_sph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpunord_sph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpunord_sph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpunord_sph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpunord_sph k5, zmm6, [edx+1016]{1to32}
vcmpunord_sph k5, zmm6, [edx+1024]{1to32}
vcmpunord_sph k5, zmm6, [edx-1024]{1to32}
vcmpunord_sph k5, zmm6, [edx-1032]{1to32}
vcmpneq_usph k5, zmm6, zmm5
vcmpneq_usph k5{k7}, zmm6, zmm5
vcmpneq_usph k5, zmm6, zmm5, {sae}
vcmpneq_usph k5, zmm6, ZMMWORD PTR [ecx]
vcmpneq_usph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpneq_usph k5, zmm6, [eax]{1to32}
vcmpneq_usph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpneq_usph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpneq_usph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpneq_usph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpneq_usph k5, zmm6, [edx+1016]{1to32}
vcmpneq_usph k5, zmm6, [edx+1024]{1to32}
vcmpneq_usph k5, zmm6, [edx-1024]{1to32}
vcmpneq_usph k5, zmm6, [edx-1032]{1to32}
vcmpnlt_uqph k5, zmm6, zmm5
vcmpnlt_uqph k5{k7}, zmm6, zmm5
vcmpnlt_uqph k5, zmm6, zmm5, {sae}
vcmpnlt_uqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnlt_uqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnlt_uqph k5, zmm6, [eax]{1to32}
vcmpnlt_uqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnlt_uqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnlt_uqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnlt_uqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnlt_uqph k5, zmm6, [edx+1016]{1to32}
vcmpnlt_uqph k5, zmm6, [edx+1024]{1to32}
vcmpnlt_uqph k5, zmm6, [edx-1024]{1to32}
vcmpnlt_uqph k5, zmm6, [edx-1032]{1to32}
vcmpnle_uqph k5, zmm6, zmm5
vcmpnle_uqph k5{k7}, zmm6, zmm5
vcmpnle_uqph k5, zmm6, zmm5, {sae}
vcmpnle_uqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnle_uqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnle_uqph k5, zmm6, [eax]{1to32}
vcmpnle_uqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnle_uqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnle_uqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnle_uqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnle_uqph k5, zmm6, [edx+1016]{1to32}
vcmpnle_uqph k5, zmm6, [edx+1024]{1to32}
vcmpnle_uqph k5, zmm6, [edx-1024]{1to32}
vcmpnle_uqph k5, zmm6, [edx-1032]{1to32}
vcmpord_sph k5, zmm6, zmm5
vcmpord_sph k5{k7}, zmm6, zmm5
vcmpord_sph k5, zmm6, zmm5, {sae}
vcmpord_sph k5, zmm6, ZMMWORD PTR [ecx]
vcmpord_sph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpord_sph k5, zmm6, [eax]{1to32}
vcmpord_sph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpord_sph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpord_sph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpord_sph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpord_sph k5, zmm6, [edx+1016]{1to32}
vcmpord_sph k5, zmm6, [edx+1024]{1to32}
vcmpord_sph k5, zmm6, [edx-1024]{1to32}
vcmpord_sph k5, zmm6, [edx-1032]{1to32}
vcmpeq_usph k5, zmm6, zmm5
vcmpeq_usph k5{k7}, zmm6, zmm5
vcmpeq_usph k5, zmm6, zmm5, {sae}
vcmpeq_usph k5, zmm6, ZMMWORD PTR [ecx]
vcmpeq_usph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpeq_usph k5, zmm6, [eax]{1to32}
vcmpeq_usph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpeq_usph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpeq_usph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpeq_usph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpeq_usph k5, zmm6, [edx+1016]{1to32}
vcmpeq_usph k5, zmm6, [edx+1024]{1to32}
vcmpeq_usph k5, zmm6, [edx-1024]{1to32}
vcmpeq_usph k5, zmm6, [edx-1032]{1to32}
vcmpnge_uqph k5, zmm6, zmm5
vcmpnge_uqph k5{k7}, zmm6, zmm5
vcmpnge_uqph k5, zmm6, zmm5, {sae}
vcmpnge_uqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpnge_uqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpnge_uqph k5, zmm6, [eax]{1to32}
vcmpnge_uqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpnge_uqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpnge_uqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpnge_uqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpnge_uqph k5, zmm6, [edx+1016]{1to32}
vcmpnge_uqph k5, zmm6, [edx+1024]{1to32}
vcmpnge_uqph k5, zmm6, [edx-1024]{1to32}
vcmpnge_uqph k5, zmm6, [edx-1032]{1to32}
vcmpngt_uqph k5, zmm6, zmm5
vcmpngt_uqph k5{k7}, zmm6, zmm5
vcmpngt_uqph k5, zmm6, zmm5, {sae}
vcmpngt_uqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpngt_uqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpngt_uqph k5, zmm6, [eax]{1to32}
vcmpngt_uqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpngt_uqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpngt_uqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpngt_uqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpngt_uqph k5, zmm6, [edx+1016]{1to32}
vcmpngt_uqph k5, zmm6, [edx+1024]{1to32}
vcmpngt_uqph k5, zmm6, [edx-1024]{1to32}
vcmpngt_uqph k5, zmm6, [edx-1032]{1to32}
vcmpfalse_osph k5, zmm6, zmm5
vcmpfalse_osph k5{k7}, zmm6, zmm5
vcmpfalse_osph k5, zmm6, zmm5, {sae}
vcmpfalse_osph k5, zmm6, ZMMWORD PTR [ecx]
vcmpfalse_osph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpfalse_osph k5, zmm6, [eax]{1to32}
vcmpfalse_osph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpfalse_osph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpfalse_osph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpfalse_osph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpfalse_osph k5, zmm6, [edx+1016]{1to32}
vcmpfalse_osph k5, zmm6, [edx+1024]{1to32}
vcmpfalse_osph k5, zmm6, [edx-1024]{1to32}
vcmpfalse_osph k5, zmm6, [edx-1032]{1to32}
vcmpneq_osph k5, zmm6, zmm5
vcmpneq_osph k5{k7}, zmm6, zmm5
vcmpneq_osph k5, zmm6, zmm5, {sae}
vcmpneq_osph k5, zmm6, ZMMWORD PTR [ecx]
vcmpneq_osph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpneq_osph k5, zmm6, [eax]{1to32}
vcmpneq_osph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpneq_osph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpneq_osph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpneq_osph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpneq_osph k5, zmm6, [edx+1016]{1to32}
vcmpneq_osph k5, zmm6, [edx+1024]{1to32}
vcmpneq_osph k5, zmm6, [edx-1024]{1to32}
vcmpneq_osph k5, zmm6, [edx-1032]{1to32}
vcmpge_oqph k5, zmm6, zmm5
vcmpge_oqph k5{k7}, zmm6, zmm5
vcmpge_oqph k5, zmm6, zmm5, {sae}
vcmpge_oqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpge_oqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpge_oqph k5, zmm6, [eax]{1to32}
vcmpge_oqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpge_oqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpge_oqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpge_oqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpge_oqph k5, zmm6, [edx+1016]{1to32}
vcmpge_oqph k5, zmm6, [edx+1024]{1to32}
vcmpge_oqph k5, zmm6, [edx-1024]{1to32}
vcmpge_oqph k5, zmm6, [edx-1032]{1to32}
vcmpgt_oqph k5, zmm6, zmm5
vcmpgt_oqph k5{k7}, zmm6, zmm5
vcmpgt_oqph k5, zmm6, zmm5, {sae}
vcmpgt_oqph k5, zmm6, ZMMWORD PTR [ecx]
vcmpgt_oqph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmpgt_oqph k5, zmm6, [eax]{1to32}
vcmpgt_oqph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmpgt_oqph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmpgt_oqph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmpgt_oqph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmpgt_oqph k5, zmm6, [edx+1016]{1to32}
vcmpgt_oqph k5, zmm6, [edx+1024]{1to32}
vcmpgt_oqph k5, zmm6, [edx-1024]{1to32}
vcmpgt_oqph k5, zmm6, [edx-1032]{1to32}
vcmptrue_usph k5, zmm6, zmm5
vcmptrue_usph k5{k7}, zmm6, zmm5
vcmptrue_usph k5, zmm6, zmm5, {sae}
vcmptrue_usph k5, zmm6, ZMMWORD PTR [ecx]
vcmptrue_usph k5, zmm6, ZMMWORD PTR [esp+esi*8-123456]
vcmptrue_usph k5, zmm6, [eax]{1to32}
vcmptrue_usph k5, zmm6, ZMMWORD PTR [edx+8128]
vcmptrue_usph k5, zmm6, ZMMWORD PTR [edx+8192]
vcmptrue_usph k5, zmm6, ZMMWORD PTR [edx-8192]
vcmptrue_usph k5, zmm6, ZMMWORD PTR [edx-8256]
vcmptrue_usph k5, zmm6, [edx+1016]{1to32}
vcmptrue_usph k5, zmm6, [edx+1024]{1to32}
vcmptrue_usph k5, zmm6, [edx-1024]{1to32}
vcmptrue_usph k5, zmm6, [edx-1032]{1to32}
vcmpeq_oqsh k5{k7}, xmm5, xmm4
vcmpeq_oqsh k5{k7}, xmm5, xmm4, {sae}
vcmpeq_oqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpeq_oqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpeq_oqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpeq_oqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpeq_oqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpeq_oqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpeqsh k5{k7}, xmm5, xmm4
vcmpeqsh k5{k7}, xmm5, xmm4, {sae}
vcmpeqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpeqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpeqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpeqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpeqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpeqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmplt_ossh k5{k7}, xmm5, xmm4
vcmplt_ossh k5{k7}, xmm5, xmm4, {sae}
vcmplt_ossh k5{k7}, xmm5, WORD PTR [ecx]
vcmplt_ossh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmplt_ossh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmplt_ossh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmplt_ossh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmplt_ossh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpltsh k5{k7}, xmm5, xmm4
vcmpltsh k5{k7}, xmm5, xmm4, {sae}
vcmpltsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpltsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpltsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpltsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpltsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpltsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmple_ossh k5{k7}, xmm5, xmm4
vcmple_ossh k5{k7}, xmm5, xmm4, {sae}
vcmple_ossh k5{k7}, xmm5, WORD PTR [ecx]
vcmple_ossh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmple_ossh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmple_ossh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmple_ossh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmple_ossh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmplesh k5{k7}, xmm5, xmm4
vcmplesh k5{k7}, xmm5, xmm4, {sae}
vcmplesh k5{k7}, xmm5, WORD PTR [ecx]
vcmplesh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmplesh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmplesh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmplesh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmplesh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpunord_qsh k5{k7}, xmm5, xmm4
vcmpunord_qsh k5{k7}, xmm5, xmm4, {sae}
vcmpunord_qsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpunord_qsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpunord_qsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpunord_qsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpunord_qsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpunord_qsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpunordsh k5{k7}, xmm5, xmm4
vcmpunordsh k5{k7}, xmm5, xmm4, {sae}
vcmpunordsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpunordsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpunordsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpunordsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpunordsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpunordsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpneq_uqsh k5{k7}, xmm5, xmm4
vcmpneq_uqsh k5{k7}, xmm5, xmm4, {sae}
vcmpneq_uqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpneq_uqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpneq_uqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpneq_uqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpneq_uqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpneq_uqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpneqsh k5{k7}, xmm5, xmm4
vcmpneqsh k5{k7}, xmm5, xmm4, {sae}
vcmpneqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpneqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpneqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpneqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpneqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpneqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnlt_ussh k5{k7}, xmm5, xmm4
vcmpnlt_ussh k5{k7}, xmm5, xmm4, {sae}
vcmpnlt_ussh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnlt_ussh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnlt_ussh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnlt_ussh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnlt_ussh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnlt_ussh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnltsh k5{k7}, xmm5, xmm4
vcmpnltsh k5{k7}, xmm5, xmm4, {sae}
vcmpnltsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnltsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnltsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnltsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnltsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnltsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnle_ussh k5{k7}, xmm5, xmm4
vcmpnle_ussh k5{k7}, xmm5, xmm4, {sae}
vcmpnle_ussh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnle_ussh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnle_ussh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnle_ussh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnle_ussh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnle_ussh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnlesh k5{k7}, xmm5, xmm4
vcmpnlesh k5{k7}, xmm5, xmm4, {sae}
vcmpnlesh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnlesh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnlesh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnlesh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnlesh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnlesh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpord_qsh k5{k7}, xmm5, xmm4
vcmpord_qsh k5{k7}, xmm5, xmm4, {sae}
vcmpord_qsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpord_qsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpord_qsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpord_qsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpord_qsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpord_qsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpordsh k5{k7}, xmm5, xmm4
vcmpordsh k5{k7}, xmm5, xmm4, {sae}
vcmpordsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpordsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpordsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpordsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpordsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpordsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpeq_uqsh k5{k7}, xmm5, xmm4
vcmpeq_uqsh k5{k7}, xmm5, xmm4, {sae}
vcmpeq_uqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpeq_uqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpeq_uqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpeq_uqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpeq_uqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpeq_uqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnge_ussh k5{k7}, xmm5, xmm4
vcmpnge_ussh k5{k7}, xmm5, xmm4, {sae}
vcmpnge_ussh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnge_ussh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnge_ussh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnge_ussh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnge_ussh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnge_ussh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpngesh k5{k7}, xmm5, xmm4
vcmpngesh k5{k7}, xmm5, xmm4, {sae}
vcmpngesh k5{k7}, xmm5, WORD PTR [ecx]
vcmpngesh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpngesh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpngesh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpngesh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpngesh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpngt_ussh k5{k7}, xmm5, xmm4
vcmpngt_ussh k5{k7}, xmm5, xmm4, {sae}
vcmpngt_ussh k5{k7}, xmm5, WORD PTR [ecx]
vcmpngt_ussh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpngt_ussh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpngt_ussh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpngt_ussh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpngt_ussh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpngtsh k5{k7}, xmm5, xmm4
vcmpngtsh k5{k7}, xmm5, xmm4, {sae}
vcmpngtsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpngtsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpngtsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpngtsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpngtsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpngtsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpfalse_oqsh k5{k7}, xmm5, xmm4
vcmpfalse_oqsh k5{k7}, xmm5, xmm4, {sae}
vcmpfalse_oqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpfalse_oqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpfalse_oqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpfalse_oqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpfalse_oqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpfalse_oqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpfalsesh k5{k7}, xmm5, xmm4
vcmpfalsesh k5{k7}, xmm5, xmm4, {sae}
vcmpfalsesh k5{k7}, xmm5, WORD PTR [ecx]
vcmpfalsesh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpfalsesh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpfalsesh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpfalsesh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpfalsesh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpneq_oqsh k5{k7}, xmm5, xmm4
vcmpneq_oqsh k5{k7}, xmm5, xmm4, {sae}
vcmpneq_oqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpneq_oqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpneq_oqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpneq_oqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpneq_oqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpneq_oqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpge_ossh k5{k7}, xmm5, xmm4
vcmpge_ossh k5{k7}, xmm5, xmm4, {sae}
vcmpge_ossh k5{k7}, xmm5, WORD PTR [ecx]
vcmpge_ossh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpge_ossh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpge_ossh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpge_ossh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpge_ossh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpgesh k5{k7}, xmm5, xmm4
vcmpgesh k5{k7}, xmm5, xmm4, {sae}
vcmpgesh k5{k7}, xmm5, WORD PTR [ecx]
vcmpgesh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpgesh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpgesh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpgesh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpgesh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpgt_ossh k5{k7}, xmm5, xmm4
vcmpgt_ossh k5{k7}, xmm5, xmm4, {sae}
vcmpgt_ossh k5{k7}, xmm5, WORD PTR [ecx]
vcmpgt_ossh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpgt_ossh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpgt_ossh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpgt_ossh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpgt_ossh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpgtsh k5{k7}, xmm5, xmm4
vcmpgtsh k5{k7}, xmm5, xmm4, {sae}
vcmpgtsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpgtsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpgtsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpgtsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpgtsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpgtsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmptrue_uqsh k5{k7}, xmm5, xmm4
vcmptrue_uqsh k5{k7}, xmm5, xmm4, {sae}
vcmptrue_uqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmptrue_uqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmptrue_uqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmptrue_uqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmptrue_uqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmptrue_uqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmptruesh k5{k7}, xmm5, xmm4
vcmptruesh k5{k7}, xmm5, xmm4, {sae}
vcmptruesh k5{k7}, xmm5, WORD PTR [ecx]
vcmptruesh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmptruesh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmptruesh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmptruesh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmptruesh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpeq_ossh k5{k7}, xmm5, xmm4
vcmpeq_ossh k5{k7}, xmm5, xmm4, {sae}
vcmpeq_ossh k5{k7}, xmm5, WORD PTR [ecx]
vcmpeq_ossh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpeq_ossh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpeq_ossh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpeq_ossh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpeq_ossh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmplt_oqsh k5{k7}, xmm5, xmm4
vcmplt_oqsh k5{k7}, xmm5, xmm4, {sae}
vcmplt_oqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmplt_oqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmplt_oqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmplt_oqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmplt_oqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmplt_oqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmple_oqsh k5{k7}, xmm5, xmm4
vcmple_oqsh k5{k7}, xmm5, xmm4, {sae}
vcmple_oqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmple_oqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmple_oqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmple_oqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmple_oqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmple_oqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpunord_ssh k5{k7}, xmm5, xmm4
vcmpunord_ssh k5{k7}, xmm5, xmm4, {sae}
vcmpunord_ssh k5{k7}, xmm5, WORD PTR [ecx]
vcmpunord_ssh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpunord_ssh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpunord_ssh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpunord_ssh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpunord_ssh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpneq_ussh k5{k7}, xmm5, xmm4
vcmpneq_ussh k5{k7}, xmm5, xmm4, {sae}
vcmpneq_ussh k5{k7}, xmm5, WORD PTR [ecx]
vcmpneq_ussh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpneq_ussh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpneq_ussh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpneq_ussh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpneq_ussh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnlt_uqsh k5{k7}, xmm5, xmm4
vcmpnlt_uqsh k5{k7}, xmm5, xmm4, {sae}
vcmpnlt_uqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnlt_uqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnlt_uqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnlt_uqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnlt_uqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnlt_uqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnle_uqsh k5{k7}, xmm5, xmm4
vcmpnle_uqsh k5{k7}, xmm5, xmm4, {sae}
vcmpnle_uqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnle_uqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnle_uqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnle_uqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnle_uqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnle_uqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpord_ssh k5{k7}, xmm5, xmm4
vcmpord_ssh k5{k7}, xmm5, xmm4, {sae}
vcmpord_ssh k5{k7}, xmm5, WORD PTR [ecx]
vcmpord_ssh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpord_ssh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpord_ssh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpord_ssh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpord_ssh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpeq_ussh k5{k7}, xmm5, xmm4
vcmpeq_ussh k5{k7}, xmm5, xmm4, {sae}
vcmpeq_ussh k5{k7}, xmm5, WORD PTR [ecx]
vcmpeq_ussh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpeq_ussh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpeq_ussh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpeq_ussh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpeq_ussh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpnge_uqsh k5{k7}, xmm5, xmm4
vcmpnge_uqsh k5{k7}, xmm5, xmm4, {sae}
vcmpnge_uqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpnge_uqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpnge_uqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpnge_uqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpnge_uqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpnge_uqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpngt_uqsh k5{k7}, xmm5, xmm4
vcmpngt_uqsh k5{k7}, xmm5, xmm4, {sae}
vcmpngt_uqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpngt_uqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpngt_uqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpngt_uqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpngt_uqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpngt_uqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpfalse_ossh k5{k7}, xmm5, xmm4
vcmpfalse_ossh k5{k7}, xmm5, xmm4, {sae}
vcmpfalse_ossh k5{k7}, xmm5, WORD PTR [ecx]
vcmpfalse_ossh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpfalse_ossh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpfalse_ossh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpfalse_ossh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpfalse_ossh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpneq_ossh k5{k7}, xmm5, xmm4
vcmpneq_ossh k5{k7}, xmm5, xmm4, {sae}
vcmpneq_ossh k5{k7}, xmm5, WORD PTR [ecx]
vcmpneq_ossh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpneq_ossh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpneq_ossh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpneq_ossh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpneq_ossh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpge_oqsh k5{k7}, xmm5, xmm4
vcmpge_oqsh k5{k7}, xmm5, xmm4, {sae}
vcmpge_oqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpge_oqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpge_oqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpge_oqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpge_oqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpge_oqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmpgt_oqsh k5{k7}, xmm5, xmm4
vcmpgt_oqsh k5{k7}, xmm5, xmm4, {sae}
vcmpgt_oqsh k5{k7}, xmm5, WORD PTR [ecx]
vcmpgt_oqsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmpgt_oqsh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmpgt_oqsh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmpgt_oqsh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmpgt_oqsh k5{k7}, xmm5, WORD PTR [edx-1032]
vcmptrue_ussh k5{k7}, xmm5, xmm4
vcmptrue_ussh k5{k7}, xmm5, xmm4, {sae}
vcmptrue_ussh k5{k7}, xmm5, WORD PTR [ecx]
vcmptrue_ussh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456]
vcmptrue_ussh k5{k7}, xmm5, WORD PTR [edx+1016]
vcmptrue_ussh k5{k7}, xmm5, WORD PTR [edx+1024]
vcmptrue_ussh k5{k7}, xmm5, WORD PTR [edx-1024]
vcmptrue_ussh k5{k7}, xmm5, WORD PTR [edx-1032]
|
tactcomplabs/xbgas-binutils-gdb
| 166,966
|
gas/testsuite/gas/i386/evex-lig.s
|
# Check EVEX LIG instructions
.allow_index_reg
.text
_start:
vaddsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vaddsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vaddsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vaddsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vaddsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vaddsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vaddsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vaddss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vaddss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vaddss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vaddss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vaddss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vaddss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vaddss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vcmpsd $0xab, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpsd $0xab, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpsd $123, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpsd $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpsd $123, (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpsd $123, -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpsd $123, 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpsd $123, 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpsd $123, -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpsd $123, -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_ossd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_ossd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmplt_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmplt_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpltsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpltsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpltsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpltsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpltsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpltsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpltsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpltsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_ossd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_ossd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmple_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmple_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmplesd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplesd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmplesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmplesd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplesd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmplesd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplesd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_qsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_qsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_qsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpunord_qsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpunord_qsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_qsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_qsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_qsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunordsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunordsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunordsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpunordsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpunordsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunordsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunordsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunordsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_ussd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_ussd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnlt_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnltsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnltsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnltsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnltsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnltsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnltsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnltsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnltsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_ussd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_ussd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnle_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnle_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlesd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlesd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnlesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnlesd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlesd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlesd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlesd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_qsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_qsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_qsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpord_qsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpord_qsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_qsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_qsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_qsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpordsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpordsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpordsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpordsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpordsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpordsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpordsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpordsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_ussd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_ussd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnge_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnge_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngesd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngesd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngesd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngesd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngesd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngesd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_ussd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_ussd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngt_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngt_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngtsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngtsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngtsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngtsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngtsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngtsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngtsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngtsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalsesd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalsesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalsesd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpfalsesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpfalsesd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalsesd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalsesd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalsesd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_ossd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_ossd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpge_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpge_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgesd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgesd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgesd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgesd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgesd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgesd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_ossd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_ossd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgt_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgt_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgtsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgtsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgtsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgtsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgtsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgtsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgtsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgtsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmptruesd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptruesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptruesd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmptruesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmptruesd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptruesd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmptruesd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptruesd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_ossd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_ossd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmplt_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmplt_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmple_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmple_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_ssd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_ssd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_ssd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpunord_ssd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpunord_ssd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_ssd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_ssd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_ssd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_ussd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_ussd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_ssd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_ssd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_ssd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpord_ssd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpord_ssd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_ssd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_ssd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_ssd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_ussd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_ussd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_ossd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_ossd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpfalse_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_ossd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_ossd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpge_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpge_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_ussd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_ussd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmptrue_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmptrue_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpss $0xab, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpss $0xab, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpss $123, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpss $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpss $123, (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpss $123, -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpss $123, 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpss $123, 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpss $123, -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpss $123, -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_oqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_osss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_osss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmplt_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmplt_osss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_osss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_osss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_osss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpltss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpltss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpltss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpltss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpltss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpltss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpltss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpltss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_osss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_osss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmple_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmple_osss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_osss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_osss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_osss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpless %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpless {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpless (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpless -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpless 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpless 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpless -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpless -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_qss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_qss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_qss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpunord_qss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpunord_qss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_qss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_qss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_qss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunordss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunordss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunordss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpunordss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpunordss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunordss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunordss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunordss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_uqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_usss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_usss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnlt_usss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_usss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_usss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_usss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnltss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnltss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnltss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnltss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnltss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnltss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnltss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnltss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_usss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_usss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnle_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnle_usss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_usss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_usss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_usss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnless %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnless {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnless (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnless -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnless 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnless 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnless -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnless -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_qss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_qss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_qss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpord_qss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpord_qss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_qss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_qss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_qss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpordss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpordss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpordss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpordss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpordss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpordss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpordss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpordss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_uqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_usss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_usss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnge_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnge_usss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_usss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_usss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_usss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngess %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngess (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngess 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngess 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngess -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngess -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_usss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_usss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngt_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngt_usss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_usss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_usss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_usss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngtss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngtss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngtss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngtss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngtss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngtss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngtss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngtss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalsess %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalsess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalsess (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpfalsess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpfalsess 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalsess 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalsess -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalsess -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_oqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_osss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_osss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpge_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpge_osss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_osss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_osss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_osss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgess %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgess (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgess 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgess 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgess -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgess -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_osss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_osss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgt_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgt_osss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_osss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_osss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_osss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgtss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgtss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgtss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgtss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgtss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgtss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgtss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgtss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_uqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmptruess %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptruess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptruess (%ecx), %xmm5, %k5{%k7} # AVX512
vcmptruess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmptruess 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptruess 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmptruess -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptruess -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_osss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_osss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_osss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_osss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_osss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_osss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_oqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_oqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmplt_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmplt_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_oqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_oqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmple_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmple_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_sss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_sss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_sss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpunord_sss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpunord_sss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_sss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_sss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_sss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_usss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_usss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_usss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_usss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_usss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_usss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_uqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_sss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_sss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_sss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpord_sss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpord_sss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_sss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_sss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_sss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_usss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_usss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_usss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_usss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_usss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_usss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_uqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_uqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_osss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_osss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpfalse_osss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_osss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_osss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_osss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_osss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_osss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_osss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_osss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_osss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_osss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_oqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_oqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpge_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpge_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_oqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_usss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_usss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmptrue_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmptrue_usss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_usss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_usss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_usss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcomisd {sae}, %xmm5, %xmm6 # AVX512
vcomiss {sae}, %xmm5, %xmm6 # AVX512
vcvtsd2si {rn-sae}, %xmm6, %eax # AVX512
vcvtsd2si {ru-sae}, %xmm6, %eax # AVX512
vcvtsd2si {rd-sae}, %xmm6, %eax # AVX512
vcvtsd2si {rz-sae}, %xmm6, %eax # AVX512
vcvtsd2si {rn-sae}, %xmm6, %ebp # AVX512
vcvtsd2si {ru-sae}, %xmm6, %ebp # AVX512
vcvtsd2si {rd-sae}, %xmm6, %ebp # AVX512
vcvtsd2si {rz-sae}, %xmm6, %ebp # AVX512
vcvtsd2ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vcvtsd2ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vcvtsd2ss 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vcvtsd2ss -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vcvtsi2ssl %eax, {rn-sae}, %xmm5, %xmm6 # AVX512
vcvtsi2ssl %eax, {ru-sae}, %xmm5, %xmm6 # AVX512
vcvtsi2ssl %eax, {rd-sae}, %xmm5, %xmm6 # AVX512
vcvtsi2ssl %eax, {rz-sae}, %xmm5, %xmm6 # AVX512
vcvtsi2ssl %ebp, {rn-sae}, %xmm5, %xmm6 # AVX512
vcvtsi2ssl %ebp, {ru-sae}, %xmm5, %xmm6 # AVX512
vcvtsi2ssl %ebp, {rd-sae}, %xmm5, %xmm6 # AVX512
vcvtsi2ssl %ebp, {rz-sae}, %xmm5, %xmm6 # AVX512
vcvtss2sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vcvtss2sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vcvtss2sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vcvtss2sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vcvtss2sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vcvtss2sd 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vcvtss2sd 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vcvtss2sd -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vcvtss2sd -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vcvtss2si {rn-sae}, %xmm6, %eax # AVX512
vcvtss2si {ru-sae}, %xmm6, %eax # AVX512
vcvtss2si {rd-sae}, %xmm6, %eax # AVX512
vcvtss2si {rz-sae}, %xmm6, %eax # AVX512
vcvtss2si {rn-sae}, %xmm6, %ebp # AVX512
vcvtss2si {ru-sae}, %xmm6, %ebp # AVX512
vcvtss2si {rd-sae}, %xmm6, %ebp # AVX512
vcvtss2si {rz-sae}, %xmm6, %ebp # AVX512
vcvttsd2si {sae}, %xmm6, %eax # AVX512
vcvttsd2si {sae}, %xmm6, %ebp # AVX512
vcvttss2si {sae}, %xmm6, %eax # AVX512
vcvttss2si {sae}, %xmm6, %ebp # AVX512
vdivsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vdivsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vdivsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vdivsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vdivsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vdivsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vdivsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vdivss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vdivss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vdivss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vdivss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vdivss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vdivss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vdivss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmadd132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmadd132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmadd213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmadd213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmadd231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmadd231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmsub132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmsub132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmsub213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmsub213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmsub231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmsub231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmadd132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmadd132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmadd213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmadd213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmadd231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmadd231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmsub132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmsub132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmsub213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmsub213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmsub231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmsub231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetexpsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetexpsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vgetexpsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetexpsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vgetexpsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vgetexpsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetexpsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetexpsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetexpsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetexpss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetexpss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vgetexpss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetexpss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vgetexpss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vgetexpss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetexpss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetexpss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetexpss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vgetmantsd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetmantsd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetmantsd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetmantss $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantss $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vgetmantss $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantss $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantss $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantss $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512
vgetmantss $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vgetmantss $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetmantss $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetmantss $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetmantss $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vmaxsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmaxsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vmaxsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmaxsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vmaxsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vmaxsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmaxsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vmaxsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmaxsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vmaxss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmaxss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vmaxss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmaxss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vmaxss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vmaxss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmaxss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vmaxss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmaxss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vminsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vminsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vminsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vminsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vminsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vminsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vminsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vminsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vminsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vminss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vminss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vminss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vminss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vminss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vminss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vminss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vminss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vminss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vmovsd (%ecx), %xmm6{%k7} # AVX512
vmovsd (%ecx), %xmm6{%k7}{z} # AVX512
vmovsd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512
vmovsd 1016(%edx), %xmm6{%k7} # AVX512 Disp8
vmovsd 1024(%edx), %xmm6{%k7} # AVX512
vmovsd -1024(%edx), %xmm6{%k7} # AVX512 Disp8
vmovsd -1032(%edx), %xmm6{%k7} # AVX512
vmovsd %xmm6, (%ecx){%k7} # AVX512
vmovsd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512
vmovsd %xmm6, 1016(%edx){%k7} # AVX512 Disp8
vmovsd %xmm6, 1024(%edx){%k7} # AVX512
vmovsd %xmm6, -1024(%edx){%k7} # AVX512 Disp8
vmovsd %xmm6, -1032(%edx){%k7} # AVX512
vmovsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmovsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vmovss (%ecx), %xmm6{%k7} # AVX512
vmovss (%ecx), %xmm6{%k7}{z} # AVX512
vmovss -123456(%esp,%esi,8), %xmm6{%k7} # AVX512
vmovss 508(%edx), %xmm6{%k7} # AVX512 Disp8
vmovss 512(%edx), %xmm6{%k7} # AVX512
vmovss -512(%edx), %xmm6{%k7} # AVX512 Disp8
vmovss -516(%edx), %xmm6{%k7} # AVX512
vmovss %xmm6, (%ecx){%k7} # AVX512
vmovss %xmm6, -123456(%esp,%esi,8){%k7} # AVX512
vmovss %xmm6, 508(%edx){%k7} # AVX512 Disp8
vmovss %xmm6, 512(%edx){%k7} # AVX512
vmovss %xmm6, -512(%edx){%k7} # AVX512 Disp8
vmovss %xmm6, -516(%edx){%k7} # AVX512
vmovss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmovss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vmulsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vmulsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vmulsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vmulsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmulsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vmulsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmulsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vmulss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vmulss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vmulss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vmulss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmulss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vmulss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmulss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vrcp14sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrcp14sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vrcp14sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vrcp14sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vrcp14sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrcp14sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vrcp14sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrcp14sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vrcp14ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrcp14ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vrcp14ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vrcp14ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vrcp14ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrcp14ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vrcp14ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrcp14ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vrcp28ss %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512EMI
vrcp28ss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28ss (%ecx), %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrcp28ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrcp28ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28sd %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512EMI
vrcp28sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28sd (%ecx), %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrcp28sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrcp28sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt14sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrsqrt14sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vrsqrt14sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vrsqrt14sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vrsqrt14sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrsqrt14sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vrsqrt14sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrsqrt14sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vrsqrt14ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrsqrt14ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vrsqrt14ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vrsqrt14ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vrsqrt14ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrsqrt14ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vrsqrt14ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrsqrt14ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vrsqrt28ss %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512EMI
vrsqrt28ss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28ss (%ecx), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrsqrt28ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrsqrt28ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28sd %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512EMI
vrsqrt28sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28sd (%ecx), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrsqrt28sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrsqrt28sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vsqrtsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vsqrtsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vsqrtsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vsqrtsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsqrtsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vsqrtsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsqrtsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vsqrtss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vsqrtss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vsqrtss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vsqrtss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsqrtss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vsqrtss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsqrtss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vsubsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vsubsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vsubsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vsubsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsubsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vsubsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsubsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vsubss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vsubss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vsubss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vsubss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsubss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vsubss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsubss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vucomisd {sae}, %xmm5, %xmm6 # AVX512
vucomiss {sae}, %xmm5, %xmm6 # AVX512
vcvtsd2usi %xmm6, %eax # AVX512
vcvtsd2usi {rn-sae}, %xmm6, %eax # AVX512
vcvtsd2usi {ru-sae}, %xmm6, %eax # AVX512
vcvtsd2usi {rd-sae}, %xmm6, %eax # AVX512
vcvtsd2usi {rz-sae}, %xmm6, %eax # AVX512
vcvtsd2usi (%ecx), %eax # AVX512
vcvtsd2usi -123456(%esp,%esi,8), %eax # AVX512
vcvtsd2usi 1016(%edx), %eax # AVX512 Disp8
vcvtsd2usi 1024(%edx), %eax # AVX512
vcvtsd2usi -1024(%edx), %eax # AVX512 Disp8
vcvtsd2usi -1032(%edx), %eax # AVX512
vcvtsd2usi %xmm6, %ebp # AVX512
vcvtsd2usi {rn-sae}, %xmm6, %ebp # AVX512
vcvtsd2usi {ru-sae}, %xmm6, %ebp # AVX512
vcvtsd2usi {rd-sae}, %xmm6, %ebp # AVX512
vcvtsd2usi {rz-sae}, %xmm6, %ebp # AVX512
vcvtsd2usi (%ecx), %ebp # AVX512
vcvtsd2usi -123456(%esp,%esi,8), %ebp # AVX512
vcvtsd2usi 1016(%edx), %ebp # AVX512 Disp8
vcvtsd2usi 1024(%edx), %ebp # AVX512
vcvtsd2usi -1024(%edx), %ebp # AVX512 Disp8
vcvtsd2usi -1032(%edx), %ebp # AVX512
vcvtss2usi %xmm6, %eax # AVX512
vcvtss2usi {rn-sae}, %xmm6, %eax # AVX512
vcvtss2usi {ru-sae}, %xmm6, %eax # AVX512
vcvtss2usi {rd-sae}, %xmm6, %eax # AVX512
vcvtss2usi {rz-sae}, %xmm6, %eax # AVX512
vcvtss2usi (%ecx), %eax # AVX512
vcvtss2usi -123456(%esp,%esi,8), %eax # AVX512
vcvtss2usi 508(%edx), %eax # AVX512 Disp8
vcvtss2usi 512(%edx), %eax # AVX512
vcvtss2usi -512(%edx), %eax # AVX512 Disp8
vcvtss2usi -516(%edx), %eax # AVX512
vcvtss2usi %xmm6, %ebp # AVX512
vcvtss2usi {rn-sae}, %xmm6, %ebp # AVX512
vcvtss2usi {ru-sae}, %xmm6, %ebp # AVX512
vcvtss2usi {rd-sae}, %xmm6, %ebp # AVX512
vcvtss2usi {rz-sae}, %xmm6, %ebp # AVX512
vcvtss2usi (%ecx), %ebp # AVX512
vcvtss2usi -123456(%esp,%esi,8), %ebp # AVX512
vcvtss2usi 508(%edx), %ebp # AVX512 Disp8
vcvtss2usi 512(%edx), %ebp # AVX512
vcvtss2usi -512(%edx), %ebp # AVX512 Disp8
vcvtss2usi -516(%edx), %ebp # AVX512
vcvtusi2sdl %eax, %xmm5, %xmm6 # AVX512
vcvtusi2sdl %ebp, %xmm5, %xmm6 # AVX512
vcvtusi2sdl (%ecx), %xmm5, %xmm6 # AVX512
vcvtusi2sdl -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512
vcvtusi2sdl 508(%edx), %xmm5, %xmm6 # AVX512 Disp8
vcvtusi2sdl 512(%edx), %xmm5, %xmm6 # AVX512
vcvtusi2sdl -512(%edx), %xmm5, %xmm6 # AVX512 Disp8
vcvtusi2sdl -516(%edx), %xmm5, %xmm6 # AVX512
vcvtusi2ssl %eax, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %eax, {rn-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %eax, {ru-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %eax, {rd-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %eax, {rz-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %ebp, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %ebp, {rn-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %ebp, {ru-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %ebp, {rd-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %ebp, {rz-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl (%ecx), %xmm5, %xmm6 # AVX512
vcvtusi2ssl -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512
vcvtusi2ssl 508(%edx), %xmm5, %xmm6 # AVX512 Disp8
vcvtusi2ssl 512(%edx), %xmm5, %xmm6 # AVX512
vcvtusi2ssl -512(%edx), %xmm5, %xmm6 # AVX512 Disp8
vcvtusi2ssl -516(%edx), %xmm5, %xmm6 # AVX512
vscalefsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vscalefsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vscalefsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vscalefsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vscalefsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vscalefsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vscalefsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vscalefss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vscalefss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vscalefss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vscalefss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vscalefss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vscalefss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vscalefss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfixupimmss $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfixupimmss $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfixupimmss $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfixupimmsd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfixupimmsd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfixupimmsd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vrndscalesd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrndscalesd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrndscalesd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vrndscaless $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscaless $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vrndscaless $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscaless $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscaless $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscaless $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512
vrndscaless $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vrndscaless $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrndscaless $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vrndscaless $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrndscaless $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vcmpsh $123, %xmm4, %xmm5, %k5 # AVX512-FP16
vcmpsh $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512-FP16
vcmpsh $123, (%ecx), %xmm5, %k5 # AVX512-FP16
vcmpsh $123, -123456(%esp, %esi, 8), %xmm5, %k5{%k7} # AVX512-FP16
vcmpsh $123, 254(%ecx), %xmm5, %k5 # AVX512-FP16 Disp8
vcmpsh $123, -256(%edx), %xmm5, %k5{%k7} # AVX512-FP16 Disp8
vfpclasssh $123, %xmm4, %k5 # AVX512-FP16
vfpclasssh $123, (%ecx), %k5 # AVX512-FP16
vfpclasssh $123, -123456(%esp, %esi, 8), %k5{%k7} # AVX512-FP16
vfpclasssh $123, 254(%ecx), %k5 # AVX512-FP16 Disp8
vfpclasssh $123, -256(%edx), %k5{%k7} # AVX512-FP16 Disp8
.intel_syntax noprefix
vaddsd xmm6{k7}, xmm5, xmm4 # AVX512
vaddsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vaddsd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vaddsd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vaddsd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vaddsd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vaddsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vaddsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vaddss xmm6{k7}, xmm5, xmm4 # AVX512
vaddss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vaddss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vaddss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vaddss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vaddss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vaddss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vaddss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vaddss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vaddss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vaddss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vaddss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpsd k5{k7}, xmm5, xmm4, 0xab # AVX512
vcmpsd k5{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vcmpsd k5{k7}, xmm5, xmm4, 123 # AVX512
vcmpsd k5{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vcmpsd k5{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512
vcmpsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512
vcmpsd k5{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512 Disp8
vcmpsd k5{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512
vcmpsd k5{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512 Disp8
vcmpsd k5{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512
vcmpeq_oqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_oqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpeqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpeqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpeqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmplt_ossd k5{k7}, xmm5, xmm4 # AVX512
vcmplt_ossd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpltsd k5{k7}, xmm5, xmm4 # AVX512
vcmpltsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpltsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpltsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmple_ossd k5{k7}, xmm5, xmm4 # AVX512
vcmple_ossd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmple_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmple_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmplesd k5{k7}, xmm5, xmm4 # AVX512
vcmplesd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmplesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmplesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmplesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmplesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmplesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmplesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpunord_qsd k5{k7}, xmm5, xmm4 # AVX512
vcmpunord_qsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpunordsd k5{k7}, xmm5, xmm4 # AVX512
vcmpunordsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpunordsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpunordsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpneq_uqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_uqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpneqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpneqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpneqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnlt_ussd k5{k7}, xmm5, xmm4 # AVX512
vcmpnlt_ussd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnltsd k5{k7}, xmm5, xmm4 # AVX512
vcmpnltsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnltsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnltsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnle_ussd k5{k7}, xmm5, xmm4 # AVX512
vcmpnle_ussd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnlesd k5{k7}, xmm5, xmm4 # AVX512
vcmpnlesd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnlesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnlesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpord_qsd k5{k7}, xmm5, xmm4 # AVX512
vcmpord_qsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpordsd k5{k7}, xmm5, xmm4 # AVX512
vcmpordsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpordsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpordsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpeq_uqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_uqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnge_ussd k5{k7}, xmm5, xmm4 # AVX512
vcmpnge_ussd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpngesd k5{k7}, xmm5, xmm4 # AVX512
vcmpngesd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpngesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpngt_ussd k5{k7}, xmm5, xmm4 # AVX512
vcmpngt_ussd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpngtsd k5{k7}, xmm5, xmm4 # AVX512
vcmpngtsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngtsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpngtsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpfalse_oqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpfalse_oqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpfalsesd k5{k7}, xmm5, xmm4 # AVX512
vcmpfalsesd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpneq_oqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_oqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpge_ossd k5{k7}, xmm5, xmm4 # AVX512
vcmpge_ossd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpgesd k5{k7}, xmm5, xmm4 # AVX512
vcmpgesd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpgesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpgt_ossd k5{k7}, xmm5, xmm4 # AVX512
vcmpgt_ossd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpgtsd k5{k7}, xmm5, xmm4 # AVX512
vcmpgtsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgtsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpgtsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmptrue_uqsd k5{k7}, xmm5, xmm4 # AVX512
vcmptrue_uqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmptruesd k5{k7}, xmm5, xmm4 # AVX512
vcmptruesd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmptruesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmptruesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpeq_ossd k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_ossd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmplt_oqsd k5{k7}, xmm5, xmm4 # AVX512
vcmplt_oqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmple_oqsd k5{k7}, xmm5, xmm4 # AVX512
vcmple_oqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpunord_ssd k5{k7}, xmm5, xmm4 # AVX512
vcmpunord_ssd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpneq_ussd k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_ussd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnlt_uqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpnlt_uqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnle_uqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpnle_uqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpord_ssd k5{k7}, xmm5, xmm4 # AVX512
vcmpord_ssd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpeq_ussd k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_ussd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnge_uqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpnge_uqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpngt_uqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpngt_uqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpfalse_ossd k5{k7}, xmm5, xmm4 # AVX512
vcmpfalse_ossd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpneq_ossd k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_ossd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpge_oqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpge_oqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpgt_oqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpgt_oqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmptrue_ussd k5{k7}, xmm5, xmm4 # AVX512
vcmptrue_ussd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpss k5{k7}, xmm5, xmm4, 0xab # AVX512
vcmpss k5{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vcmpss k5{k7}, xmm5, xmm4, 123 # AVX512
vcmpss k5{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vcmpss k5{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512
vcmpss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512
vcmpss k5{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512 Disp8
vcmpss k5{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512
vcmpss k5{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512 Disp8
vcmpss k5{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512
vcmpeq_oqss k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_oqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpeqss k5{k7}, xmm5, xmm4 # AVX512
vcmpeqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpeqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmplt_osss k5{k7}, xmm5, xmm4 # AVX512
vcmplt_osss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmplt_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmplt_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpltss k5{k7}, xmm5, xmm4 # AVX512
vcmpltss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpltss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpltss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpltss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpltss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpltss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpltss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmple_osss k5{k7}, xmm5, xmm4 # AVX512
vcmple_osss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmple_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmple_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpless k5{k7}, xmm5, xmm4 # AVX512
vcmpless k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpless k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpless k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpless k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpless k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpless k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpless k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpunord_qss k5{k7}, xmm5, xmm4 # AVX512
vcmpunord_qss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpunordss k5{k7}, xmm5, xmm4 # AVX512
vcmpunordss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpunordss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpunordss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpneq_uqss k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_uqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpneqss k5{k7}, xmm5, xmm4 # AVX512
vcmpneqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpneqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnlt_usss k5{k7}, xmm5, xmm4 # AVX512
vcmpnlt_usss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnltss k5{k7}, xmm5, xmm4 # AVX512
vcmpnltss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnltss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnltss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnle_usss k5{k7}, xmm5, xmm4 # AVX512
vcmpnle_usss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnless k5{k7}, xmm5, xmm4 # AVX512
vcmpnless k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnless k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnless k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnless k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnless k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnless k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnless k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpord_qss k5{k7}, xmm5, xmm4 # AVX512
vcmpord_qss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpord_qss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpord_qss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpordss k5{k7}, xmm5, xmm4 # AVX512
vcmpordss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpordss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpordss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpordss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpordss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpordss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpordss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpeq_uqss k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_uqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnge_usss k5{k7}, xmm5, xmm4 # AVX512
vcmpnge_usss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpngess k5{k7}, xmm5, xmm4 # AVX512
vcmpngess k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpngess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpngess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpngess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpngess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpngess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpngt_usss k5{k7}, xmm5, xmm4 # AVX512
vcmpngt_usss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpngtss k5{k7}, xmm5, xmm4 # AVX512
vcmpngtss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngtss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpngtss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpfalse_oqss k5{k7}, xmm5, xmm4 # AVX512
vcmpfalse_oqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpfalsess k5{k7}, xmm5, xmm4 # AVX512
vcmpfalsess k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpfalsess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpfalsess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpneq_oqss k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_oqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpge_osss k5{k7}, xmm5, xmm4 # AVX512
vcmpge_osss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpge_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpge_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpgess k5{k7}, xmm5, xmm4 # AVX512
vcmpgess k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpgess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpgess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpgess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpgess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpgess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpgt_osss k5{k7}, xmm5, xmm4 # AVX512
vcmpgt_osss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpgtss k5{k7}, xmm5, xmm4 # AVX512
vcmpgtss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgtss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpgtss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmptrue_uqss k5{k7}, xmm5, xmm4 # AVX512
vcmptrue_uqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmptruess k5{k7}, xmm5, xmm4 # AVX512
vcmptruess k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmptruess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmptruess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmptruess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmptruess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmptruess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmptruess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpeq_osss k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_osss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmplt_oqss k5{k7}, xmm5, xmm4 # AVX512
vcmplt_oqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmple_oqss k5{k7}, xmm5, xmm4 # AVX512
vcmple_oqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmple_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmple_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpunord_sss k5{k7}, xmm5, xmm4 # AVX512
vcmpunord_sss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpneq_usss k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_usss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnlt_uqss k5{k7}, xmm5, xmm4 # AVX512
vcmpnlt_uqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnle_uqss k5{k7}, xmm5, xmm4 # AVX512
vcmpnle_uqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpord_sss k5{k7}, xmm5, xmm4 # AVX512
vcmpord_sss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpord_sss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpord_sss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpeq_usss k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_usss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnge_uqss k5{k7}, xmm5, xmm4 # AVX512
vcmpnge_uqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpngt_uqss k5{k7}, xmm5, xmm4 # AVX512
vcmpngt_uqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpfalse_osss k5{k7}, xmm5, xmm4 # AVX512
vcmpfalse_osss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpneq_osss k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_osss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpge_oqss k5{k7}, xmm5, xmm4 # AVX512
vcmpge_oqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpgt_oqss k5{k7}, xmm5, xmm4 # AVX512
vcmpgt_oqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmptrue_usss k5{k7}, xmm5, xmm4 # AVX512
vcmptrue_usss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcomisd xmm6, xmm5, {sae} # AVX512
vcomiss xmm6, xmm5, {sae} # AVX512
vcvtsd2si eax, xmm6, {rn-sae} # AVX512
vcvtsd2si eax, xmm6, {ru-sae} # AVX512
vcvtsd2si eax, xmm6, {rd-sae} # AVX512
vcvtsd2si eax, xmm6, {rz-sae} # AVX512
vcvtsd2si ebp, xmm6, {rn-sae} # AVX512
vcvtsd2si ebp, xmm6, {ru-sae} # AVX512
vcvtsd2si ebp, xmm6, {rd-sae} # AVX512
vcvtsd2si ebp, xmm6, {rz-sae} # AVX512
vcvtsd2ss xmm6{k7}, xmm5, xmm4 # AVX512
vcvtsd2ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vcvtsd2ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vcvtsd2ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vcvtsd2ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vcvtsd2ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcvtsi2ss xmm6, xmm5, {rn-sae}, eax # AVX512
vcvtsi2ss xmm6, xmm5, {ru-sae}, eax # AVX512
vcvtsi2ss xmm6, xmm5, {rd-sae}, eax # AVX512
vcvtsi2ss xmm6, xmm5, {rz-sae}, eax # AVX512
vcvtsi2ss xmm6, xmm5, {rn-sae}, ebp # AVX512
vcvtsi2ss xmm6, xmm5, {ru-sae}, ebp # AVX512
vcvtsi2ss xmm6, xmm5, {rd-sae}, ebp # AVX512
vcvtsi2ss xmm6, xmm5, {rz-sae}, ebp # AVX512
vcvtss2sd xmm6{k7}, xmm5, xmm4 # AVX512
vcvtss2sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vcvtss2sd xmm6{k7}, xmm5, xmm4, {sae} # AVX512
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcvtss2si eax, xmm6, {rn-sae} # AVX512
vcvtss2si eax, xmm6, {ru-sae} # AVX512
vcvtss2si eax, xmm6, {rd-sae} # AVX512
vcvtss2si eax, xmm6, {rz-sae} # AVX512
vcvtss2si ebp, xmm6, {rn-sae} # AVX512
vcvtss2si ebp, xmm6, {ru-sae} # AVX512
vcvtss2si ebp, xmm6, {rd-sae} # AVX512
vcvtss2si ebp, xmm6, {rz-sae} # AVX512
vcvttsd2si eax, xmm6, {sae} # AVX512
vcvttsd2si ebp, xmm6, {sae} # AVX512
vcvttss2si eax, xmm6, {sae} # AVX512
vcvttss2si ebp, xmm6, {sae} # AVX512
vdivsd xmm6{k7}, xmm5, xmm4 # AVX512
vdivsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vdivsd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vdivsd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vdivsd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vdivsd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vdivsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vdivsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vdivss xmm6{k7}, xmm5, xmm4 # AVX512
vdivss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vdivss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vdivss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vdivss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vdivss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vdivss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vdivss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vdivss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vdivss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vdivss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vdivss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfmadd132sd xmm6{k7}, xmm5, xmm4 # AVX512
vfmadd132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmadd132sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmadd132sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmadd132sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmadd132sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfmadd132ss xmm6{k7}, xmm5, xmm4 # AVX512
vfmadd132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmadd132ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmadd132ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmadd132ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmadd132ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfmadd213sd xmm6{k7}, xmm5, xmm4 # AVX512
vfmadd213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmadd213sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmadd213sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmadd213sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmadd213sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfmadd213ss xmm6{k7}, xmm5, xmm4 # AVX512
vfmadd213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmadd213ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmadd213ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmadd213ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmadd213ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfmadd231sd xmm6{k7}, xmm5, xmm4 # AVX512
vfmadd231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmadd231sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmadd231sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmadd231sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmadd231sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfmadd231ss xmm6{k7}, xmm5, xmm4 # AVX512
vfmadd231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmadd231ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmadd231ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmadd231ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmadd231ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfmsub132sd xmm6{k7}, xmm5, xmm4 # AVX512
vfmsub132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmsub132sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmsub132sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmsub132sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmsub132sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfmsub132ss xmm6{k7}, xmm5, xmm4 # AVX512
vfmsub132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmsub132ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmsub132ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmsub132ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmsub132ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfmsub213sd xmm6{k7}, xmm5, xmm4 # AVX512
vfmsub213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmsub213sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmsub213sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmsub213sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmsub213sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfmsub213ss xmm6{k7}, xmm5, xmm4 # AVX512
vfmsub213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmsub213ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmsub213ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmsub213ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmsub213ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfmsub231sd xmm6{k7}, xmm5, xmm4 # AVX512
vfmsub231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmsub231sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmsub231sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmsub231sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmsub231sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfmsub231ss xmm6{k7}, xmm5, xmm4 # AVX512
vfmsub231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmsub231ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmsub231ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmsub231ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmsub231ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfnmadd132sd xmm6{k7}, xmm5, xmm4 # AVX512
vfnmadd132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmadd132sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmadd132sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmadd132sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmadd132sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfnmadd132ss xmm6{k7}, xmm5, xmm4 # AVX512
vfnmadd132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmadd132ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmadd132ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmadd132ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmadd132ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfnmadd213sd xmm6{k7}, xmm5, xmm4 # AVX512
vfnmadd213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmadd213sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmadd213sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmadd213sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmadd213sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfnmadd213ss xmm6{k7}, xmm5, xmm4 # AVX512
vfnmadd213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmadd213ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmadd213ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmadd213ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmadd213ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfnmadd231sd xmm6{k7}, xmm5, xmm4 # AVX512
vfnmadd231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmadd231sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmadd231sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmadd231sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmadd231sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfnmadd231ss xmm6{k7}, xmm5, xmm4 # AVX512
vfnmadd231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmadd231ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmadd231ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmadd231ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmadd231ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfnmsub132sd xmm6{k7}, xmm5, xmm4 # AVX512
vfnmsub132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmsub132sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmsub132sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmsub132sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmsub132sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfnmsub132ss xmm6{k7}, xmm5, xmm4 # AVX512
vfnmsub132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmsub132ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmsub132ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmsub132ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmsub132ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfnmsub213sd xmm6{k7}, xmm5, xmm4 # AVX512
vfnmsub213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmsub213sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmsub213sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmsub213sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmsub213sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfnmsub213ss xmm6{k7}, xmm5, xmm4 # AVX512
vfnmsub213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmsub213ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmsub213ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmsub213ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmsub213ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfnmsub231sd xmm6{k7}, xmm5, xmm4 # AVX512
vfnmsub231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmsub231sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmsub231sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmsub231sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmsub231sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfnmsub231ss xmm6{k7}, xmm5, xmm4 # AVX512
vfnmsub231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmsub231ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmsub231ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmsub231ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmsub231ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vgetexpsd xmm6{k7}, xmm5, xmm4 # AVX512
vgetexpsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vgetexpsd xmm6{k7}, xmm5, xmm4, {sae} # AVX512
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vgetexpss xmm6{k7}, xmm5, xmm4 # AVX512
vgetexpss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vgetexpss xmm6{k7}, xmm5, xmm4, {sae} # AVX512
vgetexpss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vgetexpss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vgetmantsd xmm6{k7}, xmm5, xmm4, 0xab # AVX512
vgetmantsd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512
vgetmantsd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vgetmantsd xmm6{k7}, xmm5, xmm4, 123 # AVX512
vgetmantsd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512 Disp8
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512 Disp8
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512
vgetmantss xmm6{k7}, xmm5, xmm4, 0xab # AVX512
vgetmantss xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512
vgetmantss xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vgetmantss xmm6{k7}, xmm5, xmm4, 123 # AVX512
vgetmantss xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vgetmantss xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512
vgetmantss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512 Disp8
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512 Disp8
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512
vmaxsd xmm6{k7}, xmm5, xmm4 # AVX512
vmaxsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vmaxsd xmm6{k7}, xmm5, xmm4, {sae} # AVX512
vmaxsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vmaxsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vmaxss xmm6{k7}, xmm5, xmm4 # AVX512
vmaxss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vmaxss xmm6{k7}, xmm5, xmm4, {sae} # AVX512
vmaxss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vmaxss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vminsd xmm6{k7}, xmm5, xmm4 # AVX512
vminsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vminsd xmm6{k7}, xmm5, xmm4, {sae} # AVX512
vminsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vminsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vminsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vminsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vminsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vminsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vminss xmm6{k7}, xmm5, xmm4 # AVX512
vminss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vminss xmm6{k7}, xmm5, xmm4, {sae} # AVX512
vminss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vminss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vminss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vminss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vminss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vminss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vmovsd xmm6{k7}, QWORD PTR [ecx] # AVX512
vmovsd xmm6{k7}{z}, QWORD PTR [ecx] # AVX512
vmovsd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512
vmovsd xmm6{k7}, QWORD PTR [edx+1016] # AVX512 Disp8
vmovsd xmm6{k7}, QWORD PTR [edx+1024] # AVX512
vmovsd xmm6{k7}, QWORD PTR [edx-1024] # AVX512 Disp8
vmovsd xmm6{k7}, QWORD PTR [edx-1032] # AVX512
vmovsd QWORD PTR [ecx]{k7}, xmm6 # AVX512
vmovsd QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512
vmovsd QWORD PTR [edx+1016]{k7}, xmm6 # AVX512 Disp8
vmovsd QWORD PTR [edx+1024]{k7}, xmm6 # AVX512
vmovsd QWORD PTR [edx-1024]{k7}, xmm6 # AVX512 Disp8
vmovsd QWORD PTR [edx-1032]{k7}, xmm6 # AVX512
vmovsd xmm6{k7}, xmm5, xmm4 # AVX512
vmovsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vmovss xmm6{k7}, DWORD PTR [ecx] # AVX512
vmovss xmm6{k7}{z}, DWORD PTR [ecx] # AVX512
vmovss xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512
vmovss xmm6{k7}, DWORD PTR [edx+508] # AVX512 Disp8
vmovss xmm6{k7}, DWORD PTR [edx+512] # AVX512
vmovss xmm6{k7}, DWORD PTR [edx-512] # AVX512 Disp8
vmovss xmm6{k7}, DWORD PTR [edx-516] # AVX512
vmovss DWORD PTR [ecx]{k7}, xmm6 # AVX512
vmovss DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512
vmovss DWORD PTR [edx+508]{k7}, xmm6 # AVX512 Disp8
vmovss DWORD PTR [edx+512]{k7}, xmm6 # AVX512
vmovss DWORD PTR [edx-512]{k7}, xmm6 # AVX512 Disp8
vmovss DWORD PTR [edx-516]{k7}, xmm6 # AVX512
vmovss xmm6{k7}, xmm5, xmm4 # AVX512
vmovss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vmulsd xmm6{k7}, xmm5, xmm4 # AVX512
vmulsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vmulsd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vmulsd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vmulsd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vmulsd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vmulsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vmulsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vmulss xmm6{k7}, xmm5, xmm4 # AVX512
vmulss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vmulss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vmulss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vmulss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vmulss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vmulss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vmulss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vmulss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vmulss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vmulss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vmulss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vrcp14sd xmm6{k7}, xmm5, xmm4 # AVX512
vrcp14sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vrcp14ss xmm6{k7}, xmm5, xmm4 # AVX512
vrcp14ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vrcp28ss xmm6{k7}, xmm5, xmm4 # AVX512EMI
vrcp28ss xmm6{k7}{z}, xmm5, xmm4 # AVX512EMI
vrcp28ss xmm6{k7}, xmm5, xmm4, {sae} # AVX512EMI
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512EMI
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512EMI
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512EMI Disp8
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512EMI
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512EMI Disp8
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512EMI
vrcp28sd xmm6{k7}, xmm5, xmm4 # AVX512EMI
vrcp28sd xmm6{k7}{z}, xmm5, xmm4 # AVX512EMI
vrcp28sd xmm6{k7}, xmm5, xmm4, {sae} # AVX512EMI
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512EMI
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512EMI
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512EMI Disp8
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512EMI
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512EMI Disp8
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512EMI
vrsqrt14sd xmm6{k7}, xmm5, xmm4 # AVX512
vrsqrt14sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vrsqrt14ss xmm6{k7}, xmm5, xmm4 # AVX512
vrsqrt14ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vrsqrt28ss xmm6{k7}, xmm5, xmm4 # AVX512EMI
vrsqrt28ss xmm6{k7}{z}, xmm5, xmm4 # AVX512EMI
vrsqrt28ss xmm6{k7}, xmm5, xmm4, {sae} # AVX512EMI
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512EMI
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512EMI
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512EMI Disp8
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512EMI
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512EMI Disp8
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512EMI
vrsqrt28sd xmm6{k7}, xmm5, xmm4 # AVX512EMI
vrsqrt28sd xmm6{k7}{z}, xmm5, xmm4 # AVX512EMI
vrsqrt28sd xmm6{k7}, xmm5, xmm4, {sae} # AVX512EMI
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512EMI
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512EMI
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512EMI Disp8
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512EMI
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512EMI Disp8
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512EMI
vsqrtsd xmm6{k7}, xmm5, xmm4 # AVX512
vsqrtsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vsqrtsd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vsqrtsd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vsqrtsd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vsqrtsd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vsqrtss xmm6{k7}, xmm5, xmm4 # AVX512
vsqrtss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vsqrtss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vsqrtss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vsqrtss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vsqrtss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vsqrtss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vsqrtss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vsubsd xmm6{k7}, xmm5, xmm4 # AVX512
vsubsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vsubsd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vsubsd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vsubsd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vsubsd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vsubsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vsubsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vsubss xmm6{k7}, xmm5, xmm4 # AVX512
vsubss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vsubss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vsubss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vsubss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vsubss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vsubss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vsubss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vsubss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vsubss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vsubss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vsubss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vucomisd xmm6, xmm5, {sae} # AVX512
vucomiss xmm6, xmm5, {sae} # AVX512
vcvtsd2usi eax, xmm6 # AVX512
vcvtsd2usi eax, xmm6, {rn-sae} # AVX512
vcvtsd2usi eax, xmm6, {ru-sae} # AVX512
vcvtsd2usi eax, xmm6, {rd-sae} # AVX512
vcvtsd2usi eax, xmm6, {rz-sae} # AVX512
vcvtsd2usi eax, QWORD PTR [ecx] # AVX512
vcvtsd2usi eax, QWORD PTR [esp+esi*8-123456] # AVX512
vcvtsd2usi eax, QWORD PTR [edx+1016] # AVX512 Disp8
vcvtsd2usi eax, QWORD PTR [edx+1024] # AVX512
vcvtsd2usi eax, QWORD PTR [edx-1024] # AVX512 Disp8
vcvtsd2usi eax, QWORD PTR [edx-1032] # AVX512
vcvtsd2usi ebp, xmm6 # AVX512
vcvtsd2usi ebp, xmm6, {rn-sae} # AVX512
vcvtsd2usi ebp, xmm6, {ru-sae} # AVX512
vcvtsd2usi ebp, xmm6, {rd-sae} # AVX512
vcvtsd2usi ebp, xmm6, {rz-sae} # AVX512
vcvtsd2usi ebp, QWORD PTR [ecx] # AVX512
vcvtsd2usi ebp, QWORD PTR [esp+esi*8-123456] # AVX512
vcvtsd2usi ebp, QWORD PTR [edx+1016] # AVX512 Disp8
vcvtsd2usi ebp, QWORD PTR [edx+1024] # AVX512
vcvtsd2usi ebp, QWORD PTR [edx-1024] # AVX512 Disp8
vcvtsd2usi ebp, QWORD PTR [edx-1032] # AVX512
vcvtss2usi eax, xmm6 # AVX512
vcvtss2usi eax, xmm6, {rn-sae} # AVX512
vcvtss2usi eax, xmm6, {ru-sae} # AVX512
vcvtss2usi eax, xmm6, {rd-sae} # AVX512
vcvtss2usi eax, xmm6, {rz-sae} # AVX512
vcvtss2usi eax, DWORD PTR [ecx] # AVX512
vcvtss2usi eax, DWORD PTR [esp+esi*8-123456] # AVX512
vcvtss2usi eax, DWORD PTR [edx+508] # AVX512 Disp8
vcvtss2usi eax, DWORD PTR [edx+512] # AVX512
vcvtss2usi eax, DWORD PTR [edx-512] # AVX512 Disp8
vcvtss2usi eax, DWORD PTR [edx-516] # AVX512
vcvtss2usi ebp, xmm6 # AVX512
vcvtss2usi ebp, xmm6, {rn-sae} # AVX512
vcvtss2usi ebp, xmm6, {ru-sae} # AVX512
vcvtss2usi ebp, xmm6, {rd-sae} # AVX512
vcvtss2usi ebp, xmm6, {rz-sae} # AVX512
vcvtss2usi ebp, DWORD PTR [ecx] # AVX512
vcvtss2usi ebp, DWORD PTR [esp+esi*8-123456] # AVX512
vcvtss2usi ebp, DWORD PTR [edx+508] # AVX512 Disp8
vcvtss2usi ebp, DWORD PTR [edx+512] # AVX512
vcvtss2usi ebp, DWORD PTR [edx-512] # AVX512 Disp8
vcvtss2usi ebp, DWORD PTR [edx-516] # AVX512
vcvtusi2sd xmm6, xmm5, eax # AVX512
vcvtusi2sd xmm6, xmm5, ebp # AVX512
vcvtusi2sd xmm6, xmm5, DWORD PTR [ecx] # AVX512
vcvtusi2sd xmm6, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx+512] # AVX512
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx-516] # AVX512
vcvtusi2ss xmm6, xmm5, eax # AVX512
vcvtusi2ss xmm6, xmm5, {rn-sae}, eax # AVX512
vcvtusi2ss xmm6, xmm5, {ru-sae}, eax # AVX512
vcvtusi2ss xmm6, xmm5, {rd-sae}, eax # AVX512
vcvtusi2ss xmm6, xmm5, {rz-sae}, eax # AVX512
vcvtusi2ss xmm6, xmm5, ebp # AVX512
vcvtusi2ss xmm6, xmm5, {rn-sae}, ebp # AVX512
vcvtusi2ss xmm6, xmm5, {ru-sae}, ebp # AVX512
vcvtusi2ss xmm6, xmm5, {rd-sae}, ebp # AVX512
vcvtusi2ss xmm6, xmm5, {rz-sae}, ebp # AVX512
vcvtusi2ss xmm6, xmm5, DWORD PTR [ecx] # AVX512
vcvtusi2ss xmm6, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx+512] # AVX512
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx-516] # AVX512
vscalefsd xmm6{k7}, xmm5, xmm4 # AVX512
vscalefsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vscalefsd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vscalefsd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vscalefsd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vscalefsd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vscalefsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vscalefsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vscalefss xmm6{k7}, xmm5, xmm4 # AVX512
vscalefss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vscalefss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vscalefss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vscalefss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vscalefss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vscalefss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vscalefss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfixupimmss xmm6{k7}, xmm5, xmm4, 0xab # AVX512
vfixupimmss xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512
vfixupimmss xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vfixupimmss xmm6{k7}, xmm5, xmm4, 123 # AVX512
vfixupimmss xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512 Disp8
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512 Disp8
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512
vfixupimmsd xmm6{k7}, xmm5, xmm4, 0xab # AVX512
vfixupimmsd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512
vfixupimmsd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vfixupimmsd xmm6{k7}, xmm5, xmm4, 123 # AVX512
vfixupimmsd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512 Disp8
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512 Disp8
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512
vrndscalesd xmm6{k7}, xmm5, xmm4, 0xab # AVX512
vrndscalesd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512
vrndscalesd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vrndscalesd xmm6{k7}, xmm5, xmm4, 123 # AVX512
vrndscalesd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512 Disp8
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512 Disp8
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512
vrndscaless xmm6{k7}, xmm5, xmm4, 0xab # AVX512
vrndscaless xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512
vrndscaless xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vrndscaless xmm6{k7}, xmm5, xmm4, 123 # AVX512
vrndscaless xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vrndscaless xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512
vrndscaless xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512 Disp8
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512 Disp8
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512
vcmpsh k5, xmm5, xmm4, 123 # AVX512-FP16
vcmpsh k5{k7}, xmm5, xmm4, {sae}, 123 # AVX512-FP16
vcmpsh k5, xmm5, WORD PTR [ecx], 123 # AVX512-FP16
vcmpsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456], 123 # AVX512-FP16
vcmpsh k5, xmm5, WORD PTR [ecx+254], 123 # AVX512-FP16 Disp8
vcmpsh k5{k7}, xmm5, WORD PTR [edx-256], 123 # AVX512-FP16 Disp8
vfpclasssh k5, xmm4, 123 # AVX512-FP16
vfpclasssh k5, WORD PTR [ecx], 123 # AVX512-FP16
vfpclasssh k5{k7}, WORD PTR [esp+esi*8-123456], 123 # AVX512-FP16
vfpclasssh k5, WORD PTR [ecx+254], 123 # AVX512-FP16 Disp8
vfpclasssh k5{k7}, WORD PTR [edx-256], 123 # AVX512-FP16 Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 2,831
|
gas/testsuite/gas/i386/noreg16.s
|
.macro pfx insn:vararg
.ifdef DATA32
data32 \insn
.else
\insn
.endif
.endm
.text
.code16
noreg:
pfx adc $1, (%bx)
pfx adc $0x89, (%bx)
pfx adc $0x1234, (%bx)
pfx add $1, (%bx)
pfx add $0x89, (%bx)
pfx add $0x1234, (%bx)
pfx and $1, (%bx)
pfx and $0x89, (%bx)
pfx and $0x1234, (%bx)
pfx bt $1, (%bx)
pfx btc $1, (%bx)
pfx btr $1, (%bx)
pfx bts $1, (%bx)
pfx call *(%bx)
pfx cmp $1, (%bx)
pfx cmp $0x89, (%bx)
pfx cmp $0x1234, (%bx)
pfx cmps
pfx cmps %es:(%di), (%si)
pfx crc32 (%bx), %eax
cvtsi2sd (%bx), %xmm0
cvtsi2ss (%bx), %xmm0
pfx dec (%bx)
pfx div (%bx)
pfx fadd (%bx)
pfx fcom (%bx)
pfx fcomp (%bx)
pfx fdiv (%bx)
pfx fdivr (%bx)
pfx fiadd (%bx)
pfx ficom (%bx)
pfx ficomp (%bx)
pfx fidiv (%bx)
pfx fidivr (%bx)
pfx fild (%bx)
pfx fimul (%bx)
pfx fist (%bx)
pfx fistp (%bx)
pfx fisttp (%bx)
pfx fisub (%bx)
pfx fisubr (%bx)
pfx fld (%bx)
pfx fmul (%bx)
pfx fst (%bx)
pfx fstp (%bx)
pfx fsub (%bx)
pfx fsubr (%bx)
pfx idiv (%bx)
pfx imul (%bx)
pfx in $0
pfx in %dx
pfx inc (%bx)
pfx ins
pfx ins %dx, %es:(%di)
pfx jmp *(%bx)
pfx lgdt (%bx)
pfx lidt (%bx)
pfx lldt (%bx)
pfx lmsw (%bx)
pfx lods
pfx lods (%si)
pfx ltr (%bx)
pfx mov $0x12, (%bx)
pfx mov $0x1234, (%bx)
pfx mov %es, (%bx)
pfx mov (%bx), %es
pfx movs
pfx movs (%si), %es:(%di)
pfx movsx (%bx), %ax
movsx (%bx), %eax
pfx movzx (%bx), %ax
movzx (%bx), %eax
pfx mul (%bx)
pfx neg (%bx)
pfx nop (%bx)
pfx not (%bx)
pfx or $1, (%bx)
pfx or $0x89, (%bx)
pfx or $0x1234, (%bx)
pfx out $0
pfx out %dx
pfx outs
pfx outs (%si), %dx
pfx pop (%bx)
pfx pop %es
ptwrite (%bx)
pfx push (%bx)
pfx push %es
pfx rcl $1, (%bx)
pfx rcl $2, (%bx)
pfx rcl %cl, (%bx)
pfx rcl (%bx)
pfx rcr $1, (%bx)
pfx rcr $2, (%bx)
pfx rcr %cl, (%bx)
pfx rcr (%bx)
pfx rol $1, (%bx)
pfx rol $2, (%bx)
pfx rol %cl, (%bx)
pfx rol (%bx)
pfx ror $1, (%bx)
pfx ror $2, (%bx)
pfx ror %cl, (%bx)
pfx ror (%bx)
pfx sbb $1, (%bx)
pfx sbb $0x89, (%bx)
pfx sbb $0x1234, (%bx)
pfx scas
pfx scas %es:(%di)
pfx sal $1, (%bx)
pfx sal $2, (%bx)
pfx sal %cl, (%bx)
pfx sal (%bx)
pfx sar $1, (%bx)
pfx sar $2, (%bx)
pfx sar %cl, (%bx)
pfx sar (%bx)
pfx shl $1, (%bx)
pfx shl $2, (%bx)
pfx shl %cl, (%bx)
pfx shl (%bx)
pfx shr $1, (%bx)
pfx shr $2, (%bx)
pfx shr %cl, (%bx)
pfx shr (%bx)
pfx stos
pfx stos %es:(%di)
pfx sub $1, (%bx)
pfx sub $0x89, (%bx)
pfx sub $0x1234, (%bx)
pfx test $0x89, (%bx)
pfx test $0x1234, (%bx)
vcvtsi2sd (%bx), %xmm0, %xmm0
{evex} vcvtsi2sd (%bx), %xmm0, %xmm0
vcvtsi2ss (%bx), %xmm0, %xmm0
{evex} vcvtsi2ss (%bx), %xmm0, %xmm0
vcvtusi2sd (%bx), %xmm0, %xmm0
vcvtusi2ss (%bx), %xmm0, %xmm0
pfx xor $1, (%bx)
pfx xor $0x89, (%bx)
pfx xor $0x1234, (%bx)
|
tactcomplabs/xbgas-binutils-gdb
| 16,330
|
gas/testsuite/gas/i386/x86-64-fma.s
|
# Check 64bit FMA instructions
.allow_index_reg
.text
_start:
# Tests for op ymm/mem256, ymm, ymm
vfmadd132pd %ymm4,%ymm6,%ymm2
vfmadd132pd (%rcx),%ymm6,%ymm2
vfmadd132ps %ymm4,%ymm6,%ymm2
vfmadd132ps (%rcx),%ymm6,%ymm2
vfmadd213pd %ymm4,%ymm6,%ymm2
vfmadd213pd (%rcx),%ymm6,%ymm2
vfmadd213ps %ymm4,%ymm6,%ymm2
vfmadd213ps (%rcx),%ymm6,%ymm2
vfmadd231pd %ymm4,%ymm6,%ymm2
vfmadd231pd (%rcx),%ymm6,%ymm2
vfmadd231ps %ymm4,%ymm6,%ymm2
vfmadd231ps (%rcx),%ymm6,%ymm2
vfmaddsub132pd %ymm4,%ymm6,%ymm2
vfmaddsub132pd (%rcx),%ymm6,%ymm2
vfmaddsub132ps %ymm4,%ymm6,%ymm2
vfmaddsub132ps (%rcx),%ymm6,%ymm2
vfmaddsub213pd %ymm4,%ymm6,%ymm2
vfmaddsub213pd (%rcx),%ymm6,%ymm2
vfmaddsub213ps %ymm4,%ymm6,%ymm2
vfmaddsub213ps (%rcx),%ymm6,%ymm2
vfmaddsub231pd %ymm4,%ymm6,%ymm2
vfmaddsub231pd (%rcx),%ymm6,%ymm2
vfmaddsub231ps %ymm4,%ymm6,%ymm2
vfmaddsub231ps (%rcx),%ymm6,%ymm2
vfmsubadd132pd %ymm4,%ymm6,%ymm2
vfmsubadd132pd (%rcx),%ymm6,%ymm2
vfmsubadd132ps %ymm4,%ymm6,%ymm2
vfmsubadd132ps (%rcx),%ymm6,%ymm2
vfmsubadd213pd %ymm4,%ymm6,%ymm2
vfmsubadd213pd (%rcx),%ymm6,%ymm2
vfmsubadd213ps %ymm4,%ymm6,%ymm2
vfmsubadd213ps (%rcx),%ymm6,%ymm2
vfmsubadd231pd %ymm4,%ymm6,%ymm2
vfmsubadd231pd (%rcx),%ymm6,%ymm2
vfmsubadd231ps %ymm4,%ymm6,%ymm2
vfmsubadd231ps (%rcx),%ymm6,%ymm2
vfmsub132pd %ymm4,%ymm6,%ymm2
vfmsub132pd (%rcx),%ymm6,%ymm2
vfmsub132ps %ymm4,%ymm6,%ymm2
vfmsub132ps (%rcx),%ymm6,%ymm2
vfmsub213pd %ymm4,%ymm6,%ymm2
vfmsub213pd (%rcx),%ymm6,%ymm2
vfmsub213ps %ymm4,%ymm6,%ymm2
vfmsub213ps (%rcx),%ymm6,%ymm2
vfmsub231pd %ymm4,%ymm6,%ymm2
vfmsub231pd (%rcx),%ymm6,%ymm2
vfmsub231ps %ymm4,%ymm6,%ymm2
vfmsub231ps (%rcx),%ymm6,%ymm2
vfnmadd132pd %ymm4,%ymm6,%ymm2
vfnmadd132pd (%rcx),%ymm6,%ymm2
vfnmadd132ps %ymm4,%ymm6,%ymm2
vfnmadd132ps (%rcx),%ymm6,%ymm2
vfnmadd213pd %ymm4,%ymm6,%ymm2
vfnmadd213pd (%rcx),%ymm6,%ymm2
vfnmadd213ps %ymm4,%ymm6,%ymm2
vfnmadd213ps (%rcx),%ymm6,%ymm2
vfnmadd231pd %ymm4,%ymm6,%ymm2
vfnmadd231pd (%rcx),%ymm6,%ymm2
vfnmadd231ps %ymm4,%ymm6,%ymm2
vfnmadd231ps (%rcx),%ymm6,%ymm2
vfnmsub132pd %ymm4,%ymm6,%ymm2
vfnmsub132pd (%rcx),%ymm6,%ymm2
vfnmsub132ps %ymm4,%ymm6,%ymm2
vfnmsub132ps (%rcx),%ymm6,%ymm2
vfnmsub213pd %ymm4,%ymm6,%ymm2
vfnmsub213pd (%rcx),%ymm6,%ymm2
vfnmsub213ps %ymm4,%ymm6,%ymm2
vfnmsub213ps (%rcx),%ymm6,%ymm2
vfnmsub231pd %ymm4,%ymm6,%ymm2
vfnmsub231pd (%rcx),%ymm6,%ymm2
vfnmsub231ps %ymm4,%ymm6,%ymm2
vfnmsub231ps (%rcx),%ymm6,%ymm2
# Tests for op xmm/mem128, xmm, xmm
vfmadd132pd %xmm4,%xmm6,%xmm2
vfmadd132pd (%rcx),%xmm6,%xmm7
vfmadd132ps %xmm4,%xmm6,%xmm2
vfmadd132ps (%rcx),%xmm6,%xmm7
vfmadd213pd %xmm4,%xmm6,%xmm2
vfmadd213pd (%rcx),%xmm6,%xmm7
vfmadd213ps %xmm4,%xmm6,%xmm2
vfmadd213ps (%rcx),%xmm6,%xmm7
vfmadd231pd %xmm4,%xmm6,%xmm2
vfmadd231pd (%rcx),%xmm6,%xmm7
vfmadd231ps %xmm4,%xmm6,%xmm2
vfmadd231ps (%rcx),%xmm6,%xmm7
vfmaddsub132pd %xmm4,%xmm6,%xmm2
vfmaddsub132pd (%rcx),%xmm6,%xmm7
vfmaddsub132ps %xmm4,%xmm6,%xmm2
vfmaddsub132ps (%rcx),%xmm6,%xmm7
vfmaddsub213pd %xmm4,%xmm6,%xmm2
vfmaddsub213pd (%rcx),%xmm6,%xmm7
vfmaddsub213ps %xmm4,%xmm6,%xmm2
vfmaddsub213ps (%rcx),%xmm6,%xmm7
vfmaddsub231pd %xmm4,%xmm6,%xmm2
vfmaddsub231pd (%rcx),%xmm6,%xmm7
vfmaddsub231ps %xmm4,%xmm6,%xmm2
vfmaddsub231ps (%rcx),%xmm6,%xmm7
vfmsubadd132pd %xmm4,%xmm6,%xmm2
vfmsubadd132pd (%rcx),%xmm6,%xmm7
vfmsubadd132ps %xmm4,%xmm6,%xmm2
vfmsubadd132ps (%rcx),%xmm6,%xmm7
vfmsubadd213pd %xmm4,%xmm6,%xmm2
vfmsubadd213pd (%rcx),%xmm6,%xmm7
vfmsubadd213ps %xmm4,%xmm6,%xmm2
vfmsubadd213ps (%rcx),%xmm6,%xmm7
vfmsubadd231pd %xmm4,%xmm6,%xmm2
vfmsubadd231pd (%rcx),%xmm6,%xmm7
vfmsubadd231ps %xmm4,%xmm6,%xmm2
vfmsubadd231ps (%rcx),%xmm6,%xmm7
vfmsub132pd %xmm4,%xmm6,%xmm2
vfmsub132pd (%rcx),%xmm6,%xmm7
vfmsub132ps %xmm4,%xmm6,%xmm2
vfmsub132ps (%rcx),%xmm6,%xmm7
vfmsub213pd %xmm4,%xmm6,%xmm2
vfmsub213pd (%rcx),%xmm6,%xmm7
vfmsub213ps %xmm4,%xmm6,%xmm2
vfmsub213ps (%rcx),%xmm6,%xmm7
vfmsub231pd %xmm4,%xmm6,%xmm2
vfmsub231pd (%rcx),%xmm6,%xmm7
vfmsub231ps %xmm4,%xmm6,%xmm2
vfmsub231ps (%rcx),%xmm6,%xmm7
vfnmadd132pd %xmm4,%xmm6,%xmm2
vfnmadd132pd (%rcx),%xmm6,%xmm7
vfnmadd132ps %xmm4,%xmm6,%xmm2
vfnmadd132ps (%rcx),%xmm6,%xmm7
vfnmadd213pd %xmm4,%xmm6,%xmm2
vfnmadd213pd (%rcx),%xmm6,%xmm7
vfnmadd213ps %xmm4,%xmm6,%xmm2
vfnmadd213ps (%rcx),%xmm6,%xmm7
vfnmadd231pd %xmm4,%xmm6,%xmm2
vfnmadd231pd (%rcx),%xmm6,%xmm7
vfnmadd231ps %xmm4,%xmm6,%xmm2
vfnmadd231ps (%rcx),%xmm6,%xmm7
vfnmsub132pd %xmm4,%xmm6,%xmm2
vfnmsub132pd (%rcx),%xmm6,%xmm7
vfnmsub132ps %xmm4,%xmm6,%xmm2
vfnmsub132ps (%rcx),%xmm6,%xmm7
vfnmsub213pd %xmm4,%xmm6,%xmm2
vfnmsub213pd (%rcx),%xmm6,%xmm7
vfnmsub213ps %xmm4,%xmm6,%xmm2
vfnmsub213ps (%rcx),%xmm6,%xmm7
vfnmsub231pd %xmm4,%xmm6,%xmm2
vfnmsub231pd (%rcx),%xmm6,%xmm7
vfnmsub231ps %xmm4,%xmm6,%xmm2
vfnmsub231ps (%rcx),%xmm6,%xmm7
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd %xmm4,%xmm6,%xmm2
vfmadd132sd (%rcx),%xmm6,%xmm2
vfmadd213sd %xmm4,%xmm6,%xmm2
vfmadd213sd (%rcx),%xmm6,%xmm2
vfmadd231sd %xmm4,%xmm6,%xmm2
vfmadd231sd (%rcx),%xmm6,%xmm2
vfmsub132sd %xmm4,%xmm6,%xmm2
vfmsub132sd (%rcx),%xmm6,%xmm2
vfmsub213sd %xmm4,%xmm6,%xmm2
vfmsub213sd (%rcx),%xmm6,%xmm2
vfmsub231sd %xmm4,%xmm6,%xmm2
vfmsub231sd (%rcx),%xmm6,%xmm2
vfnmadd132sd %xmm4,%xmm6,%xmm2
vfnmadd132sd (%rcx),%xmm6,%xmm2
vfnmadd213sd %xmm4,%xmm6,%xmm2
vfnmadd213sd (%rcx),%xmm6,%xmm2
vfnmadd231sd %xmm4,%xmm6,%xmm2
vfnmadd231sd (%rcx),%xmm6,%xmm2
vfnmsub132sd %xmm4,%xmm6,%xmm2
vfnmsub132sd (%rcx),%xmm6,%xmm2
vfnmsub213sd %xmm4,%xmm6,%xmm2
vfnmsub213sd (%rcx),%xmm6,%xmm2
vfnmsub231sd %xmm4,%xmm6,%xmm2
vfnmsub231sd (%rcx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss %xmm4,%xmm6,%xmm2
vfmadd132ss (%rcx),%xmm6,%xmm2
vfmadd213ss %xmm4,%xmm6,%xmm2
vfmadd213ss (%rcx),%xmm6,%xmm2
vfmadd231ss %xmm4,%xmm6,%xmm2
vfmadd231ss (%rcx),%xmm6,%xmm2
vfmsub132ss %xmm4,%xmm6,%xmm2
vfmsub132ss (%rcx),%xmm6,%xmm2
vfmsub213ss %xmm4,%xmm6,%xmm2
vfmsub213ss (%rcx),%xmm6,%xmm2
vfmsub231ss %xmm4,%xmm6,%xmm2
vfmsub231ss (%rcx),%xmm6,%xmm2
vfnmadd132ss %xmm4,%xmm6,%xmm2
vfnmadd132ss (%rcx),%xmm6,%xmm2
vfnmadd213ss %xmm4,%xmm6,%xmm2
vfnmadd213ss (%rcx),%xmm6,%xmm2
vfnmadd231ss %xmm4,%xmm6,%xmm2
vfnmadd231ss (%rcx),%xmm6,%xmm2
vfnmsub132ss %xmm4,%xmm6,%xmm2
vfnmsub132ss (%rcx),%xmm6,%xmm2
vfnmsub213ss %xmm4,%xmm6,%xmm2
vfnmsub213ss (%rcx),%xmm6,%xmm2
vfnmsub231ss %xmm4,%xmm6,%xmm2
vfnmsub231ss (%rcx),%xmm6,%xmm2
.intel_syntax noprefix
# Tests for op ymm/mem256, ymm, ymm
vfmadd132pd ymm2,ymm6,ymm4
vfmadd132pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmadd132pd ymm2,ymm6,[rcx]
vfmadd132ps ymm2,ymm6,ymm4
vfmadd132ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmadd132ps ymm2,ymm6,[rcx]
vfmadd213pd ymm2,ymm6,ymm4
vfmadd213pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmadd213pd ymm2,ymm6,[rcx]
vfmadd213ps ymm2,ymm6,ymm4
vfmadd213ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmadd213ps ymm2,ymm6,[rcx]
vfmadd231pd ymm2,ymm6,ymm4
vfmadd231pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmadd231pd ymm2,ymm6,[rcx]
vfmadd231ps ymm2,ymm6,ymm4
vfmadd231ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmadd231ps ymm2,ymm6,[rcx]
vfmaddsub132pd ymm2,ymm6,ymm4
vfmaddsub132pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmaddsub132pd ymm2,ymm6,[rcx]
vfmaddsub132ps ymm2,ymm6,ymm4
vfmaddsub132ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmaddsub132ps ymm2,ymm6,[rcx]
vfmaddsub213pd ymm2,ymm6,ymm4
vfmaddsub213pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmaddsub213pd ymm2,ymm6,[rcx]
vfmaddsub213ps ymm2,ymm6,ymm4
vfmaddsub213ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmaddsub213ps ymm2,ymm6,[rcx]
vfmaddsub231pd ymm2,ymm6,ymm4
vfmaddsub231pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmaddsub231pd ymm2,ymm6,[rcx]
vfmaddsub231ps ymm2,ymm6,ymm4
vfmaddsub231ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmaddsub231ps ymm2,ymm6,[rcx]
vfmsubadd132pd ymm2,ymm6,ymm4
vfmsubadd132pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmsubadd132pd ymm2,ymm6,[rcx]
vfmsubadd132ps ymm2,ymm6,ymm4
vfmsubadd132ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmsubadd132ps ymm2,ymm6,[rcx]
vfmsubadd213pd ymm2,ymm6,ymm4
vfmsubadd213pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmsubadd213pd ymm2,ymm6,[rcx]
vfmsubadd213ps ymm2,ymm6,ymm4
vfmsubadd213ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmsubadd213ps ymm2,ymm6,[rcx]
vfmsubadd231pd ymm2,ymm6,ymm4
vfmsubadd231pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmsubadd231pd ymm2,ymm6,[rcx]
vfmsubadd231ps ymm2,ymm6,ymm4
vfmsubadd231ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmsubadd231ps ymm2,ymm6,[rcx]
vfmsub132pd ymm2,ymm6,ymm4
vfmsub132pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmsub132pd ymm2,ymm6,[rcx]
vfmsub132ps ymm2,ymm6,ymm4
vfmsub132ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmsub132ps ymm2,ymm6,[rcx]
vfmsub213pd ymm2,ymm6,ymm4
vfmsub213pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmsub213pd ymm2,ymm6,[rcx]
vfmsub213ps ymm2,ymm6,ymm4
vfmsub213ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmsub213ps ymm2,ymm6,[rcx]
vfmsub231pd ymm2,ymm6,ymm4
vfmsub231pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmsub231pd ymm2,ymm6,[rcx]
vfmsub231ps ymm2,ymm6,ymm4
vfmsub231ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmsub231ps ymm2,ymm6,[rcx]
vfnmadd132pd ymm2,ymm6,ymm4
vfnmadd132pd ymm2,ymm6,YMMWORD PTR [rcx]
vfnmadd132pd ymm2,ymm6,[rcx]
vfnmadd132ps ymm2,ymm6,ymm4
vfnmadd132ps ymm2,ymm6,YMMWORD PTR [rcx]
vfnmadd132ps ymm2,ymm6,[rcx]
vfnmadd213pd ymm2,ymm6,ymm4
vfnmadd213pd ymm2,ymm6,YMMWORD PTR [rcx]
vfnmadd213pd ymm2,ymm6,[rcx]
vfnmadd213ps ymm2,ymm6,ymm4
vfnmadd213ps ymm2,ymm6,YMMWORD PTR [rcx]
vfnmadd213ps ymm2,ymm6,[rcx]
vfnmadd231pd ymm2,ymm6,ymm4
vfnmadd231pd ymm2,ymm6,YMMWORD PTR [rcx]
vfnmadd231pd ymm2,ymm6,[rcx]
vfnmadd231ps ymm2,ymm6,ymm4
vfnmadd231ps ymm2,ymm6,YMMWORD PTR [rcx]
vfnmadd231ps ymm2,ymm6,[rcx]
vfnmsub132pd ymm2,ymm6,ymm4
vfnmsub132pd ymm2,ymm6,YMMWORD PTR [rcx]
vfnmsub132pd ymm2,ymm6,[rcx]
vfnmsub132ps ymm2,ymm6,ymm4
vfnmsub132ps ymm2,ymm6,YMMWORD PTR [rcx]
vfnmsub132ps ymm2,ymm6,[rcx]
vfnmsub213pd ymm2,ymm6,ymm4
vfnmsub213pd ymm2,ymm6,YMMWORD PTR [rcx]
vfnmsub213pd ymm2,ymm6,[rcx]
vfnmsub213ps ymm2,ymm6,ymm4
vfnmsub213ps ymm2,ymm6,YMMWORD PTR [rcx]
vfnmsub213ps ymm2,ymm6,[rcx]
vfnmsub231pd ymm2,ymm6,ymm4
vfnmsub231pd ymm2,ymm6,YMMWORD PTR [rcx]
vfnmsub231pd ymm2,ymm6,[rcx]
vfnmsub231ps ymm2,ymm6,ymm4
vfnmsub231ps ymm2,ymm6,YMMWORD PTR [rcx]
vfnmsub231ps ymm2,ymm6,[rcx]
# Tests for op xmm/mem128, xmm, xmm
vfmadd132pd xmm2,xmm6,xmm4
vfmadd132pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmadd132pd xmm7,xmm6,[rcx]
vfmadd132ps xmm2,xmm6,xmm4
vfmadd132ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmadd132ps xmm7,xmm6,[rcx]
vfmadd213pd xmm2,xmm6,xmm4
vfmadd213pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmadd213pd xmm7,xmm6,[rcx]
vfmadd213ps xmm2,xmm6,xmm4
vfmadd213ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmadd213ps xmm7,xmm6,[rcx]
vfmadd231pd xmm2,xmm6,xmm4
vfmadd231pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmadd231pd xmm7,xmm6,[rcx]
vfmadd231ps xmm2,xmm6,xmm4
vfmadd231ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmadd231ps xmm7,xmm6,[rcx]
vfmaddsub132pd xmm2,xmm6,xmm4
vfmaddsub132pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmaddsub132pd xmm7,xmm6,[rcx]
vfmaddsub132ps xmm2,xmm6,xmm4
vfmaddsub132ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmaddsub132ps xmm7,xmm6,[rcx]
vfmaddsub213pd xmm2,xmm6,xmm4
vfmaddsub213pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmaddsub213pd xmm7,xmm6,[rcx]
vfmaddsub213ps xmm2,xmm6,xmm4
vfmaddsub213ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmaddsub213ps xmm7,xmm6,[rcx]
vfmaddsub231pd xmm2,xmm6,xmm4
vfmaddsub231pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmaddsub231pd xmm7,xmm6,[rcx]
vfmaddsub231ps xmm2,xmm6,xmm4
vfmaddsub231ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmaddsub231ps xmm7,xmm6,[rcx]
vfmsubadd132pd xmm2,xmm6,xmm4
vfmsubadd132pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmsubadd132pd xmm7,xmm6,[rcx]
vfmsubadd132ps xmm2,xmm6,xmm4
vfmsubadd132ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmsubadd132ps xmm7,xmm6,[rcx]
vfmsubadd213pd xmm2,xmm6,xmm4
vfmsubadd213pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmsubadd213pd xmm7,xmm6,[rcx]
vfmsubadd213ps xmm2,xmm6,xmm4
vfmsubadd213ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmsubadd213ps xmm7,xmm6,[rcx]
vfmsubadd231pd xmm2,xmm6,xmm4
vfmsubadd231pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmsubadd231pd xmm7,xmm6,[rcx]
vfmsubadd231ps xmm2,xmm6,xmm4
vfmsubadd231ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmsubadd231ps xmm7,xmm6,[rcx]
vfmsub132pd xmm2,xmm6,xmm4
vfmsub132pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmsub132pd xmm7,xmm6,[rcx]
vfmsub132ps xmm2,xmm6,xmm4
vfmsub132ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmsub132ps xmm7,xmm6,[rcx]
vfmsub213pd xmm2,xmm6,xmm4
vfmsub213pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmsub213pd xmm7,xmm6,[rcx]
vfmsub213ps xmm2,xmm6,xmm4
vfmsub213ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmsub213ps xmm7,xmm6,[rcx]
vfmsub231pd xmm2,xmm6,xmm4
vfmsub231pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmsub231pd xmm7,xmm6,[rcx]
vfmsub231ps xmm2,xmm6,xmm4
vfmsub231ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmsub231ps xmm7,xmm6,[rcx]
vfnmadd132pd xmm2,xmm6,xmm4
vfnmadd132pd xmm7,xmm6,XMMWORD PTR [rcx]
vfnmadd132pd xmm7,xmm6,[rcx]
vfnmadd132ps xmm2,xmm6,xmm4
vfnmadd132ps xmm7,xmm6,XMMWORD PTR [rcx]
vfnmadd132ps xmm7,xmm6,[rcx]
vfnmadd213pd xmm2,xmm6,xmm4
vfnmadd213pd xmm7,xmm6,XMMWORD PTR [rcx]
vfnmadd213pd xmm7,xmm6,[rcx]
vfnmadd213ps xmm2,xmm6,xmm4
vfnmadd213ps xmm7,xmm6,XMMWORD PTR [rcx]
vfnmadd213ps xmm7,xmm6,[rcx]
vfnmadd231pd xmm2,xmm6,xmm4
vfnmadd231pd xmm7,xmm6,XMMWORD PTR [rcx]
vfnmadd231pd xmm7,xmm6,[rcx]
vfnmadd231ps xmm2,xmm6,xmm4
vfnmadd231ps xmm7,xmm6,XMMWORD PTR [rcx]
vfnmadd231ps xmm7,xmm6,[rcx]
vfnmsub132pd xmm2,xmm6,xmm4
vfnmsub132pd xmm7,xmm6,XMMWORD PTR [rcx]
vfnmsub132pd xmm7,xmm6,[rcx]
vfnmsub132ps xmm2,xmm6,xmm4
vfnmsub132ps xmm7,xmm6,XMMWORD PTR [rcx]
vfnmsub132ps xmm7,xmm6,[rcx]
vfnmsub213pd xmm2,xmm6,xmm4
vfnmsub213pd xmm7,xmm6,XMMWORD PTR [rcx]
vfnmsub213pd xmm7,xmm6,[rcx]
vfnmsub213ps xmm2,xmm6,xmm4
vfnmsub213ps xmm7,xmm6,XMMWORD PTR [rcx]
vfnmsub213ps xmm7,xmm6,[rcx]
vfnmsub231pd xmm2,xmm6,xmm4
vfnmsub231pd xmm7,xmm6,XMMWORD PTR [rcx]
vfnmsub231pd xmm7,xmm6,[rcx]
vfnmsub231ps xmm2,xmm6,xmm4
vfnmsub231ps xmm7,xmm6,XMMWORD PTR [rcx]
vfnmsub231ps xmm7,xmm6,[rcx]
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd xmm2,xmm6,xmm4
vfmadd132sd xmm2,xmm6,QWORD PTR [rcx]
vfmadd132sd xmm2,xmm6,[rcx]
vfmadd213sd xmm2,xmm6,xmm4
vfmadd213sd xmm2,xmm6,QWORD PTR [rcx]
vfmadd213sd xmm2,xmm6,[rcx]
vfmadd231sd xmm2,xmm6,xmm4
vfmadd231sd xmm2,xmm6,QWORD PTR [rcx]
vfmadd231sd xmm2,xmm6,[rcx]
vfmsub132sd xmm2,xmm6,xmm4
vfmsub132sd xmm2,xmm6,QWORD PTR [rcx]
vfmsub132sd xmm2,xmm6,[rcx]
vfmsub213sd xmm2,xmm6,xmm4
vfmsub213sd xmm2,xmm6,QWORD PTR [rcx]
vfmsub213sd xmm2,xmm6,[rcx]
vfmsub231sd xmm2,xmm6,xmm4
vfmsub231sd xmm2,xmm6,QWORD PTR [rcx]
vfmsub231sd xmm2,xmm6,[rcx]
vfnmadd132sd xmm2,xmm6,xmm4
vfnmadd132sd xmm2,xmm6,QWORD PTR [rcx]
vfnmadd132sd xmm2,xmm6,[rcx]
vfnmadd213sd xmm2,xmm6,xmm4
vfnmadd213sd xmm2,xmm6,QWORD PTR [rcx]
vfnmadd213sd xmm2,xmm6,[rcx]
vfnmadd231sd xmm2,xmm6,xmm4
vfnmadd231sd xmm2,xmm6,QWORD PTR [rcx]
vfnmadd231sd xmm2,xmm6,[rcx]
vfnmsub132sd xmm2,xmm6,xmm4
vfnmsub132sd xmm2,xmm6,QWORD PTR [rcx]
vfnmsub132sd xmm2,xmm6,[rcx]
vfnmsub213sd xmm2,xmm6,xmm4
vfnmsub213sd xmm2,xmm6,QWORD PTR [rcx]
vfnmsub213sd xmm2,xmm6,[rcx]
vfnmsub231sd xmm2,xmm6,xmm4
vfnmsub231sd xmm2,xmm6,QWORD PTR [rcx]
vfnmsub231sd xmm2,xmm6,[rcx]
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss xmm2,xmm6,xmm4
vfmadd132ss xmm2,xmm6,DWORD PTR [rcx]
vfmadd132ss xmm2,xmm6,[rcx]
vfmadd213ss xmm2,xmm6,xmm4
vfmadd213ss xmm2,xmm6,DWORD PTR [rcx]
vfmadd213ss xmm2,xmm6,[rcx]
vfmadd231ss xmm2,xmm6,xmm4
vfmadd231ss xmm2,xmm6,DWORD PTR [rcx]
vfmadd231ss xmm2,xmm6,[rcx]
vfmsub132ss xmm2,xmm6,xmm4
vfmsub132ss xmm2,xmm6,DWORD PTR [rcx]
vfmsub132ss xmm2,xmm6,[rcx]
vfmsub213ss xmm2,xmm6,xmm4
vfmsub213ss xmm2,xmm6,DWORD PTR [rcx]
vfmsub213ss xmm2,xmm6,[rcx]
vfmsub231ss xmm2,xmm6,xmm4
vfmsub231ss xmm2,xmm6,DWORD PTR [rcx]
vfmsub231ss xmm2,xmm6,[rcx]
vfnmadd132ss xmm2,xmm6,xmm4
vfnmadd132ss xmm2,xmm6,DWORD PTR [rcx]
vfnmadd132ss xmm2,xmm6,[rcx]
vfnmadd213ss xmm2,xmm6,xmm4
vfnmadd213ss xmm2,xmm6,DWORD PTR [rcx]
vfnmadd213ss xmm2,xmm6,[rcx]
vfnmadd231ss xmm2,xmm6,xmm4
vfnmadd231ss xmm2,xmm6,DWORD PTR [rcx]
vfnmadd231ss xmm2,xmm6,[rcx]
vfnmsub132ss xmm2,xmm6,xmm4
vfnmsub132ss xmm2,xmm6,DWORD PTR [rcx]
vfnmsub132ss xmm2,xmm6,[rcx]
vfnmsub213ss xmm2,xmm6,xmm4
vfnmsub213ss xmm2,xmm6,DWORD PTR [rcx]
vfnmsub213ss xmm2,xmm6,[rcx]
vfnmsub231ss xmm2,xmm6,xmm4
vfnmsub231ss xmm2,xmm6,DWORD PTR [rcx]
vfnmsub231ss xmm2,xmm6,[rcx]
|
tactcomplabs/xbgas-binutils-gdb
| 98,123
|
gas/testsuite/gas/i386/x86-64-avx512dq_vl.s
|
# Check 64bit AVX512{DQ,VL} instructions
.allow_index_reg
.text
_start:
vbroadcastf64x2 (%rcx), %ymm30 # AVX512{DQ,VL}
vbroadcastf64x2 (%rcx), %ymm30{%k7} # AVX512{DQ,VL}
vbroadcastf64x2 (%rcx), %ymm30{%k7}{z} # AVX512{DQ,VL}
vbroadcastf64x2 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vbroadcastf64x2 2032(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcastf64x2 2048(%rdx), %ymm30 # AVX512{DQ,VL}
vbroadcastf64x2 -2048(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcastf64x2 -2064(%rdx), %ymm30 # AVX512{DQ,VL}
vbroadcasti64x2 (%rcx), %ymm30 # AVX512{DQ,VL}
vbroadcasti64x2 (%rcx), %ymm30{%k7} # AVX512{DQ,VL}
vbroadcasti64x2 (%rcx), %ymm30{%k7}{z} # AVX512{DQ,VL}
vbroadcasti64x2 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vbroadcasti64x2 2032(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcasti64x2 2048(%rdx), %ymm30 # AVX512{DQ,VL}
vbroadcasti64x2 -2048(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcasti64x2 -2064(%rdx), %ymm30 # AVX512{DQ,VL}
vbroadcastf32x2 %xmm31, %ymm30 # AVX512{DQ,VL}
vbroadcastf32x2 %xmm31, %ymm30{%k7} # AVX512{DQ,VL}
vbroadcastf32x2 %xmm31, %ymm30{%k7}{z} # AVX512{DQ,VL}
vbroadcastf32x2 (%rcx), %ymm30 # AVX512{DQ,VL}
vbroadcastf32x2 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vbroadcastf32x2 1016(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcastf32x2 1024(%rdx), %ymm30 # AVX512{DQ,VL}
vbroadcastf32x2 -1024(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcastf32x2 -1032(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtpd2qq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtpd2qq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtpd2qq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtpd2qq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtpd2qq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtpd2qq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtpd2qq 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtpd2qq -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtpd2qq 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtpd2qq -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtpd2qq %ymm29, %ymm30 # AVX512{DQ,VL}
vcvtpd2qq %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvtpd2qq %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvtpd2qq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvtpd2qq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvtpd2qq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtpd2qq 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtpd2qq -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtpd2qq 1016(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq 1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtpd2qq -1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2qq -1032(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtpd2uqq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtpd2uqq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtpd2uqq %ymm29, %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvtpd2uqq %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvtpd2uqq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq 1016(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq 1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtpd2uqq -1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtpd2uqq -1032(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtps2qq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtps2qq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtps2qq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtps2qq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtps2qq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtps2qq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtps2qq 1016(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2qq 1024(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtps2qq -1024(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2qq -1032(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtps2qq 508(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2qq 512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtps2qq -512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2qq -516(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtps2qq %xmm29, %ymm30 # AVX512{DQ,VL}
vcvtps2qq %xmm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvtps2qq %xmm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvtps2qq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvtps2qq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvtps2qq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtps2qq 2032(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2qq 2048(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtps2qq -2048(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2qq -2064(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtps2qq 508(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2qq 512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtps2qq -512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2qq -516(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtps2uqq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtps2uqq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtps2uqq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtps2uqq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtps2uqq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtps2uqq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtps2uqq 1016(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq 1024(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtps2uqq -1024(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq -1032(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtps2uqq 508(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq 512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtps2uqq -512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq -516(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtps2uqq %xmm29, %ymm30 # AVX512{DQ,VL}
vcvtps2uqq %xmm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvtps2uqq %xmm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvtps2uqq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvtps2uqq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvtps2uqq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtps2uqq 2032(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq 2048(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtps2uqq -2048(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq -2064(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtps2uqq 508(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq 512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtps2uqq -512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtps2uqq -516(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtqq2pd %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtqq2pd %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtqq2pd %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtqq2pd (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtqq2pd 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtqq2pd (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtqq2pd 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtqq2pd -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtqq2pd 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtqq2pd -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtqq2pd %ymm29, %ymm30 # AVX512{DQ,VL}
vcvtqq2pd %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvtqq2pd %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvtqq2pd (%rcx), %ymm30 # AVX512{DQ,VL}
vcvtqq2pd 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvtqq2pd (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtqq2pd 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtqq2pd -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtqq2pd 1016(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd 1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtqq2pd -1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtqq2pd -1032(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtqq2ps %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtqq2ps %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtqq2ps %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtqq2psx (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtqq2psx 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtqq2ps (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtqq2psx 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psx 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtqq2psx -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psx -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtqq2psx 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psx 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtqq2psx -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psx -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtqq2ps %ymm29, %xmm30 # AVX512{DQ,VL}
vcvtqq2ps %ymm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtqq2ps %ymm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtqq2psy (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtqq2psy 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtqq2ps (%rcx){1to4}, %xmm30 # AVX512{DQ,VL}
vcvtqq2psy 4064(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psy 4096(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtqq2psy -4096(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psy -4128(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtqq2psy 1016(%rdx){1to4}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psy 1024(%rdx){1to4}, %xmm30 # AVX512{DQ,VL}
vcvtqq2psy -1024(%rdx){1to4}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtqq2psy -1032(%rdx){1to4}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtuqq2pd %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2pd (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2pd %ymm29, %ymm30 # AVX512{DQ,VL}
vcvtuqq2pd %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvtuqq2pd %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2pd (%rcx), %ymm30 # AVX512{DQ,VL}
vcvtuqq2pd 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvtuqq2pd (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtuqq2pd 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtuqq2pd -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vcvtuqq2pd 1016(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd 1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtuqq2pd -1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvtuqq2pd -1032(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvtuqq2ps %xmm29, %xmm30 # AVX512{DQ,VL}
vcvtuqq2ps %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtuqq2ps %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2psx (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2psx 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtuqq2ps (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2psx 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psx 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2psx -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psx -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2psx 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psx 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2psx -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psx -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2ps %ymm29, %xmm30 # AVX512{DQ,VL}
vcvtuqq2ps %ymm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvtuqq2ps %ymm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2psy (%rcx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2psy 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvtuqq2ps (%rcx){1to4}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2psy 4064(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psy 4096(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2psy -4096(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psy -4128(%rdx), %xmm30 # AVX512{DQ,VL}
vcvtuqq2psy 1016(%rdx){1to4}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psy 1024(%rdx){1to4}, %xmm30 # AVX512{DQ,VL}
vcvtuqq2psy -1024(%rdx){1to4}, %xmm30 # AVX512{DQ,VL} Disp8
vcvtuqq2psy -1032(%rdx){1to4}, %xmm30 # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm29, %xmm30 # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm29, %xmm30{%k7} # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vextractf64x2 $123, %ymm29, %xmm30 # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm29, %xmm30 # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm29, %xmm30{%k7} # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vextracti64x2 $123, %ymm29, %xmm30 # AVX512{DQ,VL}
vfpclasspd $0xab, %xmm30, %k5 # AVX512{DQ,VL}
vfpclasspd $0xab, %xmm30, %k5{%k7} # AVX512{DQ,VL}
vfpclasspd $123, %xmm30, %k5 # AVX512{DQ,VL}
vfpclasspdx $123, (%rcx), %k5 # AVX512{DQ,VL}
vfpclasspdx $123, 0x123(%rax,%r14,8), %k5 # AVX512{DQ,VL}
vfpclasspd $123, (%rcx){1to2}, %k5 # AVX512{DQ,VL}
vfpclasspdx $123, 2032(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspdx $123, 2048(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspdx $123, -2048(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspdx $123, -2064(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspdx $123, 1016(%rdx){1to2}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspdx $123, 1024(%rdx){1to2}, %k5 # AVX512{DQ,VL}
vfpclasspdx $123, -1024(%rdx){1to2}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspdx $123, -1032(%rdx){1to2}, %k5 # AVX512{DQ,VL}
vfpclasspd $0xab, %ymm30, %k5 # AVX512{DQ,VL}
vfpclasspd $0xab, %ymm30, %k5{%k7} # AVX512{DQ,VL}
vfpclasspd $123, %ymm30, %k5 # AVX512{DQ,VL}
vfpclasspdy $123, (%rcx), %k5 # AVX512{DQ,VL}
vfpclasspdy $123, 0x123(%rax,%r14,8), %k5 # AVX512{DQ,VL}
vfpclasspd $123, (%rcx){1to4}, %k5 # AVX512{DQ,VL}
vfpclasspdy $123, 4064(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspdy $123, 4096(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspdy $123, -4096(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspdy $123, -4128(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspdy $123, 1016(%rdx){1to4}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspdy $123, 1024(%rdx){1to4}, %k5 # AVX512{DQ,VL}
vfpclasspdy $123, -1024(%rdx){1to4}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspdy $123, -1032(%rdx){1to4}, %k5 # AVX512{DQ,VL}
vfpclassps $0xab, %xmm30, %k5 # AVX512{DQ,VL}
vfpclassps $0xab, %xmm30, %k5{%k7} # AVX512{DQ,VL}
vfpclassps $123, %xmm30, %k5 # AVX512{DQ,VL}
vfpclasspsx $123, (%rcx), %k5 # AVX512{DQ,VL}
vfpclasspsx $123, 0x123(%rax,%r14,8), %k5 # AVX512{DQ,VL}
vfpclassps $123, (%rcx){1to4}, %k5 # AVX512{DQ,VL}
vfpclasspsx $123, 2032(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspsx $123, 2048(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspsx $123, -2048(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspsx $123, -2064(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspsx $123, 508(%rdx){1to4}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspsx $123, 512(%rdx){1to4}, %k5 # AVX512{DQ,VL}
vfpclasspsx $123, -512(%rdx){1to4}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspsx $123, -516(%rdx){1to4}, %k5 # AVX512{DQ,VL}
vfpclassps $0xab, %ymm30, %k5 # AVX512{DQ,VL}
vfpclassps $0xab, %ymm30, %k5{%k7} # AVX512{DQ,VL}
vfpclassps $123, %ymm30, %k5 # AVX512{DQ,VL}
vfpclasspsy $123, (%rcx), %k5 # AVX512{DQ,VL}
vfpclasspsy $123, 0x123(%rax,%r14,8), %k5 # AVX512{DQ,VL}
vfpclassps $123, (%rcx){1to8}, %k5 # AVX512{DQ,VL}
vfpclasspsy $123, 4064(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspsy $123, 4096(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspsy $123, -4096(%rdx), %k5 # AVX512{DQ,VL} Disp8
vfpclasspsy $123, -4128(%rdx), %k5 # AVX512{DQ,VL}
vfpclasspsy $123, 508(%rdx){1to8}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspsy $123, 512(%rdx){1to8}, %k5 # AVX512{DQ,VL}
vfpclasspsy $123, -512(%rdx){1to8}, %k5 # AVX512{DQ,VL} Disp8
vfpclasspsy $123, -516(%rdx){1to8}, %k5 # AVX512{DQ,VL}
vinsertf64x2 $0xab, %xmm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vinsertf64x2 $0xab, %xmm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vinsertf64x2 $0xab, %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vinsertf64x2 $123, %xmm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vinsertf64x2 $123, (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vinsertf64x2 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vinsertf64x2 $123, 2032(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vinsertf64x2 $123, 2048(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vinsertf64x2 $123, -2048(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vinsertf64x2 $123, -2064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vinserti64x2 $0xab, %xmm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vinserti64x2 $0xab, %xmm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vinserti64x2 $0xab, %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vinserti64x2 $123, %xmm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vinserti64x2 $123, (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vinserti64x2 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vinserti64x2 $123, 2032(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vinserti64x2 $123, 2048(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vinserti64x2 $123, -2048(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vinserti64x2 $123, -2064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vbroadcasti32x2 %xmm31, %xmm30 # AVX512{DQ,VL}
vbroadcasti32x2 %xmm31, %xmm30{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 %xmm31, %xmm30{%k7}{z} # AVX512{DQ,VL}
vbroadcasti32x2 (%rcx), %xmm30 # AVX512{DQ,VL}
vbroadcasti32x2 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vbroadcasti32x2 1016(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vbroadcasti32x2 1024(%rdx), %xmm30 # AVX512{DQ,VL}
vbroadcasti32x2 -1024(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vbroadcasti32x2 -1032(%rdx), %xmm30 # AVX512{DQ,VL}
vbroadcasti32x2 %xmm31, %ymm30 # AVX512{DQ,VL}
vbroadcasti32x2 %xmm31, %ymm30{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 %xmm31, %ymm30{%k7}{z} # AVX512{DQ,VL}
vbroadcasti32x2 (%rcx), %ymm30 # AVX512{DQ,VL}
vbroadcasti32x2 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vbroadcasti32x2 1016(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcasti32x2 1024(%rdx), %ymm30 # AVX512{DQ,VL}
vbroadcasti32x2 -1024(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vbroadcasti32x2 -1032(%rdx), %ymm30 # AVX512{DQ,VL}
vpmullq %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vpmullq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vpmullq (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vpmullq 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vpmullq -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vpmullq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vpmullq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vpmullq %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vpmullq %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vpmullq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vpmullq (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vpmullq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vpmullq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vpmullq 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vpmullq 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vpmullq -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vpmullq -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vpmullq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vpmullq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vpmullq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vpmullq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $0xab, %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vrangepd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vrangepd $123, %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $123, (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $123, (%rcx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangepd $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangepd $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangepd $123, 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $123, -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangepd $123, -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangepd $0xab, %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vrangepd $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vrangepd $123, %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $123, (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $123, (%rcx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangepd $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangepd $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangepd $123, 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangepd $123, -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangepd $123, -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $0xab, %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vrangeps $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vrangeps $123, %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $123, (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $123, (%rcx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangeps $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangeps $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $123, 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangeps $123, 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $123, -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vrangeps $123, -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vrangeps $0xab, %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vrangeps $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vrangeps $123, %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $123, (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $123, (%rcx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangeps $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangeps $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangeps $123, 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vrangeps $123, -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vrangeps $123, -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vandpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vandpd (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandpd 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandpd -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandpd %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vandpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vandpd (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandpd 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandpd -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandps %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vandps %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vandps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vandps (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vandps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandps 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandps 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandps -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandps -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandps %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vandps %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vandps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vandps (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vandps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandps 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandps 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandps -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandps -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vandnpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vandnpd (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnpd 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnpd -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnpd %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vandnpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vandnpd (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnpd 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnpd -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vandnps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vandnps (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnps 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnps -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vandnps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vandnps %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vandnps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vandnps (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnps 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnps -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vandnps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vandnps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vorpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vorpd (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorpd 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorpd -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vorpd %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vorpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vorpd (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorpd 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorpd -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vorpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vorps %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vorps %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vorps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vorps (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vorps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vorps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vorps 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorps 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vorps -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorps -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vorps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vorps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vorps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vorps %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vorps %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vorps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vorps (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vorps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vorps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vorps 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorps 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vorps -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorps -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vorps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vorps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vorps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vxorpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vxorpd (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorpd 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorpd -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorpd %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vxorpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vxorpd (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorpd 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorpd -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps %xmm28, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps %xmm28, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vxorps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vxorps (%rcx), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps 2032(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorps 2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps -2048(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorps -2064(%rdx), %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL} Disp8
vxorps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{DQ,VL}
vxorps %ymm28, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps %ymm28, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vxorps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vxorps (%rcx), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps 4064(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorps 4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps -4096(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorps -4128(%rdx), %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vxorps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL} Disp8
vxorps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{DQ,VL}
vreducepd $0xab, %xmm29, %xmm30 # AVX512{DQ,VL}
vreducepd $0xab, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vreducepd $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vreducepd $123, %xmm29, %xmm30 # AVX512{DQ,VL}
vreducepd $123, (%rcx), %xmm30 # AVX512{DQ,VL}
vreducepd $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vreducepd $123, (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vreducepd $123, 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vreducepd $123, 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vreducepd $123, -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vreducepd $123, -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vreducepd $123, 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vreducepd $123, 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vreducepd $123, -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vreducepd $123, -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vreducepd $0xab, %ymm29, %ymm30 # AVX512{DQ,VL}
vreducepd $0xab, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vreducepd $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vreducepd $123, %ymm29, %ymm30 # AVX512{DQ,VL}
vreducepd $123, (%rcx), %ymm30 # AVX512{DQ,VL}
vreducepd $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vreducepd $123, (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vreducepd $123, 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vreducepd $123, 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vreducepd $123, -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vreducepd $123, -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vreducepd $123, 1016(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vreducepd $123, 1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vreducepd $123, -1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vreducepd $123, -1032(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vreduceps $0xab, %xmm29, %xmm30 # AVX512{DQ,VL}
vreduceps $0xab, %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vreduceps $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vreduceps $123, %xmm29, %xmm30 # AVX512{DQ,VL}
vreduceps $123, (%rcx), %xmm30 # AVX512{DQ,VL}
vreduceps $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vreduceps $123, (%rcx){1to4}, %xmm30 # AVX512{DQ,VL}
vreduceps $123, 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vreduceps $123, 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vreduceps $123, -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vreduceps $123, -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vreduceps $123, 508(%rdx){1to4}, %xmm30 # AVX512{DQ,VL} Disp8
vreduceps $123, 512(%rdx){1to4}, %xmm30 # AVX512{DQ,VL}
vreduceps $123, -512(%rdx){1to4}, %xmm30 # AVX512{DQ,VL} Disp8
vreduceps $123, -516(%rdx){1to4}, %xmm30 # AVX512{DQ,VL}
vreduceps $0xab, %ymm29, %ymm30 # AVX512{DQ,VL}
vreduceps $0xab, %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vreduceps $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vreduceps $123, %ymm29, %ymm30 # AVX512{DQ,VL}
vreduceps $123, (%rcx), %ymm30 # AVX512{DQ,VL}
vreduceps $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vreduceps $123, (%rcx){1to8}, %ymm30 # AVX512{DQ,VL}
vreduceps $123, 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vreduceps $123, 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vreduceps $123, -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vreduceps $123, -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vreduceps $123, 508(%rdx){1to8}, %ymm30 # AVX512{DQ,VL} Disp8
vreduceps $123, 512(%rdx){1to8}, %ymm30 # AVX512{DQ,VL}
vreduceps $123, -512(%rdx){1to8}, %ymm30 # AVX512{DQ,VL} Disp8
vreduceps $123, -516(%rdx){1to8}, %ymm30 # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm29, (%rcx) # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm29, (%rcx){%k7} # AVX512{DQ,VL}
vextractf64x2 $123, %ymm29, (%rcx) # AVX512{DQ,VL}
vextractf64x2 $123, %ymm29, 0x123(%rax,%r14,8) # AVX512{DQ,VL}
vextractf64x2 $123, %ymm29, 2032(%rdx) # AVX512{DQ,VL} Disp8
vextractf64x2 $123, %ymm29, 2048(%rdx) # AVX512{DQ,VL}
vextractf64x2 $123, %ymm29, -2048(%rdx) # AVX512{DQ,VL} Disp8
vextractf64x2 $123, %ymm29, -2064(%rdx) # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm29, (%rcx) # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm29, (%rcx){%k7} # AVX512{DQ,VL}
vextracti64x2 $123, %ymm29, (%rcx) # AVX512{DQ,VL}
vextracti64x2 $123, %ymm29, 0x123(%rax,%r14,8) # AVX512{DQ,VL}
vextracti64x2 $123, %ymm29, 2032(%rdx) # AVX512{DQ,VL} Disp8
vextracti64x2 $123, %ymm29, 2048(%rdx) # AVX512{DQ,VL}
vextracti64x2 $123, %ymm29, -2048(%rdx) # AVX512{DQ,VL} Disp8
vextracti64x2 $123, %ymm29, -2064(%rdx) # AVX512{DQ,VL}
vcvttpd2qq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvttpd2qq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvttpd2qq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvttpd2qq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvttpd2qq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvttpd2qq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttpd2qq 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttpd2qq -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttpd2qq 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttpd2qq -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttpd2qq %ymm29, %ymm30 # AVX512{DQ,VL}
vcvttpd2qq %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvttpd2qq %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvttpd2qq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvttpd2qq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvttpd2qq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttpd2qq 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttpd2qq -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttpd2qq 1016(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq 1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttpd2qq -1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2qq -1032(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvttpd2uqq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvttpd2uqq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq 2032(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq 2048(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq -2048(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq -2064(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq 1016(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq 1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq -1024(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq -1032(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttpd2uqq %ymm29, %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq %ymm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvttpd2uqq %ymm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvttpd2uqq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq 4064(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq 4096(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq -4096(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq -4128(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq 1016(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq 1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttpd2uqq -1024(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttpd2uqq -1032(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttps2qq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvttps2qq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvttps2qq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvttps2qq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvttps2qq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvttps2qq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttps2qq 1016(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2qq 1024(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttps2qq -1024(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2qq -1032(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttps2qq 508(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2qq 512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttps2qq -512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2qq -516(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttps2qq %xmm29, %ymm30 # AVX512{DQ,VL}
vcvttps2qq %xmm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvttps2qq %xmm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvttps2qq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvttps2qq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvttps2qq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttps2qq 2032(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2qq 2048(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttps2qq -2048(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2qq -2064(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttps2qq 508(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2qq 512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttps2qq -512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2qq -516(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttps2uqq %xmm29, %xmm30 # AVX512{DQ,VL}
vcvttps2uqq %xmm29, %xmm30{%k7} # AVX512{DQ,VL}
vcvttps2uqq %xmm29, %xmm30{%k7}{z} # AVX512{DQ,VL}
vcvttps2uqq (%rcx), %xmm30 # AVX512{DQ,VL}
vcvttps2uqq 0x123(%rax,%r14,8), %xmm30 # AVX512{DQ,VL}
vcvttps2uqq (%rcx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttps2uqq 1016(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq 1024(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttps2uqq -1024(%rdx), %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq -1032(%rdx), %xmm30 # AVX512{DQ,VL}
vcvttps2uqq 508(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq 512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttps2uqq -512(%rdx){1to2}, %xmm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq -516(%rdx){1to2}, %xmm30 # AVX512{DQ,VL}
vcvttps2uqq %xmm29, %ymm30 # AVX512{DQ,VL}
vcvttps2uqq %xmm29, %ymm30{%k7} # AVX512{DQ,VL}
vcvttps2uqq %xmm29, %ymm30{%k7}{z} # AVX512{DQ,VL}
vcvttps2uqq (%rcx), %ymm30 # AVX512{DQ,VL}
vcvttps2uqq 0x123(%rax,%r14,8), %ymm30 # AVX512{DQ,VL}
vcvttps2uqq (%rcx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttps2uqq 2032(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq 2048(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttps2uqq -2048(%rdx), %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq -2064(%rdx), %ymm30 # AVX512{DQ,VL}
vcvttps2uqq 508(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq 512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vcvttps2uqq -512(%rdx){1to4}, %ymm30 # AVX512{DQ,VL} Disp8
vcvttps2uqq -516(%rdx){1to4}, %ymm30 # AVX512{DQ,VL}
vpmovd2m %xmm30, %k5 # AVX512{DQ,VL}
vpmovd2m %ymm30, %k5 # AVX512{DQ,VL}
vpmovq2m %xmm30, %k5 # AVX512{DQ,VL}
vpmovq2m %ymm30, %k5 # AVX512{DQ,VL}
vpmovm2d %k5, %xmm30 # AVX512{DQ,VL}
vpmovm2d %k5, %ymm30 # AVX512{DQ,VL}
vpmovm2q %k5, %xmm30 # AVX512{DQ,VL}
vpmovm2q %k5, %ymm30 # AVX512{DQ,VL}
.intel_syntax noprefix
vbroadcastf64x2 ymm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcastf64x2 ymm30{k7}, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcastf64x2 ymm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcastf64x2 ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vbroadcastf64x2 ymm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vbroadcastf64x2 ymm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vbroadcastf64x2 ymm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vbroadcastf64x2 ymm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vbroadcasti64x2 ymm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcasti64x2 ymm30{k7}, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcasti64x2 ymm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcasti64x2 ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vbroadcasti64x2 ymm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vbroadcasti64x2 ymm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vbroadcasti64x2 ymm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vbroadcasti64x2 ymm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vbroadcastf32x2 ymm30, xmm31 # AVX512{DQ,VL}
vbroadcastf32x2 ymm30{k7}, xmm31 # AVX512{DQ,VL}
vbroadcastf32x2 ymm30{k7}{z}, xmm31 # AVX512{DQ,VL}
vbroadcastf32x2 ymm30, QWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcastf32x2 ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vbroadcastf32x2 ymm30, QWORD PTR [rdx+1016] # AVX512{DQ,VL} Disp8
vbroadcastf32x2 ymm30, QWORD PTR [rdx+1024] # AVX512{DQ,VL}
vbroadcastf32x2 ymm30, QWORD PTR [rdx-1024] # AVX512{DQ,VL} Disp8
vbroadcastf32x2 ymm30, QWORD PTR [rdx-1032] # AVX512{DQ,VL}
vcvtpd2qq xmm30, xmm29 # AVX512{DQ,VL}
vcvtpd2qq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtpd2qq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtpd2qq xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtpd2qq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtpd2qq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtpd2qq xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtpd2qq xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtpd2qq xmm30, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm30, [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvtpd2qq xmm30, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm30, [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvtpd2qq ymm30, ymm29 # AVX512{DQ,VL}
vcvtpd2qq ymm30{k7}, ymm29 # AVX512{DQ,VL}
vcvtpd2qq ymm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvtpd2qq ymm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtpd2qq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtpd2qq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtpd2qq ymm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvtpd2qq ymm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvtpd2qq ymm30, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm30, [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvtpd2qq ymm30, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm30, [rdx-1032]{1to4} # AVX512{DQ,VL}
vcvtpd2uqq xmm30, xmm29 # AVX512{DQ,VL}
vcvtpd2uqq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtpd2uqq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtpd2uqq xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtpd2uqq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtpd2uqq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtpd2uqq xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtpd2uqq xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtpd2uqq xmm30, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm30, [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvtpd2uqq xmm30, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm30, [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvtpd2uqq ymm30, ymm29 # AVX512{DQ,VL}
vcvtpd2uqq ymm30{k7}, ymm29 # AVX512{DQ,VL}
vcvtpd2uqq ymm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvtpd2uqq ymm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtpd2uqq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtpd2uqq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtpd2uqq ymm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvtpd2uqq ymm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvtpd2uqq ymm30, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm30, [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvtpd2uqq ymm30, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm30, [rdx-1032]{1to4} # AVX512{DQ,VL}
vcvtps2qq xmm30, xmm29 # AVX512{DQ,VL}
vcvtps2qq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtps2qq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtps2qq xmm30, QWORD PTR [rcx] # AVX512{DQ,VL}
vcvtps2qq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtps2qq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtps2qq xmm30, QWORD PTR [rdx+1016] # AVX512{DQ,VL} Disp8
vcvtps2qq xmm30, QWORD PTR [rdx+1024] # AVX512{DQ,VL}
vcvtps2qq xmm30, QWORD PTR [rdx-1024] # AVX512{DQ,VL} Disp8
vcvtps2qq xmm30, QWORD PTR [rdx-1032] # AVX512{DQ,VL}
vcvtps2qq xmm30, [rdx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2qq xmm30, [rdx+512]{1to2} # AVX512{DQ,VL}
vcvtps2qq xmm30, [rdx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2qq xmm30, [rdx-516]{1to2} # AVX512{DQ,VL}
vcvtps2qq xmm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vcvtps2qq ymm30, xmm29 # AVX512{DQ,VL}
vcvtps2qq ymm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtps2qq ymm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtps2qq ymm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtps2qq ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtps2qq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtps2qq ymm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtps2qq ymm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtps2qq ymm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtps2qq ymm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtps2qq ymm30, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2qq ymm30, [rdx+512]{1to4} # AVX512{DQ,VL}
vcvtps2qq ymm30, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2qq ymm30, [rdx-516]{1to4} # AVX512{DQ,VL}
vcvtps2qq ymm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm30, xmm29 # AVX512{DQ,VL}
vcvtps2uqq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtps2uqq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtps2uqq xmm30, QWORD PTR [rcx] # AVX512{DQ,VL}
vcvtps2uqq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtps2uqq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtps2uqq xmm30, QWORD PTR [rdx+1016] # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm30, QWORD PTR [rdx+1024] # AVX512{DQ,VL}
vcvtps2uqq xmm30, QWORD PTR [rdx-1024] # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm30, QWORD PTR [rdx-1032] # AVX512{DQ,VL}
vcvtps2uqq xmm30, [rdx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm30, [rdx+512]{1to2} # AVX512{DQ,VL}
vcvtps2uqq xmm30, [rdx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm30, [rdx-516]{1to2} # AVX512{DQ,VL}
vcvtps2uqq xmm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm30, xmm29 # AVX512{DQ,VL}
vcvtps2uqq ymm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtps2uqq ymm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtps2uqq ymm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtps2uqq ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtps2uqq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtps2uqq ymm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtps2uqq ymm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtps2uqq ymm30, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm30, [rdx+512]{1to4} # AVX512{DQ,VL}
vcvtps2uqq ymm30, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm30, [rdx-516]{1to4} # AVX512{DQ,VL}
vcvtps2uqq ymm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm30, xmm29 # AVX512{DQ,VL}
vcvtqq2pd xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtqq2pd xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtqq2pd xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtqq2pd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtqq2pd xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtqq2pd xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtqq2pd xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtqq2pd xmm30, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm30, [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvtqq2pd xmm30, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm30, [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvtqq2pd ymm30, ymm29 # AVX512{DQ,VL}
vcvtqq2pd ymm30{k7}, ymm29 # AVX512{DQ,VL}
vcvtqq2pd ymm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvtqq2pd ymm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtqq2pd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtqq2pd ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtqq2pd ymm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvtqq2pd ymm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvtqq2pd ymm30, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm30, [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvtqq2pd ymm30, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm30, [rdx-1032]{1to4} # AVX512{DQ,VL}
vcvtqq2ps xmm30, xmm29 # AVX512{DQ,VL}
vcvtqq2ps xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtqq2ps xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtqq2ps xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtqq2ps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtqq2ps xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtqq2ps xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtqq2ps xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtqq2ps xmm30, QWORD BCST [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, QWORD BCST [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvtqq2ps xmm30, QWORD BCST [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, QWORD BCST [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvtqq2ps xmm30, ymm29 # AVX512{DQ,VL}
vcvtqq2ps xmm30{k7}, ymm29 # AVX512{DQ,VL}
vcvtqq2ps xmm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvtqq2ps xmm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtqq2ps xmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtqq2ps xmm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtqq2ps xmm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvtqq2ps xmm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvtqq2ps xmm30, QWORD BCST [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, QWORD BCST [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvtqq2ps xmm30, QWORD BCST [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm30, QWORD BCST [rdx-1032]{1to4} # AVX512{DQ,VL}
vcvtuqq2pd xmm30, xmm29 # AVX512{DQ,VL}
vcvtuqq2pd xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtuqq2pd xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtuqq2pd xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtuqq2pd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtuqq2pd xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtuqq2pd xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtuqq2pd xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtuqq2pd xmm30, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm30, [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvtuqq2pd xmm30, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm30, [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvtuqq2pd ymm30, ymm29 # AVX512{DQ,VL}
vcvtuqq2pd ymm30{k7}, ymm29 # AVX512{DQ,VL}
vcvtuqq2pd ymm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvtuqq2pd ymm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtuqq2pd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtuqq2pd ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtuqq2pd ymm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvtuqq2pd ymm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvtuqq2pd ymm30, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm30, [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvtuqq2pd ymm30, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm30, [rdx-1032]{1to4} # AVX512{DQ,VL}
vcvtuqq2ps xmm30, xmm29 # AVX512{DQ,VL}
vcvtuqq2ps xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvtuqq2ps xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvtuqq2ps xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvtuqq2ps xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, QWORD BCST [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, QWORD BCST [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvtuqq2ps xmm30, QWORD BCST [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, QWORD BCST [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvtuqq2ps xmm30, ymm29 # AVX512{DQ,VL}
vcvtuqq2ps xmm30{k7}, ymm29 # AVX512{DQ,VL}
vcvtuqq2ps xmm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvtuqq2ps xmm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvtuqq2ps xmm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvtuqq2ps xmm30, QWORD BCST [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, QWORD BCST [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvtuqq2ps xmm30, QWORD BCST [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm30, QWORD BCST [rdx-1032]{1to4} # AVX512{DQ,VL}
vextractf64x2 xmm30, ymm29, 0xab # AVX512{DQ,VL}
vextractf64x2 xmm30{k7}, ymm29, 0xab # AVX512{DQ,VL}
vextractf64x2 xmm30{k7}{z}, ymm29, 0xab # AVX512{DQ,VL}
vextractf64x2 xmm30, ymm29, 123 # AVX512{DQ,VL}
vextracti64x2 xmm30, ymm29, 0xab # AVX512{DQ,VL}
vextracti64x2 xmm30{k7}, ymm29, 0xab # AVX512{DQ,VL}
vextracti64x2 xmm30{k7}{z}, ymm29, 0xab # AVX512{DQ,VL}
vextracti64x2 xmm30, ymm29, 123 # AVX512{DQ,VL}
vfpclasspd k5, xmm30, 0xab # AVX512{DQ,VL}
vfpclasspd k5{k7}, xmm30, 0xab # AVX512{DQ,VL}
vfpclasspd k5, xmm30, 123 # AVX512{DQ,VL}
vfpclasspd k5, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vfpclasspd k5, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vfpclasspd k5, [rcx]{1to2}, 123 # AVX512{DQ,VL}
vfpclasspd k5, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vfpclasspd k5, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vfpclasspd k5, QWORD BCST [rdx+1016]{1to2}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, QWORD BCST [rdx+1024]{1to2}, 123 # AVX512{DQ,VL}
vfpclasspd k5, QWORD BCST [rdx-1024]{1to2}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, QWORD BCST [rdx-1032]{1to2}, 123 # AVX512{DQ,VL}
vfpclasspd k5, ymm30, 0xab # AVX512{DQ,VL}
vfpclasspd k5{k7}, ymm30, 0xab # AVX512{DQ,VL}
vfpclasspd k5, ymm30, 123 # AVX512{DQ,VL}
vfpclasspd k5, YMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vfpclasspd k5, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vfpclasspd k5, [rcx]{1to4}, 123 # AVX512{DQ,VL}
vfpclasspd k5, YMMWORD PTR [rdx+4064], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, YMMWORD PTR [rdx+4096], 123 # AVX512{DQ,VL}
vfpclasspd k5, YMMWORD PTR [rdx-4096], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, YMMWORD PTR [rdx-4128], 123 # AVX512{DQ,VL}
vfpclasspd k5, QWORD BCST [rdx+1016]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, QWORD BCST [rdx+1024]{1to4}, 123 # AVX512{DQ,VL}
vfpclasspd k5, QWORD BCST [rdx-1024]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5, QWORD BCST [rdx-1032]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5, xmm30, 0xab # AVX512{DQ,VL}
vfpclassps k5{k7}, xmm30, 0xab # AVX512{DQ,VL}
vfpclassps k5, xmm30, 123 # AVX512{DQ,VL}
vfpclassps k5, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vfpclassps k5, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vfpclassps k5, [rcx]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vfpclassps k5, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vfpclassps k5, DWORD BCST [rdx+508]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, DWORD BCST [rdx+512]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5, DWORD BCST [rdx-512]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, DWORD BCST [rdx-516]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5, ymm30, 0xab # AVX512{DQ,VL}
vfpclassps k5{k7}, ymm30, 0xab # AVX512{DQ,VL}
vfpclassps k5, ymm30, 123 # AVX512{DQ,VL}
vfpclassps k5, YMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vfpclassps k5, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vfpclassps k5, [rcx]{1to8}, 123 # AVX512{DQ,VL}
vfpclassps k5, YMMWORD PTR [rdx+4064], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, YMMWORD PTR [rdx+4096], 123 # AVX512{DQ,VL}
vfpclassps k5, YMMWORD PTR [rdx-4096], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, YMMWORD PTR [rdx-4128], 123 # AVX512{DQ,VL}
vfpclassps k5, DWORD BCST [rdx+508]{1to8}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, DWORD BCST [rdx+512]{1to8}, 123 # AVX512{DQ,VL}
vfpclassps k5, DWORD BCST [rdx-512]{1to8}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5, DWORD BCST [rdx-516]{1to8}, 123 # AVX512{DQ,VL}
vinsertf64x2 ymm30, ymm29, xmm28, 0xab # AVX512{DQ,VL}
vinsertf64x2 ymm30{k7}, ymm29, xmm28, 0xab # AVX512{DQ,VL}
vinsertf64x2 ymm30{k7}{z}, ymm29, xmm28, 0xab # AVX512{DQ,VL}
vinsertf64x2 ymm30, ymm29, xmm28, 123 # AVX512{DQ,VL}
vinsertf64x2 ymm30, ymm29, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vinsertf64x2 ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vinsertf64x2 ymm30, ymm29, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vinsertf64x2 ymm30, ymm29, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vinsertf64x2 ymm30, ymm29, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vinsertf64x2 ymm30, ymm29, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vinserti64x2 ymm30, ymm29, xmm28, 0xab # AVX512{DQ,VL}
vinserti64x2 ymm30{k7}, ymm29, xmm28, 0xab # AVX512{DQ,VL}
vinserti64x2 ymm30{k7}{z}, ymm29, xmm28, 0xab # AVX512{DQ,VL}
vinserti64x2 ymm30, ymm29, xmm28, 123 # AVX512{DQ,VL}
vinserti64x2 ymm30, ymm29, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vinserti64x2 ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vinserti64x2 ymm30, ymm29, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vinserti64x2 ymm30, ymm29, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vinserti64x2 ymm30, ymm29, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vinserti64x2 ymm30, ymm29, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vbroadcasti32x2 xmm30, xmm31 # AVX512{DQ,VL}
vbroadcasti32x2 xmm30{k7}, xmm31 # AVX512{DQ,VL}
vbroadcasti32x2 xmm30{k7}{z}, xmm31 # AVX512{DQ,VL}
vbroadcasti32x2 xmm30, QWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcasti32x2 xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vbroadcasti32x2 xmm30, QWORD PTR [rdx+1016] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 xmm30, QWORD PTR [rdx+1024] # AVX512{DQ,VL}
vbroadcasti32x2 xmm30, QWORD PTR [rdx-1024] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 xmm30, QWORD PTR [rdx-1032] # AVX512{DQ,VL}
vbroadcasti32x2 ymm30, xmm31 # AVX512{DQ,VL}
vbroadcasti32x2 ymm30{k7}, xmm31 # AVX512{DQ,VL}
vbroadcasti32x2 ymm30{k7}{z}, xmm31 # AVX512{DQ,VL}
vbroadcasti32x2 ymm30, QWORD PTR [rcx] # AVX512{DQ,VL}
vbroadcasti32x2 ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vbroadcasti32x2 ymm30, QWORD PTR [rdx+1016] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 ymm30, QWORD PTR [rdx+1024] # AVX512{DQ,VL}
vbroadcasti32x2 ymm30, QWORD PTR [rdx-1024] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 ymm30, QWORD PTR [rdx-1032] # AVX512{DQ,VL}
vpmullq xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vpmullq xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vpmullq xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vpmullq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vpmullq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vpmullq xmm30, xmm29, [rcx]{1to2} # AVX512{DQ,VL}
vpmullq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vpmullq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vpmullq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vpmullq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vpmullq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vpmullq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{DQ,VL}
vpmullq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vpmullq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{DQ,VL}
vpmullq ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vpmullq ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vpmullq ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vpmullq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vpmullq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vpmullq ymm30, ymm29, [rcx]{1to4} # AVX512{DQ,VL}
vpmullq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vpmullq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vpmullq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vpmullq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vpmullq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vpmullq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{DQ,VL}
vpmullq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vpmullq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{DQ,VL}
vrangepd xmm30, xmm29, xmm28, 0xab # AVX512{DQ,VL}
vrangepd xmm30{k7}, xmm29, xmm28, 0xab # AVX512{DQ,VL}
vrangepd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{DQ,VL}
vrangepd xmm30, xmm29, xmm28, 123 # AVX512{DQ,VL}
vrangepd xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vrangepd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vrangepd xmm30, xmm29, [rcx]{1to2}, 123 # AVX512{DQ,VL}
vrangepd xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vrangepd xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vrangepd xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vrangepd xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vrangepd xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512{DQ,VL} Disp8
vrangepd xmm30, xmm29, [rdx+1024]{1to2}, 123 # AVX512{DQ,VL}
vrangepd xmm30, xmm29, [rdx-1024]{1to2}, 123 # AVX512{DQ,VL} Disp8
vrangepd xmm30, xmm29, [rdx-1032]{1to2}, 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, ymm28, 0xab # AVX512{DQ,VL}
vrangepd ymm30{k7}, ymm29, ymm28, 0xab # AVX512{DQ,VL}
vrangepd ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{DQ,VL}
vrangepd ymm30, ymm29, ymm28, 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, [rcx]{1to4}, 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{DQ,VL} Disp8
vrangepd ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{DQ,VL} Disp8
vrangepd ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangepd ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512{DQ,VL}
vrangepd ymm30, ymm29, [rdx-1024]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangepd ymm30, ymm29, [rdx-1032]{1to4}, 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, xmm28, 0xab # AVX512{DQ,VL}
vrangeps xmm30{k7}, xmm29, xmm28, 0xab # AVX512{DQ,VL}
vrangeps xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{DQ,VL}
vrangeps xmm30, xmm29, xmm28, 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, [rcx]{1to4}, 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vrangeps xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vrangeps xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, [rdx+508]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangeps xmm30, xmm29, [rdx+512]{1to4}, 123 # AVX512{DQ,VL}
vrangeps xmm30, xmm29, [rdx-512]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangeps xmm30, xmm29, [rdx-516]{1to4}, 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, ymm28, 0xab # AVX512{DQ,VL}
vrangeps ymm30{k7}, ymm29, ymm28, 0xab # AVX512{DQ,VL}
vrangeps ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{DQ,VL}
vrangeps ymm30, ymm29, ymm28, 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, [rcx]{1to8}, 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{DQ,VL} Disp8
vrangeps ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{DQ,VL} Disp8
vrangeps ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{DQ,VL} Disp8
vrangeps ymm30, ymm29, [rdx+512]{1to8}, 123 # AVX512{DQ,VL}
vrangeps ymm30, ymm29, [rdx-512]{1to8}, 123 # AVX512{DQ,VL} Disp8
vrangeps ymm30, ymm29, [rdx-516]{1to8}, 123 # AVX512{DQ,VL}
vandpd xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vandpd xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vandpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vandpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vandpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandpd xmm30, xmm29, [rcx]{1to2} # AVX512{DQ,VL}
vandpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vandpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vandpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vandpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vandpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vandpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{DQ,VL}
vandpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vandpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{DQ,VL}
vandpd ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vandpd ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vandpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vandpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vandpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandpd ymm30, ymm29, [rcx]{1to4} # AVX512{DQ,VL}
vandpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vandpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vandpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vandpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vandpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vandpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{DQ,VL}
vandpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vandpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{DQ,VL}
vandps xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vandps xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vandps xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vandps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vandps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandps xmm30, xmm29, [rcx]{1to4} # AVX512{DQ,VL}
vandps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vandps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vandps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vandps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vandps xmm30, xmm29, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vandps xmm30, xmm29, [rdx+512]{1to4} # AVX512{DQ,VL}
vandps xmm30, xmm29, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vandps xmm30, xmm29, [rdx-516]{1to4} # AVX512{DQ,VL}
vandps ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vandps ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vandps ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vandps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vandps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandps ymm30, ymm29, [rcx]{1to8} # AVX512{DQ,VL}
vandps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vandps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vandps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vandps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vandps ymm30, ymm29, [rdx+508]{1to8} # AVX512{DQ,VL} Disp8
vandps ymm30, ymm29, [rdx+512]{1to8} # AVX512{DQ,VL}
vandps ymm30, ymm29, [rdx-512]{1to8} # AVX512{DQ,VL} Disp8
vandps ymm30, ymm29, [rdx-516]{1to8} # AVX512{DQ,VL}
vandnpd xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vandnpd xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vandnpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vandnpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vandnpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandnpd xmm30, xmm29, [rcx]{1to2} # AVX512{DQ,VL}
vandnpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vandnpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vandnpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vandnpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vandnpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vandnpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{DQ,VL}
vandnpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vandnpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{DQ,VL}
vandnpd ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vandnpd ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vandnpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vandnpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vandnpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandnpd ymm30, ymm29, [rcx]{1to4} # AVX512{DQ,VL}
vandnpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vandnpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vandnpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vandnpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vandnpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vandnpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{DQ,VL}
vandnpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vandnpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{DQ,VL}
vandnps xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vandnps xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vandnps xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vandnps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vandnps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandnps xmm30, xmm29, [rcx]{1to4} # AVX512{DQ,VL}
vandnps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vandnps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vandnps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vandnps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vandnps xmm30, xmm29, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vandnps xmm30, xmm29, [rdx+512]{1to4} # AVX512{DQ,VL}
vandnps xmm30, xmm29, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vandnps xmm30, xmm29, [rdx-516]{1to4} # AVX512{DQ,VL}
vandnps ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vandnps ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vandnps ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vandnps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vandnps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vandnps ymm30, ymm29, [rcx]{1to8} # AVX512{DQ,VL}
vandnps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vandnps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vandnps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vandnps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vandnps ymm30, ymm29, [rdx+508]{1to8} # AVX512{DQ,VL} Disp8
vandnps ymm30, ymm29, [rdx+512]{1to8} # AVX512{DQ,VL}
vandnps ymm30, ymm29, [rdx-512]{1to8} # AVX512{DQ,VL} Disp8
vandnps ymm30, ymm29, [rdx-516]{1to8} # AVX512{DQ,VL}
vorpd xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vorpd xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vorpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vorpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vorpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vorpd xmm30, xmm29, [rcx]{1to2} # AVX512{DQ,VL}
vorpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vorpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vorpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vorpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vorpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vorpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{DQ,VL}
vorpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vorpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{DQ,VL}
vorpd ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vorpd ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vorpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vorpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vorpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vorpd ymm30, ymm29, [rcx]{1to4} # AVX512{DQ,VL}
vorpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vorpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vorpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vorpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vorpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vorpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{DQ,VL}
vorpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vorpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{DQ,VL}
vorps xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vorps xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vorps xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vorps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vorps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vorps xmm30, xmm29, [rcx]{1to4} # AVX512{DQ,VL}
vorps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vorps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vorps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vorps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vorps xmm30, xmm29, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vorps xmm30, xmm29, [rdx+512]{1to4} # AVX512{DQ,VL}
vorps xmm30, xmm29, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vorps xmm30, xmm29, [rdx-516]{1to4} # AVX512{DQ,VL}
vorps ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vorps ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vorps ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vorps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vorps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vorps ymm30, ymm29, [rcx]{1to8} # AVX512{DQ,VL}
vorps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vorps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vorps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vorps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vorps ymm30, ymm29, [rdx+508]{1to8} # AVX512{DQ,VL} Disp8
vorps ymm30, ymm29, [rdx+512]{1to8} # AVX512{DQ,VL}
vorps ymm30, ymm29, [rdx-512]{1to8} # AVX512{DQ,VL} Disp8
vorps ymm30, ymm29, [rdx-516]{1to8} # AVX512{DQ,VL}
vxorpd xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vxorpd xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vxorpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vxorpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vxorpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vxorpd xmm30, xmm29, [rcx]{1to2} # AVX512{DQ,VL}
vxorpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vxorpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vxorpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vxorpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vxorpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vxorpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{DQ,VL}
vxorpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vxorpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{DQ,VL}
vxorpd ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vxorpd ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vxorpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vxorpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vxorpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vxorpd ymm30, ymm29, [rcx]{1to4} # AVX512{DQ,VL}
vxorpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vxorpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vxorpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vxorpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vxorpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vxorpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{DQ,VL}
vxorpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vxorpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{DQ,VL}
vxorps xmm30, xmm29, xmm28 # AVX512{DQ,VL}
vxorps xmm30{k7}, xmm29, xmm28 # AVX512{DQ,VL}
vxorps xmm30{k7}{z}, xmm29, xmm28 # AVX512{DQ,VL}
vxorps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vxorps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vxorps xmm30, xmm29, [rcx]{1to4} # AVX512{DQ,VL}
vxorps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vxorps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vxorps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vxorps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vxorps xmm30, xmm29, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vxorps xmm30, xmm29, [rdx+512]{1to4} # AVX512{DQ,VL}
vxorps xmm30, xmm29, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vxorps xmm30, xmm29, [rdx-516]{1to4} # AVX512{DQ,VL}
vxorps ymm30, ymm29, ymm28 # AVX512{DQ,VL}
vxorps ymm30{k7}, ymm29, ymm28 # AVX512{DQ,VL}
vxorps ymm30{k7}{z}, ymm29, ymm28 # AVX512{DQ,VL}
vxorps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vxorps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vxorps ymm30, ymm29, [rcx]{1to8} # AVX512{DQ,VL}
vxorps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vxorps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vxorps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vxorps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vxorps ymm30, ymm29, [rdx+508]{1to8} # AVX512{DQ,VL} Disp8
vxorps ymm30, ymm29, [rdx+512]{1to8} # AVX512{DQ,VL}
vxorps ymm30, ymm29, [rdx-512]{1to8} # AVX512{DQ,VL} Disp8
vxorps ymm30, ymm29, [rdx-516]{1to8} # AVX512{DQ,VL}
vreducepd xmm30, xmm29, 0xab # AVX512{DQ,VL}
vreducepd xmm30{k7}, xmm29, 0xab # AVX512{DQ,VL}
vreducepd xmm30{k7}{z}, xmm29, 0xab # AVX512{DQ,VL}
vreducepd xmm30, xmm29, 123 # AVX512{DQ,VL}
vreducepd xmm30, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vreducepd xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vreducepd xmm30, [rcx]{1to2}, 123 # AVX512{DQ,VL}
vreducepd xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vreducepd xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vreducepd xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vreducepd xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vreducepd xmm30, [rdx+1016]{1to2}, 123 # AVX512{DQ,VL} Disp8
vreducepd xmm30, [rdx+1024]{1to2}, 123 # AVX512{DQ,VL}
vreducepd xmm30, [rdx-1024]{1to2}, 123 # AVX512{DQ,VL} Disp8
vreducepd xmm30, [rdx-1032]{1to2}, 123 # AVX512{DQ,VL}
vreducepd ymm30, ymm29, 0xab # AVX512{DQ,VL}
vreducepd ymm30{k7}, ymm29, 0xab # AVX512{DQ,VL}
vreducepd ymm30{k7}{z}, ymm29, 0xab # AVX512{DQ,VL}
vreducepd ymm30, ymm29, 123 # AVX512{DQ,VL}
vreducepd ymm30, YMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vreducepd ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vreducepd ymm30, [rcx]{1to4}, 123 # AVX512{DQ,VL}
vreducepd ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{DQ,VL} Disp8
vreducepd ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{DQ,VL}
vreducepd ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{DQ,VL} Disp8
vreducepd ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{DQ,VL}
vreducepd ymm30, [rdx+1016]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreducepd ymm30, [rdx+1024]{1to4}, 123 # AVX512{DQ,VL}
vreducepd ymm30, [rdx-1024]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreducepd ymm30, [rdx-1032]{1to4}, 123 # AVX512{DQ,VL}
vreduceps xmm30, xmm29, 0xab # AVX512{DQ,VL}
vreduceps xmm30{k7}, xmm29, 0xab # AVX512{DQ,VL}
vreduceps xmm30{k7}{z}, xmm29, 0xab # AVX512{DQ,VL}
vreduceps xmm30, xmm29, 123 # AVX512{DQ,VL}
vreduceps xmm30, XMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vreduceps xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vreduceps xmm30, [rcx]{1to4}, 123 # AVX512{DQ,VL}
vreduceps xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{DQ,VL} Disp8
vreduceps xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{DQ,VL}
vreduceps xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{DQ,VL} Disp8
vreduceps xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{DQ,VL}
vreduceps xmm30, [rdx+508]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreduceps xmm30, [rdx+512]{1to4}, 123 # AVX512{DQ,VL}
vreduceps xmm30, [rdx-512]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreduceps xmm30, [rdx-516]{1to4}, 123 # AVX512{DQ,VL}
vreduceps ymm30, ymm29, 0xab # AVX512{DQ,VL}
vreduceps ymm30{k7}, ymm29, 0xab # AVX512{DQ,VL}
vreduceps ymm30{k7}{z}, ymm29, 0xab # AVX512{DQ,VL}
vreduceps ymm30, ymm29, 123 # AVX512{DQ,VL}
vreduceps ymm30, YMMWORD PTR [rcx], 123 # AVX512{DQ,VL}
vreduceps ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{DQ,VL}
vreduceps ymm30, [rcx]{1to8}, 123 # AVX512{DQ,VL}
vreduceps ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{DQ,VL} Disp8
vreduceps ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{DQ,VL}
vreduceps ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{DQ,VL} Disp8
vreduceps ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{DQ,VL}
vreduceps ymm30, [rdx+508]{1to8}, 123 # AVX512{DQ,VL} Disp8
vreduceps ymm30, [rdx+512]{1to8}, 123 # AVX512{DQ,VL}
vreduceps ymm30, [rdx-512]{1to8}, 123 # AVX512{DQ,VL} Disp8
vreduceps ymm30, [rdx-516]{1to8}, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [rcx], ymm29, 0xab # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [rcx]{k7}, ymm29, 0xab # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [rcx], ymm29, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [rax+r14*8+0x1234], ymm29, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [rdx+2032], ymm29, 123 # AVX512{DQ,VL} Disp8
vextractf64x2 XMMWORD PTR [rdx+2048], ymm29, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [rdx-2048], ymm29, 123 # AVX512{DQ,VL} Disp8
vextractf64x2 XMMWORD PTR [rdx-2064], ymm29, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [rcx], ymm29, 0xab # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [rcx]{k7}, ymm29, 0xab # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [rcx], ymm29, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [rax+r14*8+0x1234], ymm29, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [rdx+2032], ymm29, 123 # AVX512{DQ,VL} Disp8
vextracti64x2 XMMWORD PTR [rdx+2048], ymm29, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [rdx-2048], ymm29, 123 # AVX512{DQ,VL} Disp8
vextracti64x2 XMMWORD PTR [rdx-2064], ymm29, 123 # AVX512{DQ,VL}
vcvttpd2qq xmm30, xmm29 # AVX512{DQ,VL}
vcvttpd2qq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvttpd2qq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvttpd2qq xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvttpd2qq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttpd2qq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvttpd2qq xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvttpd2qq xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvttpd2qq xmm30, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm30, [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvttpd2qq xmm30, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm30, [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvttpd2qq ymm30, ymm29 # AVX512{DQ,VL}
vcvttpd2qq ymm30{k7}, ymm29 # AVX512{DQ,VL}
vcvttpd2qq ymm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvttpd2qq ymm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvttpd2qq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttpd2qq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvttpd2qq ymm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvttpd2qq ymm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvttpd2qq ymm30, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm30, [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvttpd2qq ymm30, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm30, [rdx-1032]{1to4} # AVX512{DQ,VL}
vcvttpd2uqq xmm30, xmm29 # AVX512{DQ,VL}
vcvttpd2uqq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvttpd2uqq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvttpd2uqq xmm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvttpd2uqq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttpd2uqq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvttpd2uqq xmm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvttpd2uqq xmm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvttpd2uqq xmm30, [rdx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm30, [rdx+1024]{1to2} # AVX512{DQ,VL}
vcvttpd2uqq xmm30, [rdx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm30, [rdx-1032]{1to2} # AVX512{DQ,VL}
vcvttpd2uqq ymm30, ymm29 # AVX512{DQ,VL}
vcvttpd2uqq ymm30{k7}, ymm29 # AVX512{DQ,VL}
vcvttpd2uqq ymm30{k7}{z}, ymm29 # AVX512{DQ,VL}
vcvttpd2uqq ymm30, YMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvttpd2uqq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttpd2uqq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvttpd2uqq ymm30, YMMWORD PTR [rdx+4064] # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm30, YMMWORD PTR [rdx+4096] # AVX512{DQ,VL}
vcvttpd2uqq ymm30, YMMWORD PTR [rdx-4096] # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm30, YMMWORD PTR [rdx-4128] # AVX512{DQ,VL}
vcvttpd2uqq ymm30, [rdx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm30, [rdx+1024]{1to4} # AVX512{DQ,VL}
vcvttpd2uqq ymm30, [rdx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm30, [rdx-1032]{1to4} # AVX512{DQ,VL}
vcvttps2qq xmm30, xmm29 # AVX512{DQ,VL}
vcvttps2qq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvttps2qq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvttps2qq xmm30, QWORD PTR [rcx] # AVX512{DQ,VL}
vcvttps2qq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttps2qq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvttps2qq xmm30, QWORD PTR [rdx+1016] # AVX512{DQ,VL} Disp8
vcvttps2qq xmm30, QWORD PTR [rdx+1024] # AVX512{DQ,VL}
vcvttps2qq xmm30, QWORD PTR [rdx-1024] # AVX512{DQ,VL} Disp8
vcvttps2qq xmm30, QWORD PTR [rdx-1032] # AVX512{DQ,VL}
vcvttps2qq xmm30, [rdx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2qq xmm30, [rdx+512]{1to2} # AVX512{DQ,VL}
vcvttps2qq xmm30, [rdx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2qq xmm30, [rdx-516]{1to2} # AVX512{DQ,VL}
vcvttps2qq xmm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vcvttps2qq ymm30, xmm29 # AVX512{DQ,VL}
vcvttps2qq ymm30{k7}, xmm29 # AVX512{DQ,VL}
vcvttps2qq ymm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvttps2qq ymm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvttps2qq ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttps2qq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvttps2qq ymm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvttps2qq ymm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvttps2qq ymm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvttps2qq ymm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvttps2qq ymm30, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2qq ymm30, [rdx+512]{1to4} # AVX512{DQ,VL}
vcvttps2qq ymm30, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2qq ymm30, [rdx-516]{1to4} # AVX512{DQ,VL}
vcvttps2qq ymm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm30, xmm29 # AVX512{DQ,VL}
vcvttps2uqq xmm30{k7}, xmm29 # AVX512{DQ,VL}
vcvttps2uqq xmm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvttps2uqq xmm30, QWORD PTR [rcx] # AVX512{DQ,VL}
vcvttps2uqq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttps2uqq xmm30, [rcx]{1to2} # AVX512{DQ,VL}
vcvttps2uqq xmm30, QWORD PTR [rdx+1016] # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm30, QWORD PTR [rdx+1024] # AVX512{DQ,VL}
vcvttps2uqq xmm30, QWORD PTR [rdx-1024] # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm30, QWORD PTR [rdx-1032] # AVX512{DQ,VL}
vcvttps2uqq xmm30, [rdx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm30, [rdx+512]{1to2} # AVX512{DQ,VL}
vcvttps2uqq xmm30, [rdx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm30, [rdx-516]{1to2} # AVX512{DQ,VL}
vcvttps2uqq xmm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm30, xmm29 # AVX512{DQ,VL}
vcvttps2uqq ymm30{k7}, xmm29 # AVX512{DQ,VL}
vcvttps2uqq ymm30{k7}{z}, xmm29 # AVX512{DQ,VL}
vcvttps2uqq ymm30, XMMWORD PTR [rcx] # AVX512{DQ,VL}
vcvttps2uqq ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{DQ,VL}
vcvttps2uqq ymm30, [rcx]{1to4} # AVX512{DQ,VL}
vcvttps2uqq ymm30, XMMWORD PTR [rdx+2032] # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm30, XMMWORD PTR [rdx+2048] # AVX512{DQ,VL}
vcvttps2uqq ymm30, XMMWORD PTR [rdx-2048] # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm30, XMMWORD PTR [rdx-2064] # AVX512{DQ,VL}
vcvttps2uqq ymm30, [rdx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm30, [rdx+512]{1to4} # AVX512{DQ,VL}
vcvttps2uqq ymm30, [rdx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm30, [rdx-516]{1to4} # AVX512{DQ,VL}
vcvttps2uqq ymm30, DWORD BCST [rdx+508] # AVX512{DQ,VL} Disp8
vpmovd2m k5, xmm30 # AVX512{DQ,VL}
vpmovd2m k5, ymm30 # AVX512{DQ,VL}
vpmovq2m k5, xmm30 # AVX512{DQ,VL}
vpmovq2m k5, ymm30 # AVX512{DQ,VL}
vpmovm2d xmm30, k5 # AVX512{DQ,VL}
vpmovm2d ymm30, k5 # AVX512{DQ,VL}
vpmovm2q xmm30, k5 # AVX512{DQ,VL}
vpmovm2q ymm30, k5 # AVX512{DQ,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 4,848
|
gas/testsuite/gas/i386/opts.s
|
# Check instructions with encoding options
.allow_index_reg
.text
_start:
# Tests for op reg, reg
add %dl,%cl
add.s %dl,%cl
add %dx,%cx
add.s %dx,%cx
add %edx,%ecx
add.s %edx,%ecx
addb %dl,%cl
addb.s %dl,%cl
addw %dx,%cx
addw.s %dx,%cx
addl %edx,%ecx
addl.s %edx,%ecx
adc %dl,%cl
adc.s %dl,%cl
adc %dx,%cx
adc.s %dx,%cx
adc %edx,%ecx
adc.s %edx,%ecx
adcb %dl,%cl
adcb.s %dl,%cl
adcw %dx,%cx
adcw.s %dx,%cx
adcl %edx,%ecx
adcl.s %edx,%ecx
and %dl,%cl
and.s %dl,%cl
and %dx,%cx
and.s %dx,%cx
and %edx,%ecx
and.s %edx,%ecx
andb %dl,%cl
andb.s %dl,%cl
andw %dx,%cx
andw.s %dx,%cx
andl %edx,%ecx
andl.s %edx,%ecx
cmp %dl,%cl
cmp.s %dl,%cl
cmp %dx,%cx
cmp.s %dx,%cx
cmp %edx,%ecx
cmp.s %edx,%ecx
cmpb %dl,%cl
cmpb.s %dl,%cl
cmpw %dx,%cx
cmpw.s %dx,%cx
cmpl %edx,%ecx
cmpl.s %edx,%ecx
mov %dl,%cl
mov.s %dl,%cl
mov %dx,%cx
mov.s %dx,%cx
mov %edx,%ecx
mov.s %edx,%ecx
movb %dl,%cl
movb.s %dl,%cl
movw %dx,%cx
movw.s %dx,%cx
movl %edx,%ecx
movl.s %edx,%ecx
or %dl,%cl
or.s %dl,%cl
or %dx,%cx
or.s %dx,%cx
or %edx,%ecx
or.s %edx,%ecx
orb %dl,%cl
orb.s %dl,%cl
orw %dx,%cx
orw.s %dx,%cx
orl %edx,%ecx
orl.s %edx,%ecx
sbb %dl,%cl
sbb.s %dl,%cl
sbb %dx,%cx
sbb.s %dx,%cx
sbb %edx,%ecx
sbb.s %edx,%ecx
sbbb %dl,%cl
sbbb.s %dl,%cl
sbbw %dx,%cx
sbbw.s %dx,%cx
sbbl %edx,%ecx
sbbl.s %edx,%ecx
sub %dl,%cl
sub.s %dl,%cl
sub %dx,%cx
sub.s %dx,%cx
sub %edx,%ecx
sub.s %edx,%ecx
subb %dl,%cl
subb.s %dl,%cl
subw %dx,%cx
subw.s %dx,%cx
subl %edx,%ecx
subl.s %edx,%ecx
xor %dl,%cl
xor.s %dl,%cl
xor %dx,%cx
xor.s %dx,%cx
xor %edx,%ecx
xor.s %edx,%ecx
xorb %dl,%cl
xorb.s %dl,%cl
xorw %dx,%cx
xorw.s %dx,%cx
xorl %edx,%ecx
xorl.s %edx,%ecx
# Tests for op ymm, ymm
vmovapd %ymm4,%ymm6
vmovapd.s %ymm4,%ymm6
vmovaps %ymm4,%ymm6
vmovaps.s %ymm4,%ymm6
vmovdqa %ymm4,%ymm6
vmovdqa.s %ymm4,%ymm6
vmovdqu %ymm4,%ymm6
vmovdqu.s %ymm4,%ymm6
vmovupd %ymm4,%ymm6
vmovupd.s %ymm4,%ymm6
vmovups %ymm4,%ymm6
vmovups.s %ymm4,%ymm6
# Tests for op xmm, xmm
movapd %xmm4,%xmm6
movapd.s %xmm4,%xmm6
movaps %xmm4,%xmm6
movaps.s %xmm4,%xmm6
movdqa %xmm4,%xmm6
movdqa.s %xmm4,%xmm6
movdqu %xmm4,%xmm6
movdqu.s %xmm4,%xmm6
movq %xmm4,%xmm6
movq.s %xmm4,%xmm6
movsd %xmm4,%xmm6
movsd.s %xmm4,%xmm6
movss %xmm4,%xmm6
movss.s %xmm4,%xmm6
movupd %xmm4,%xmm6
movupd.s %xmm4,%xmm6
movups %xmm4,%xmm6
movups.s %xmm4,%xmm6
vmovapd %xmm4,%xmm6
vmovapd.s %xmm4,%xmm6
vmovaps %xmm4,%xmm6
vmovaps.s %xmm4,%xmm6
vmovdqa %xmm4,%xmm6
vmovdqa.s %xmm4,%xmm6
vmovdqu %xmm4,%xmm6
vmovdqu.s %xmm4,%xmm6
vmovq %xmm4,%xmm6
vmovq.s %xmm4,%xmm6
vmovupd %xmm4,%xmm6
vmovupd.s %xmm4,%xmm6
vmovups %xmm4,%xmm6
vmovups.s %xmm4,%xmm6
# Tests for op xmm, xmm, xmm
vmovsd %xmm4,%xmm6,%xmm2
vmovsd.s %xmm4,%xmm6,%xmm2
vmovss %xmm4,%xmm6,%xmm2
vmovss.s %xmm4,%xmm6,%xmm2
# Tests for op mm, mm
movq %mm0,%mm4
movq.s %mm0,%mm4
# Tests for op bnd, bnd
bndmov %bnd1,%bnd2
bndmov.s %bnd1,%bnd2
.intel_syntax noprefix
# Tests for op reg, reg
add cl,dl
add.s cl,dl
add cx,dx
add.s cx,dx
add ecx,edx
add.s ecx,edx
adc cl,dl
adc.s cl,dl
adc cx,dx
adc.s cx,dx
adc ecx,edx
adc.s ecx,edx
and cl,dl
and.s cl,dl
and cx,dx
and.s cx,dx
and ecx,edx
and.s ecx,edx
cmp cl,dl
cmp.s cl,dl
cmp cx,dx
cmp.s cx,dx
cmp ecx,edx
cmp.s ecx,edx
mov cl,dl
mov.s cl,dl
mov cx,dx
mov.s cx,dx
mov ecx,edx
mov.s ecx,edx
or cl,dl
or.s cl,dl
or cx,dx
or.s cx,dx
or ecx,edx
or.s ecx,edx
sbb cl,dl
sbb.s cl,dl
sbb cx,dx
sbb.s cx,dx
sbb ecx,edx
sbb.s ecx,edx
sub cl,dl
sub.s cl,dl
sub cx,dx
sub.s cx,dx
sub ecx,edx
sub.s ecx,edx
xor cl,dl
xor.s cl,dl
xor cx,dx
xor.s cx,dx
xor ecx,edx
xor.s ecx,edx
# Tests for op ymm, ymm
vmovapd ymm6,ymm4
vmovapd.s ymm6,ymm4
vmovaps ymm6,ymm4
vmovaps.s ymm6,ymm4
vmovdqa ymm6,ymm4
vmovdqa.s ymm6,ymm4
vmovdqu ymm6,ymm4
vmovdqu.s ymm6,ymm4
vmovupd ymm6,ymm4
vmovupd.s ymm6,ymm4
vmovups ymm6,ymm4
vmovups.s ymm6,ymm4
# Tests for op xmm, xmm
movapd xmm6,xmm4
movapd.s xmm6,xmm4
movaps xmm6,xmm4
movaps.s xmm6,xmm4
movdqa xmm6,xmm4
movdqa.s xmm6,xmm4
movdqu xmm6,xmm4
movdqu.s xmm6,xmm4
movq xmm6,xmm4
movq.s xmm6,xmm4
movsd xmm6,xmm4
movsd.s xmm6,xmm4
movss xmm6,xmm4
movss.s xmm6,xmm4
movupd xmm6,xmm4
movupd.s xmm6,xmm4
movups xmm6,xmm4
movups.s xmm6,xmm4
vmovapd xmm6,xmm4
vmovapd.s xmm6,xmm4
vmovaps xmm6,xmm4
vmovaps.s xmm6,xmm4
vmovdqa xmm6,xmm4
vmovdqa.s xmm6,xmm4
vmovdqu xmm6,xmm4
vmovdqu.s xmm6,xmm4
vmovq xmm6,xmm4
vmovq.s xmm6,xmm4
vmovupd xmm6,xmm4
vmovupd.s xmm6,xmm4
vmovups xmm6,xmm4
vmovups.s xmm6,xmm4
# Tests for op xmm, xmm, xmm
vmovsd xmm2,xmm6,xmm4
vmovsd.s xmm2,xmm6,xmm4
vmovss xmm2,xmm6,xmm4
vmovss.s xmm2,xmm6,xmm4
# Tests for op mm, mm
movq mm4,mm0
movq.s mm4,mm0
# Tests for op bnd, bnd
bndmov bnd1,bnd2
bndmov.s bnd1,bnd2
|
tactcomplabs/xbgas-binutils-gdb
| 86,024
|
gas/testsuite/gas/i386/x86-64-avx512bw.s
|
# Check 64bit AVX512BW instructions
.allow_index_reg
.text
_start:
vpabsb %zmm29, %zmm30 # AVX512BW
vpabsb %zmm29, %zmm30{%k7} # AVX512BW
vpabsb %zmm29, %zmm30{%k7}{z} # AVX512BW
vpabsb (%rcx), %zmm30 # AVX512BW
vpabsb 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpabsb 8128(%rdx), %zmm30 # AVX512BW Disp8
vpabsb 8192(%rdx), %zmm30 # AVX512BW
vpabsb -8192(%rdx), %zmm30 # AVX512BW Disp8
vpabsb -8256(%rdx), %zmm30 # AVX512BW
vpabsw %zmm29, %zmm30 # AVX512BW
vpabsw %zmm29, %zmm30{%k7} # AVX512BW
vpabsw %zmm29, %zmm30{%k7}{z} # AVX512BW
vpabsw (%rcx), %zmm30 # AVX512BW
vpabsw 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpabsw 8128(%rdx), %zmm30 # AVX512BW Disp8
vpabsw 8192(%rdx), %zmm30 # AVX512BW
vpabsw -8192(%rdx), %zmm30 # AVX512BW Disp8
vpabsw -8256(%rdx), %zmm30 # AVX512BW
vpackssdw %zmm28, %zmm29, %zmm30 # AVX512BW
vpackssdw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpackssdw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpackssdw (%rcx), %zmm29, %zmm30 # AVX512BW
vpackssdw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpackssdw (%rcx){1to16}, %zmm29, %zmm30 # AVX512BW
vpackssdw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackssdw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpackssdw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackssdw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpackssdw 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW Disp8
vpackssdw 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW
vpackssdw -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW Disp8
vpackssdw -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW
vpacksswb %zmm28, %zmm29, %zmm30 # AVX512BW
vpacksswb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpacksswb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpacksswb (%rcx), %zmm29, %zmm30 # AVX512BW
vpacksswb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpacksswb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpacksswb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpacksswb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpacksswb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpackusdw %zmm28, %zmm29, %zmm30 # AVX512BW
vpackusdw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpackusdw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpackusdw (%rcx), %zmm29, %zmm30 # AVX512BW
vpackusdw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpackusdw (%rcx){1to16}, %zmm29, %zmm30 # AVX512BW
vpackusdw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackusdw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpackusdw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackusdw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpackusdw 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW Disp8
vpackusdw 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW
vpackusdw -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW Disp8
vpackusdw -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW
vpackuswb %zmm28, %zmm29, %zmm30 # AVX512BW
vpackuswb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpackuswb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpackuswb (%rcx), %zmm29, %zmm30 # AVX512BW
vpackuswb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpackuswb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackuswb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpackuswb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackuswb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddb %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddb (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusb %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddusb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddusb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddusb (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddusb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddusb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusw %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddusw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddusw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddusw (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddusw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddusw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddw %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddw (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpalignr $0xab, %zmm28, %zmm29, %zmm30 # AVX512BW
vpalignr $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpalignr $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpalignr $123, %zmm28, %zmm29, %zmm30 # AVX512BW
vpalignr $123, (%rcx), %zmm29, %zmm30 # AVX512BW
vpalignr $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpalignr $123, 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpalignr $123, 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpalignr $123, -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpalignr $123, -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgb %zmm28, %zmm29, %zmm30 # AVX512BW
vpavgb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpavgb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpavgb (%rcx), %zmm29, %zmm30 # AVX512BW
vpavgb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpavgb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgw %zmm28, %zmm29, %zmm30 # AVX512BW
vpavgw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpavgw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpavgw (%rcx), %zmm29, %zmm30 # AVX512BW
vpavgw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpavgw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpblendmb %zmm28, %zmm29, %zmm30 # AVX512BW
vpblendmb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpblendmb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpblendmb (%rcx), %zmm29, %zmm30 # AVX512BW
vpblendmb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpblendmb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpblendmb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpblendmb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpblendmb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpbroadcastb %xmm29, %zmm30 # AVX512BW
vpbroadcastb %xmm29, %zmm30{%k7} # AVX512BW
vpbroadcastb %xmm29, %zmm30{%k7}{z} # AVX512BW
vpbroadcastb (%rcx), %zmm30 # AVX512BW
vpbroadcastb 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpbroadcastb 127(%rdx), %zmm30 # AVX512BW Disp8
vpbroadcastb 128(%rdx), %zmm30 # AVX512BW
vpbroadcastb -128(%rdx), %zmm30 # AVX512BW Disp8
vpbroadcastb -129(%rdx), %zmm30 # AVX512BW
vpbroadcastb %eax, %zmm30 # AVX512BW
vpbroadcastb %eax, %zmm30{%k7} # AVX512BW
vpbroadcastb %eax, %zmm30{%k7}{z} # AVX512BW
vpbroadcastw %xmm29, %zmm30 # AVX512BW
vpbroadcastw %xmm29, %zmm30{%k7} # AVX512BW
vpbroadcastw %xmm29, %zmm30{%k7}{z} # AVX512BW
vpbroadcastw (%rcx), %zmm30 # AVX512BW
vpbroadcastw 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpbroadcastw 254(%rdx), %zmm30 # AVX512BW Disp8
vpbroadcastw 256(%rdx), %zmm30 # AVX512BW
vpbroadcastw -256(%rdx), %zmm30 # AVX512BW Disp8
vpbroadcastw -258(%rdx), %zmm30 # AVX512BW
vpbroadcastw %eax, %zmm30 # AVX512BW
vpbroadcastw %eax, %zmm30{%k7} # AVX512BW
vpbroadcastw %eax, %zmm30{%k7}{z} # AVX512BW
vpcmpeqb %zmm29, %zmm30, %k5 # AVX512BW
vpcmpeqb %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpeqb (%rcx), %zmm30, %k5 # AVX512BW
vpcmpeqb 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpeqb 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqb 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpeqb -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqb -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpeqw %zmm29, %zmm30, %k5 # AVX512BW
vpcmpeqw %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpeqw (%rcx), %zmm30, %k5 # AVX512BW
vpcmpeqw 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpeqw 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqw 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpeqw -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqw -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtb %zmm29, %zmm30, %k5 # AVX512BW
vpcmpgtb %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpgtb (%rcx), %zmm30, %k5 # AVX512BW
vpcmpgtb 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpgtb 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtb 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtb -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtb -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtw %zmm29, %zmm30, %k5 # AVX512BW
vpcmpgtw %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpgtw (%rcx), %zmm30, %k5 # AVX512BW
vpcmpgtw 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpgtw 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtw 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtw -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtw -8256(%rdx), %zmm30, %k5 # AVX512BW
vpblendmw %zmm28, %zmm29, %zmm30 # AVX512BW
vpblendmw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpblendmw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpblendmw (%rcx), %zmm29, %zmm30 # AVX512BW
vpblendmw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpblendmw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpblendmw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpblendmw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpblendmw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpextrb $0xab, %xmm29, %eax # AVX512BW
vpextrb $123, %xmm29, %rax # AVX512BW
vpextrb $123, %xmm29, %r8 # AVX512BW
vpextrb $123, %xmm29, (%rcx) # AVX512BW
vpextrb $123, %xmm29, 0x123(%rax,%r14,8) # AVX512BW
vpextrb $123, %xmm29, 127(%rdx) # AVX512BW Disp8
vpextrb $123, %xmm29, 128(%rdx) # AVX512BW
vpextrb $123, %xmm29, -128(%rdx) # AVX512BW Disp8
vpextrb $123, %xmm29, -129(%rdx) # AVX512BW
vpextrw $123, %xmm29, (%rcx) # AVX512BW
vpextrw $123, %xmm29, 0x123(%rax,%r14,8) # AVX512BW
vpextrw $123, %xmm29, 254(%rdx) # AVX512BW Disp8
vpextrw $123, %xmm29, 256(%rdx) # AVX512BW
vpextrw $123, %xmm29, -256(%rdx) # AVX512BW Disp8
vpextrw $123, %xmm29, -258(%rdx) # AVX512BW
vpextrw $0xab, %xmm30, %eax # AVX512BW
vpextrw $123, %xmm30, %rax # AVX512BW
vpextrw $123, %xmm30, %r8 # AVX512BW
vpinsrb $0xab, %eax, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, %rax, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, %ebp, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, %r13, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, (%rcx), %xmm29, %xmm30 # AVX512BW
vpinsrb $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512BW
vpinsrb $123, 127(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrb $123, 128(%rdx), %xmm29, %xmm30 # AVX512BW
vpinsrb $123, -128(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrb $123, -129(%rdx), %xmm29, %xmm30 # AVX512BW
vpinsrw $0xab, %eax, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, %rax, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, %ebp, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, %r13, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, (%rcx), %xmm29, %xmm30 # AVX512BW
vpinsrw $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512BW
vpinsrw $123, 254(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrw $123, 256(%rdx), %xmm29, %xmm30 # AVX512BW
vpinsrw $123, -256(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrw $123, -258(%rdx), %xmm29, %xmm30 # AVX512BW
vpmaddubsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaddubsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaddubsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaddubsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaddubsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaddubsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddubsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaddubsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddubsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaddwd %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaddwd %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaddwd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaddwd (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaddwd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaddwd 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddwd 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaddwd -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddwd -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxub %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxub %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxub %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxub (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxub 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxub 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxub 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxub -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxub -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxuw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxuw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxuw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxuw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxuw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxuw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxuw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxuw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxuw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpminsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpminsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpminsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpminsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminub %zmm28, %zmm29, %zmm30 # AVX512BW
vpminub %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminub %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminub (%rcx), %zmm29, %zmm30 # AVX512BW
vpminub 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminub 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminub 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminub -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminub -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminuw %zmm28, %zmm29, %zmm30 # AVX512BW
vpminuw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminuw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminuw (%rcx), %zmm29, %zmm30 # AVX512BW
vpminuw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminuw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminuw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminuw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminuw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmovsxbw %ymm29, %zmm30 # AVX512BW
vpmovsxbw %ymm29, %zmm30{%k7} # AVX512BW
vpmovsxbw %ymm29, %zmm30{%k7}{z} # AVX512BW
vpmovsxbw (%rcx), %zmm30 # AVX512BW
vpmovsxbw 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpmovsxbw 4064(%rdx), %zmm30 # AVX512BW Disp8
vpmovsxbw 4096(%rdx), %zmm30 # AVX512BW
vpmovsxbw -4096(%rdx), %zmm30 # AVX512BW Disp8
vpmovsxbw -4128(%rdx), %zmm30 # AVX512BW
vpmovzxbw %ymm29, %zmm30 # AVX512BW
vpmovzxbw %ymm29, %zmm30{%k7} # AVX512BW
vpmovzxbw %ymm29, %zmm30{%k7}{z} # AVX512BW
vpmovzxbw (%rcx), %zmm30 # AVX512BW
vpmovzxbw 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpmovzxbw 4064(%rdx), %zmm30 # AVX512BW Disp8
vpmovzxbw 4096(%rdx), %zmm30 # AVX512BW
vpmovzxbw -4096(%rdx), %zmm30 # AVX512BW Disp8
vpmovzxbw -4128(%rdx), %zmm30 # AVX512BW
vpmulhrsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmulhrsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmulhrsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmulhrsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmulhrsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmulhrsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhrsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhrsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhrsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhuw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmulhuw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmulhuw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmulhuw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmulhuw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmulhuw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhuw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhuw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhuw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmulhw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmulhw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmulhw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmulhw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmulhw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmullw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmullw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmullw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmullw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmullw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmullw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmullw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmullw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmullw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsadbw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsadbw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsadbw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsadbw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsadbw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsadbw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsadbw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpshufb %zmm28, %zmm29, %zmm30 # AVX512BW
vpshufb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpshufb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpshufb (%rcx), %zmm29, %zmm30 # AVX512BW
vpshufb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpshufb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpshufb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpshufb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpshufb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpshufhw $0xab, %zmm29, %zmm30 # AVX512BW
vpshufhw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpshufhw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpshufhw $123, %zmm29, %zmm30 # AVX512BW
vpshufhw $123, (%rcx), %zmm30 # AVX512BW
vpshufhw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpshufhw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpshufhw $123, 8192(%rdx), %zmm30 # AVX512BW
vpshufhw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpshufhw $123, -8256(%rdx), %zmm30 # AVX512BW
vpshuflw $0xab, %zmm29, %zmm30 # AVX512BW
vpshuflw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpshuflw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpshuflw $123, %zmm29, %zmm30 # AVX512BW
vpshuflw $123, (%rcx), %zmm30 # AVX512BW
vpshuflw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpshuflw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpshuflw $123, 8192(%rdx), %zmm30 # AVX512BW
vpshuflw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpshuflw $123, -8256(%rdx), %zmm30 # AVX512BW
vpsllw %xmm28, %zmm29, %zmm30 # AVX512BW
vpsllw %xmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsllw %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsllw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsllw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsllw 2032(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsllw 2048(%rdx), %zmm29, %zmm30 # AVX512BW
vpsllw -2048(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsllw -2064(%rdx), %zmm29, %zmm30 # AVX512BW
vpsraw %xmm28, %zmm29, %zmm30 # AVX512BW
vpsraw %xmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsraw %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsraw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsraw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsraw 2032(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsraw 2048(%rdx), %zmm29, %zmm30 # AVX512BW
vpsraw -2048(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsraw -2064(%rdx), %zmm29, %zmm30 # AVX512BW
vpsrlw %xmm28, %zmm29, %zmm30 # AVX512BW
vpsrlw %xmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsrlw %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsrlw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsrlw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsrlw 2032(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsrlw 2048(%rdx), %zmm29, %zmm30 # AVX512BW
vpsrlw -2048(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsrlw -2064(%rdx), %zmm29, %zmm30 # AVX512BW
vpsrldq $0xab, %zmm29, %zmm30 # AVX512BW
vpsrldq $123, %zmm29, %zmm30 # AVX512BW
vpsrldq $123, (%rcx), %zmm30 # AVX512BW
vpsrldq $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsrldq $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsrldq $123, 8192(%rdx), %zmm30 # AVX512BW
vpsrldq $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsrldq $123, -8256(%rdx), %zmm30 # AVX512BW
vpsrlw $0xab, %zmm29, %zmm30 # AVX512BW
vpsrlw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpsrlw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsrlw $123, %zmm29, %zmm30 # AVX512BW
vpsrlw $123, (%rcx), %zmm30 # AVX512BW
vpsrlw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsrlw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsrlw $123, 8192(%rdx), %zmm30 # AVX512BW
vpsrlw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsrlw $123, -8256(%rdx), %zmm30 # AVX512BW
vpsraw $0xab, %zmm29, %zmm30 # AVX512BW
vpsraw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpsraw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsraw $123, %zmm29, %zmm30 # AVX512BW
vpsraw $123, (%rcx), %zmm30 # AVX512BW
vpsraw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsraw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsraw $123, 8192(%rdx), %zmm30 # AVX512BW
vpsraw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsraw $123, -8256(%rdx), %zmm30 # AVX512BW
vpsrlvw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsrlvw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsrlvw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsrlvw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsrlvw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsrlvw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsrlvw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsrlvw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsrlvw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsravw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsravw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsravw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsravw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsravw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsravw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsravw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsravw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsravw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubb %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubb (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusb %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubusb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubusb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubusb (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubusb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubusb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubusw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubusw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubusw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubusw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubusw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhbw %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpckhbw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpckhbw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpckhbw (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpckhbw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpckhbw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhbw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhbw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhbw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhwd %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpckhwd %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpckhwd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpckhwd (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpckhwd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpckhwd 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhwd 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhwd -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhwd -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklbw %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpcklbw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpcklbw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpcklbw (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpcklbw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpcklbw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklbw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklbw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklbw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklwd %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpcklwd %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpcklwd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpcklwd (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpcklwd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpcklwd 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklwd 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklwd -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklwd -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmovwb %zmm29, %ymm30 # AVX512BW
vpmovwb %zmm29, %ymm30{%k7} # AVX512BW
vpmovwb %zmm29, %ymm30{%k7}{z} # AVX512BW
vpmovswb %zmm29, %ymm30 # AVX512BW
vpmovswb %zmm29, %ymm30{%k7} # AVX512BW
vpmovswb %zmm29, %ymm30{%k7}{z} # AVX512BW
vpmovuswb %zmm29, %ymm30 # AVX512BW
vpmovuswb %zmm29, %ymm30{%k7} # AVX512BW
vpmovuswb %zmm29, %ymm30{%k7}{z} # AVX512BW
vdbpsadbw $0xab, %zmm28, %zmm29, %zmm30 # AVX512BW
vdbpsadbw $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vdbpsadbw $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vdbpsadbw $123, %zmm28, %zmm29, %zmm30 # AVX512BW
vdbpsadbw $123, (%rcx), %zmm29, %zmm30 # AVX512BW
vdbpsadbw $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vdbpsadbw $123, 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vdbpsadbw $123, 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vdbpsadbw $123, -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vdbpsadbw $123, -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpermw %zmm28, %zmm29, %zmm30 # AVX512BW
vpermw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpermw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpermw (%rcx), %zmm29, %zmm30 # AVX512BW
vpermw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpermw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpermw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpermw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpermw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpermt2w %zmm28, %zmm29, %zmm30 # AVX512BW
vpermt2w %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpermt2w %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpermt2w (%rcx), %zmm29, %zmm30 # AVX512BW
vpermt2w 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpermt2w 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpermt2w 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpermt2w -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpermt2w -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpslldq $0xab, %zmm29, %zmm30 # AVX512BW
vpslldq $123, %zmm29, %zmm30 # AVX512BW
vpslldq $123, (%rcx), %zmm30 # AVX512BW
vpslldq $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpslldq $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpslldq $123, 8192(%rdx), %zmm30 # AVX512BW
vpslldq $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpslldq $123, -8256(%rdx), %zmm30 # AVX512BW
vpsllw $0xab, %zmm29, %zmm30 # AVX512BW
vpsllw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpsllw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsllw $123, %zmm29, %zmm30 # AVX512BW
vpsllw $123, (%rcx), %zmm30 # AVX512BW
vpsllw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsllw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsllw $123, 8192(%rdx), %zmm30 # AVX512BW
vpsllw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsllw $123, -8256(%rdx), %zmm30 # AVX512BW
vpsllvw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsllvw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsllvw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsllvw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsllvw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsllvw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsllvw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsllvw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsllvw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vmovdqu8 %zmm29, %zmm30 # AVX512BW
vmovdqu8 %zmm29, %zmm30{%k7} # AVX512BW
vmovdqu8 %zmm29, %zmm30{%k7}{z} # AVX512BW
vmovdqu8 (%rcx), %zmm30 # AVX512BW
vmovdqu8 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vmovdqu8 8128(%rdx), %zmm30 # AVX512BW Disp8
vmovdqu8 8192(%rdx), %zmm30 # AVX512BW
vmovdqu8 -8192(%rdx), %zmm30 # AVX512BW Disp8
vmovdqu8 -8256(%rdx), %zmm30 # AVX512BW
vmovdqu16 %zmm29, %zmm30 # AVX512BW
vmovdqu16 %zmm29, %zmm30{%k7} # AVX512BW
vmovdqu16 %zmm29, %zmm30{%k7}{z} # AVX512BW
vmovdqu16 (%rcx), %zmm30 # AVX512BW
vmovdqu16 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vmovdqu16 8128(%rdx), %zmm30 # AVX512BW Disp8
vmovdqu16 8192(%rdx), %zmm30 # AVX512BW
vmovdqu16 -8192(%rdx), %zmm30 # AVX512BW Disp8
vmovdqu16 -8256(%rdx), %zmm30 # AVX512BW
kandq %k7, %k6, %k5 # AVX512BW
kandd %k7, %k6, %k5 # AVX512BW
kandnq %k7, %k6, %k5 # AVX512BW
kandnd %k7, %k6, %k5 # AVX512BW
korq %k7, %k6, %k5 # AVX512BW
kord %k7, %k6, %k5 # AVX512BW
kxnorq %k7, %k6, %k5 # AVX512BW
kxnord %k7, %k6, %k5 # AVX512BW
kxorq %k7, %k6, %k5 # AVX512BW
kxord %k7, %k6, %k5 # AVX512BW
knotq %k6, %k5 # AVX512BW
knotd %k6, %k5 # AVX512BW
kortestq %k6, %k5 # AVX512BW
kortestd %k6, %k5 # AVX512BW
ktestq %k6, %k5 # AVX512BW
ktestd %k6, %k5 # AVX512BW
kshiftrq $0xab, %k6, %k5 # AVX512BW
kshiftrq $123, %k6, %k5 # AVX512BW
kshiftrd $0xab, %k6, %k5 # AVX512BW
kshiftrd $123, %k6, %k5 # AVX512BW
kshiftlq $0xab, %k6, %k5 # AVX512BW
kshiftlq $123, %k6, %k5 # AVX512BW
kshiftld $0xab, %k6, %k5 # AVX512BW
kshiftld $123, %k6, %k5 # AVX512BW
kmovq %k6, %k5 # AVX512BW
kmovq (%rcx), %k5 # AVX512BW
kmovq 0x123(%rax,%r14,8), %k5 # AVX512BW
kmovd %k6, %k5 # AVX512BW
kmovd (%rcx), %k5 # AVX512BW
kmovd 0x123(%rax,%r14,8), %k5 # AVX512BW
kmovq %k5, (%rcx) # AVX512BW
kmovq %k5, 0x123(%rax,%r14,8) # AVX512BW
kmovd %k5, (%rcx) # AVX512BW
kmovd %k5, 0x123(%rax,%r14,8) # AVX512BW
kmovq %rax, %k5 # AVX512BW
kmovq %r8, %k5 # AVX512BW
kmovd %eax, %k5 # AVX512BW
kmovd %ebp, %k5 # AVX512BW
kmovd %r13d, %k5 # AVX512BW
kmovq %k5, %rax # AVX512BW
kmovq %k5, %r8 # AVX512BW
kmovd %k5, %eax # AVX512BW
kmovd %k5, %ebp # AVX512BW
kmovd %k5, %r13d # AVX512BW
kaddq %k7, %k6, %k5 # AVX512BW
kaddd %k7, %k6, %k5 # AVX512BW
kunpckwd %k7, %k6, %k5 # AVX512BW
kunpckdq %k7, %k6, %k5 # AVX512BW
vpmovwb %zmm30, (%rcx) # AVX512BW
vpmovwb %zmm30, (%rcx){%k7} # AVX512BW
vpmovwb %zmm30, 0x123(%rax,%r14,8) # AVX512BW
vpmovwb %zmm30, 4064(%rdx) # AVX512BW Disp8
vpmovwb %zmm30, 4096(%rdx) # AVX512BW
vpmovwb %zmm30, -4096(%rdx) # AVX512BW Disp8
vpmovwb %zmm30, -4128(%rdx) # AVX512BW
vpmovswb %zmm30, (%rcx) # AVX512BW
vpmovswb %zmm30, (%rcx){%k7} # AVX512BW
vpmovswb %zmm30, 0x123(%rax,%r14,8) # AVX512BW
vpmovswb %zmm30, 4064(%rdx) # AVX512BW Disp8
vpmovswb %zmm30, 4096(%rdx) # AVX512BW
vpmovswb %zmm30, -4096(%rdx) # AVX512BW Disp8
vpmovswb %zmm30, -4128(%rdx) # AVX512BW
vpmovuswb %zmm30, (%rcx) # AVX512BW
vpmovuswb %zmm30, (%rcx){%k7} # AVX512BW
vpmovuswb %zmm30, 0x123(%rax,%r14,8) # AVX512BW
vpmovuswb %zmm30, 4064(%rdx) # AVX512BW Disp8
vpmovuswb %zmm30, 4096(%rdx) # AVX512BW
vpmovuswb %zmm30, -4096(%rdx) # AVX512BW Disp8
vpmovuswb %zmm30, -4128(%rdx) # AVX512BW
vmovdqu8 %zmm30, (%rcx) # AVX512BW
vmovdqu8 %zmm30, (%rcx){%k7} # AVX512BW
vmovdqu8 %zmm30, 0x123(%rax,%r14,8) # AVX512BW
vmovdqu8 %zmm30, 8128(%rdx) # AVX512BW Disp8
vmovdqu8 %zmm30, 8192(%rdx) # AVX512BW
vmovdqu8 %zmm30, -8192(%rdx) # AVX512BW Disp8
vmovdqu8 %zmm30, -8256(%rdx) # AVX512BW
vmovdqu16 %zmm30, (%rcx) # AVX512BW
vmovdqu16 %zmm30, (%rcx){%k7} # AVX512BW
vmovdqu16 %zmm30, 0x123(%rax,%r14,8) # AVX512BW
vmovdqu16 %zmm30, 8128(%rdx) # AVX512BW Disp8
vmovdqu16 %zmm30, 8192(%rdx) # AVX512BW
vmovdqu16 %zmm30, -8192(%rdx) # AVX512BW Disp8
vmovdqu16 %zmm30, -8256(%rdx) # AVX512BW
vpermi2w %zmm28, %zmm29, %zmm30 # AVX512BW
vpermi2w %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpermi2w %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpermi2w (%rcx), %zmm29, %zmm30 # AVX512BW
vpermi2w 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpermi2w 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpermi2w 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpermi2w -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpermi2w -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vptestmb %zmm29, %zmm30, %k5 # AVX512BW
vptestmb %zmm29, %zmm30, %k5{%k7} # AVX512BW
vptestmb (%rcx), %zmm30, %k5 # AVX512BW
vptestmb 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vptestmb 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vptestmb 8192(%rdx), %zmm30, %k5 # AVX512BW
vptestmb -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vptestmb -8256(%rdx), %zmm30, %k5 # AVX512BW
vptestmw %zmm29, %zmm30, %k5 # AVX512BW
vptestmw %zmm29, %zmm30, %k5{%k7} # AVX512BW
vptestmw (%rcx), %zmm30, %k5 # AVX512BW
vptestmw 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vptestmw 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vptestmw 8192(%rdx), %zmm30, %k5 # AVX512BW
vptestmw -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vptestmw -8256(%rdx), %zmm30, %k5 # AVX512BW
vpmovb2m %zmm30, %k5 # AVX512BW
vpmovw2m %zmm30, %k5 # AVX512BW
vpmovm2b %k5, %zmm30 # AVX512BW
vpmovm2w %k5, %zmm30 # AVX512BW
vptestnmb %zmm28, %zmm29, %k5 # AVX512BW
vptestnmb %zmm28, %zmm29, %k5{%k7} # AVX512BW
vptestnmb (%rcx), %zmm29, %k5 # AVX512BW
vptestnmb 0x123(%rax,%r14,8), %zmm29, %k5 # AVX512BW
vptestnmb 8128(%rdx), %zmm29, %k5 # AVX512BW Disp8
vptestnmb 8192(%rdx), %zmm29, %k5 # AVX512BW
vptestnmb -8192(%rdx), %zmm29, %k5 # AVX512BW Disp8
vptestnmb -8256(%rdx), %zmm29, %k5 # AVX512BW
vptestnmw %zmm28, %zmm29, %k5 # AVX512BW
vptestnmw %zmm28, %zmm29, %k5{%k7} # AVX512BW
vptestnmw (%rcx), %zmm29, %k5 # AVX512BW
vptestnmw 0x123(%rax,%r14,8), %zmm29, %k5 # AVX512BW
vptestnmw 8128(%rdx), %zmm29, %k5 # AVX512BW Disp8
vptestnmw 8192(%rdx), %zmm29, %k5 # AVX512BW
vptestnmw -8192(%rdx), %zmm29, %k5 # AVX512BW Disp8
vptestnmw -8256(%rdx), %zmm29, %k5 # AVX512BW
vpcmpb $0xab, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpb $0xab, %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpb $123, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpb $123, (%rcx), %zmm30, %k5 # AVX512BW
vpcmpb $123, 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpb $123, 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpb $123, 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpb $123, -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpb $123, -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpw $0xab, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpw $0xab, %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpw $123, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpw $123, (%rcx), %zmm30, %k5 # AVX512BW
vpcmpw $123, 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpw $123, 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpw $123, 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpw $123, -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpw $123, -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpub $0xab, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpub $0xab, %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpub $123, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpub $123, (%rcx), %zmm30, %k5 # AVX512BW
vpcmpub $123, 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpub $123, 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpub $123, 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpub $123, -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpub $123, -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpuw $0xab, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpuw $0xab, %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpuw $123, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpuw $123, (%rcx), %zmm30, %k5 # AVX512BW
vpcmpuw $123, 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpuw $123, 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpuw $123, 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpuw $123, -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpuw $123, -8256(%rdx), %zmm30, %k5 # AVX512BW
.intel_syntax noprefix
vpabsb zmm30, zmm29 # AVX512BW
vpabsb zmm30{k7}, zmm29 # AVX512BW
vpabsb zmm30{k7}{z}, zmm29 # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpabsb zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpabsb zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpabsw zmm30, zmm29 # AVX512BW
vpabsw zmm30{k7}, zmm29 # AVX512BW
vpabsw zmm30{k7}{z}, zmm29 # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpabsw zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpabsw zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpackssdw zmm30, zmm29, zmm28 # AVX512BW
vpackssdw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpackssdw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpackssdw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpackssdw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpackssdw zmm30, zmm29, [rcx]{1to16} # AVX512BW
vpackssdw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpackssdw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpackssdw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpackssdw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpackssdw zmm30, zmm29, [rdx+508]{1to16} # AVX512BW Disp8
vpackssdw zmm30, zmm29, [rdx+512]{1to16} # AVX512BW
vpackssdw zmm30, zmm29, [rdx-512]{1to16} # AVX512BW Disp8
vpackssdw zmm30, zmm29, [rdx-516]{1to16} # AVX512BW
vpacksswb zmm30, zmm29, zmm28 # AVX512BW
vpacksswb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpacksswb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpackusdw zmm30, zmm29, zmm28 # AVX512BW
vpackusdw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpackusdw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpackusdw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpackusdw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpackusdw zmm30, zmm29, [rcx]{1to16} # AVX512BW
vpackusdw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpackusdw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpackusdw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpackusdw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpackusdw zmm30, zmm29, [rdx+508]{1to16} # AVX512BW Disp8
vpackusdw zmm30, zmm29, [rdx+512]{1to16} # AVX512BW
vpackusdw zmm30, zmm29, [rdx-512]{1to16} # AVX512BW Disp8
vpackusdw zmm30, zmm29, [rdx-516]{1to16} # AVX512BW
vpackuswb zmm30, zmm29, zmm28 # AVX512BW
vpackuswb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpackuswb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddb zmm30, zmm29, zmm28 # AVX512BW
vpaddb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddsb zmm30, zmm29, zmm28 # AVX512BW
vpaddsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddsw zmm30, zmm29, zmm28 # AVX512BW
vpaddsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddusb zmm30, zmm29, zmm28 # AVX512BW
vpaddusb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddusb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddusw zmm30, zmm29, zmm28 # AVX512BW
vpaddusw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddusw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddw zmm30, zmm29, zmm28 # AVX512BW
vpaddw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpalignr zmm30, zmm29, zmm28, 0xab # AVX512BW
vpalignr zmm30{k7}, zmm29, zmm28, 0xab # AVX512BW
vpalignr zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512BW
vpalignr zmm30, zmm29, zmm28, 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpavgb zmm30, zmm29, zmm28 # AVX512BW
vpavgb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpavgb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpavgw zmm30, zmm29, zmm28 # AVX512BW
vpavgw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpavgw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpblendmb zmm30, zmm29, zmm28 # AVX512BW
vpblendmb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpblendmb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpblendmb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpblendmb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpblendmb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpblendmb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpblendmb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpblendmb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpbroadcastb zmm30, xmm29 # AVX512BW
vpbroadcastb zmm30{k7}, xmm29 # AVX512BW
vpbroadcastb zmm30{k7}{z}, xmm29 # AVX512BW
vpbroadcastb zmm30, BYTE PTR [rcx] # AVX512BW
vpbroadcastb zmm30, BYTE PTR [rax+r14*8+0x1234] # AVX512BW
vpbroadcastb zmm30, BYTE PTR [rdx+127] # AVX512BW Disp8
vpbroadcastb zmm30, BYTE PTR [rdx+128] # AVX512BW
vpbroadcastb zmm30, BYTE PTR [rdx-128] # AVX512BW Disp8
vpbroadcastb zmm30, BYTE PTR [rdx-129] # AVX512BW
vpbroadcastb zmm30, eax # AVX512BW
vpbroadcastb zmm30{k7}, eax # AVX512BW
vpbroadcastb zmm30{k7}{z}, eax # AVX512BW
vpbroadcastw zmm30, xmm29 # AVX512BW
vpbroadcastw zmm30{k7}, xmm29 # AVX512BW
vpbroadcastw zmm30{k7}{z}, xmm29 # AVX512BW
vpbroadcastw zmm30, WORD PTR [rcx] # AVX512BW
vpbroadcastw zmm30, WORD PTR [rax+r14*8+0x1234] # AVX512BW
vpbroadcastw zmm30, WORD PTR [rdx+254] # AVX512BW Disp8
vpbroadcastw zmm30, WORD PTR [rdx+256] # AVX512BW
vpbroadcastw zmm30, WORD PTR [rdx-256] # AVX512BW Disp8
vpbroadcastw zmm30, WORD PTR [rdx-258] # AVX512BW
vpbroadcastw zmm30, eax # AVX512BW
vpbroadcastw zmm30{k7}, eax # AVX512BW
vpbroadcastw zmm30{k7}{z}, eax # AVX512BW
vpcmpeqb k5, zmm30, zmm29 # AVX512BW
vpcmpeqb k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpeqw k5, zmm30, zmm29 # AVX512BW
vpcmpeqw k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpgtb k5, zmm30, zmm29 # AVX512BW
vpcmpgtb k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpgtw k5, zmm30, zmm29 # AVX512BW
vpcmpgtw k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpblendmw zmm30, zmm29, zmm28 # AVX512BW
vpblendmw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpblendmw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpblendmw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpblendmw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpblendmw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpblendmw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpblendmw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpblendmw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpextrb eax, xmm29, 0xab # AVX512BW
vpextrb rax, xmm29, 123 # AVX512BW
vpextrb r8, xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rcx], xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rax+r14*8+0x1234], xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rdx+127], xmm29, 123 # AVX512BW Disp8
vpextrb BYTE PTR [rdx+128], xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rdx-128], xmm29, 123 # AVX512BW Disp8
vpextrb BYTE PTR [rdx-129], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rcx], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rax+r14*8+0x1234], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rdx+254], xmm29, 123 # AVX512BW Disp8
vpextrw WORD PTR [rdx+256], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rdx-256], xmm29, 123 # AVX512BW Disp8
vpextrw WORD PTR [rdx-258], xmm29, 123 # AVX512BW
vpextrw eax, xmm30, 0xab # AVX512BW
vpextrw rax, xmm30, 123 # AVX512BW
vpextrw r8, xmm30, 123 # AVX512BW
vpinsrb xmm30, xmm29, eax, 0xab # AVX512BW
vpinsrb xmm30, xmm29, rax, 123 # AVX512BW
vpinsrb xmm30, xmm29, ebp, 123 # AVX512BW
vpinsrb xmm30, xmm29, r13, 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rcx], 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rdx+127], 123 # AVX512BW Disp8
vpinsrb xmm30, xmm29, BYTE PTR [rdx+128], 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rdx-128], 123 # AVX512BW Disp8
vpinsrb xmm30, xmm29, BYTE PTR [rdx-129], 123 # AVX512BW
vpinsrw xmm30, xmm29, eax, 0xab # AVX512BW
vpinsrw xmm30, xmm29, rax, 123 # AVX512BW
vpinsrw xmm30, xmm29, ebp, 123 # AVX512BW
vpinsrw xmm30, xmm29, r13, 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rcx], 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rdx+254], 123 # AVX512BW Disp8
vpinsrw xmm30, xmm29, WORD PTR [rdx+256], 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rdx-256], 123 # AVX512BW Disp8
vpinsrw xmm30, xmm29, WORD PTR [rdx-258], 123 # AVX512BW
vpmaddubsw zmm30, zmm29, zmm28 # AVX512BW
vpmaddubsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaddubsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaddwd zmm30, zmm29, zmm28 # AVX512BW
vpmaddwd zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaddwd zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxsb zmm30, zmm29, zmm28 # AVX512BW
vpmaxsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxsw zmm30, zmm29, zmm28 # AVX512BW
vpmaxsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxub zmm30, zmm29, zmm28 # AVX512BW
vpmaxub zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxub zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxuw zmm30, zmm29, zmm28 # AVX512BW
vpmaxuw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxuw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminsb zmm30, zmm29, zmm28 # AVX512BW
vpminsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminsw zmm30, zmm29, zmm28 # AVX512BW
vpminsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminub zmm30, zmm29, zmm28 # AVX512BW
vpminub zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminub zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminub zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminub zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminuw zmm30, zmm29, zmm28 # AVX512BW
vpminuw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminuw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmovsxbw zmm30, ymm29 # AVX512BW
vpmovsxbw zmm30{k7}, ymm29 # AVX512BW
vpmovsxbw zmm30{k7}{z}, ymm29 # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rcx] # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rdx+4064] # AVX512BW Disp8
vpmovsxbw zmm30, YMMWORD PTR [rdx+4096] # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rdx-4096] # AVX512BW Disp8
vpmovsxbw zmm30, YMMWORD PTR [rdx-4128] # AVX512BW
vpmovzxbw zmm30, ymm29 # AVX512BW
vpmovzxbw zmm30{k7}, ymm29 # AVX512BW
vpmovzxbw zmm30{k7}{z}, ymm29 # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rcx] # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rdx+4064] # AVX512BW Disp8
vpmovzxbw zmm30, YMMWORD PTR [rdx+4096] # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rdx-4096] # AVX512BW Disp8
vpmovzxbw zmm30, YMMWORD PTR [rdx-4128] # AVX512BW
vpmulhrsw zmm30, zmm29, zmm28 # AVX512BW
vpmulhrsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmulhrsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmulhuw zmm30, zmm29, zmm28 # AVX512BW
vpmulhuw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmulhuw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmulhw zmm30, zmm29, zmm28 # AVX512BW
vpmulhw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmulhw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmullw zmm30, zmm29, zmm28 # AVX512BW
vpmullw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmullw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsadbw zmm30, zmm29, zmm28 # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpshufb zmm30, zmm29, zmm28 # AVX512BW
vpshufb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpshufb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpshufhw zmm30, zmm29, 0xab # AVX512BW
vpshufhw zmm30{k7}, zmm29, 0xab # AVX512BW
vpshufhw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpshufhw zmm30, zmm29, 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpshufhw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpshufhw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpshuflw zmm30, zmm29, 0xab # AVX512BW
vpshuflw zmm30{k7}, zmm29, 0xab # AVX512BW
vpshuflw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpshuflw zmm30, zmm29, 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpshuflw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpshuflw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsllw zmm30, zmm29, xmm28 # AVX512BW
vpsllw zmm30{k7}, zmm29, xmm28 # AVX512BW
vpsllw zmm30{k7}{z}, zmm29, xmm28 # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rcx] # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rdx+2032] # AVX512BW Disp8
vpsllw zmm30, zmm29, XMMWORD PTR [rdx+2048] # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rdx-2048] # AVX512BW Disp8
vpsllw zmm30, zmm29, XMMWORD PTR [rdx-2064] # AVX512BW
vpsraw zmm30, zmm29, xmm28 # AVX512BW
vpsraw zmm30{k7}, zmm29, xmm28 # AVX512BW
vpsraw zmm30{k7}{z}, zmm29, xmm28 # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rcx] # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rdx+2032] # AVX512BW Disp8
vpsraw zmm30, zmm29, XMMWORD PTR [rdx+2048] # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rdx-2048] # AVX512BW Disp8
vpsraw zmm30, zmm29, XMMWORD PTR [rdx-2064] # AVX512BW
vpsrlw zmm30, zmm29, xmm28 # AVX512BW
vpsrlw zmm30{k7}, zmm29, xmm28 # AVX512BW
vpsrlw zmm30{k7}{z}, zmm29, xmm28 # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rcx] # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx+2032] # AVX512BW Disp8
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx+2048] # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx-2048] # AVX512BW Disp8
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx-2064] # AVX512BW
vpsrldq zmm30, zmm29, 0xab # AVX512BW
vpsrldq zmm30, zmm29, 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsrldq zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsrldq zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsrlw zmm30, zmm29, 0xab # AVX512BW
vpsrlw zmm30{k7}, zmm29, 0xab # AVX512BW
vpsrlw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpsrlw zmm30, zmm29, 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsrlw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsrlw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsraw zmm30, zmm29, 0xab # AVX512BW
vpsraw zmm30{k7}, zmm29, 0xab # AVX512BW
vpsraw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpsraw zmm30, zmm29, 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsraw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsraw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsrlvw zmm30, zmm29, zmm28 # AVX512BW
vpsrlvw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsrlvw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsrlvw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsrlvw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsrlvw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsrlvw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsrlvw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsrlvw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsravw zmm30, zmm29, zmm28 # AVX512BW
vpsravw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsravw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsravw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsravw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsravw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsravw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsravw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsravw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubb zmm30, zmm29, zmm28 # AVX512BW
vpsubb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubsb zmm30, zmm29, zmm28 # AVX512BW
vpsubsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubsw zmm30, zmm29, zmm28 # AVX512BW
vpsubsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubusb zmm30, zmm29, zmm28 # AVX512BW
vpsubusb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubusb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubusw zmm30, zmm29, zmm28 # AVX512BW
vpsubusw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubusw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubw zmm30, zmm29, zmm28 # AVX512BW
vpsubw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpckhbw zmm30, zmm29, zmm28 # AVX512BW
vpunpckhbw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpckhbw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpckhwd zmm30, zmm29, zmm28 # AVX512BW
vpunpckhwd zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpckhwd zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpcklbw zmm30, zmm29, zmm28 # AVX512BW
vpunpcklbw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpcklbw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpcklwd zmm30, zmm29, zmm28 # AVX512BW
vpunpcklwd zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpcklwd zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmovwb ymm30, zmm29 # AVX512BW
vpmovwb ymm30{k7}, zmm29 # AVX512BW
vpmovwb ymm30{k7}{z}, zmm29 # AVX512BW
vpmovswb ymm30, zmm29 # AVX512BW
vpmovswb ymm30{k7}, zmm29 # AVX512BW
vpmovswb ymm30{k7}{z}, zmm29 # AVX512BW
vpmovuswb ymm30, zmm29 # AVX512BW
vpmovuswb ymm30{k7}, zmm29 # AVX512BW
vpmovuswb ymm30{k7}{z}, zmm29 # AVX512BW
vdbpsadbw zmm30, zmm29, zmm28, 0xab # AVX512BW
vdbpsadbw zmm30{k7}, zmm29, zmm28, 0xab # AVX512BW
vdbpsadbw zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512BW
vdbpsadbw zmm30, zmm29, zmm28, 123 # AVX512BW
vdbpsadbw zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512BW
vdbpsadbw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vdbpsadbw zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vdbpsadbw zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vdbpsadbw zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vdbpsadbw zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpermw zmm30, zmm29, zmm28 # AVX512BW
vpermw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpermw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpermw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpermw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpermw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpermw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpermw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpermw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpermt2w zmm30, zmm29, zmm28 # AVX512BW
vpermt2w zmm30{k7}, zmm29, zmm28 # AVX512BW
vpermt2w zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpermt2w zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpermt2w zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpermt2w zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpermt2w zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpermt2w zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpermt2w zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpslldq zmm30, zmm29, 0xab # AVX512BW
vpslldq zmm30, zmm29, 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpslldq zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpslldq zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsllw zmm30, zmm29, 0xab # AVX512BW
vpsllw zmm30{k7}, zmm29, 0xab # AVX512BW
vpsllw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpsllw zmm30, zmm29, 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsllw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsllw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsllvw zmm30, zmm29, zmm28 # AVX512BW
vpsllvw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsllvw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsllvw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsllvw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsllvw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsllvw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsllvw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsllvw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vmovdqu8 zmm30, zmm29 # AVX512BW
vmovdqu8 zmm30{k7}, zmm29 # AVX512BW
vmovdqu8 zmm30{k7}{z}, zmm29 # AVX512BW
vmovdqu8 zmm30, ZMMWORD PTR [rcx] # AVX512BW
vmovdqu8 zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vmovdqu8 zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vmovdqu8 zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vmovdqu8 zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vmovdqu8 zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vmovdqu16 zmm30, zmm29 # AVX512BW
vmovdqu16 zmm30{k7}, zmm29 # AVX512BW
vmovdqu16 zmm30{k7}{z}, zmm29 # AVX512BW
vmovdqu16 zmm30, ZMMWORD PTR [rcx] # AVX512BW
vmovdqu16 zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vmovdqu16 zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vmovdqu16 zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vmovdqu16 zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vmovdqu16 zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
kandq k5, k6, k7 # AVX512BW
kandd k5, k6, k7 # AVX512BW
kandnq k5, k6, k7 # AVX512BW
kandnd k5, k6, k7 # AVX512BW
korq k5, k6, k7 # AVX512BW
kord k5, k6, k7 # AVX512BW
kxnorq k5, k6, k7 # AVX512BW
kxnord k5, k6, k7 # AVX512BW
kxorq k5, k6, k7 # AVX512BW
kxord k5, k6, k7 # AVX512BW
knotq k5, k6 # AVX512BW
knotd k5, k6 # AVX512BW
kortestq k5, k6 # AVX512BW
kortestd k5, k6 # AVX512BW
ktestq k5, k6 # AVX512BW
ktestd k5, k6 # AVX512BW
kshiftrq k5, k6, 0xab # AVX512BW
kshiftrq k5, k6, 123 # AVX512BW
kshiftrd k5, k6, 0xab # AVX512BW
kshiftrd k5, k6, 123 # AVX512BW
kshiftlq k5, k6, 0xab # AVX512BW
kshiftlq k5, k6, 123 # AVX512BW
kshiftld k5, k6, 0xab # AVX512BW
kshiftld k5, k6, 123 # AVX512BW
kmovq k5, k6 # AVX512BW
kmovq k5, QWORD PTR [rcx] # AVX512BW
kmovq k5, QWORD PTR [rax+r14*8+0x1234] # AVX512BW
kmovd k5, k6 # AVX512BW
kmovd k5, DWORD PTR [rcx] # AVX512BW
kmovd k5, DWORD PTR [rax+r14*8+0x1234] # AVX512BW
kmovq QWORD PTR [rcx], k5 # AVX512BW
kmovq QWORD PTR [rax+r14*8+0x1234], k5 # AVX512BW
kmovd DWORD PTR [rcx], k5 # AVX512BW
kmovd DWORD PTR [rax+r14*8+0x1234], k5 # AVX512BW
kmovq k5, rax # AVX512BW
kmovq k5, r8 # AVX512BW
kmovd k5, eax # AVX512BW
kmovd k5, ebp # AVX512BW
kmovd k5, r13d # AVX512BW
kmovq rax, k5 # AVX512BW
kmovq r8, k5 # AVX512BW
kmovd eax, k5 # AVX512BW
kmovd ebp, k5 # AVX512BW
kmovd r13d, k5 # AVX512BW
kaddq k5, k6, k7 # AVX512BW
kaddd k5, k6, k7 # AVX512BW
kunpckwd k5, k6, k7 # AVX512BW
kunpckdq k5, k6, k7 # AVX512BW
vpmovwb YMMWORD PTR [rcx], zmm30 # AVX512BW
vpmovwb YMMWORD PTR [rcx]{k7}, zmm30 # AVX512BW
vpmovwb YMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512BW
vpmovwb YMMWORD PTR [rdx+4064], zmm30 # AVX512BW Disp8
vpmovwb YMMWORD PTR [rdx+4096], zmm30 # AVX512BW
vpmovwb YMMWORD PTR [rdx-4096], zmm30 # AVX512BW Disp8
vpmovwb YMMWORD PTR [rdx-4128], zmm30 # AVX512BW
vpmovswb YMMWORD PTR [rcx], zmm30 # AVX512BW
vpmovswb YMMWORD PTR [rcx]{k7}, zmm30 # AVX512BW
vpmovswb YMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512BW
vpmovswb YMMWORD PTR [rdx+4064], zmm30 # AVX512BW Disp8
vpmovswb YMMWORD PTR [rdx+4096], zmm30 # AVX512BW
vpmovswb YMMWORD PTR [rdx-4096], zmm30 # AVX512BW Disp8
vpmovswb YMMWORD PTR [rdx-4128], zmm30 # AVX512BW
vpmovuswb YMMWORD PTR [rcx], zmm30 # AVX512BW
vpmovuswb YMMWORD PTR [rcx]{k7}, zmm30 # AVX512BW
vpmovuswb YMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512BW
vpmovuswb YMMWORD PTR [rdx+4064], zmm30 # AVX512BW Disp8
vpmovuswb YMMWORD PTR [rdx+4096], zmm30 # AVX512BW
vpmovuswb YMMWORD PTR [rdx-4096], zmm30 # AVX512BW Disp8
vpmovuswb YMMWORD PTR [rdx-4128], zmm30 # AVX512BW
vmovdqu8 ZMMWORD PTR [rcx], zmm30 # AVX512BW
vmovdqu8 ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512BW
vmovdqu8 ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512BW
vmovdqu8 ZMMWORD PTR [rdx+8128], zmm30 # AVX512BW Disp8
vmovdqu8 ZMMWORD PTR [rdx+8192], zmm30 # AVX512BW
vmovdqu8 ZMMWORD PTR [rdx-8192], zmm30 # AVX512BW Disp8
vmovdqu8 ZMMWORD PTR [rdx-8256], zmm30 # AVX512BW
vmovdqu16 ZMMWORD PTR [rcx], zmm30 # AVX512BW
vmovdqu16 ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512BW
vmovdqu16 ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512BW
vmovdqu16 ZMMWORD PTR [rdx+8128], zmm30 # AVX512BW Disp8
vmovdqu16 ZMMWORD PTR [rdx+8192], zmm30 # AVX512BW
vmovdqu16 ZMMWORD PTR [rdx-8192], zmm30 # AVX512BW Disp8
vmovdqu16 ZMMWORD PTR [rdx-8256], zmm30 # AVX512BW
vpermi2w zmm30, zmm29, zmm28 # AVX512BW
vpermi2w zmm30{k7}, zmm29, zmm28 # AVX512BW
vpermi2w zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpermi2w zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpermi2w zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpermi2w zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpermi2w zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpermi2w zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpermi2w zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vptestmb k5, zmm30, zmm29 # AVX512BW
vptestmb k5{k7}, zmm30, zmm29 # AVX512BW
vptestmb k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vptestmb k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vptestmb k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vptestmb k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vptestmb k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vptestmb k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vptestmw k5, zmm30, zmm29 # AVX512BW
vptestmw k5{k7}, zmm30, zmm29 # AVX512BW
vptestmw k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vptestmw k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vptestmw k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vptestmw k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vptestmw k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vptestmw k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmovb2m k5, zmm30 # AVX512BW
vpmovw2m k5, zmm30 # AVX512BW
vpmovm2b zmm30, k5 # AVX512BW
vpmovm2w zmm30, k5 # AVX512BW
vptestnmb k5, zmm29, zmm28 # AVX512BW
vptestnmb k5{k7}, zmm29, zmm28 # AVX512BW
vptestnmb k5, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vptestnmb k5, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vptestnmb k5, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vptestnmb k5, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vptestnmb k5, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vptestnmb k5, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vptestnmw k5, zmm29, zmm28 # AVX512BW
vptestnmw k5{k7}, zmm29, zmm28 # AVX512BW
vptestnmw k5, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vptestnmw k5, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vptestnmw k5, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vptestnmw k5, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vptestnmw k5, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vptestnmw k5, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpb k5, zmm30, zmm29, 0xab # AVX512BW
vpcmpb k5{k7}, zmm30, zmm29, 0xab # AVX512BW
vpcmpb k5, zmm30, zmm29, 123 # AVX512BW
vpcmpb k5, zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpcmpb k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpcmpb k5, zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpcmpb k5, zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpcmpb k5, zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpcmpb k5, zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpcmpw k5, zmm30, zmm29, 0xab # AVX512BW
vpcmpw k5{k7}, zmm30, zmm29, 0xab # AVX512BW
vpcmpw k5, zmm30, zmm29, 123 # AVX512BW
vpcmpw k5, zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpcmpw k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpcmpw k5, zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpcmpw k5, zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpcmpw k5, zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpcmpw k5, zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpcmpub k5, zmm30, zmm29, 0xab # AVX512BW
vpcmpub k5{k7}, zmm30, zmm29, 0xab # AVX512BW
vpcmpub k5, zmm30, zmm29, 123 # AVX512BW
vpcmpub k5, zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpcmpub k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpcmpub k5, zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpcmpub k5, zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpcmpub k5, zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpcmpub k5, zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpcmpuw k5, zmm30, zmm29, 0xab # AVX512BW
vpcmpuw k5{k7}, zmm30, zmm29, 0xab # AVX512BW
vpcmpuw k5, zmm30, zmm29, 123 # AVX512BW
vpcmpuw k5, zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpcmpuw k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpcmpuw k5, zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpcmpuw k5, zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpcmpuw k5, zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpcmpuw k5, zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
|
tactcomplabs/xbgas-binutils-gdb
| 107,668
|
gas/testsuite/gas/i386/avx.s
|
# Check AVX instructions
.allow_index_reg
.text
_start:
# Tests for op
vzeroall
vzeroupper
# Tests for op mem64
vldmxcsr (%ecx)
vstmxcsr (%ecx)
# Tests for op mem256, mask, ymm
# Tests for op ymm, mask, mem256
vmaskmovpd (%ecx),%ymm4,%ymm6
vmaskmovpd %ymm4,%ymm6,(%ecx)
vmaskmovps (%ecx),%ymm4,%ymm6
vmaskmovps %ymm4,%ymm6,(%ecx)
# Tests for op imm8, ymm/mem256, ymm
vpermilpd $7,%ymm6,%ymm2
vpermilpd $7,(%ecx),%ymm6
vpermilps $7,%ymm6,%ymm2
vpermilps $7,(%ecx),%ymm6
vroundpd $7,%ymm6,%ymm2
vroundpd $7,(%ecx),%ymm6
vroundps $7,%ymm6,%ymm2
vroundps $7,(%ecx),%ymm6
# Tests for op ymm/mem256, ymm, ymm
vaddpd %ymm4,%ymm6,%ymm2
vaddpd (%ecx),%ymm6,%ymm2
vaddps %ymm4,%ymm6,%ymm2
vaddps (%ecx),%ymm6,%ymm2
vaddsubpd %ymm4,%ymm6,%ymm2
vaddsubpd (%ecx),%ymm6,%ymm2
vaddsubps %ymm4,%ymm6,%ymm2
vaddsubps (%ecx),%ymm6,%ymm2
vandnpd %ymm4,%ymm6,%ymm2
vandnpd (%ecx),%ymm6,%ymm2
vandnps %ymm4,%ymm6,%ymm2
vandnps (%ecx),%ymm6,%ymm2
vandpd %ymm4,%ymm6,%ymm2
vandpd (%ecx),%ymm6,%ymm2
vandps %ymm4,%ymm6,%ymm2
vandps (%ecx),%ymm6,%ymm2
vdivpd %ymm4,%ymm6,%ymm2
vdivpd (%ecx),%ymm6,%ymm2
vdivps %ymm4,%ymm6,%ymm2
vdivps (%ecx),%ymm6,%ymm2
vhaddpd %ymm4,%ymm6,%ymm2
vhaddpd (%ecx),%ymm6,%ymm2
vhaddps %ymm4,%ymm6,%ymm2
vhaddps (%ecx),%ymm6,%ymm2
vhsubpd %ymm4,%ymm6,%ymm2
vhsubpd (%ecx),%ymm6,%ymm2
vhsubps %ymm4,%ymm6,%ymm2
vhsubps (%ecx),%ymm6,%ymm2
vmaxpd %ymm4,%ymm6,%ymm2
vmaxpd (%ecx),%ymm6,%ymm2
vmaxps %ymm4,%ymm6,%ymm2
vmaxps (%ecx),%ymm6,%ymm2
vminpd %ymm4,%ymm6,%ymm2
vminpd (%ecx),%ymm6,%ymm2
vminps %ymm4,%ymm6,%ymm2
vminps (%ecx),%ymm6,%ymm2
vmulpd %ymm4,%ymm6,%ymm2
vmulpd (%ecx),%ymm6,%ymm2
vmulps %ymm4,%ymm6,%ymm2
vmulps (%ecx),%ymm6,%ymm2
vorpd %ymm4,%ymm6,%ymm2
vorpd (%ecx),%ymm6,%ymm2
vorps %ymm4,%ymm6,%ymm2
vorps (%ecx),%ymm6,%ymm2
vpermilpd %ymm4,%ymm6,%ymm2
vpermilpd (%ecx),%ymm6,%ymm2
vpermilps %ymm4,%ymm6,%ymm2
vpermilps (%ecx),%ymm6,%ymm2
vsubpd %ymm4,%ymm6,%ymm2
vsubpd (%ecx),%ymm6,%ymm2
vsubps %ymm4,%ymm6,%ymm2
vsubps (%ecx),%ymm6,%ymm2
vunpckhpd %ymm4,%ymm6,%ymm2
vunpckhpd (%ecx),%ymm6,%ymm2
vunpckhps %ymm4,%ymm6,%ymm2
vunpckhps (%ecx),%ymm6,%ymm2
vunpcklpd %ymm4,%ymm6,%ymm2
vunpcklpd (%ecx),%ymm6,%ymm2
vunpcklps %ymm4,%ymm6,%ymm2
vunpcklps (%ecx),%ymm6,%ymm2
vxorpd %ymm4,%ymm6,%ymm2
vxorpd (%ecx),%ymm6,%ymm2
vxorps %ymm4,%ymm6,%ymm2
vxorps (%ecx),%ymm6,%ymm2
vcmpeqpd %ymm4,%ymm6,%ymm2
vcmpeqpd (%ecx),%ymm6,%ymm2
vcmpeq_oqpd %ymm4,%ymm6,%ymm2
vcmpeq_oqpd (%ecx),%ymm6,%ymm2
vcmpltpd %ymm4,%ymm6,%ymm2
vcmpltpd (%ecx),%ymm6,%ymm2
vcmplt_ospd %ymm4,%ymm6,%ymm2
vcmplt_ospd (%ecx),%ymm6,%ymm2
vcmplepd %ymm4,%ymm6,%ymm2
vcmplepd (%ecx),%ymm6,%ymm2
vcmple_ospd %ymm4,%ymm6,%ymm2
vcmple_ospd (%ecx),%ymm6,%ymm2
vcmpunordpd %ymm4,%ymm6,%ymm2
vcmpunordpd (%ecx),%ymm6,%ymm2
vcmpunord_qpd %ymm4,%ymm6,%ymm2
vcmpunord_qpd (%ecx),%ymm6,%ymm2
vcmpneqpd %ymm4,%ymm6,%ymm2
vcmpneqpd (%ecx),%ymm6,%ymm2
vcmpneq_uqpd %ymm4,%ymm6,%ymm2
vcmpneq_uqpd (%ecx),%ymm6,%ymm2
vcmpnltpd %ymm4,%ymm6,%ymm2
vcmpnltpd (%ecx),%ymm6,%ymm2
vcmpnlt_uspd %ymm4,%ymm6,%ymm2
vcmpnlt_uspd (%ecx),%ymm6,%ymm2
vcmpnlepd %ymm4,%ymm6,%ymm2
vcmpnlepd (%ecx),%ymm6,%ymm2
vcmpnle_uspd %ymm4,%ymm6,%ymm2
vcmpnle_uspd (%ecx),%ymm6,%ymm2
vcmpordpd %ymm4,%ymm6,%ymm2
vcmpordpd (%ecx),%ymm6,%ymm2
vcmpord_qpd %ymm4,%ymm6,%ymm2
vcmpord_qpd (%ecx),%ymm6,%ymm2
vcmpeq_uqpd %ymm4,%ymm6,%ymm2
vcmpeq_uqpd (%ecx),%ymm6,%ymm2
vcmpngepd %ymm4,%ymm6,%ymm2
vcmpngepd (%ecx),%ymm6,%ymm2
vcmpnge_uspd %ymm4,%ymm6,%ymm2
vcmpnge_uspd (%ecx),%ymm6,%ymm2
vcmpngtpd %ymm4,%ymm6,%ymm2
vcmpngtpd (%ecx),%ymm6,%ymm2
vcmpngt_uspd %ymm4,%ymm6,%ymm2
vcmpngt_uspd (%ecx),%ymm6,%ymm2
vcmpfalsepd %ymm4,%ymm6,%ymm2
vcmpfalsepd (%ecx),%ymm6,%ymm2
vcmpfalse_oqpd %ymm4,%ymm6,%ymm2
vcmpfalse_oqpd (%ecx),%ymm6,%ymm2
vcmpneq_oqpd %ymm4,%ymm6,%ymm2
vcmpneq_oqpd (%ecx),%ymm6,%ymm2
vcmpgepd %ymm4,%ymm6,%ymm2
vcmpgepd (%ecx),%ymm6,%ymm2
vcmpge_ospd %ymm4,%ymm6,%ymm2
vcmpge_ospd (%ecx),%ymm6,%ymm2
vcmpgtpd %ymm4,%ymm6,%ymm2
vcmpgtpd (%ecx),%ymm6,%ymm2
vcmpgt_ospd %ymm4,%ymm6,%ymm2
vcmpgt_ospd (%ecx),%ymm6,%ymm2
vcmptruepd %ymm4,%ymm6,%ymm2
vcmptruepd (%ecx),%ymm6,%ymm2
vcmptrue_uqpd %ymm4,%ymm6,%ymm2
vcmptrue_uqpd (%ecx),%ymm6,%ymm2
vcmpeq_ospd %ymm4,%ymm6,%ymm2
vcmpeq_ospd (%ecx),%ymm6,%ymm2
vcmplt_oqpd %ymm4,%ymm6,%ymm2
vcmplt_oqpd (%ecx),%ymm6,%ymm2
vcmple_oqpd %ymm4,%ymm6,%ymm2
vcmple_oqpd (%ecx),%ymm6,%ymm2
vcmpunord_spd %ymm4,%ymm6,%ymm2
vcmpunord_spd (%ecx),%ymm6,%ymm2
vcmpneq_uspd %ymm4,%ymm6,%ymm2
vcmpneq_uspd (%ecx),%ymm6,%ymm2
vcmpnlt_uqpd %ymm4,%ymm6,%ymm2
vcmpnlt_uqpd (%ecx),%ymm6,%ymm2
vcmpnle_uqpd %ymm4,%ymm6,%ymm2
vcmpnle_uqpd (%ecx),%ymm6,%ymm2
vcmpord_spd %ymm4,%ymm6,%ymm2
vcmpord_spd (%ecx),%ymm6,%ymm2
vcmpeq_uspd %ymm4,%ymm6,%ymm2
vcmpeq_uspd (%ecx),%ymm6,%ymm2
vcmpnge_uqpd %ymm4,%ymm6,%ymm2
vcmpnge_uqpd (%ecx),%ymm6,%ymm2
vcmpngt_uqpd %ymm4,%ymm6,%ymm2
vcmpngt_uqpd (%ecx),%ymm6,%ymm2
vcmpfalse_ospd %ymm4,%ymm6,%ymm2
vcmpfalse_ospd (%ecx),%ymm6,%ymm2
vcmpneq_ospd %ymm4,%ymm6,%ymm2
vcmpneq_ospd (%ecx),%ymm6,%ymm2
vcmpge_oqpd %ymm4,%ymm6,%ymm2
vcmpge_oqpd (%ecx),%ymm6,%ymm2
vcmpgt_oqpd %ymm4,%ymm6,%ymm2
vcmpgt_oqpd (%ecx),%ymm6,%ymm2
vcmptrue_uspd %ymm4,%ymm6,%ymm2
vcmptrue_uspd (%ecx),%ymm6,%ymm2
vcmpeqps %ymm4,%ymm6,%ymm2
vcmpeqps (%ecx),%ymm6,%ymm2
vcmpeq_oqps %ymm4,%ymm6,%ymm2
vcmpeq_oqps (%ecx),%ymm6,%ymm2
vcmpltps %ymm4,%ymm6,%ymm2
vcmpltps (%ecx),%ymm6,%ymm2
vcmplt_osps %ymm4,%ymm6,%ymm2
vcmplt_osps (%ecx),%ymm6,%ymm2
vcmpleps %ymm4,%ymm6,%ymm2
vcmpleps (%ecx),%ymm6,%ymm2
vcmple_osps %ymm4,%ymm6,%ymm2
vcmple_osps (%ecx),%ymm6,%ymm2
vcmpunordps %ymm4,%ymm6,%ymm2
vcmpunordps (%ecx),%ymm6,%ymm2
vcmpunord_qps %ymm4,%ymm6,%ymm2
vcmpunord_qps (%ecx),%ymm6,%ymm2
vcmpneqps %ymm4,%ymm6,%ymm2
vcmpneqps (%ecx),%ymm6,%ymm2
vcmpneq_uqps %ymm4,%ymm6,%ymm2
vcmpneq_uqps (%ecx),%ymm6,%ymm2
vcmpnltps %ymm4,%ymm6,%ymm2
vcmpnltps (%ecx),%ymm6,%ymm2
vcmpnlt_usps %ymm4,%ymm6,%ymm2
vcmpnlt_usps (%ecx),%ymm6,%ymm2
vcmpnleps %ymm4,%ymm6,%ymm2
vcmpnleps (%ecx),%ymm6,%ymm2
vcmpnle_usps %ymm4,%ymm6,%ymm2
vcmpnle_usps (%ecx),%ymm6,%ymm2
vcmpordps %ymm4,%ymm6,%ymm2
vcmpordps (%ecx),%ymm6,%ymm2
vcmpord_qps %ymm4,%ymm6,%ymm2
vcmpord_qps (%ecx),%ymm6,%ymm2
vcmpeq_uqps %ymm4,%ymm6,%ymm2
vcmpeq_uqps (%ecx),%ymm6,%ymm2
vcmpngeps %ymm4,%ymm6,%ymm2
vcmpngeps (%ecx),%ymm6,%ymm2
vcmpnge_usps %ymm4,%ymm6,%ymm2
vcmpnge_usps (%ecx),%ymm6,%ymm2
vcmpngtps %ymm4,%ymm6,%ymm2
vcmpngtps (%ecx),%ymm6,%ymm2
vcmpngt_usps %ymm4,%ymm6,%ymm2
vcmpngt_usps (%ecx),%ymm6,%ymm2
vcmpfalseps %ymm4,%ymm6,%ymm2
vcmpfalseps (%ecx),%ymm6,%ymm2
vcmpfalse_oqps %ymm4,%ymm6,%ymm2
vcmpfalse_oqps (%ecx),%ymm6,%ymm2
vcmpneq_oqps %ymm4,%ymm6,%ymm2
vcmpneq_oqps (%ecx),%ymm6,%ymm2
vcmpgeps %ymm4,%ymm6,%ymm2
vcmpgeps (%ecx),%ymm6,%ymm2
vcmpge_osps %ymm4,%ymm6,%ymm2
vcmpge_osps (%ecx),%ymm6,%ymm2
vcmpgtps %ymm4,%ymm6,%ymm2
vcmpgtps (%ecx),%ymm6,%ymm2
vcmpgt_osps %ymm4,%ymm6,%ymm2
vcmpgt_osps (%ecx),%ymm6,%ymm2
vcmptrueps %ymm4,%ymm6,%ymm2
vcmptrueps (%ecx),%ymm6,%ymm2
vcmptrue_uqps %ymm4,%ymm6,%ymm2
vcmptrue_uqps (%ecx),%ymm6,%ymm2
vcmpeq_osps %ymm4,%ymm6,%ymm2
vcmpeq_osps (%ecx),%ymm6,%ymm2
vcmplt_oqps %ymm4,%ymm6,%ymm2
vcmplt_oqps (%ecx),%ymm6,%ymm2
vcmple_oqps %ymm4,%ymm6,%ymm2
vcmple_oqps (%ecx),%ymm6,%ymm2
vcmpunord_sps %ymm4,%ymm6,%ymm2
vcmpunord_sps (%ecx),%ymm6,%ymm2
vcmpneq_usps %ymm4,%ymm6,%ymm2
vcmpneq_usps (%ecx),%ymm6,%ymm2
vcmpnlt_uqps %ymm4,%ymm6,%ymm2
vcmpnlt_uqps (%ecx),%ymm6,%ymm2
vcmpnle_uqps %ymm4,%ymm6,%ymm2
vcmpnle_uqps (%ecx),%ymm6,%ymm2
vcmpord_sps %ymm4,%ymm6,%ymm2
vcmpord_sps (%ecx),%ymm6,%ymm2
vcmpeq_usps %ymm4,%ymm6,%ymm2
vcmpeq_usps (%ecx),%ymm6,%ymm2
vcmpnge_uqps %ymm4,%ymm6,%ymm2
vcmpnge_uqps (%ecx),%ymm6,%ymm2
vcmpngt_uqps %ymm4,%ymm6,%ymm2
vcmpngt_uqps (%ecx),%ymm6,%ymm2
vcmpfalse_osps %ymm4,%ymm6,%ymm2
vcmpfalse_osps (%ecx),%ymm6,%ymm2
vcmpneq_osps %ymm4,%ymm6,%ymm2
vcmpneq_osps (%ecx),%ymm6,%ymm2
vcmpge_oqps %ymm4,%ymm6,%ymm2
vcmpge_oqps (%ecx),%ymm6,%ymm2
vcmpgt_oqps %ymm4,%ymm6,%ymm2
vcmpgt_oqps (%ecx),%ymm6,%ymm2
vcmptrue_usps %ymm4,%ymm6,%ymm2
vcmptrue_usps (%ecx),%ymm6,%ymm2
vgf2p8mulb %ymm4, %ymm5, %ymm6
vgf2p8mulb (%ecx), %ymm5, %ymm6
vgf2p8mulb -123456(%esp,%esi,8), %ymm5, %ymm6
vgf2p8mulb 4064(%edx), %ymm5, %ymm6
vgf2p8mulb 4096(%edx), %ymm5, %ymm6
vgf2p8mulb -4096(%edx), %ymm5, %ymm6
vgf2p8mulb -4128(%edx), %ymm5, %ymm6
# Tests for op ymm/mem256, xmm
vcvtpd2dqy %ymm4,%xmm4
vcvtpd2dqy (%ecx),%xmm4
vcvtpd2psy %ymm4,%xmm4
vcvtpd2psy (%ecx),%xmm4
vcvttpd2dqy %ymm4,%xmm4
vcvttpd2dqy (%ecx),%xmm4
# Tests for op ymm/mem256, ymm
vcvtdq2ps %ymm4,%ymm6
vcvtdq2ps (%ecx),%ymm4
vcvtps2dq %ymm4,%ymm6
vcvtps2dq (%ecx),%ymm4
vcvttps2dq %ymm4,%ymm6
vcvttps2dq (%ecx),%ymm4
vmovapd %ymm4,%ymm6
vmovapd (%ecx),%ymm4
vmovaps %ymm4,%ymm6
vmovaps (%ecx),%ymm4
vmovdqa %ymm4,%ymm6
vmovdqa (%ecx),%ymm4
vmovdqu %ymm4,%ymm6
vmovdqu (%ecx),%ymm4
vmovddup %ymm4,%ymm6
vmovddup (%ecx),%ymm4
vmovshdup %ymm4,%ymm6
vmovshdup (%ecx),%ymm4
vmovsldup %ymm4,%ymm6
vmovsldup (%ecx),%ymm4
vmovupd %ymm4,%ymm6
vmovupd (%ecx),%ymm4
vmovups %ymm4,%ymm6
vmovups (%ecx),%ymm4
vptest %ymm4,%ymm6
vptest (%ecx),%ymm4
vrcpps %ymm4,%ymm6
vrcpps (%ecx),%ymm4
vrsqrtps %ymm4,%ymm6
vrsqrtps (%ecx),%ymm4
vsqrtpd %ymm4,%ymm6
vsqrtpd (%ecx),%ymm4
vsqrtps %ymm4,%ymm6
vsqrtps (%ecx),%ymm4
vtestpd %ymm4,%ymm6
vtestpd (%ecx),%ymm4
vtestps %ymm4,%ymm6
vtestps (%ecx),%ymm4
# Tests for op ymm, ymm/mem256
vmovapd %ymm4,%ymm6
vmovapd %ymm4,(%ecx)
vmovaps %ymm4,%ymm6
vmovaps %ymm4,(%ecx)
vmovdqa %ymm4,%ymm6
vmovdqa %ymm4,(%ecx)
vmovdqu %ymm4,%ymm6
vmovdqu %ymm4,(%ecx)
vmovupd %ymm4,%ymm6
vmovupd %ymm4,(%ecx)
vmovups %ymm4,%ymm6
vmovups %ymm4,(%ecx)
# Tests for op mem256, ymm
vlddqu (%ecx),%ymm4
# Tests for op ymm, mem256
vmovntdq %ymm4,(%ecx)
vmovntpd %ymm4,(%ecx)
vmovntps %ymm4,(%ecx)
# Tests for op imm8, ymm/mem256, ymm, ymm
vblendpd $7,%ymm4,%ymm6,%ymm2
vblendpd $7,(%ecx),%ymm6,%ymm2
vblendps $7,%ymm4,%ymm6,%ymm2
vblendps $7,(%ecx),%ymm6,%ymm2
vcmppd $7,%ymm4,%ymm6,%ymm2
vcmppd $7,(%ecx),%ymm6,%ymm2
vcmpps $7,%ymm4,%ymm6,%ymm2
vcmpps $7,(%ecx),%ymm6,%ymm2
vdpps $7,%ymm4,%ymm6,%ymm2
vdpps $7,(%ecx),%ymm6,%ymm2
vperm2f128 $7,%ymm4,%ymm6,%ymm2
vperm2f128 $7,(%ecx),%ymm6,%ymm2
vshufpd $7,%ymm4,%ymm6,%ymm2
vshufpd $7,(%ecx),%ymm6,%ymm2
vshufps $7,%ymm4,%ymm6,%ymm2
vshufps $7,(%ecx),%ymm6,%ymm2
vgf2p8affineqb $0xab, %ymm4, %ymm5, %ymm6
vgf2p8affineqb $123, %ymm4, %ymm5, %ymm6
vgf2p8affineqb $123, (%ecx), %ymm5, %ymm6
vgf2p8affineqb $123, -123456(%esp,%esi,8), %ymm5, %ymm6
vgf2p8affineqb $123, 4064(%edx), %ymm5, %ymm6
vgf2p8affineqb $123, 4096(%edx), %ymm5, %ymm6
vgf2p8affineqb $123, -4096(%edx), %ymm5, %ymm6
vgf2p8affineqb $123, -4128(%edx), %ymm5, %ymm6
vgf2p8affineinvqb $0xab, %ymm4, %ymm5, %ymm6
vgf2p8affineinvqb $123, %ymm4, %ymm5, %ymm6
vgf2p8affineinvqb $123, (%ecx), %ymm5, %ymm6
vgf2p8affineinvqb $123, -123456(%esp,%esi,8), %ymm5, %ymm6
vgf2p8affineinvqb $123, 4064(%edx), %ymm5, %ymm6
vgf2p8affineinvqb $123, 4096(%edx), %ymm5, %ymm6
vgf2p8affineinvqb $123, -4096(%edx), %ymm5, %ymm6
vgf2p8affineinvqb $123, -4128(%edx), %ymm5, %ymm6
# Tests for op ymm, ymm/mem256, ymm, ymm
vblendvpd %ymm4,%ymm6,%ymm2,%ymm7
vblendvpd %ymm4,(%ecx),%ymm2,%ymm7
vblendvps %ymm4,%ymm6,%ymm2,%ymm7
vblendvps %ymm4,(%ecx),%ymm2,%ymm7
# Tests for op imm8, xmm/mem128, ymm, ymm
vinsertf128 $7,%xmm4,%ymm4,%ymm6
vinsertf128 $7,(%ecx),%ymm4,%ymm6
# Tests for op imm8, ymm, xmm/mem128
vextractf128 $7,%ymm4,%xmm4
vextractf128 $7,%ymm4,(%ecx)
# Tests for op mem128, ymm
vbroadcastf128 (%ecx),%ymm4
# Tests for op xmm/mem128, xmm
vcvtdq2ps %xmm4,%xmm6
vcvtdq2ps (%ecx),%xmm4
vcvtpd2dqx %xmm4,%xmm6
vcvtpd2dqx (%ecx),%xmm4
vcvtpd2psx %xmm4,%xmm6
vcvtpd2psx (%ecx),%xmm4
vcvtps2dq %xmm4,%xmm6
vcvtps2dq (%ecx),%xmm4
vcvttpd2dqx %xmm4,%xmm6
vcvttpd2dqx (%ecx),%xmm4
vcvttps2dq %xmm4,%xmm6
vcvttps2dq (%ecx),%xmm4
vmovapd %xmm4,%xmm6
vmovapd (%ecx),%xmm4
vmovaps %xmm4,%xmm6
vmovaps (%ecx),%xmm4
vmovdqa %xmm4,%xmm6
vmovdqa (%ecx),%xmm4
vmovdqu %xmm4,%xmm6
vmovdqu (%ecx),%xmm4
vmovshdup %xmm4,%xmm6
vmovshdup (%ecx),%xmm4
vmovsldup %xmm4,%xmm6
vmovsldup (%ecx),%xmm4
vmovupd %xmm4,%xmm6
vmovupd (%ecx),%xmm4
vmovups %xmm4,%xmm6
vmovups (%ecx),%xmm4
vpabsb %xmm4,%xmm6
vpabsb (%ecx),%xmm4
vpabsw %xmm4,%xmm6
vpabsw (%ecx),%xmm4
vpabsd %xmm4,%xmm6
vpabsd (%ecx),%xmm4
vphminposuw %xmm4,%xmm6
vphminposuw (%ecx),%xmm4
vptest %xmm4,%xmm6
vptest (%ecx),%xmm4
vtestps %xmm4,%xmm6
vtestps (%ecx),%xmm4
vtestpd %xmm4,%xmm6
vtestpd (%ecx),%xmm4
vrcpps %xmm4,%xmm6
vrcpps (%ecx),%xmm4
vrsqrtps %xmm4,%xmm6
vrsqrtps (%ecx),%xmm4
vsqrtpd %xmm4,%xmm6
vsqrtpd (%ecx),%xmm4
vsqrtps %xmm4,%xmm6
vsqrtps (%ecx),%xmm4
vaesimc %xmm4,%xmm6
vaesimc (%ecx),%xmm4
# Tests for op xmm, xmm/mem128
vmovapd %xmm4,%xmm6
vmovapd %xmm4,(%ecx)
vmovaps %xmm4,%xmm6
vmovaps %xmm4,(%ecx)
vmovdqa %xmm4,%xmm6
vmovdqa %xmm4,(%ecx)
vmovdqu %xmm4,%xmm6
vmovdqu %xmm4,(%ecx)
vmovupd %xmm4,%xmm6
vmovupd %xmm4,(%ecx)
vmovups %xmm4,%xmm6
vmovups %xmm4,(%ecx)
# Tests for op mem128, xmm
vlddqu (%ecx),%xmm4
vmovntdqa (%ecx),%xmm4
# Tests for op xmm, mem128
vmovntdq %xmm4,(%ecx)
vmovntpd %xmm4,(%ecx)
vmovntps %xmm4,(%ecx)
# Tests for op xmm/mem128, ymm
vcvtdq2pd %xmm4,%ymm4
vcvtdq2pd (%ecx),%ymm4
vcvtps2pd %xmm4,%ymm4
vcvtps2pd (%ecx),%ymm4
# Tests for op xmm/mem128, xmm, xmm
vaddpd %xmm4,%xmm6,%xmm2
vaddpd (%ecx),%xmm6,%xmm7
vaddps %xmm4,%xmm6,%xmm2
vaddps (%ecx),%xmm6,%xmm7
vaddsubpd %xmm4,%xmm6,%xmm2
vaddsubpd (%ecx),%xmm6,%xmm7
vaddsubps %xmm4,%xmm6,%xmm2
vaddsubps (%ecx),%xmm6,%xmm7
vandnpd %xmm4,%xmm6,%xmm2
vandnpd (%ecx),%xmm6,%xmm7
vandnps %xmm4,%xmm6,%xmm2
vandnps (%ecx),%xmm6,%xmm7
vandpd %xmm4,%xmm6,%xmm2
vandpd (%ecx),%xmm6,%xmm7
vandps %xmm4,%xmm6,%xmm2
vandps (%ecx),%xmm6,%xmm7
vdivpd %xmm4,%xmm6,%xmm2
vdivpd (%ecx),%xmm6,%xmm7
vdivps %xmm4,%xmm6,%xmm2
vdivps (%ecx),%xmm6,%xmm7
vhaddpd %xmm4,%xmm6,%xmm2
vhaddpd (%ecx),%xmm6,%xmm7
vhaddps %xmm4,%xmm6,%xmm2
vhaddps (%ecx),%xmm6,%xmm7
vhsubpd %xmm4,%xmm6,%xmm2
vhsubpd (%ecx),%xmm6,%xmm7
vhsubps %xmm4,%xmm6,%xmm2
vhsubps (%ecx),%xmm6,%xmm7
vmaxpd %xmm4,%xmm6,%xmm2
vmaxpd (%ecx),%xmm6,%xmm7
vmaxps %xmm4,%xmm6,%xmm2
vmaxps (%ecx),%xmm6,%xmm7
vminpd %xmm4,%xmm6,%xmm2
vminpd (%ecx),%xmm6,%xmm7
vminps %xmm4,%xmm6,%xmm2
vminps (%ecx),%xmm6,%xmm7
vmulpd %xmm4,%xmm6,%xmm2
vmulpd (%ecx),%xmm6,%xmm7
vmulps %xmm4,%xmm6,%xmm2
vmulps (%ecx),%xmm6,%xmm7
vorpd %xmm4,%xmm6,%xmm2
vorpd (%ecx),%xmm6,%xmm7
vorps %xmm4,%xmm6,%xmm2
vorps (%ecx),%xmm6,%xmm7
vpacksswb %xmm4,%xmm6,%xmm2
vpacksswb (%ecx),%xmm6,%xmm7
vpackssdw %xmm4,%xmm6,%xmm2
vpackssdw (%ecx),%xmm6,%xmm7
vpackuswb %xmm4,%xmm6,%xmm2
vpackuswb (%ecx),%xmm6,%xmm7
vpackusdw %xmm4,%xmm6,%xmm2
vpackusdw (%ecx),%xmm6,%xmm7
vpaddb %xmm4,%xmm6,%xmm2
vpaddb (%ecx),%xmm6,%xmm7
vpaddw %xmm4,%xmm6,%xmm2
vpaddw (%ecx),%xmm6,%xmm7
vpaddd %xmm4,%xmm6,%xmm2
vpaddd (%ecx),%xmm6,%xmm7
vpaddq %xmm4,%xmm6,%xmm2
vpaddq (%ecx),%xmm6,%xmm7
vpaddsb %xmm4,%xmm6,%xmm2
vpaddsb (%ecx),%xmm6,%xmm7
vpaddsw %xmm4,%xmm6,%xmm2
vpaddsw (%ecx),%xmm6,%xmm7
vpaddusb %xmm4,%xmm6,%xmm2
vpaddusb (%ecx),%xmm6,%xmm7
vpaddusw %xmm4,%xmm6,%xmm2
vpaddusw (%ecx),%xmm6,%xmm7
vpand %xmm4,%xmm6,%xmm2
vpand (%ecx),%xmm6,%xmm7
vpandn %xmm4,%xmm6,%xmm2
vpandn (%ecx),%xmm6,%xmm7
vpavgb %xmm4,%xmm6,%xmm2
vpavgb (%ecx),%xmm6,%xmm7
vpavgw %xmm4,%xmm6,%xmm2
vpavgw (%ecx),%xmm6,%xmm7
vpclmullqlqdq %xmm4,%xmm6,%xmm2
vpclmullqlqdq (%ecx),%xmm6,%xmm7
vpclmulhqlqdq %xmm4,%xmm6,%xmm2
vpclmulhqlqdq (%ecx),%xmm6,%xmm7
vpclmullqhqdq %xmm4,%xmm6,%xmm2
vpclmullqhqdq (%ecx),%xmm6,%xmm7
vpclmulhqhqdq %xmm4,%xmm6,%xmm2
vpclmulhqhqdq (%ecx),%xmm6,%xmm7
vpcmpeqb %xmm4,%xmm6,%xmm2
vpcmpeqb (%ecx),%xmm6,%xmm7
vpcmpeqw %xmm4,%xmm6,%xmm2
vpcmpeqw (%ecx),%xmm6,%xmm7
vpcmpeqd %xmm4,%xmm6,%xmm2
vpcmpeqd (%ecx),%xmm6,%xmm7
vpcmpeqq %xmm4,%xmm6,%xmm2
vpcmpeqq (%ecx),%xmm6,%xmm7
vpcmpgtb %xmm4,%xmm6,%xmm2
vpcmpgtb (%ecx),%xmm6,%xmm7
vpcmpgtw %xmm4,%xmm6,%xmm2
vpcmpgtw (%ecx),%xmm6,%xmm7
vpcmpgtd %xmm4,%xmm6,%xmm2
vpcmpgtd (%ecx),%xmm6,%xmm7
vpcmpgtq %xmm4,%xmm6,%xmm2
vpcmpgtq (%ecx),%xmm6,%xmm7
vpermilpd %xmm4,%xmm6,%xmm2
vpermilpd (%ecx),%xmm6,%xmm7
vpermilps %xmm4,%xmm6,%xmm2
vpermilps (%ecx),%xmm6,%xmm7
vphaddw %xmm4,%xmm6,%xmm2
vphaddw (%ecx),%xmm6,%xmm7
vphaddd %xmm4,%xmm6,%xmm2
vphaddd (%ecx),%xmm6,%xmm7
vphaddsw %xmm4,%xmm6,%xmm2
vphaddsw (%ecx),%xmm6,%xmm7
vphsubw %xmm4,%xmm6,%xmm2
vphsubw (%ecx),%xmm6,%xmm7
vphsubd %xmm4,%xmm6,%xmm2
vphsubd (%ecx),%xmm6,%xmm7
vphsubsw %xmm4,%xmm6,%xmm2
vphsubsw (%ecx),%xmm6,%xmm7
vpmaddwd %xmm4,%xmm6,%xmm2
vpmaddwd (%ecx),%xmm6,%xmm7
vpmaddubsw %xmm4,%xmm6,%xmm2
vpmaddubsw (%ecx),%xmm6,%xmm7
vpmaxsb %xmm4,%xmm6,%xmm2
vpmaxsb (%ecx),%xmm6,%xmm7
vpmaxsw %xmm4,%xmm6,%xmm2
vpmaxsw (%ecx),%xmm6,%xmm7
vpmaxsd %xmm4,%xmm6,%xmm2
vpmaxsd (%ecx),%xmm6,%xmm7
vpmaxub %xmm4,%xmm6,%xmm2
vpmaxub (%ecx),%xmm6,%xmm7
vpmaxuw %xmm4,%xmm6,%xmm2
vpmaxuw (%ecx),%xmm6,%xmm7
vpmaxud %xmm4,%xmm6,%xmm2
vpmaxud (%ecx),%xmm6,%xmm7
vpminsb %xmm4,%xmm6,%xmm2
vpminsb (%ecx),%xmm6,%xmm7
vpminsw %xmm4,%xmm6,%xmm2
vpminsw (%ecx),%xmm6,%xmm7
vpminsd %xmm4,%xmm6,%xmm2
vpminsd (%ecx),%xmm6,%xmm7
vpminub %xmm4,%xmm6,%xmm2
vpminub (%ecx),%xmm6,%xmm7
vpminuw %xmm4,%xmm6,%xmm2
vpminuw (%ecx),%xmm6,%xmm7
vpminud %xmm4,%xmm6,%xmm2
vpminud (%ecx),%xmm6,%xmm7
vpmulhuw %xmm4,%xmm6,%xmm2
vpmulhuw (%ecx),%xmm6,%xmm7
vpmulhrsw %xmm4,%xmm6,%xmm2
vpmulhrsw (%ecx),%xmm6,%xmm7
vpmulhw %xmm4,%xmm6,%xmm2
vpmulhw (%ecx),%xmm6,%xmm7
vpmullw %xmm4,%xmm6,%xmm2
vpmullw (%ecx),%xmm6,%xmm7
vpmulld %xmm4,%xmm6,%xmm2
vpmulld (%ecx),%xmm6,%xmm7
vpmuludq %xmm4,%xmm6,%xmm2
vpmuludq (%ecx),%xmm6,%xmm7
vpmuldq %xmm4,%xmm6,%xmm2
vpmuldq (%ecx),%xmm6,%xmm7
vpor %xmm4,%xmm6,%xmm2
vpor (%ecx),%xmm6,%xmm7
vpsadbw %xmm4,%xmm6,%xmm2
vpsadbw (%ecx),%xmm6,%xmm7
vpshufb %xmm4,%xmm6,%xmm2
vpshufb (%ecx),%xmm6,%xmm7
vpsignb %xmm4,%xmm6,%xmm2
vpsignb (%ecx),%xmm6,%xmm7
vpsignw %xmm4,%xmm6,%xmm2
vpsignw (%ecx),%xmm6,%xmm7
vpsignd %xmm4,%xmm6,%xmm2
vpsignd (%ecx),%xmm6,%xmm7
vpsllw %xmm4,%xmm6,%xmm2
vpsllw (%ecx),%xmm6,%xmm7
vpslld %xmm4,%xmm6,%xmm2
vpslld (%ecx),%xmm6,%xmm7
vpsllq %xmm4,%xmm6,%xmm2
vpsllq (%ecx),%xmm6,%xmm7
vpsraw %xmm4,%xmm6,%xmm2
vpsraw (%ecx),%xmm6,%xmm7
vpsrad %xmm4,%xmm6,%xmm2
vpsrad (%ecx),%xmm6,%xmm7
vpsrlw %xmm4,%xmm6,%xmm2
vpsrlw (%ecx),%xmm6,%xmm7
vpsrld %xmm4,%xmm6,%xmm2
vpsrld (%ecx),%xmm6,%xmm7
vpsrlq %xmm4,%xmm6,%xmm2
vpsrlq (%ecx),%xmm6,%xmm7
vpsubb %xmm4,%xmm6,%xmm2
vpsubb (%ecx),%xmm6,%xmm7
vpsubw %xmm4,%xmm6,%xmm2
vpsubw (%ecx),%xmm6,%xmm7
vpsubd %xmm4,%xmm6,%xmm2
vpsubd (%ecx),%xmm6,%xmm7
vpsubq %xmm4,%xmm6,%xmm2
vpsubq (%ecx),%xmm6,%xmm7
vpsubsb %xmm4,%xmm6,%xmm2
vpsubsb (%ecx),%xmm6,%xmm7
vpsubsw %xmm4,%xmm6,%xmm2
vpsubsw (%ecx),%xmm6,%xmm7
vpsubusb %xmm4,%xmm6,%xmm2
vpsubusb (%ecx),%xmm6,%xmm7
vpsubusw %xmm4,%xmm6,%xmm2
vpsubusw (%ecx),%xmm6,%xmm7
vpunpckhbw %xmm4,%xmm6,%xmm2
vpunpckhbw (%ecx),%xmm6,%xmm7
vpunpckhwd %xmm4,%xmm6,%xmm2
vpunpckhwd (%ecx),%xmm6,%xmm7
vpunpckhdq %xmm4,%xmm6,%xmm2
vpunpckhdq (%ecx),%xmm6,%xmm7
vpunpckhqdq %xmm4,%xmm6,%xmm2
vpunpckhqdq (%ecx),%xmm6,%xmm7
vpunpcklbw %xmm4,%xmm6,%xmm2
vpunpcklbw (%ecx),%xmm6,%xmm7
vpunpcklwd %xmm4,%xmm6,%xmm2
vpunpcklwd (%ecx),%xmm6,%xmm7
vpunpckldq %xmm4,%xmm6,%xmm2
vpunpckldq (%ecx),%xmm6,%xmm7
vpunpcklqdq %xmm4,%xmm6,%xmm2
vpunpcklqdq (%ecx),%xmm6,%xmm7
vpxor %xmm4,%xmm6,%xmm2
vpxor (%ecx),%xmm6,%xmm7
vsubpd %xmm4,%xmm6,%xmm2
vsubpd (%ecx),%xmm6,%xmm7
vsubps %xmm4,%xmm6,%xmm2
vsubps (%ecx),%xmm6,%xmm7
vunpckhpd %xmm4,%xmm6,%xmm2
vunpckhpd (%ecx),%xmm6,%xmm7
vunpckhps %xmm4,%xmm6,%xmm2
vunpckhps (%ecx),%xmm6,%xmm7
vunpcklpd %xmm4,%xmm6,%xmm2
vunpcklpd (%ecx),%xmm6,%xmm7
vunpcklps %xmm4,%xmm6,%xmm2
vunpcklps (%ecx),%xmm6,%xmm7
vxorpd %xmm4,%xmm6,%xmm2
vxorpd (%ecx),%xmm6,%xmm7
vxorps %xmm4,%xmm6,%xmm2
vxorps (%ecx),%xmm6,%xmm7
vaesenc %xmm4,%xmm6,%xmm2
vaesenc (%ecx),%xmm6,%xmm7
vaesenclast %xmm4,%xmm6,%xmm2
vaesenclast (%ecx),%xmm6,%xmm7
vaesdec %xmm4,%xmm6,%xmm2
vaesdec (%ecx),%xmm6,%xmm7
vaesdeclast %xmm4,%xmm6,%xmm2
vaesdeclast (%ecx),%xmm6,%xmm7
vcmpeqpd %xmm4,%xmm6,%xmm2
vcmpeqpd (%ecx),%xmm6,%xmm7
vcmpltpd %xmm4,%xmm6,%xmm2
vcmpltpd (%ecx),%xmm6,%xmm7
vcmplepd %xmm4,%xmm6,%xmm2
vcmplepd (%ecx),%xmm6,%xmm7
vcmpunordpd %xmm4,%xmm6,%xmm2
vcmpunordpd (%ecx),%xmm6,%xmm7
vcmpneqpd %xmm4,%xmm6,%xmm2
vcmpneqpd (%ecx),%xmm6,%xmm7
vcmpnltpd %xmm4,%xmm6,%xmm2
vcmpnltpd (%ecx),%xmm6,%xmm7
vcmpnlepd %xmm4,%xmm6,%xmm2
vcmpnlepd (%ecx),%xmm6,%xmm7
vcmpordpd %xmm4,%xmm6,%xmm2
vcmpordpd (%ecx),%xmm6,%xmm7
vcmpeq_uqpd %xmm4,%xmm6,%xmm2
vcmpeq_uqpd (%ecx),%xmm6,%xmm7
vcmpngepd %xmm4,%xmm6,%xmm2
vcmpngepd (%ecx),%xmm6,%xmm7
vcmpngtpd %xmm4,%xmm6,%xmm2
vcmpngtpd (%ecx),%xmm6,%xmm7
vcmpfalsepd %xmm4,%xmm6,%xmm2
vcmpfalsepd (%ecx),%xmm6,%xmm7
vcmpneq_oqpd %xmm4,%xmm6,%xmm2
vcmpneq_oqpd (%ecx),%xmm6,%xmm7
vcmpgepd %xmm4,%xmm6,%xmm2
vcmpgepd (%ecx),%xmm6,%xmm7
vcmpgtpd %xmm4,%xmm6,%xmm2
vcmpgtpd (%ecx),%xmm6,%xmm7
vcmptruepd %xmm4,%xmm6,%xmm2
vcmptruepd (%ecx),%xmm6,%xmm7
vcmpeq_ospd %xmm4,%xmm6,%xmm2
vcmpeq_ospd (%ecx),%xmm6,%xmm7
vcmplt_oqpd %xmm4,%xmm6,%xmm2
vcmplt_oqpd (%ecx),%xmm6,%xmm7
vcmple_oqpd %xmm4,%xmm6,%xmm2
vcmple_oqpd (%ecx),%xmm6,%xmm7
vcmpunord_spd %xmm4,%xmm6,%xmm2
vcmpunord_spd (%ecx),%xmm6,%xmm7
vcmpneq_uspd %xmm4,%xmm6,%xmm2
vcmpneq_uspd (%ecx),%xmm6,%xmm7
vcmpnlt_uqpd %xmm4,%xmm6,%xmm2
vcmpnlt_uqpd (%ecx),%xmm6,%xmm7
vcmpnle_uqpd %xmm4,%xmm6,%xmm2
vcmpnle_uqpd (%ecx),%xmm6,%xmm7
vcmpord_spd %xmm4,%xmm6,%xmm2
vcmpord_spd (%ecx),%xmm6,%xmm7
vcmpeq_uspd %xmm4,%xmm6,%xmm2
vcmpeq_uspd (%ecx),%xmm6,%xmm7
vcmpnge_uqpd %xmm4,%xmm6,%xmm2
vcmpnge_uqpd (%ecx),%xmm6,%xmm7
vcmpngt_uqpd %xmm4,%xmm6,%xmm2
vcmpngt_uqpd (%ecx),%xmm6,%xmm7
vcmpfalse_ospd %xmm4,%xmm6,%xmm2
vcmpfalse_ospd (%ecx),%xmm6,%xmm7
vcmpneq_ospd %xmm4,%xmm6,%xmm2
vcmpneq_ospd (%ecx),%xmm6,%xmm7
vcmpge_oqpd %xmm4,%xmm6,%xmm2
vcmpge_oqpd (%ecx),%xmm6,%xmm7
vcmpgt_oqpd %xmm4,%xmm6,%xmm2
vcmpgt_oqpd (%ecx),%xmm6,%xmm7
vcmptrue_uspd %xmm4,%xmm6,%xmm2
vcmptrue_uspd (%ecx),%xmm6,%xmm7
vcmpeqps %xmm4,%xmm6,%xmm2
vcmpeqps (%ecx),%xmm6,%xmm7
vcmpltps %xmm4,%xmm6,%xmm2
vcmpltps (%ecx),%xmm6,%xmm7
vcmpleps %xmm4,%xmm6,%xmm2
vcmpleps (%ecx),%xmm6,%xmm7
vcmpunordps %xmm4,%xmm6,%xmm2
vcmpunordps (%ecx),%xmm6,%xmm7
vcmpneqps %xmm4,%xmm6,%xmm2
vcmpneqps (%ecx),%xmm6,%xmm7
vcmpnltps %xmm4,%xmm6,%xmm2
vcmpnltps (%ecx),%xmm6,%xmm7
vcmpnleps %xmm4,%xmm6,%xmm2
vcmpnleps (%ecx),%xmm6,%xmm7
vcmpordps %xmm4,%xmm6,%xmm2
vcmpordps (%ecx),%xmm6,%xmm7
vcmpeq_uqps %xmm4,%xmm6,%xmm2
vcmpeq_uqps (%ecx),%xmm6,%xmm7
vcmpngeps %xmm4,%xmm6,%xmm2
vcmpngeps (%ecx),%xmm6,%xmm7
vcmpngtps %xmm4,%xmm6,%xmm2
vcmpngtps (%ecx),%xmm6,%xmm7
vcmpfalseps %xmm4,%xmm6,%xmm2
vcmpfalseps (%ecx),%xmm6,%xmm7
vcmpneq_oqps %xmm4,%xmm6,%xmm2
vcmpneq_oqps (%ecx),%xmm6,%xmm7
vcmpgeps %xmm4,%xmm6,%xmm2
vcmpgeps (%ecx),%xmm6,%xmm7
vcmpgtps %xmm4,%xmm6,%xmm2
vcmpgtps (%ecx),%xmm6,%xmm7
vcmptrueps %xmm4,%xmm6,%xmm2
vcmptrueps (%ecx),%xmm6,%xmm7
vcmpeq_osps %xmm4,%xmm6,%xmm2
vcmpeq_osps (%ecx),%xmm6,%xmm7
vcmplt_oqps %xmm4,%xmm6,%xmm2
vcmplt_oqps (%ecx),%xmm6,%xmm7
vcmple_oqps %xmm4,%xmm6,%xmm2
vcmple_oqps (%ecx),%xmm6,%xmm7
vcmpunord_sps %xmm4,%xmm6,%xmm2
vcmpunord_sps (%ecx),%xmm6,%xmm7
vcmpneq_usps %xmm4,%xmm6,%xmm2
vcmpneq_usps (%ecx),%xmm6,%xmm7
vcmpnlt_uqps %xmm4,%xmm6,%xmm2
vcmpnlt_uqps (%ecx),%xmm6,%xmm7
vcmpnle_uqps %xmm4,%xmm6,%xmm2
vcmpnle_uqps (%ecx),%xmm6,%xmm7
vcmpord_sps %xmm4,%xmm6,%xmm2
vcmpord_sps (%ecx),%xmm6,%xmm7
vcmpeq_usps %xmm4,%xmm6,%xmm2
vcmpeq_usps (%ecx),%xmm6,%xmm7
vcmpnge_uqps %xmm4,%xmm6,%xmm2
vcmpnge_uqps (%ecx),%xmm6,%xmm7
vcmpngt_uqps %xmm4,%xmm6,%xmm2
vcmpngt_uqps (%ecx),%xmm6,%xmm7
vcmpfalse_osps %xmm4,%xmm6,%xmm2
vcmpfalse_osps (%ecx),%xmm6,%xmm7
vcmpneq_osps %xmm4,%xmm6,%xmm2
vcmpneq_osps (%ecx),%xmm6,%xmm7
vcmpge_oqps %xmm4,%xmm6,%xmm2
vcmpge_oqps (%ecx),%xmm6,%xmm7
vcmpgt_oqps %xmm4,%xmm6,%xmm2
vcmpgt_oqps (%ecx),%xmm6,%xmm7
vcmptrue_usps %xmm4,%xmm6,%xmm2
vcmptrue_usps (%ecx),%xmm6,%xmm7
vgf2p8mulb %xmm4, %xmm5, %xmm6
vgf2p8mulb (%ecx), %xmm5, %xmm6
vgf2p8mulb -123456(%esp,%esi,8), %xmm5, %xmm6
vgf2p8mulb 2032(%edx), %xmm5, %xmm6
vgf2p8mulb 2048(%edx), %xmm5, %xmm6
vgf2p8mulb -2048(%edx), %xmm5, %xmm6
vgf2p8mulb -2064(%edx), %xmm5, %xmm6
# Tests for op mem128, xmm, xmm
vmaskmovps (%ecx),%xmm4,%xmm6
vmaskmovpd (%ecx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem128, xmm
vaeskeygenassist $7,%xmm4,%xmm6
vaeskeygenassist $7,(%ecx),%xmm6
vpcmpestri $7,%xmm4,%xmm6
vpcmpestri $7,(%ecx),%xmm6
vpcmpestrm $7,%xmm4,%xmm6
vpcmpestrm $7,(%ecx),%xmm6
vpcmpistri $7,%xmm4,%xmm6
vpcmpistri $7,(%ecx),%xmm6
vpcmpistrm $7,%xmm4,%xmm6
vpcmpistrm $7,(%ecx),%xmm6
vpermilpd $7,%xmm4,%xmm6
vpermilpd $7,(%ecx),%xmm6
vpermilps $7,%xmm4,%xmm6
vpermilps $7,(%ecx),%xmm6
vpshufd $7,%xmm4,%xmm6
vpshufd $7,(%ecx),%xmm6
vpshufhw $7,%xmm4,%xmm6
vpshufhw $7,(%ecx),%xmm6
vpshuflw $7,%xmm4,%xmm6
vpshuflw $7,(%ecx),%xmm6
vroundpd $7,%xmm4,%xmm6
vroundpd $7,(%ecx),%xmm6
vroundps $7,%xmm4,%xmm6
vroundps $7,(%ecx),%xmm6
# Tests for op xmm, xmm, mem128
vmaskmovps %xmm4,%xmm6,(%ecx)
vmaskmovpd %xmm4,%xmm6,(%ecx)
# Tests for op imm8, xmm/mem128, xmm, xmm
vblendpd $7,%xmm4,%xmm6,%xmm2
vblendpd $7,(%ecx),%xmm6,%xmm2
vblendps $7,%xmm4,%xmm6,%xmm2
vblendps $7,(%ecx),%xmm6,%xmm2
vcmppd $7,%xmm4,%xmm6,%xmm2
vcmppd $7,(%ecx),%xmm6,%xmm2
vcmpps $7,%xmm4,%xmm6,%xmm2
vcmpps $7,(%ecx),%xmm6,%xmm2
vdppd $7,%xmm4,%xmm6,%xmm2
vdppd $7,(%ecx),%xmm6,%xmm2
vdpps $7,%xmm4,%xmm6,%xmm2
vdpps $7,(%ecx),%xmm6,%xmm2
vmpsadbw $7,%xmm4,%xmm6,%xmm2
vmpsadbw $7,(%ecx),%xmm6,%xmm2
vpalignr $7,%xmm4,%xmm6,%xmm2
vpalignr $7,(%ecx),%xmm6,%xmm2
vpblendw $7,%xmm4,%xmm6,%xmm2
vpblendw $7,(%ecx),%xmm6,%xmm2
vpclmulqdq $7,%xmm4,%xmm6,%xmm2
vpclmulqdq $7,(%ecx),%xmm6,%xmm2
vshufpd $7,%xmm4,%xmm6,%xmm2
vshufpd $7,(%ecx),%xmm6,%xmm2
vshufps $7,%xmm4,%xmm6,%xmm2
vshufps $7,(%ecx),%xmm6,%xmm2
vgf2p8affineqb $0xab, %xmm4, %xmm5, %xmm6
vgf2p8affineqb $123, %xmm4, %xmm5, %xmm6
vgf2p8affineqb $123, (%ecx), %xmm5, %xmm6
vgf2p8affineqb $123, -123456(%esp,%esi,8), %xmm5, %xmm6
vgf2p8affineqb $123, 2032(%edx), %xmm5, %xmm6
vgf2p8affineqb $123, 2048(%edx), %xmm5, %xmm6
vgf2p8affineqb $123, -2048(%edx), %xmm5, %xmm6
vgf2p8affineqb $123, -2064(%edx), %xmm5, %xmm6
vgf2p8affineinvqb $0xab, %xmm4, %xmm5, %xmm6
vgf2p8affineinvqb $123, %xmm4, %xmm5, %xmm6
vgf2p8affineinvqb $123, (%ecx), %xmm5, %xmm6
vgf2p8affineinvqb $123, -123456(%esp,%esi,8), %xmm5, %xmm6
vgf2p8affineinvqb $123, 2032(%edx), %xmm5, %xmm6
vgf2p8affineinvqb $123, 2048(%edx), %xmm5, %xmm6
vgf2p8affineinvqb $123, -2048(%edx), %xmm5, %xmm6
vgf2p8affineinvqb $123, -2064(%edx), %xmm5, %xmm6
# Tests for op xmm, xmm/mem128, xmm, xmm
vblendvpd %xmm4,%xmm6,%xmm2,%xmm7
vblendvpd %xmm4,(%ecx),%xmm2,%xmm7
vblendvps %xmm4,%xmm6,%xmm2,%xmm7
vblendvps %xmm4,(%ecx),%xmm2,%xmm7
vpblendvb %xmm4,%xmm6,%xmm2,%xmm7
vpblendvb %xmm4,(%ecx),%xmm2,%xmm7
# Tests for op mem64, ymm
vbroadcastsd (%ecx),%ymm4
# Tests for op xmm/mem64, xmm
vcomisd %xmm4,%xmm6
vcomisd (%ecx),%xmm4
vcvtdq2pd %xmm4,%xmm6
vcvtdq2pd (%ecx),%xmm4
vcvtps2pd %xmm4,%xmm6
vcvtps2pd (%ecx),%xmm4
vmovddup %xmm4,%xmm6
vmovddup (%ecx),%xmm4
vpmovsxbw %xmm4,%xmm6
vpmovsxbw (%ecx),%xmm4
vpmovsxwd %xmm4,%xmm6
vpmovsxwd (%ecx),%xmm4
vpmovsxdq %xmm4,%xmm6
vpmovsxdq (%ecx),%xmm4
vpmovzxbw %xmm4,%xmm6
vpmovzxbw (%ecx),%xmm4
vpmovzxwd %xmm4,%xmm6
vpmovzxwd (%ecx),%xmm4
vpmovzxdq %xmm4,%xmm6
vpmovzxdq (%ecx),%xmm4
vucomisd %xmm4,%xmm6
vucomisd (%ecx),%xmm4
# Tests for op mem64, xmm
vmovsd (%ecx),%xmm4
# Tests for op xmm, mem64
vmovlpd %xmm4,(%ecx)
vmovlps %xmm4,(%ecx)
vmovhpd %xmm4,(%ecx)
vmovhps %xmm4,(%ecx)
vmovsd %xmm4,(%ecx)
# Tests for op xmm, regq/mem64
# Tests for op regq/mem64, xmm
vmovq %xmm4,(%ecx)
vmovq (%ecx),%xmm4
# Tests for op xmm/mem64, regl
vcvtsd2si %xmm4,%ecx
vcvtsd2si (%ecx),%ecx
vcvttsd2si %xmm4,%ecx
vcvttsd2si (%ecx),%ecx
# Tests for op mem64, xmm, xmm
vmovlpd (%ecx),%xmm4,%xmm6
vmovlps (%ecx),%xmm4,%xmm6
vmovhpd (%ecx),%xmm4,%xmm6
vmovhps (%ecx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd $7,%xmm4,%xmm6,%xmm2
vcmpsd $7,(%ecx),%xmm6,%xmm2
vroundsd $7,%xmm4,%xmm6,%xmm2
vroundsd $7,(%ecx),%xmm6,%xmm2
# Tests for op xmm/mem64, xmm, xmm
vaddsd %xmm4,%xmm6,%xmm2
vaddsd (%ecx),%xmm6,%xmm2
vcvtsd2ss %xmm4,%xmm6,%xmm2
vcvtsd2ss (%ecx),%xmm6,%xmm2
vdivsd %xmm4,%xmm6,%xmm2
vdivsd (%ecx),%xmm6,%xmm2
vmaxsd %xmm4,%xmm6,%xmm2
vmaxsd (%ecx),%xmm6,%xmm2
vminsd %xmm4,%xmm6,%xmm2
vminsd (%ecx),%xmm6,%xmm2
vmulsd %xmm4,%xmm6,%xmm2
vmulsd (%ecx),%xmm6,%xmm2
vsqrtsd %xmm4,%xmm6,%xmm2
vsqrtsd (%ecx),%xmm6,%xmm2
vsubsd %xmm4,%xmm6,%xmm2
vsubsd (%ecx),%xmm6,%xmm2
vcmpeqsd %xmm4,%xmm6,%xmm2
vcmpeqsd (%ecx),%xmm6,%xmm2
vcmpeq_oqsd %xmm4,%xmm6,%xmm2
vcmpeq_oqsd (%ecx),%xmm6,%xmm2
vcmpltsd %xmm4,%xmm6,%xmm2
vcmpltsd (%ecx),%xmm6,%xmm2
vcmplt_ossd %xmm4,%xmm6,%xmm2
vcmplt_ossd (%ecx),%xmm6,%xmm2
vcmplesd %xmm4,%xmm6,%xmm2
vcmplesd (%ecx),%xmm6,%xmm2
vcmple_ossd %xmm4,%xmm6,%xmm2
vcmple_ossd (%ecx),%xmm6,%xmm2
vcmpunordsd %xmm4,%xmm6,%xmm2
vcmpunordsd (%ecx),%xmm6,%xmm2
vcmpunord_qsd %xmm4,%xmm6,%xmm2
vcmpunord_qsd (%ecx),%xmm6,%xmm2
vcmpneqsd %xmm4,%xmm6,%xmm2
vcmpneqsd (%ecx),%xmm6,%xmm2
vcmpneq_uqsd %xmm4,%xmm6,%xmm2
vcmpneq_uqsd (%ecx),%xmm6,%xmm2
vcmpnltsd %xmm4,%xmm6,%xmm2
vcmpnltsd (%ecx),%xmm6,%xmm2
vcmpnlt_ussd %xmm4,%xmm6,%xmm2
vcmpnlt_ussd (%ecx),%xmm6,%xmm2
vcmpnlesd %xmm4,%xmm6,%xmm2
vcmpnlesd (%ecx),%xmm6,%xmm2
vcmpnle_ussd %xmm4,%xmm6,%xmm2
vcmpnle_ussd (%ecx),%xmm6,%xmm2
vcmpordsd %xmm4,%xmm6,%xmm2
vcmpordsd (%ecx),%xmm6,%xmm2
vcmpord_qsd %xmm4,%xmm6,%xmm2
vcmpord_qsd (%ecx),%xmm6,%xmm2
vcmpeq_uqsd %xmm4,%xmm6,%xmm2
vcmpeq_uqsd (%ecx),%xmm6,%xmm2
vcmpngesd %xmm4,%xmm6,%xmm2
vcmpngesd (%ecx),%xmm6,%xmm2
vcmpnge_ussd %xmm4,%xmm6,%xmm2
vcmpnge_ussd (%ecx),%xmm6,%xmm2
vcmpngtsd %xmm4,%xmm6,%xmm2
vcmpngtsd (%ecx),%xmm6,%xmm2
vcmpngt_ussd %xmm4,%xmm6,%xmm2
vcmpngt_ussd (%ecx),%xmm6,%xmm2
vcmpfalsesd %xmm4,%xmm6,%xmm2
vcmpfalsesd (%ecx),%xmm6,%xmm2
vcmpfalse_oqsd %xmm4,%xmm6,%xmm2
vcmpfalse_oqsd (%ecx),%xmm6,%xmm2
vcmpneq_oqsd %xmm4,%xmm6,%xmm2
vcmpneq_oqsd (%ecx),%xmm6,%xmm2
vcmpgesd %xmm4,%xmm6,%xmm2
vcmpgesd (%ecx),%xmm6,%xmm2
vcmpge_ossd %xmm4,%xmm6,%xmm2
vcmpge_ossd (%ecx),%xmm6,%xmm2
vcmpgtsd %xmm4,%xmm6,%xmm2
vcmpgtsd (%ecx),%xmm6,%xmm2
vcmpgt_ossd %xmm4,%xmm6,%xmm2
vcmpgt_ossd (%ecx),%xmm6,%xmm2
vcmptruesd %xmm4,%xmm6,%xmm2
vcmptruesd (%ecx),%xmm6,%xmm2
vcmptrue_uqsd %xmm4,%xmm6,%xmm2
vcmptrue_uqsd (%ecx),%xmm6,%xmm2
vcmpeq_ossd %xmm4,%xmm6,%xmm2
vcmpeq_ossd (%ecx),%xmm6,%xmm2
vcmplt_oqsd %xmm4,%xmm6,%xmm2
vcmplt_oqsd (%ecx),%xmm6,%xmm2
vcmple_oqsd %xmm4,%xmm6,%xmm2
vcmple_oqsd (%ecx),%xmm6,%xmm2
vcmpunord_ssd %xmm4,%xmm6,%xmm2
vcmpunord_ssd (%ecx),%xmm6,%xmm2
vcmpneq_ussd %xmm4,%xmm6,%xmm2
vcmpneq_ussd (%ecx),%xmm6,%xmm2
vcmpnlt_uqsd %xmm4,%xmm6,%xmm2
vcmpnlt_uqsd (%ecx),%xmm6,%xmm2
vcmpnle_uqsd %xmm4,%xmm6,%xmm2
vcmpnle_uqsd (%ecx),%xmm6,%xmm2
vcmpord_ssd %xmm4,%xmm6,%xmm2
vcmpord_ssd (%ecx),%xmm6,%xmm2
vcmpeq_ussd %xmm4,%xmm6,%xmm2
vcmpeq_ussd (%ecx),%xmm6,%xmm2
vcmpnge_uqsd %xmm4,%xmm6,%xmm2
vcmpnge_uqsd (%ecx),%xmm6,%xmm2
vcmpngt_uqsd %xmm4,%xmm6,%xmm2
vcmpngt_uqsd (%ecx),%xmm6,%xmm2
vcmpfalse_ossd %xmm4,%xmm6,%xmm2
vcmpfalse_ossd (%ecx),%xmm6,%xmm2
vcmpneq_ossd %xmm4,%xmm6,%xmm2
vcmpneq_ossd (%ecx),%xmm6,%xmm2
vcmpge_oqsd %xmm4,%xmm6,%xmm2
vcmpge_oqsd (%ecx),%xmm6,%xmm2
vcmpgt_oqsd %xmm4,%xmm6,%xmm2
vcmpgt_oqsd (%ecx),%xmm6,%xmm2
vcmptrue_ussd %xmm4,%xmm6,%xmm2
vcmptrue_ussd (%ecx),%xmm6,%xmm2
# Tests for op mem64
vldmxcsr (%ecx)
vstmxcsr (%ecx)
# Tests for op xmm/mem32, xmm, xmm
vaddss %xmm4,%xmm6,%xmm2
vaddss (%ecx),%xmm6,%xmm2
vcvtss2sd %xmm4,%xmm6,%xmm2
vcvtss2sd (%ecx),%xmm6,%xmm2
vdivss %xmm4,%xmm6,%xmm2
vdivss (%ecx),%xmm6,%xmm2
vmaxss %xmm4,%xmm6,%xmm2
vmaxss (%ecx),%xmm6,%xmm2
vminss %xmm4,%xmm6,%xmm2
vminss (%ecx),%xmm6,%xmm2
vmulss %xmm4,%xmm6,%xmm2
vmulss (%ecx),%xmm6,%xmm2
vrcpss %xmm4,%xmm6,%xmm2
vrcpss (%ecx),%xmm6,%xmm2
vrsqrtss %xmm4,%xmm6,%xmm2
vrsqrtss (%ecx),%xmm6,%xmm2
vsqrtss %xmm4,%xmm6,%xmm2
vsqrtss (%ecx),%xmm6,%xmm2
vsubss %xmm4,%xmm6,%xmm2
vsubss (%ecx),%xmm6,%xmm2
vcmpeqss %xmm4,%xmm6,%xmm2
vcmpeqss (%ecx),%xmm6,%xmm2
vcmpeq_oqss %xmm4,%xmm6,%xmm2
vcmpeq_oqss (%ecx),%xmm6,%xmm2
vcmpltss %xmm4,%xmm6,%xmm2
vcmpltss (%ecx),%xmm6,%xmm2
vcmplt_osss %xmm4,%xmm6,%xmm2
vcmplt_osss (%ecx),%xmm6,%xmm2
vcmpless %xmm4,%xmm6,%xmm2
vcmpless (%ecx),%xmm6,%xmm2
vcmple_osss %xmm4,%xmm6,%xmm2
vcmple_osss (%ecx),%xmm6,%xmm2
vcmpunordss %xmm4,%xmm6,%xmm2
vcmpunordss (%ecx),%xmm6,%xmm2
vcmpunord_qss %xmm4,%xmm6,%xmm2
vcmpunord_qss (%ecx),%xmm6,%xmm2
vcmpneqss %xmm4,%xmm6,%xmm2
vcmpneqss (%ecx),%xmm6,%xmm2
vcmpneq_uqss %xmm4,%xmm6,%xmm2
vcmpneq_uqss (%ecx),%xmm6,%xmm2
vcmpnltss %xmm4,%xmm6,%xmm2
vcmpnltss (%ecx),%xmm6,%xmm2
vcmpnlt_usss %xmm4,%xmm6,%xmm2
vcmpnlt_usss (%ecx),%xmm6,%xmm2
vcmpnless %xmm4,%xmm6,%xmm2
vcmpnless (%ecx),%xmm6,%xmm2
vcmpnle_usss %xmm4,%xmm6,%xmm2
vcmpnle_usss (%ecx),%xmm6,%xmm2
vcmpordss %xmm4,%xmm6,%xmm2
vcmpordss (%ecx),%xmm6,%xmm2
vcmpord_qss %xmm4,%xmm6,%xmm2
vcmpord_qss (%ecx),%xmm6,%xmm2
vcmpeq_uqss %xmm4,%xmm6,%xmm2
vcmpeq_uqss (%ecx),%xmm6,%xmm2
vcmpngess %xmm4,%xmm6,%xmm2
vcmpngess (%ecx),%xmm6,%xmm2
vcmpnge_usss %xmm4,%xmm6,%xmm2
vcmpnge_usss (%ecx),%xmm6,%xmm2
vcmpngtss %xmm4,%xmm6,%xmm2
vcmpngtss (%ecx),%xmm6,%xmm2
vcmpngt_usss %xmm4,%xmm6,%xmm2
vcmpngt_usss (%ecx),%xmm6,%xmm2
vcmpfalsess %xmm4,%xmm6,%xmm2
vcmpfalsess (%ecx),%xmm6,%xmm2
vcmpfalse_oqss %xmm4,%xmm6,%xmm2
vcmpfalse_oqss (%ecx),%xmm6,%xmm2
vcmpneq_oqss %xmm4,%xmm6,%xmm2
vcmpneq_oqss (%ecx),%xmm6,%xmm2
vcmpgess %xmm4,%xmm6,%xmm2
vcmpgess (%ecx),%xmm6,%xmm2
vcmpge_osss %xmm4,%xmm6,%xmm2
vcmpge_osss (%ecx),%xmm6,%xmm2
vcmpgtss %xmm4,%xmm6,%xmm2
vcmpgtss (%ecx),%xmm6,%xmm2
vcmpgt_osss %xmm4,%xmm6,%xmm2
vcmpgt_osss (%ecx),%xmm6,%xmm2
vcmptruess %xmm4,%xmm6,%xmm2
vcmptruess (%ecx),%xmm6,%xmm2
vcmptrue_uqss %xmm4,%xmm6,%xmm2
vcmptrue_uqss (%ecx),%xmm6,%xmm2
vcmpeq_osss %xmm4,%xmm6,%xmm2
vcmpeq_osss (%ecx),%xmm6,%xmm2
vcmplt_oqss %xmm4,%xmm6,%xmm2
vcmplt_oqss (%ecx),%xmm6,%xmm2
vcmple_oqss %xmm4,%xmm6,%xmm2
vcmple_oqss (%ecx),%xmm6,%xmm2
vcmpunord_sss %xmm4,%xmm6,%xmm2
vcmpunord_sss (%ecx),%xmm6,%xmm2
vcmpneq_usss %xmm4,%xmm6,%xmm2
vcmpneq_usss (%ecx),%xmm6,%xmm2
vcmpnlt_uqss %xmm4,%xmm6,%xmm2
vcmpnlt_uqss (%ecx),%xmm6,%xmm2
vcmpnle_uqss %xmm4,%xmm6,%xmm2
vcmpnle_uqss (%ecx),%xmm6,%xmm2
vcmpord_sss %xmm4,%xmm6,%xmm2
vcmpord_sss (%ecx),%xmm6,%xmm2
vcmpeq_usss %xmm4,%xmm6,%xmm2
vcmpeq_usss (%ecx),%xmm6,%xmm2
vcmpnge_uqss %xmm4,%xmm6,%xmm2
vcmpnge_uqss (%ecx),%xmm6,%xmm2
vcmpngt_uqss %xmm4,%xmm6,%xmm2
vcmpngt_uqss (%ecx),%xmm6,%xmm2
vcmpfalse_osss %xmm4,%xmm6,%xmm2
vcmpfalse_osss (%ecx),%xmm6,%xmm2
vcmpneq_osss %xmm4,%xmm6,%xmm2
vcmpneq_osss (%ecx),%xmm6,%xmm2
vcmpge_oqss %xmm4,%xmm6,%xmm2
vcmpge_oqss (%ecx),%xmm6,%xmm2
vcmpgt_oqss %xmm4,%xmm6,%xmm2
vcmpgt_oqss (%ecx),%xmm6,%xmm2
vcmptrue_usss %xmm4,%xmm6,%xmm2
vcmptrue_usss (%ecx),%xmm6,%xmm2
# Tests for op mem32, ymm
vbroadcastss (%ecx),%ymm4
# Tests for op xmm/mem32, xmm
vcomiss %xmm4,%xmm6
vcomiss (%ecx),%xmm4
vpmovsxbd %xmm4,%xmm6
vpmovsxbd (%ecx),%xmm4
vpmovsxwq %xmm4,%xmm6
vpmovsxwq (%ecx),%xmm4
vpmovzxbd %xmm4,%xmm6
vpmovzxbd (%ecx),%xmm4
vpmovzxwq %xmm4,%xmm6
vpmovzxwq (%ecx),%xmm4
vucomiss %xmm4,%xmm6
vucomiss (%ecx),%xmm4
# Tests for op mem32, xmm
vbroadcastss (%ecx),%xmm4
vmovss (%ecx),%xmm4
# Tests for op xmm, mem32
vmovss %xmm4,(%ecx)
# Tests for op xmm, regl/mem32
# Tests for op regl/mem32, xmm
vmovd %xmm4,%ecx
vmovd %xmm4,(%ecx)
vmovd %ecx,%xmm4
vmovd (%ecx),%xmm4
# Tests for op xmm/mem32, regl
vcvtss2si %xmm4,%ecx
vcvtss2si (%ecx),%ecx
vcvttss2si %xmm4,%ecx
vcvttss2si (%ecx),%ecx
# Tests for op imm8, xmm, regq/mem32
vextractps $7,%xmm4,(%ecx)
# Tests for op imm8, xmm, regl/mem32
vpextrd $7,%xmm4,%ecx
vpextrd $7,%xmm4,(%ecx)
vextractps $7,%xmm4,%ecx
vextractps $7,%xmm4,(%ecx)
# Tests for op imm8, regl/mem32, xmm, xmm
vpinsrd $7,%ecx,%xmm4,%xmm6
vpinsrd $7,(%ecx),%xmm4,%xmm6
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd %ecx,%xmm4,%xmm6
vcvtsi2sd (%ecx),%xmm4,%xmm6
vcvtsi2ss %ecx,%xmm4,%xmm6
vcvtsi2ss (%ecx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss $7,%xmm4,%xmm6,%xmm2
vcmpss $7,(%ecx),%xmm6,%xmm2
vinsertps $7,%xmm4,%xmm6,%xmm2
vinsertps $7,(%ecx),%xmm6,%xmm2
vroundss $7,%xmm4,%xmm6,%xmm2
vroundss $7,(%ecx),%xmm6,%xmm2
# Tests for op xmm/m16, xmm
vpmovsxbq %xmm4,%xmm6
vpmovsxbq (%ecx),%xmm4
vpmovzxbq %xmm4,%xmm6
vpmovzxbq (%ecx),%xmm4
# Tests for op imm8, xmm, regl/mem16
vpextrw $7,%xmm4,%ecx
vpextrw $7,%xmm4,(%ecx)
# Tests for op imm8, xmm, regq/mem16
vpextrw $7,%xmm4,(%ecx)
# Tests for op imm8, regl/mem16, xmm, xmm
vpinsrw $7,%ecx,%xmm4,%xmm6
vpinsrw $7,(%ecx),%xmm4,%xmm6
# Tests for op imm8, xmm, regl/mem8
vpextrb $7,%xmm4,%ecx
vpextrb $7,%xmm4,(%ecx)
# Tests for op imm8, regl/mem8, xmm, xmm
vpinsrb $7,%ecx,%xmm4,%xmm6
vpinsrb $7,(%ecx),%xmm4,%xmm6
# Tests for op imm8, xmm, regq/mem8
vpextrb $7,%xmm4,(%ecx)
# Tests for op xmm, xmm
vmaskmovdqu %xmm4,%xmm6
vmovq %xmm4,%xmm6
# Tests for op xmm, regl
vmovmskpd %xmm4,%ecx
vmovmskps %xmm4,%ecx
vpmovmskb %xmm4,%ecx
# Tests for op xmm, xmm, xmm
vmovhlps %xmm4,%xmm6,%xmm2
vmovlhps %xmm4,%xmm6,%xmm2
vmovsd %xmm4,%xmm6,%xmm2
vmovss %xmm4,%xmm6,%xmm2
# Tests for op imm8, xmm, xmm
vpslld $7,%xmm4,%xmm6
vpslldq $7,%xmm4,%xmm6
vpsllq $7,%xmm4,%xmm6
vpsllw $7,%xmm4,%xmm6
vpsrad $7,%xmm4,%xmm6
vpsraw $7,%xmm4,%xmm6
vpsrld $7,%xmm4,%xmm6
vpsrldq $7,%xmm4,%xmm6
vpsrlq $7,%xmm4,%xmm6
vpsrlw $7,%xmm4,%xmm6
# Tests for op imm8, xmm, regl
vpextrw $7,%xmm4,%ecx
# Tests for op ymm, regl
vmovmskpd %ymm4,%ecx
vmovmskps %ymm4,%ecx
# Default instructions without suffixes.
vcvtpd2dq %xmm4,%xmm6
vcvtpd2dq %ymm4,%xmm6
vcvtpd2ps %xmm4,%xmm6
vcvtpd2ps %ymm4,%xmm6
vcvttpd2dq %xmm4,%xmm6
vcvttpd2dq %ymm4,%xmm6
#Tests with different memory and register operands.
vldmxcsr 0x1234
vmovdqa 0x1234,%xmm0
vmovdqa %xmm0,0x1234
vmovd %xmm0,0x1234
vcvtsd2si 0x1234,%eax
vcvtdq2pd 0x1234,%ymm0
vcvtpd2psy 0x1234,%xmm0
vpavgb 0x1234,%xmm0,%xmm7
vaeskeygenassist $7,0x1234,%xmm0
vpextrb $7,%xmm0,0x1234
vcvtsi2sdl 0x1234,%xmm0,%xmm7
vpclmulqdq $7,0x1234,%xmm0,%xmm7
vblendvps %xmm0,0x1234,%xmm4,%xmm6
vpinsrb $7,0x1234,%xmm0,%xmm7
vmovdqa 0x1234,%ymm0
vmovdqa %ymm0,0x1234
vpermilpd 0x1234,%ymm0,%ymm7
vroundpd $7,0x1234,%ymm0
vextractf128 $7,%ymm0,0x1234
vperm2f128 $7,0x1234,%ymm0,%ymm7
vblendvpd %ymm0,0x1234,%ymm4,%ymm6
vldmxcsr (%ebp)
vmovdqa (%ebp),%xmm0
vmovdqa %xmm0,(%ebp)
vmovd %xmm0,(%ebp)
vcvtsd2si (%ebp),%eax
vcvtdq2pd (%ebp),%ymm0
vcvtpd2psy (%ebp),%xmm0
vpavgb (%ebp),%xmm0,%xmm7
vaeskeygenassist $7,(%ebp),%xmm0
vpextrb $7,%xmm0,(%ebp)
vcvtsi2sdl (%ebp),%xmm0,%xmm7
vpclmulqdq $7,(%ebp),%xmm0,%xmm7
vblendvps %xmm0,(%ebp),%xmm4,%xmm6
vpinsrb $7,(%ebp),%xmm0,%xmm7
vmovdqa (%ebp),%ymm0
vmovdqa %ymm0,(%ebp)
vpermilpd (%ebp),%ymm0,%ymm7
vroundpd $7,(%ebp),%ymm0
vextractf128 $7,%ymm0,(%ebp)
vperm2f128 $7,(%ebp),%ymm0,%ymm7
vblendvpd %ymm0,(%ebp),%ymm4,%ymm6
vldmxcsr (%esp)
vmovdqa (%esp),%xmm0
vmovdqa %xmm0,(%esp)
vmovd %xmm0,(%esp)
vcvtsd2si (%esp),%eax
vcvtdq2pd (%esp),%ymm0
vcvtpd2psy (%esp),%xmm0
vpavgb (%esp),%xmm0,%xmm7
vaeskeygenassist $7,(%esp),%xmm0
vpextrb $7,%xmm0,(%esp)
vcvtsi2sdl (%esp),%xmm0,%xmm7
vpclmulqdq $7,(%esp),%xmm0,%xmm7
vblendvps %xmm0,(%esp),%xmm4,%xmm6
vpinsrb $7,(%esp),%xmm0,%xmm7
vmovdqa (%esp),%ymm0
vmovdqa %ymm0,(%esp)
vpermilpd (%esp),%ymm0,%ymm7
vroundpd $7,(%esp),%ymm0
vextractf128 $7,%ymm0,(%esp)
vperm2f128 $7,(%esp),%ymm0,%ymm7
vblendvpd %ymm0,(%esp),%ymm4,%ymm6
vldmxcsr 0x99(%ebp)
vmovdqa 0x99(%ebp),%xmm0
vmovdqa %xmm0,0x99(%ebp)
vmovd %xmm0,0x99(%ebp)
vcvtsd2si 0x99(%ebp),%eax
vcvtdq2pd 0x99(%ebp),%ymm0
vcvtpd2psy 0x99(%ebp),%xmm0
vpavgb 0x99(%ebp),%xmm0,%xmm7
vaeskeygenassist $7,0x99(%ebp),%xmm0
vpextrb $7,%xmm0,0x99(%ebp)
vcvtsi2sdl 0x99(%ebp),%xmm0,%xmm7
vpclmulqdq $7,0x99(%ebp),%xmm0,%xmm7
vblendvps %xmm0,0x99(%ebp),%xmm4,%xmm6
vpinsrb $7,0x99(%ebp),%xmm0,%xmm7
vmovdqa 0x99(%ebp),%ymm0
vmovdqa %ymm0,0x99(%ebp)
vpermilpd 0x99(%ebp),%ymm0,%ymm7
vroundpd $7,0x99(%ebp),%ymm0
vextractf128 $7,%ymm0,0x99(%ebp)
vperm2f128 $7,0x99(%ebp),%ymm0,%ymm7
vblendvpd %ymm0,0x99(%ebp),%ymm4,%ymm6
vldmxcsr 0x99(,%eiz)
vmovdqa 0x99(,%eiz),%xmm0
vmovdqa %xmm0,0x99(,%eiz)
vmovd %xmm0,0x99(,%eiz)
vcvtsd2si 0x99(,%eiz),%eax
vcvtdq2pd 0x99(,%eiz),%ymm0
vcvtpd2psy 0x99(,%eiz),%xmm0
vpavgb 0x99(,%eiz),%xmm0,%xmm7
vaeskeygenassist $7,0x99(,%eiz),%xmm0
vpextrb $7,%xmm0,0x99(,%eiz)
vcvtsi2sdl 0x99(,%eiz),%xmm0,%xmm7
vpclmulqdq $7,0x99(,%eiz),%xmm0,%xmm7
vblendvps %xmm0,0x99(,%eiz),%xmm4,%xmm6
vpinsrb $7,0x99(,%eiz),%xmm0,%xmm7
vmovdqa 0x99(,%eiz),%ymm0
vmovdqa %ymm0,0x99(,%eiz)
vpermilpd 0x99(,%eiz),%ymm0,%ymm7
vroundpd $7,0x99(,%eiz),%ymm0
vextractf128 $7,%ymm0,0x99(,%eiz)
vperm2f128 $7,0x99(,%eiz),%ymm0,%ymm7
vblendvpd %ymm0,0x99(,%eiz),%ymm4,%ymm6
vldmxcsr 0x99(,%eiz,2)
vmovdqa 0x99(,%eiz,2),%xmm0
vmovdqa %xmm0,0x99(,%eiz,2)
vmovd %xmm0,0x99(,%eiz,2)
vcvtsd2si 0x99(,%eiz,2),%eax
vcvtdq2pd 0x99(,%eiz,2),%ymm0
vcvtpd2psy 0x99(,%eiz,2),%xmm0
vpavgb 0x99(,%eiz,2),%xmm0,%xmm7
vaeskeygenassist $7,0x99(,%eiz,2),%xmm0
vpextrb $7,%xmm0,0x99(,%eiz,2)
vcvtsi2sdl 0x99(,%eiz,2),%xmm0,%xmm7
vpclmulqdq $7,0x99(,%eiz,2),%xmm0,%xmm7
vblendvps %xmm0,0x99(,%eiz,2),%xmm4,%xmm6
vpinsrb $7,0x99(,%eiz,2),%xmm0,%xmm7
vmovdqa 0x99(,%eiz,2),%ymm0
vmovdqa %ymm0,0x99(,%eiz,2)
vpermilpd 0x99(,%eiz,2),%ymm0,%ymm7
vroundpd $7,0x99(,%eiz,2),%ymm0
vextractf128 $7,%ymm0,0x99(,%eiz,2)
vperm2f128 $7,0x99(,%eiz,2),%ymm0,%ymm7
vblendvpd %ymm0,0x99(,%eiz,2),%ymm4,%ymm6
vldmxcsr 0x99(%eax,%eiz)
vmovdqa 0x99(%eax,%eiz),%xmm0
vmovdqa %xmm0,0x99(%eax,%eiz)
vmovd %xmm0,0x99(%eax,%eiz)
vcvtsd2si 0x99(%eax,%eiz),%eax
vcvtdq2pd 0x99(%eax,%eiz),%ymm0
vcvtpd2psy 0x99(%eax,%eiz),%xmm0
vpavgb 0x99(%eax,%eiz),%xmm0,%xmm7
vaeskeygenassist $7,0x99(%eax,%eiz),%xmm0
vpextrb $7,%xmm0,0x99(%eax,%eiz)
vcvtsi2sdl 0x99(%eax,%eiz),%xmm0,%xmm7
vpclmulqdq $7,0x99(%eax,%eiz),%xmm0,%xmm7
vblendvps %xmm0,0x99(%eax,%eiz),%xmm4,%xmm6
vpinsrb $7,0x99(%eax,%eiz),%xmm0,%xmm7
vmovdqa 0x99(%eax,%eiz),%ymm0
vmovdqa %ymm0,0x99(%eax,%eiz)
vpermilpd 0x99(%eax,%eiz),%ymm0,%ymm7
vroundpd $7,0x99(%eax,%eiz),%ymm0
vextractf128 $7,%ymm0,0x99(%eax,%eiz)
vperm2f128 $7,0x99(%eax,%eiz),%ymm0,%ymm7
vblendvpd %ymm0,0x99(%eax,%eiz),%ymm4,%ymm6
vldmxcsr 0x99(%eax,%eiz,2)
vmovdqa 0x99(%eax,%eiz,2),%xmm0
vmovdqa %xmm0,0x99(%eax,%eiz,2)
vmovd %xmm0,0x99(%eax,%eiz,2)
vcvtsd2si 0x99(%eax,%eiz,2),%eax
vcvtdq2pd 0x99(%eax,%eiz,2),%ymm0
vcvtpd2psy 0x99(%eax,%eiz,2),%xmm0
vpavgb 0x99(%eax,%eiz,2),%xmm0,%xmm7
vaeskeygenassist $7,0x99(%eax,%eiz,2),%xmm0
vpextrb $7,%xmm0,0x99(%eax,%eiz,2)
vcvtsi2sdl 0x99(%eax,%eiz,2),%xmm0,%xmm7
vpclmulqdq $7,0x99(%eax,%eiz,2),%xmm0,%xmm7
vblendvps %xmm0,0x99(%eax,%eiz,2),%xmm4,%xmm6
vpinsrb $7,0x99(%eax,%eiz,2),%xmm0,%xmm7
vmovdqa 0x99(%eax,%eiz,2),%ymm0
vmovdqa %ymm0,0x99(%eax,%eiz,2)
vpermilpd 0x99(%eax,%eiz,2),%ymm0,%ymm7
vroundpd $7,0x99(%eax,%eiz,2),%ymm0
vextractf128 $7,%ymm0,0x99(%eax,%eiz,2)
vperm2f128 $7,0x99(%eax,%eiz,2),%ymm0,%ymm7
vblendvpd %ymm0,0x99(%eax,%eiz,2),%ymm4,%ymm6
vldmxcsr 0x99(%eax,%ebx,4)
vmovdqa 0x99(%eax,%ebx,4),%xmm0
vmovdqa %xmm0,0x99(%eax,%ebx,4)
vmovd %xmm0,0x99(%eax,%ebx,4)
vcvtsd2si 0x99(%eax,%ebx,4),%eax
vcvtdq2pd 0x99(%eax,%ebx,4),%ymm0
vcvtpd2psy 0x99(%eax,%ebx,4),%xmm0
vpavgb 0x99(%eax,%ebx,4),%xmm0,%xmm7
vaeskeygenassist $7,0x99(%eax,%ebx,4),%xmm0
vpextrb $7,%xmm0,0x99(%eax,%ebx,4)
vcvtsi2sdl 0x99(%eax,%ebx,4),%xmm0,%xmm7
vpclmulqdq $7,0x99(%eax,%ebx,4),%xmm0,%xmm7
vblendvps %xmm0,0x99(%eax,%ebx,4),%xmm4,%xmm6
vpinsrb $7,0x99(%eax,%ebx,4),%xmm0,%xmm7
vmovdqa 0x99(%eax,%ebx,4),%ymm0
vmovdqa %ymm0,0x99(%eax,%ebx,4)
vpermilpd 0x99(%eax,%ebx,4),%ymm0,%ymm7
vroundpd $7,0x99(%eax,%ebx,4),%ymm0
vextractf128 $7,%ymm0,0x99(%eax,%ebx,4)
vperm2f128 $7,0x99(%eax,%ebx,4),%ymm0,%ymm7
vblendvpd %ymm0,0x99(%eax,%ebx,4),%ymm4,%ymm6
vldmxcsr 0x99(%esp,%ecx,8)
vmovdqa 0x99(%esp,%ecx,8),%xmm0
vmovdqa %xmm0,0x99(%esp,%ecx,8)
vmovd %xmm0,0x99(%esp,%ecx,8)
vcvtsd2si 0x99(%esp,%ecx,8),%eax
vcvtdq2pd 0x99(%esp,%ecx,8),%ymm0
vcvtpd2psy 0x99(%esp,%ecx,8),%xmm0
vpavgb 0x99(%esp,%ecx,8),%xmm0,%xmm7
vaeskeygenassist $7,0x99(%esp,%ecx,8),%xmm0
vpextrb $7,%xmm0,0x99(%esp,%ecx,8)
vcvtsi2sdl 0x99(%esp,%ecx,8),%xmm0,%xmm7
vpclmulqdq $7,0x99(%esp,%ecx,8),%xmm0,%xmm7
vblendvps %xmm0,0x99(%esp,%ecx,8),%xmm4,%xmm6
vpinsrb $7,0x99(%esp,%ecx,8),%xmm0,%xmm7
vmovdqa 0x99(%esp,%ecx,8),%ymm0
vmovdqa %ymm0,0x99(%esp,%ecx,8)
vpermilpd 0x99(%esp,%ecx,8),%ymm0,%ymm7
vroundpd $7,0x99(%esp,%ecx,8),%ymm0
vextractf128 $7,%ymm0,0x99(%esp,%ecx,8)
vperm2f128 $7,0x99(%esp,%ecx,8),%ymm0,%ymm7
vblendvpd %ymm0,0x99(%esp,%ecx,8),%ymm4,%ymm6
vldmxcsr 0x99(%ebp,%edx,1)
vmovdqa 0x99(%ebp,%edx,1),%xmm0
vmovdqa %xmm0,0x99(%ebp,%edx,1)
vmovd %xmm0,0x99(%ebp,%edx,1)
vcvtsd2si 0x99(%ebp,%edx,1),%eax
vcvtdq2pd 0x99(%ebp,%edx,1),%ymm0
vcvtpd2psy 0x99(%ebp,%edx,1),%xmm0
vpavgb 0x99(%ebp,%edx,1),%xmm0,%xmm7
vaeskeygenassist $7,0x99(%ebp,%edx,1),%xmm0
vpextrb $7,%xmm0,0x99(%ebp,%edx,1)
vcvtsi2sdl 0x99(%ebp,%edx,1),%xmm0,%xmm7
vpclmulqdq $7,0x99(%ebp,%edx,1),%xmm0,%xmm7
vblendvps %xmm0,0x99(%ebp,%edx,1),%xmm4,%xmm6
vpinsrb $7,0x99(%ebp,%edx,1),%xmm0,%xmm7
vmovdqa 0x99(%ebp,%edx,1),%ymm0
vmovdqa %ymm0,0x99(%ebp,%edx,1)
vpermilpd 0x99(%ebp,%edx,1),%ymm0,%ymm7
vroundpd $7,0x99(%ebp,%edx,1),%ymm0
vextractf128 $7,%ymm0,0x99(%ebp,%edx,1)
vperm2f128 $7,0x99(%ebp,%edx,1),%ymm0,%ymm7
vblendvpd %ymm0,0x99(%ebp,%edx,1),%ymm4,%ymm6
# Tests for all register operands.
vmovmskpd %xmm0,%eax
vpslld $7,%xmm0,%xmm7
vmovmskps %ymm0,%eax
.intel_syntax noprefix
# Tests for op mem64
vldmxcsr DWORD PTR [ecx]
vldmxcsr [ecx]
vstmxcsr DWORD PTR [ecx]
vstmxcsr [ecx]
# Tests for op mem256, mask, ymm
# Tests for op ymm, mask, mem256
vmaskmovpd ymm6,ymm4,YMMWORD PTR [ecx]
vmaskmovpd YMMWORD PTR [ecx],ymm6,ymm4
vmaskmovpd ymm6,ymm4,[ecx]
vmaskmovpd [ecx],ymm6,ymm4
vmaskmovps ymm6,ymm4,YMMWORD PTR [ecx]
vmaskmovps YMMWORD PTR [ecx],ymm6,ymm4
vmaskmovps ymm6,ymm4,[ecx]
vmaskmovps [ecx],ymm6,ymm4
# Tests for op imm8, ymm/mem256, ymm
vpermilpd ymm2,ymm6,7
vpermilpd ymm6,YMMWORD PTR [ecx],7
vpermilpd ymm6,[ecx],7
vpermilps ymm2,ymm6,7
vpermilps ymm6,YMMWORD PTR [ecx],7
vpermilps ymm6,[ecx],7
vroundpd ymm2,ymm6,7
vroundpd ymm6,YMMWORD PTR [ecx],7
vroundpd ymm6,[ecx],7
vroundps ymm2,ymm6,7
vroundps ymm6,YMMWORD PTR [ecx],7
vroundps ymm6,[ecx],7
# Tests for op ymm/mem256, ymm, ymm
vaddpd ymm2,ymm6,ymm4
vaddpd ymm2,ymm6,YMMWORD PTR [ecx]
vaddpd ymm2,ymm6,[ecx]
vaddps ymm2,ymm6,ymm4
vaddps ymm2,ymm6,YMMWORD PTR [ecx]
vaddps ymm2,ymm6,[ecx]
vaddsubpd ymm2,ymm6,ymm4
vaddsubpd ymm2,ymm6,YMMWORD PTR [ecx]
vaddsubpd ymm2,ymm6,[ecx]
vaddsubps ymm2,ymm6,ymm4
vaddsubps ymm2,ymm6,YMMWORD PTR [ecx]
vaddsubps ymm2,ymm6,[ecx]
vandnpd ymm2,ymm6,ymm4
vandnpd ymm2,ymm6,YMMWORD PTR [ecx]
vandnpd ymm2,ymm6,[ecx]
vandnps ymm2,ymm6,ymm4
vandnps ymm2,ymm6,YMMWORD PTR [ecx]
vandnps ymm2,ymm6,[ecx]
vandpd ymm2,ymm6,ymm4
vandpd ymm2,ymm6,YMMWORD PTR [ecx]
vandpd ymm2,ymm6,[ecx]
vandps ymm2,ymm6,ymm4
vandps ymm2,ymm6,YMMWORD PTR [ecx]
vandps ymm2,ymm6,[ecx]
vdivpd ymm2,ymm6,ymm4
vdivpd ymm2,ymm6,YMMWORD PTR [ecx]
vdivpd ymm2,ymm6,[ecx]
vdivps ymm2,ymm6,ymm4
vdivps ymm2,ymm6,YMMWORD PTR [ecx]
vdivps ymm2,ymm6,[ecx]
vhaddpd ymm2,ymm6,ymm4
vhaddpd ymm2,ymm6,YMMWORD PTR [ecx]
vhaddpd ymm2,ymm6,[ecx]
vhaddps ymm2,ymm6,ymm4
vhaddps ymm2,ymm6,YMMWORD PTR [ecx]
vhaddps ymm2,ymm6,[ecx]
vhsubpd ymm2,ymm6,ymm4
vhsubpd ymm2,ymm6,YMMWORD PTR [ecx]
vhsubpd ymm2,ymm6,[ecx]
vhsubps ymm2,ymm6,ymm4
vhsubps ymm2,ymm6,YMMWORD PTR [ecx]
vhsubps ymm2,ymm6,[ecx]
vmaxpd ymm2,ymm6,ymm4
vmaxpd ymm2,ymm6,YMMWORD PTR [ecx]
vmaxpd ymm2,ymm6,[ecx]
vmaxps ymm2,ymm6,ymm4
vmaxps ymm2,ymm6,YMMWORD PTR [ecx]
vmaxps ymm2,ymm6,[ecx]
vminpd ymm2,ymm6,ymm4
vminpd ymm2,ymm6,YMMWORD PTR [ecx]
vminpd ymm2,ymm6,[ecx]
vminps ymm2,ymm6,ymm4
vminps ymm2,ymm6,YMMWORD PTR [ecx]
vminps ymm2,ymm6,[ecx]
vmulpd ymm2,ymm6,ymm4
vmulpd ymm2,ymm6,YMMWORD PTR [ecx]
vmulpd ymm2,ymm6,[ecx]
vmulps ymm2,ymm6,ymm4
vmulps ymm2,ymm6,YMMWORD PTR [ecx]
vmulps ymm2,ymm6,[ecx]
vorpd ymm2,ymm6,ymm4
vorpd ymm2,ymm6,YMMWORD PTR [ecx]
vorpd ymm2,ymm6,[ecx]
vorps ymm2,ymm6,ymm4
vorps ymm2,ymm6,YMMWORD PTR [ecx]
vorps ymm2,ymm6,[ecx]
vpermilpd ymm2,ymm6,ymm4
vpermilpd ymm2,ymm6,YMMWORD PTR [ecx]
vpermilpd ymm2,ymm6,[ecx]
vpermilps ymm2,ymm6,ymm4
vpermilps ymm2,ymm6,YMMWORD PTR [ecx]
vpermilps ymm2,ymm6,[ecx]
vsubpd ymm2,ymm6,ymm4
vsubpd ymm2,ymm6,YMMWORD PTR [ecx]
vsubpd ymm2,ymm6,[ecx]
vsubps ymm2,ymm6,ymm4
vsubps ymm2,ymm6,YMMWORD PTR [ecx]
vsubps ymm2,ymm6,[ecx]
vunpckhpd ymm2,ymm6,ymm4
vunpckhpd ymm2,ymm6,YMMWORD PTR [ecx]
vunpckhpd ymm2,ymm6,[ecx]
vunpckhps ymm2,ymm6,ymm4
vunpckhps ymm2,ymm6,YMMWORD PTR [ecx]
vunpckhps ymm2,ymm6,[ecx]
vunpcklpd ymm2,ymm6,ymm4
vunpcklpd ymm2,ymm6,YMMWORD PTR [ecx]
vunpcklpd ymm2,ymm6,[ecx]
vunpcklps ymm2,ymm6,ymm4
vunpcklps ymm2,ymm6,YMMWORD PTR [ecx]
vunpcklps ymm2,ymm6,[ecx]
vxorpd ymm2,ymm6,ymm4
vxorpd ymm2,ymm6,YMMWORD PTR [ecx]
vxorpd ymm2,ymm6,[ecx]
vxorps ymm2,ymm6,ymm4
vxorps ymm2,ymm6,YMMWORD PTR [ecx]
vxorps ymm2,ymm6,[ecx]
vcmpeqpd ymm2,ymm6,ymm4
vcmpeqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeqpd ymm2,ymm6,[ecx]
vcmpltpd ymm2,ymm6,ymm4
vcmpltpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpltpd ymm2,ymm6,[ecx]
vcmplepd ymm2,ymm6,ymm4
vcmplepd ymm2,ymm6,YMMWORD PTR [ecx]
vcmplepd ymm2,ymm6,[ecx]
vcmpunordpd ymm2,ymm6,ymm4
vcmpunordpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpunordpd ymm2,ymm6,[ecx]
vcmpneqpd ymm2,ymm6,ymm4
vcmpneqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneqpd ymm2,ymm6,[ecx]
vcmpnltpd ymm2,ymm6,ymm4
vcmpnltpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnltpd ymm2,ymm6,[ecx]
vcmpnlepd ymm2,ymm6,ymm4
vcmpnlepd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnlepd ymm2,ymm6,[ecx]
vcmpordpd ymm2,ymm6,ymm4
vcmpordpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpordpd ymm2,ymm6,[ecx]
vcmpeq_uqpd ymm2,ymm6,ymm4
vcmpeq_uqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeq_uqpd ymm2,ymm6,[ecx]
vcmpngepd ymm2,ymm6,ymm4
vcmpngepd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpngepd ymm2,ymm6,[ecx]
vcmpngtpd ymm2,ymm6,ymm4
vcmpngtpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpngtpd ymm2,ymm6,[ecx]
vcmpfalsepd ymm2,ymm6,ymm4
vcmpfalsepd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpfalsepd ymm2,ymm6,[ecx]
vcmpneq_oqpd ymm2,ymm6,ymm4
vcmpneq_oqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneq_oqpd ymm2,ymm6,[ecx]
vcmpgepd ymm2,ymm6,ymm4
vcmpgepd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpgepd ymm2,ymm6,[ecx]
vcmpgtpd ymm2,ymm6,ymm4
vcmpgtpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpgtpd ymm2,ymm6,[ecx]
vcmptruepd ymm2,ymm6,ymm4
vcmptruepd ymm2,ymm6,YMMWORD PTR [ecx]
vcmptruepd ymm2,ymm6,[ecx]
vcmpeq_ospd ymm2,ymm6,ymm4
vcmpeq_ospd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeq_ospd ymm2,ymm6,[ecx]
vcmplt_oqpd ymm2,ymm6,ymm4
vcmplt_oqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmplt_oqpd ymm2,ymm6,[ecx]
vcmple_oqpd ymm2,ymm6,ymm4
vcmple_oqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmple_oqpd ymm2,ymm6,[ecx]
vcmpunord_spd ymm2,ymm6,ymm4
vcmpunord_spd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpunord_spd ymm2,ymm6,[ecx]
vcmpneq_uspd ymm2,ymm6,ymm4
vcmpneq_uspd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneq_uspd ymm2,ymm6,[ecx]
vcmpnlt_uqpd ymm2,ymm6,ymm4
vcmpnlt_uqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnlt_uqpd ymm2,ymm6,[ecx]
vcmpnle_uqpd ymm2,ymm6,ymm4
vcmpnle_uqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnle_uqpd ymm2,ymm6,[ecx]
vcmpord_spd ymm2,ymm6,ymm4
vcmpord_spd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpord_spd ymm2,ymm6,[ecx]
vcmpeq_uspd ymm2,ymm6,ymm4
vcmpeq_uspd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeq_uspd ymm2,ymm6,[ecx]
vcmpnge_uqpd ymm2,ymm6,ymm4
vcmpnge_uqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnge_uqpd ymm2,ymm6,[ecx]
vcmpngt_uqpd ymm2,ymm6,ymm4
vcmpngt_uqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpngt_uqpd ymm2,ymm6,[ecx]
vcmpfalse_ospd ymm2,ymm6,ymm4
vcmpfalse_ospd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpfalse_ospd ymm2,ymm6,[ecx]
vcmpneq_ospd ymm2,ymm6,ymm4
vcmpneq_ospd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneq_ospd ymm2,ymm6,[ecx]
vcmpge_oqpd ymm2,ymm6,ymm4
vcmpge_oqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpge_oqpd ymm2,ymm6,[ecx]
vcmpgt_oqpd ymm2,ymm6,ymm4
vcmpgt_oqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpgt_oqpd ymm2,ymm6,[ecx]
vcmptrue_uspd ymm2,ymm6,ymm4
vcmptrue_uspd ymm2,ymm6,YMMWORD PTR [ecx]
vcmptrue_uspd ymm2,ymm6,[ecx]
vcmpeqps ymm2,ymm6,ymm4
vcmpeqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeqps ymm2,ymm6,[ecx]
vcmpltps ymm2,ymm6,ymm4
vcmpltps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpltps ymm2,ymm6,[ecx]
vcmpleps ymm2,ymm6,ymm4
vcmpleps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpleps ymm2,ymm6,[ecx]
vcmpunordps ymm2,ymm6,ymm4
vcmpunordps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpunordps ymm2,ymm6,[ecx]
vcmpneqps ymm2,ymm6,ymm4
vcmpneqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneqps ymm2,ymm6,[ecx]
vcmpnltps ymm2,ymm6,ymm4
vcmpnltps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnltps ymm2,ymm6,[ecx]
vcmpnleps ymm2,ymm6,ymm4
vcmpnleps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnleps ymm2,ymm6,[ecx]
vcmpordps ymm2,ymm6,ymm4
vcmpordps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpordps ymm2,ymm6,[ecx]
vcmpeq_uqps ymm2,ymm6,ymm4
vcmpeq_uqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeq_uqps ymm2,ymm6,[ecx]
vcmpngeps ymm2,ymm6,ymm4
vcmpngeps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpngeps ymm2,ymm6,[ecx]
vcmpngtps ymm2,ymm6,ymm4
vcmpngtps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpngtps ymm2,ymm6,[ecx]
vcmpfalseps ymm2,ymm6,ymm4
vcmpfalseps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpfalseps ymm2,ymm6,[ecx]
vcmpneq_oqps ymm2,ymm6,ymm4
vcmpneq_oqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneq_oqps ymm2,ymm6,[ecx]
vcmpgeps ymm2,ymm6,ymm4
vcmpgeps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpgeps ymm2,ymm6,[ecx]
vcmpgtps ymm2,ymm6,ymm4
vcmpgtps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpgtps ymm2,ymm6,[ecx]
vcmptrueps ymm2,ymm6,ymm4
vcmptrueps ymm2,ymm6,YMMWORD PTR [ecx]
vcmptrueps ymm2,ymm6,[ecx]
vcmpeq_osps ymm2,ymm6,ymm4
vcmpeq_osps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeq_osps ymm2,ymm6,[ecx]
vcmplt_oqps ymm2,ymm6,ymm4
vcmplt_oqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmplt_oqps ymm2,ymm6,[ecx]
vcmple_oqps ymm2,ymm6,ymm4
vcmple_oqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmple_oqps ymm2,ymm6,[ecx]
vcmpunord_sps ymm2,ymm6,ymm4
vcmpunord_sps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpunord_sps ymm2,ymm6,[ecx]
vcmpneq_usps ymm2,ymm6,ymm4
vcmpneq_usps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneq_usps ymm2,ymm6,[ecx]
vcmpnlt_uqps ymm2,ymm6,ymm4
vcmpnlt_uqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnlt_uqps ymm2,ymm6,[ecx]
vcmpnle_uqps ymm2,ymm6,ymm4
vcmpnle_uqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnle_uqps ymm2,ymm6,[ecx]
vcmpord_sps ymm2,ymm6,ymm4
vcmpord_sps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpord_sps ymm2,ymm6,[ecx]
vcmpeq_usps ymm2,ymm6,ymm4
vcmpeq_usps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeq_usps ymm2,ymm6,[ecx]
vcmpnge_uqps ymm2,ymm6,ymm4
vcmpnge_uqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnge_uqps ymm2,ymm6,[ecx]
vcmpngt_uqps ymm2,ymm6,ymm4
vcmpngt_uqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpngt_uqps ymm2,ymm6,[ecx]
vcmpfalse_osps ymm2,ymm6,ymm4
vcmpfalse_osps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpfalse_osps ymm2,ymm6,[ecx]
vcmpneq_osps ymm2,ymm6,ymm4
vcmpneq_osps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneq_osps ymm2,ymm6,[ecx]
vcmpge_oqps ymm2,ymm6,ymm4
vcmpge_oqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpge_oqps ymm2,ymm6,[ecx]
vcmpgt_oqps ymm2,ymm6,ymm4
vcmpgt_oqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpgt_oqps ymm2,ymm6,[ecx]
vcmptrue_usps ymm2,ymm6,ymm4
vcmptrue_usps ymm2,ymm6,YMMWORD PTR [ecx]
vcmptrue_usps ymm2,ymm6,[ecx]
vgf2p8mulb ymm6, ymm5, ymm4
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [ecx]
vgf2p8mulb ymm6, ymm5, [ecx]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [edx+4064]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [edx+4096]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [edx-4096]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [edx-4128]
# Tests for op ymm/mem256, xmm
vcvtpd2dq xmm4,ymm4
vcvtpd2dq xmm4,YMMWORD PTR [ecx]
vcvtpd2ps xmm4,ymm4
vcvtpd2ps xmm4,YMMWORD PTR [ecx]
vcvttpd2dq xmm4,ymm4
vcvttpd2dq xmm4,YMMWORD PTR [ecx]
# Tests for op ymm/mem256, ymm
vcvtdq2ps ymm6,ymm4
vcvtdq2ps ymm4,YMMWORD PTR [ecx]
vcvtdq2ps ymm4,[ecx]
vcvtps2dq ymm6,ymm4
vcvtps2dq ymm4,YMMWORD PTR [ecx]
vcvtps2dq ymm4,[ecx]
vcvttps2dq ymm6,ymm4
vcvttps2dq ymm4,YMMWORD PTR [ecx]
vcvttps2dq ymm4,[ecx]
vmovapd ymm6,ymm4
vmovapd ymm4,YMMWORD PTR [ecx]
vmovapd ymm4,[ecx]
vmovaps ymm6,ymm4
vmovaps ymm4,YMMWORD PTR [ecx]
vmovaps ymm4,[ecx]
vmovdqa ymm6,ymm4
vmovdqa ymm4,YMMWORD PTR [ecx]
vmovdqa ymm4,[ecx]
vmovdqu ymm6,ymm4
vmovdqu ymm4,YMMWORD PTR [ecx]
vmovdqu ymm4,[ecx]
vmovddup ymm6,ymm4
vmovddup ymm4,YMMWORD PTR [ecx]
vmovddup ymm4,[ecx]
vmovshdup ymm6,ymm4
vmovshdup ymm4,YMMWORD PTR [ecx]
vmovshdup ymm4,[ecx]
vmovsldup ymm6,ymm4
vmovsldup ymm4,YMMWORD PTR [ecx]
vmovsldup ymm4,[ecx]
vmovupd ymm6,ymm4
vmovupd ymm4,YMMWORD PTR [ecx]
vmovupd ymm4,[ecx]
vmovups ymm6,ymm4
vmovups ymm4,YMMWORD PTR [ecx]
vmovups ymm4,[ecx]
vptest ymm6,ymm4
vptest ymm4,YMMWORD PTR [ecx]
vptest ymm4,[ecx]
vrcpps ymm6,ymm4
vrcpps ymm4,YMMWORD PTR [ecx]
vrcpps ymm4,[ecx]
vrsqrtps ymm6,ymm4
vrsqrtps ymm4,YMMWORD PTR [ecx]
vrsqrtps ymm4,[ecx]
vsqrtpd ymm6,ymm4
vsqrtpd ymm4,YMMWORD PTR [ecx]
vsqrtpd ymm4,[ecx]
vsqrtps ymm6,ymm4
vsqrtps ymm4,YMMWORD PTR [ecx]
vsqrtps ymm4,[ecx]
vtestpd ymm6,ymm4
vtestpd ymm4,YMMWORD PTR [ecx]
vtestpd ymm4,[ecx]
vtestps ymm6,ymm4
vtestps ymm4,YMMWORD PTR [ecx]
vtestps ymm4,[ecx]
# Tests for op ymm, ymm/mem256
vmovapd ymm6,ymm4
vmovapd YMMWORD PTR [ecx],ymm4
vmovapd [ecx],ymm4
vmovaps ymm6,ymm4
vmovaps YMMWORD PTR [ecx],ymm4
vmovaps [ecx],ymm4
vmovdqa ymm6,ymm4
vmovdqa YMMWORD PTR [ecx],ymm4
vmovdqa [ecx],ymm4
vmovdqu ymm6,ymm4
vmovdqu YMMWORD PTR [ecx],ymm4
vmovdqu [ecx],ymm4
vmovupd ymm6,ymm4
vmovupd YMMWORD PTR [ecx],ymm4
vmovupd [ecx],ymm4
vmovups ymm6,ymm4
vmovups YMMWORD PTR [ecx],ymm4
vmovups [ecx],ymm4
# Tests for op mem256, ymm
vlddqu ymm4,YMMWORD PTR [ecx]
vlddqu ymm4,[ecx]
# Tests for op ymm, mem256
vmovntdq YMMWORD PTR [ecx],ymm4
vmovntdq [ecx],ymm4
vmovntpd YMMWORD PTR [ecx],ymm4
vmovntpd [ecx],ymm4
vmovntps YMMWORD PTR [ecx],ymm4
vmovntps [ecx],ymm4
# Tests for op imm8, ymm/mem256, ymm, ymm
vblendpd ymm2,ymm6,ymm4,7
vblendpd ymm2,ymm6,YMMWORD PTR [ecx],7
vblendpd ymm2,ymm6,[ecx],7
vblendps ymm2,ymm6,ymm4,7
vblendps ymm2,ymm6,YMMWORD PTR [ecx],7
vblendps ymm2,ymm6,[ecx],7
vcmppd ymm2,ymm6,ymm4,7
vcmppd ymm2,ymm6,YMMWORD PTR [ecx],7
vcmppd ymm2,ymm6,[ecx],7
vcmpps ymm2,ymm6,ymm4,7
vcmpps ymm2,ymm6,YMMWORD PTR [ecx],7
vcmpps ymm2,ymm6,[ecx],7
vdpps ymm2,ymm6,ymm4,7
vdpps ymm2,ymm6,YMMWORD PTR [ecx],7
vdpps ymm2,ymm6,[ecx],7
vperm2f128 ymm2,ymm6,ymm4,7
vperm2f128 ymm2,ymm6,YMMWORD PTR [ecx],7
vperm2f128 ymm2,ymm6,[ecx],7
vshufpd ymm2,ymm6,ymm4,7
vshufpd ymm2,ymm6,YMMWORD PTR [ecx],7
vshufpd ymm2,ymm6,[ecx],7
vshufps ymm2,ymm6,ymm4,7
vshufps ymm2,ymm6,YMMWORD PTR [ecx],7
vshufps ymm2,ymm6,[ecx],7
vgf2p8affineqb ymm6, ymm5, ymm4, 0xab
vgf2p8affineqb ymm6, ymm5, ymm4, 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [ecx], 123
vgf2p8affineqb ymm6, ymm5, [ecx], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [edx+4064], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [edx+4096], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [edx-4096], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [edx-4128], 123
vgf2p8affineinvqb ymm6, ymm5, ymm4, 0xab
vgf2p8affineinvqb ymm6, ymm5, ymm4, 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [ecx], 123
vgf2p8affineinvqb ymm6, ymm5, [ecx], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [edx+4064], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [edx+4096], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [edx-4096], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [edx-4128], 123
# Tests for op ymm, ymm/mem256, ymm, ymm
vblendvpd ymm7,ymm2,ymm6,ymm4
vblendvpd ymm7,ymm2,YMMWORD PTR [ecx],ymm4
vblendvpd ymm7,ymm2,[ecx],ymm4
vblendvps ymm7,ymm2,ymm6,ymm4
vblendvps ymm7,ymm2,YMMWORD PTR [ecx],ymm4
vblendvps ymm7,ymm2,[ecx],ymm4
# Tests for op imm8, xmm/mem128, ymm, ymm
vinsertf128 ymm6,ymm4,xmm4,7
vinsertf128 ymm6,ymm4,XMMWORD PTR [ecx],7
vinsertf128 ymm6,ymm4,[ecx],7
# Tests for op imm8, ymm, xmm/mem128
vextractf128 xmm4,ymm4,7
vextractf128 XMMWORD PTR [ecx],ymm4,7
vextractf128 [ecx],ymm4,7
# Tests for op mem128, ymm
vbroadcastf128 ymm4,XMMWORD PTR [ecx]
vbroadcastf128 ymm4,[ecx]
# Tests for op xmm/mem128, xmm
vcvtdq2ps xmm6,xmm4
vcvtdq2ps xmm4,XMMWORD PTR [ecx]
vcvtdq2ps xmm4,[ecx]
vcvtpd2dq xmm6,xmm4
vcvtpd2dq xmm4,XMMWORD PTR [ecx]
vcvtpd2ps xmm6,xmm4
vcvtpd2ps xmm4,XMMWORD PTR [ecx]
vcvtps2dq xmm6,xmm4
vcvtps2dq xmm4,XMMWORD PTR [ecx]
vcvtps2dq xmm4,[ecx]
vcvttpd2dq xmm6,xmm4
vcvttpd2dq xmm4,XMMWORD PTR [ecx]
vcvttps2dq xmm6,xmm4
vcvttps2dq xmm4,XMMWORD PTR [ecx]
vcvttps2dq xmm4,[ecx]
vmovapd xmm6,xmm4
vmovapd xmm4,XMMWORD PTR [ecx]
vmovapd xmm4,[ecx]
vmovaps xmm6,xmm4
vmovaps xmm4,XMMWORD PTR [ecx]
vmovaps xmm4,[ecx]
vmovdqa xmm6,xmm4
vmovdqa xmm4,XMMWORD PTR [ecx]
vmovdqa xmm4,[ecx]
vmovdqu xmm6,xmm4
vmovdqu xmm4,XMMWORD PTR [ecx]
vmovdqu xmm4,[ecx]
vmovshdup xmm6,xmm4
vmovshdup xmm4,XMMWORD PTR [ecx]
vmovshdup xmm4,[ecx]
vmovsldup xmm6,xmm4
vmovsldup xmm4,XMMWORD PTR [ecx]
vmovsldup xmm4,[ecx]
vmovupd xmm6,xmm4
vmovupd xmm4,XMMWORD PTR [ecx]
vmovupd xmm4,[ecx]
vmovups xmm6,xmm4
vmovups xmm4,XMMWORD PTR [ecx]
vmovups xmm4,[ecx]
vpabsb xmm6,xmm4
vpabsb xmm4,XMMWORD PTR [ecx]
vpabsb xmm4,[ecx]
vpabsw xmm6,xmm4
vpabsw xmm4,XMMWORD PTR [ecx]
vpabsw xmm4,[ecx]
vpabsd xmm6,xmm4
vpabsd xmm4,XMMWORD PTR [ecx]
vpabsd xmm4,[ecx]
vphminposuw xmm6,xmm4
vphminposuw xmm4,XMMWORD PTR [ecx]
vphminposuw xmm4,[ecx]
vptest xmm6,xmm4
vptest xmm4,XMMWORD PTR [ecx]
vptest xmm4,[ecx]
vtestps xmm6,xmm4
vtestps xmm4,XMMWORD PTR [ecx]
vtestps xmm4,[ecx]
vtestpd xmm6,xmm4
vtestpd xmm4,XMMWORD PTR [ecx]
vtestpd xmm4,[ecx]
vrcpps xmm6,xmm4
vrcpps xmm4,XMMWORD PTR [ecx]
vrcpps xmm4,[ecx]
vrsqrtps xmm6,xmm4
vrsqrtps xmm4,XMMWORD PTR [ecx]
vrsqrtps xmm4,[ecx]
vsqrtpd xmm6,xmm4
vsqrtpd xmm4,XMMWORD PTR [ecx]
vsqrtpd xmm4,[ecx]
vsqrtps xmm6,xmm4
vsqrtps xmm4,XMMWORD PTR [ecx]
vsqrtps xmm4,[ecx]
vaesimc xmm6,xmm4
vaesimc xmm4,XMMWORD PTR [ecx]
vaesimc xmm4,[ecx]
# Tests for op xmm, xmm/mem128
vmovapd xmm6,xmm4
vmovapd XMMWORD PTR [ecx],xmm4
vmovapd [ecx],xmm4
vmovaps xmm6,xmm4
vmovaps XMMWORD PTR [ecx],xmm4
vmovaps [ecx],xmm4
vmovdqa xmm6,xmm4
vmovdqa XMMWORD PTR [ecx],xmm4
vmovdqa [ecx],xmm4
vmovdqu xmm6,xmm4
vmovdqu XMMWORD PTR [ecx],xmm4
vmovdqu [ecx],xmm4
vmovupd xmm6,xmm4
vmovupd XMMWORD PTR [ecx],xmm4
vmovupd [ecx],xmm4
vmovups xmm6,xmm4
vmovups XMMWORD PTR [ecx],xmm4
vmovups [ecx],xmm4
# Tests for op mem128, xmm
vlddqu xmm4,XMMWORD PTR [ecx]
vlddqu xmm4,[ecx]
vmovntdqa xmm4,XMMWORD PTR [ecx]
vmovntdqa xmm4,[ecx]
# Tests for op xmm, mem128
vmovntdq XMMWORD PTR [ecx],xmm4
vmovntdq [ecx],xmm4
vmovntpd XMMWORD PTR [ecx],xmm4
vmovntpd [ecx],xmm4
vmovntps XMMWORD PTR [ecx],xmm4
vmovntps [ecx],xmm4
# Tests for op xmm/mem128, ymm
vcvtdq2pd ymm4,xmm4
vcvtdq2pd ymm4,XMMWORD PTR [ecx]
vcvtdq2pd ymm4,[ecx]
vcvtps2pd ymm4,xmm4
vcvtps2pd ymm4,XMMWORD PTR [ecx]
vcvtps2pd ymm4,[ecx]
# Tests for op xmm/mem128, xmm, xmm
vaddpd xmm2,xmm6,xmm4
vaddpd xmm7,xmm6,XMMWORD PTR [ecx]
vaddpd xmm7,xmm6,[ecx]
vaddps xmm2,xmm6,xmm4
vaddps xmm7,xmm6,XMMWORD PTR [ecx]
vaddps xmm7,xmm6,[ecx]
vaddsubpd xmm2,xmm6,xmm4
vaddsubpd xmm7,xmm6,XMMWORD PTR [ecx]
vaddsubpd xmm7,xmm6,[ecx]
vaddsubps xmm2,xmm6,xmm4
vaddsubps xmm7,xmm6,XMMWORD PTR [ecx]
vaddsubps xmm7,xmm6,[ecx]
vandnpd xmm2,xmm6,xmm4
vandnpd xmm7,xmm6,XMMWORD PTR [ecx]
vandnpd xmm7,xmm6,[ecx]
vandnps xmm2,xmm6,xmm4
vandnps xmm7,xmm6,XMMWORD PTR [ecx]
vandnps xmm7,xmm6,[ecx]
vandpd xmm2,xmm6,xmm4
vandpd xmm7,xmm6,XMMWORD PTR [ecx]
vandpd xmm7,xmm6,[ecx]
vandps xmm2,xmm6,xmm4
vandps xmm7,xmm6,XMMWORD PTR [ecx]
vandps xmm7,xmm6,[ecx]
vdivpd xmm2,xmm6,xmm4
vdivpd xmm7,xmm6,XMMWORD PTR [ecx]
vdivpd xmm7,xmm6,[ecx]
vdivps xmm2,xmm6,xmm4
vdivps xmm7,xmm6,XMMWORD PTR [ecx]
vdivps xmm7,xmm6,[ecx]
vhaddpd xmm2,xmm6,xmm4
vhaddpd xmm7,xmm6,XMMWORD PTR [ecx]
vhaddpd xmm7,xmm6,[ecx]
vhaddps xmm2,xmm6,xmm4
vhaddps xmm7,xmm6,XMMWORD PTR [ecx]
vhaddps xmm7,xmm6,[ecx]
vhsubpd xmm2,xmm6,xmm4
vhsubpd xmm7,xmm6,XMMWORD PTR [ecx]
vhsubpd xmm7,xmm6,[ecx]
vhsubps xmm2,xmm6,xmm4
vhsubps xmm7,xmm6,XMMWORD PTR [ecx]
vhsubps xmm7,xmm6,[ecx]
vmaxpd xmm2,xmm6,xmm4
vmaxpd xmm7,xmm6,XMMWORD PTR [ecx]
vmaxpd xmm7,xmm6,[ecx]
vmaxps xmm2,xmm6,xmm4
vmaxps xmm7,xmm6,XMMWORD PTR [ecx]
vmaxps xmm7,xmm6,[ecx]
vminpd xmm2,xmm6,xmm4
vminpd xmm7,xmm6,XMMWORD PTR [ecx]
vminpd xmm7,xmm6,[ecx]
vminps xmm2,xmm6,xmm4
vminps xmm7,xmm6,XMMWORD PTR [ecx]
vminps xmm7,xmm6,[ecx]
vmulpd xmm2,xmm6,xmm4
vmulpd xmm7,xmm6,XMMWORD PTR [ecx]
vmulpd xmm7,xmm6,[ecx]
vmulps xmm2,xmm6,xmm4
vmulps xmm7,xmm6,XMMWORD PTR [ecx]
vmulps xmm7,xmm6,[ecx]
vorpd xmm2,xmm6,xmm4
vorpd xmm7,xmm6,XMMWORD PTR [ecx]
vorpd xmm7,xmm6,[ecx]
vorps xmm2,xmm6,xmm4
vorps xmm7,xmm6,XMMWORD PTR [ecx]
vorps xmm7,xmm6,[ecx]
vpacksswb xmm2,xmm6,xmm4
vpacksswb xmm7,xmm6,XMMWORD PTR [ecx]
vpacksswb xmm7,xmm6,[ecx]
vpackssdw xmm2,xmm6,xmm4
vpackssdw xmm7,xmm6,XMMWORD PTR [ecx]
vpackssdw xmm7,xmm6,[ecx]
vpackuswb xmm2,xmm6,xmm4
vpackuswb xmm7,xmm6,XMMWORD PTR [ecx]
vpackuswb xmm7,xmm6,[ecx]
vpackusdw xmm2,xmm6,xmm4
vpackusdw xmm7,xmm6,XMMWORD PTR [ecx]
vpackusdw xmm7,xmm6,[ecx]
vpaddb xmm2,xmm6,xmm4
vpaddb xmm7,xmm6,XMMWORD PTR [ecx]
vpaddb xmm7,xmm6,[ecx]
vpaddw xmm2,xmm6,xmm4
vpaddw xmm7,xmm6,XMMWORD PTR [ecx]
vpaddw xmm7,xmm6,[ecx]
vpaddd xmm2,xmm6,xmm4
vpaddd xmm7,xmm6,XMMWORD PTR [ecx]
vpaddd xmm7,xmm6,[ecx]
vpaddq xmm2,xmm6,xmm4
vpaddq xmm7,xmm6,XMMWORD PTR [ecx]
vpaddq xmm7,xmm6,[ecx]
vpaddsb xmm2,xmm6,xmm4
vpaddsb xmm7,xmm6,XMMWORD PTR [ecx]
vpaddsb xmm7,xmm6,[ecx]
vpaddsw xmm2,xmm6,xmm4
vpaddsw xmm7,xmm6,XMMWORD PTR [ecx]
vpaddsw xmm7,xmm6,[ecx]
vpaddusb xmm2,xmm6,xmm4
vpaddusb xmm7,xmm6,XMMWORD PTR [ecx]
vpaddusb xmm7,xmm6,[ecx]
vpaddusw xmm2,xmm6,xmm4
vpaddusw xmm7,xmm6,XMMWORD PTR [ecx]
vpaddusw xmm7,xmm6,[ecx]
vpand xmm2,xmm6,xmm4
vpand xmm7,xmm6,XMMWORD PTR [ecx]
vpand xmm7,xmm6,[ecx]
vpandn xmm2,xmm6,xmm4
vpandn xmm7,xmm6,XMMWORD PTR [ecx]
vpandn xmm7,xmm6,[ecx]
vpavgb xmm2,xmm6,xmm4
vpavgb xmm7,xmm6,XMMWORD PTR [ecx]
vpavgb xmm7,xmm6,[ecx]
vpavgw xmm2,xmm6,xmm4
vpavgw xmm7,xmm6,XMMWORD PTR [ecx]
vpavgw xmm7,xmm6,[ecx]
vpclmullqlqdq xmm2,xmm6,xmm4
vpclmullqlqdq xmm7,xmm6,XMMWORD PTR [ecx]
vpclmullqlqdq xmm7,xmm6,[ecx]
vpclmulhqlqdq xmm2,xmm6,xmm4
vpclmulhqlqdq xmm7,xmm6,XMMWORD PTR [ecx]
vpclmulhqlqdq xmm7,xmm6,[ecx]
vpclmullqhqdq xmm2,xmm6,xmm4
vpclmullqhqdq xmm7,xmm6,XMMWORD PTR [ecx]
vpclmullqhqdq xmm7,xmm6,[ecx]
vpclmulhqhqdq xmm2,xmm6,xmm4
vpclmulhqhqdq xmm7,xmm6,XMMWORD PTR [ecx]
vpclmulhqhqdq xmm7,xmm6,[ecx]
vpcmpeqb xmm2,xmm6,xmm4
vpcmpeqb xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpeqb xmm7,xmm6,[ecx]
vpcmpeqw xmm2,xmm6,xmm4
vpcmpeqw xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpeqw xmm7,xmm6,[ecx]
vpcmpeqd xmm2,xmm6,xmm4
vpcmpeqd xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpeqd xmm7,xmm6,[ecx]
vpcmpeqq xmm2,xmm6,xmm4
vpcmpeqq xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpeqq xmm7,xmm6,[ecx]
vpcmpgtb xmm2,xmm6,xmm4
vpcmpgtb xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpgtb xmm7,xmm6,[ecx]
vpcmpgtw xmm2,xmm6,xmm4
vpcmpgtw xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpgtw xmm7,xmm6,[ecx]
vpcmpgtd xmm2,xmm6,xmm4
vpcmpgtd xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpgtd xmm7,xmm6,[ecx]
vpcmpgtq xmm2,xmm6,xmm4
vpcmpgtq xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpgtq xmm7,xmm6,[ecx]
vpermilpd xmm2,xmm6,xmm4
vpermilpd xmm7,xmm6,XMMWORD PTR [ecx]
vpermilpd xmm7,xmm6,[ecx]
vpermilps xmm2,xmm6,xmm4
vpermilps xmm7,xmm6,XMMWORD PTR [ecx]
vpermilps xmm7,xmm6,[ecx]
vphaddw xmm2,xmm6,xmm4
vphaddw xmm7,xmm6,XMMWORD PTR [ecx]
vphaddw xmm7,xmm6,[ecx]
vphaddd xmm2,xmm6,xmm4
vphaddd xmm7,xmm6,XMMWORD PTR [ecx]
vphaddd xmm7,xmm6,[ecx]
vphaddsw xmm2,xmm6,xmm4
vphaddsw xmm7,xmm6,XMMWORD PTR [ecx]
vphaddsw xmm7,xmm6,[ecx]
vphsubw xmm2,xmm6,xmm4
vphsubw xmm7,xmm6,XMMWORD PTR [ecx]
vphsubw xmm7,xmm6,[ecx]
vphsubd xmm2,xmm6,xmm4
vphsubd xmm7,xmm6,XMMWORD PTR [ecx]
vphsubd xmm7,xmm6,[ecx]
vphsubsw xmm2,xmm6,xmm4
vphsubsw xmm7,xmm6,XMMWORD PTR [ecx]
vphsubsw xmm7,xmm6,[ecx]
vpmaddwd xmm2,xmm6,xmm4
vpmaddwd xmm7,xmm6,XMMWORD PTR [ecx]
vpmaddwd xmm7,xmm6,[ecx]
vpmaddubsw xmm2,xmm6,xmm4
vpmaddubsw xmm7,xmm6,XMMWORD PTR [ecx]
vpmaddubsw xmm7,xmm6,[ecx]
vpmaxsb xmm2,xmm6,xmm4
vpmaxsb xmm7,xmm6,XMMWORD PTR [ecx]
vpmaxsb xmm7,xmm6,[ecx]
vpmaxsw xmm2,xmm6,xmm4
vpmaxsw xmm7,xmm6,XMMWORD PTR [ecx]
vpmaxsw xmm7,xmm6,[ecx]
vpmaxsd xmm2,xmm6,xmm4
vpmaxsd xmm7,xmm6,XMMWORD PTR [ecx]
vpmaxsd xmm7,xmm6,[ecx]
vpmaxub xmm2,xmm6,xmm4
vpmaxub xmm7,xmm6,XMMWORD PTR [ecx]
vpmaxub xmm7,xmm6,[ecx]
vpmaxuw xmm2,xmm6,xmm4
vpmaxuw xmm7,xmm6,XMMWORD PTR [ecx]
vpmaxuw xmm7,xmm6,[ecx]
vpmaxud xmm2,xmm6,xmm4
vpmaxud xmm7,xmm6,XMMWORD PTR [ecx]
vpmaxud xmm7,xmm6,[ecx]
vpminsb xmm2,xmm6,xmm4
vpminsb xmm7,xmm6,XMMWORD PTR [ecx]
vpminsb xmm7,xmm6,[ecx]
vpminsw xmm2,xmm6,xmm4
vpminsw xmm7,xmm6,XMMWORD PTR [ecx]
vpminsw xmm7,xmm6,[ecx]
vpminsd xmm2,xmm6,xmm4
vpminsd xmm7,xmm6,XMMWORD PTR [ecx]
vpminsd xmm7,xmm6,[ecx]
vpminub xmm2,xmm6,xmm4
vpminub xmm7,xmm6,XMMWORD PTR [ecx]
vpminub xmm7,xmm6,[ecx]
vpminuw xmm2,xmm6,xmm4
vpminuw xmm7,xmm6,XMMWORD PTR [ecx]
vpminuw xmm7,xmm6,[ecx]
vpminud xmm2,xmm6,xmm4
vpminud xmm7,xmm6,XMMWORD PTR [ecx]
vpminud xmm7,xmm6,[ecx]
vpmulhuw xmm2,xmm6,xmm4
vpmulhuw xmm7,xmm6,XMMWORD PTR [ecx]
vpmulhuw xmm7,xmm6,[ecx]
vpmulhrsw xmm2,xmm6,xmm4
vpmulhrsw xmm7,xmm6,XMMWORD PTR [ecx]
vpmulhrsw xmm7,xmm6,[ecx]
vpmulhw xmm2,xmm6,xmm4
vpmulhw xmm7,xmm6,XMMWORD PTR [ecx]
vpmulhw xmm7,xmm6,[ecx]
vpmullw xmm2,xmm6,xmm4
vpmullw xmm7,xmm6,XMMWORD PTR [ecx]
vpmullw xmm7,xmm6,[ecx]
vpmulld xmm2,xmm6,xmm4
vpmulld xmm7,xmm6,XMMWORD PTR [ecx]
vpmulld xmm7,xmm6,[ecx]
vpmuludq xmm2,xmm6,xmm4
vpmuludq xmm7,xmm6,XMMWORD PTR [ecx]
vpmuludq xmm7,xmm6,[ecx]
vpmuldq xmm2,xmm6,xmm4
vpmuldq xmm7,xmm6,XMMWORD PTR [ecx]
vpmuldq xmm7,xmm6,[ecx]
vpor xmm2,xmm6,xmm4
vpor xmm7,xmm6,XMMWORD PTR [ecx]
vpor xmm7,xmm6,[ecx]
vpsadbw xmm2,xmm6,xmm4
vpsadbw xmm7,xmm6,XMMWORD PTR [ecx]
vpsadbw xmm7,xmm6,[ecx]
vpshufb xmm2,xmm6,xmm4
vpshufb xmm7,xmm6,XMMWORD PTR [ecx]
vpshufb xmm7,xmm6,[ecx]
vpsignb xmm2,xmm6,xmm4
vpsignb xmm7,xmm6,XMMWORD PTR [ecx]
vpsignb xmm7,xmm6,[ecx]
vpsignw xmm2,xmm6,xmm4
vpsignw xmm7,xmm6,XMMWORD PTR [ecx]
vpsignw xmm7,xmm6,[ecx]
vpsignd xmm2,xmm6,xmm4
vpsignd xmm7,xmm6,XMMWORD PTR [ecx]
vpsignd xmm7,xmm6,[ecx]
vpsllw xmm2,xmm6,xmm4
vpsllw xmm7,xmm6,XMMWORD PTR [ecx]
vpsllw xmm7,xmm6,[ecx]
vpslld xmm2,xmm6,xmm4
vpslld xmm7,xmm6,XMMWORD PTR [ecx]
vpslld xmm7,xmm6,[ecx]
vpsllq xmm2,xmm6,xmm4
vpsllq xmm7,xmm6,XMMWORD PTR [ecx]
vpsllq xmm7,xmm6,[ecx]
vpsraw xmm2,xmm6,xmm4
vpsraw xmm7,xmm6,XMMWORD PTR [ecx]
vpsraw xmm7,xmm6,[ecx]
vpsrad xmm2,xmm6,xmm4
vpsrad xmm7,xmm6,XMMWORD PTR [ecx]
vpsrad xmm7,xmm6,[ecx]
vpsrlw xmm2,xmm6,xmm4
vpsrlw xmm7,xmm6,XMMWORD PTR [ecx]
vpsrlw xmm7,xmm6,[ecx]
vpsrld xmm2,xmm6,xmm4
vpsrld xmm7,xmm6,XMMWORD PTR [ecx]
vpsrld xmm7,xmm6,[ecx]
vpsrlq xmm2,xmm6,xmm4
vpsrlq xmm7,xmm6,XMMWORD PTR [ecx]
vpsrlq xmm7,xmm6,[ecx]
vpsubb xmm2,xmm6,xmm4
vpsubb xmm7,xmm6,XMMWORD PTR [ecx]
vpsubb xmm7,xmm6,[ecx]
vpsubw xmm2,xmm6,xmm4
vpsubw xmm7,xmm6,XMMWORD PTR [ecx]
vpsubw xmm7,xmm6,[ecx]
vpsubd xmm2,xmm6,xmm4
vpsubd xmm7,xmm6,XMMWORD PTR [ecx]
vpsubd xmm7,xmm6,[ecx]
vpsubq xmm2,xmm6,xmm4
vpsubq xmm7,xmm6,XMMWORD PTR [ecx]
vpsubq xmm7,xmm6,[ecx]
vpsubsb xmm2,xmm6,xmm4
vpsubsb xmm7,xmm6,XMMWORD PTR [ecx]
vpsubsb xmm7,xmm6,[ecx]
vpsubsw xmm2,xmm6,xmm4
vpsubsw xmm7,xmm6,XMMWORD PTR [ecx]
vpsubsw xmm7,xmm6,[ecx]
vpsubusb xmm2,xmm6,xmm4
vpsubusb xmm7,xmm6,XMMWORD PTR [ecx]
vpsubusb xmm7,xmm6,[ecx]
vpsubusw xmm2,xmm6,xmm4
vpsubusw xmm7,xmm6,XMMWORD PTR [ecx]
vpsubusw xmm7,xmm6,[ecx]
vpunpckhbw xmm2,xmm6,xmm4
vpunpckhbw xmm7,xmm6,XMMWORD PTR [ecx]
vpunpckhbw xmm7,xmm6,[ecx]
vpunpckhwd xmm2,xmm6,xmm4
vpunpckhwd xmm7,xmm6,XMMWORD PTR [ecx]
vpunpckhwd xmm7,xmm6,[ecx]
vpunpckhdq xmm2,xmm6,xmm4
vpunpckhdq xmm7,xmm6,XMMWORD PTR [ecx]
vpunpckhdq xmm7,xmm6,[ecx]
vpunpckhqdq xmm2,xmm6,xmm4
vpunpckhqdq xmm7,xmm6,XMMWORD PTR [ecx]
vpunpckhqdq xmm7,xmm6,[ecx]
vpunpcklbw xmm2,xmm6,xmm4
vpunpcklbw xmm7,xmm6,XMMWORD PTR [ecx]
vpunpcklbw xmm7,xmm6,[ecx]
vpunpcklwd xmm2,xmm6,xmm4
vpunpcklwd xmm7,xmm6,XMMWORD PTR [ecx]
vpunpcklwd xmm7,xmm6,[ecx]
vpunpckldq xmm2,xmm6,xmm4
vpunpckldq xmm7,xmm6,XMMWORD PTR [ecx]
vpunpckldq xmm7,xmm6,[ecx]
vpunpcklqdq xmm2,xmm6,xmm4
vpunpcklqdq xmm7,xmm6,XMMWORD PTR [ecx]
vpunpcklqdq xmm7,xmm6,[ecx]
vpxor xmm2,xmm6,xmm4
vpxor xmm7,xmm6,XMMWORD PTR [ecx]
vpxor xmm7,xmm6,[ecx]
vsubpd xmm2,xmm6,xmm4
vsubpd xmm7,xmm6,XMMWORD PTR [ecx]
vsubpd xmm7,xmm6,[ecx]
vsubps xmm2,xmm6,xmm4
vsubps xmm7,xmm6,XMMWORD PTR [ecx]
vsubps xmm7,xmm6,[ecx]
vunpckhpd xmm2,xmm6,xmm4
vunpckhpd xmm7,xmm6,XMMWORD PTR [ecx]
vunpckhpd xmm7,xmm6,[ecx]
vunpckhps xmm2,xmm6,xmm4
vunpckhps xmm7,xmm6,XMMWORD PTR [ecx]
vunpckhps xmm7,xmm6,[ecx]
vunpcklpd xmm2,xmm6,xmm4
vunpcklpd xmm7,xmm6,XMMWORD PTR [ecx]
vunpcklpd xmm7,xmm6,[ecx]
vunpcklps xmm2,xmm6,xmm4
vunpcklps xmm7,xmm6,XMMWORD PTR [ecx]
vunpcklps xmm7,xmm6,[ecx]
vxorpd xmm2,xmm6,xmm4
vxorpd xmm7,xmm6,XMMWORD PTR [ecx]
vxorpd xmm7,xmm6,[ecx]
vxorps xmm2,xmm6,xmm4
vxorps xmm7,xmm6,XMMWORD PTR [ecx]
vxorps xmm7,xmm6,[ecx]
vaesenc xmm2,xmm6,xmm4
vaesenc xmm7,xmm6,XMMWORD PTR [ecx]
vaesenc xmm7,xmm6,[ecx]
vaesenclast xmm2,xmm6,xmm4
vaesenclast xmm7,xmm6,XMMWORD PTR [ecx]
vaesenclast xmm7,xmm6,[ecx]
vaesdec xmm2,xmm6,xmm4
vaesdec xmm7,xmm6,XMMWORD PTR [ecx]
vaesdec xmm7,xmm6,[ecx]
vaesdeclast xmm2,xmm6,xmm4
vaesdeclast xmm7,xmm6,XMMWORD PTR [ecx]
vaesdeclast xmm7,xmm6,[ecx]
vcmpeqpd xmm2,xmm6,xmm4
vcmpeqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeqpd xmm7,xmm6,[ecx]
vcmpltpd xmm2,xmm6,xmm4
vcmpltpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpltpd xmm7,xmm6,[ecx]
vcmplepd xmm2,xmm6,xmm4
vcmplepd xmm7,xmm6,XMMWORD PTR [ecx]
vcmplepd xmm7,xmm6,[ecx]
vcmpunordpd xmm2,xmm6,xmm4
vcmpunordpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpunordpd xmm7,xmm6,[ecx]
vcmpneqpd xmm2,xmm6,xmm4
vcmpneqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneqpd xmm7,xmm6,[ecx]
vcmpnltpd xmm2,xmm6,xmm4
vcmpnltpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnltpd xmm7,xmm6,[ecx]
vcmpnlepd xmm2,xmm6,xmm4
vcmpnlepd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnlepd xmm7,xmm6,[ecx]
vcmpordpd xmm2,xmm6,xmm4
vcmpordpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpordpd xmm7,xmm6,[ecx]
vcmpeq_uqpd xmm2,xmm6,xmm4
vcmpeq_uqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeq_uqpd xmm7,xmm6,[ecx]
vcmpngepd xmm2,xmm6,xmm4
vcmpngepd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpngepd xmm7,xmm6,[ecx]
vcmpngtpd xmm2,xmm6,xmm4
vcmpngtpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpngtpd xmm7,xmm6,[ecx]
vcmpfalsepd xmm2,xmm6,xmm4
vcmpfalsepd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpfalsepd xmm7,xmm6,[ecx]
vcmpneq_oqpd xmm2,xmm6,xmm4
vcmpneq_oqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneq_oqpd xmm7,xmm6,[ecx]
vcmpgepd xmm2,xmm6,xmm4
vcmpgepd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpgepd xmm7,xmm6,[ecx]
vcmpgtpd xmm2,xmm6,xmm4
vcmpgtpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpgtpd xmm7,xmm6,[ecx]
vcmptruepd xmm2,xmm6,xmm4
vcmptruepd xmm7,xmm6,XMMWORD PTR [ecx]
vcmptruepd xmm7,xmm6,[ecx]
vcmpeq_ospd xmm2,xmm6,xmm4
vcmpeq_ospd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeq_ospd xmm7,xmm6,[ecx]
vcmplt_oqpd xmm2,xmm6,xmm4
vcmplt_oqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmplt_oqpd xmm7,xmm6,[ecx]
vcmple_oqpd xmm2,xmm6,xmm4
vcmple_oqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmple_oqpd xmm7,xmm6,[ecx]
vcmpunord_spd xmm2,xmm6,xmm4
vcmpunord_spd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpunord_spd xmm7,xmm6,[ecx]
vcmpneq_uspd xmm2,xmm6,xmm4
vcmpneq_uspd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneq_uspd xmm7,xmm6,[ecx]
vcmpnlt_uqpd xmm2,xmm6,xmm4
vcmpnlt_uqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnlt_uqpd xmm7,xmm6,[ecx]
vcmpnle_uqpd xmm2,xmm6,xmm4
vcmpnle_uqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnle_uqpd xmm7,xmm6,[ecx]
vcmpord_spd xmm2,xmm6,xmm4
vcmpord_spd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpord_spd xmm7,xmm6,[ecx]
vcmpeq_uspd xmm2,xmm6,xmm4
vcmpeq_uspd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeq_uspd xmm7,xmm6,[ecx]
vcmpnge_uqpd xmm2,xmm6,xmm4
vcmpnge_uqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnge_uqpd xmm7,xmm6,[ecx]
vcmpngt_uqpd xmm2,xmm6,xmm4
vcmpngt_uqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpngt_uqpd xmm7,xmm6,[ecx]
vcmpfalse_ospd xmm2,xmm6,xmm4
vcmpfalse_ospd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpfalse_ospd xmm7,xmm6,[ecx]
vcmpneq_ospd xmm2,xmm6,xmm4
vcmpneq_ospd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneq_ospd xmm7,xmm6,[ecx]
vcmpge_oqpd xmm2,xmm6,xmm4
vcmpge_oqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpge_oqpd xmm7,xmm6,[ecx]
vcmpgt_oqpd xmm2,xmm6,xmm4
vcmpgt_oqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpgt_oqpd xmm7,xmm6,[ecx]
vcmptrue_uspd xmm2,xmm6,xmm4
vcmptrue_uspd xmm7,xmm6,XMMWORD PTR [ecx]
vcmptrue_uspd xmm7,xmm6,[ecx]
vcmpeqps xmm2,xmm6,xmm4
vcmpeqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeqps xmm7,xmm6,[ecx]
vcmpltps xmm2,xmm6,xmm4
vcmpltps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpltps xmm7,xmm6,[ecx]
vcmpleps xmm2,xmm6,xmm4
vcmpleps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpleps xmm7,xmm6,[ecx]
vcmpunordps xmm2,xmm6,xmm4
vcmpunordps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpunordps xmm7,xmm6,[ecx]
vcmpneqps xmm2,xmm6,xmm4
vcmpneqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneqps xmm7,xmm6,[ecx]
vcmpnltps xmm2,xmm6,xmm4
vcmpnltps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnltps xmm7,xmm6,[ecx]
vcmpnleps xmm2,xmm6,xmm4
vcmpnleps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnleps xmm7,xmm6,[ecx]
vcmpordps xmm2,xmm6,xmm4
vcmpordps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpordps xmm7,xmm6,[ecx]
vcmpeq_uqps xmm2,xmm6,xmm4
vcmpeq_uqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeq_uqps xmm7,xmm6,[ecx]
vcmpngeps xmm2,xmm6,xmm4
vcmpngeps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpngeps xmm7,xmm6,[ecx]
vcmpngtps xmm2,xmm6,xmm4
vcmpngtps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpngtps xmm7,xmm6,[ecx]
vcmpfalseps xmm2,xmm6,xmm4
vcmpfalseps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpfalseps xmm7,xmm6,[ecx]
vcmpneq_oqps xmm2,xmm6,xmm4
vcmpneq_oqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneq_oqps xmm7,xmm6,[ecx]
vcmpgeps xmm2,xmm6,xmm4
vcmpgeps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpgeps xmm7,xmm6,[ecx]
vcmpgtps xmm2,xmm6,xmm4
vcmpgtps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpgtps xmm7,xmm6,[ecx]
vcmptrueps xmm2,xmm6,xmm4
vcmptrueps xmm7,xmm6,XMMWORD PTR [ecx]
vcmptrueps xmm7,xmm6,[ecx]
vcmpeq_osps xmm2,xmm6,xmm4
vcmpeq_osps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeq_osps xmm7,xmm6,[ecx]
vcmplt_oqps xmm2,xmm6,xmm4
vcmplt_oqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmplt_oqps xmm7,xmm6,[ecx]
vcmple_oqps xmm2,xmm6,xmm4
vcmple_oqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmple_oqps xmm7,xmm6,[ecx]
vcmpunord_sps xmm2,xmm6,xmm4
vcmpunord_sps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpunord_sps xmm7,xmm6,[ecx]
vcmpneq_usps xmm2,xmm6,xmm4
vcmpneq_usps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneq_usps xmm7,xmm6,[ecx]
vcmpnlt_uqps xmm2,xmm6,xmm4
vcmpnlt_uqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnlt_uqps xmm7,xmm6,[ecx]
vcmpnle_uqps xmm2,xmm6,xmm4
vcmpnle_uqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnle_uqps xmm7,xmm6,[ecx]
vcmpord_sps xmm2,xmm6,xmm4
vcmpord_sps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpord_sps xmm7,xmm6,[ecx]
vcmpeq_usps xmm2,xmm6,xmm4
vcmpeq_usps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeq_usps xmm7,xmm6,[ecx]
vcmpnge_uqps xmm2,xmm6,xmm4
vcmpnge_uqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnge_uqps xmm7,xmm6,[ecx]
vcmpngt_uqps xmm2,xmm6,xmm4
vcmpngt_uqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpngt_uqps xmm7,xmm6,[ecx]
vcmpfalse_osps xmm2,xmm6,xmm4
vcmpfalse_osps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpfalse_osps xmm7,xmm6,[ecx]
vcmpneq_osps xmm2,xmm6,xmm4
vcmpneq_osps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneq_osps xmm7,xmm6,[ecx]
vcmpge_oqps xmm2,xmm6,xmm4
vcmpge_oqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpge_oqps xmm7,xmm6,[ecx]
vcmpgt_oqps xmm2,xmm6,xmm4
vcmpgt_oqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpgt_oqps xmm7,xmm6,[ecx]
vcmptrue_usps xmm2,xmm6,xmm4
vcmptrue_usps xmm7,xmm6,XMMWORD PTR [ecx]
vcmptrue_usps xmm7,xmm6,[ecx]
vgf2p8mulb xmm6, xmm5, xmm4
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [ecx]
vgf2p8mulb xmm6, xmm5, [ecx]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [edx+2032]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [edx+2048]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [edx-2048]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [edx-2064]
# Tests for op mem128, xmm, xmm
vmaskmovps xmm6,xmm4,XMMWORD PTR [ecx]
vmaskmovps xmm6,xmm4,[ecx]
vmaskmovpd xmm6,xmm4,XMMWORD PTR [ecx]
vmaskmovpd xmm6,xmm4,[ecx]
# Tests for op imm8, xmm/mem128, xmm
vaeskeygenassist xmm6,xmm4,7
vaeskeygenassist xmm6,XMMWORD PTR [ecx],7
vaeskeygenassist xmm6,[ecx],7
vpcmpestri xmm6,xmm4,7
vpcmpestri xmm6,XMMWORD PTR [ecx],7
vpcmpestri xmm6,[ecx],7
vpcmpestrm xmm6,xmm4,7
vpcmpestrm xmm6,XMMWORD PTR [ecx],7
vpcmpestrm xmm6,[ecx],7
vpcmpistri xmm6,xmm4,7
vpcmpistri xmm6,XMMWORD PTR [ecx],7
vpcmpistri xmm6,[ecx],7
vpcmpistrm xmm6,xmm4,7
vpcmpistrm xmm6,XMMWORD PTR [ecx],7
vpcmpistrm xmm6,[ecx],7
vpermilpd xmm6,xmm4,7
vpermilpd xmm6,XMMWORD PTR [ecx],7
vpermilpd xmm6,[ecx],7
vpermilps xmm6,xmm4,7
vpermilps xmm6,XMMWORD PTR [ecx],7
vpermilps xmm6,[ecx],7
vpshufd xmm6,xmm4,7
vpshufd xmm6,XMMWORD PTR [ecx],7
vpshufd xmm6,[ecx],7
vpshufhw xmm6,xmm4,7
vpshufhw xmm6,XMMWORD PTR [ecx],7
vpshufhw xmm6,[ecx],7
vpshuflw xmm6,xmm4,7
vpshuflw xmm6,XMMWORD PTR [ecx],7
vpshuflw xmm6,[ecx],7
vroundpd xmm6,xmm4,7
vroundpd xmm6,XMMWORD PTR [ecx],7
vroundpd xmm6,[ecx],7
vroundps xmm6,xmm4,7
vroundps xmm6,XMMWORD PTR [ecx],7
vroundps xmm6,[ecx],7
# Tests for op xmm, xmm, mem128
vmaskmovps XMMWORD PTR [ecx],xmm6,xmm4
vmaskmovps [ecx],xmm6,xmm4
vmaskmovpd XMMWORD PTR [ecx],xmm6,xmm4
vmaskmovpd [ecx],xmm6,xmm4
# Tests for op imm8, xmm/mem128, xmm, xmm
vblendpd xmm2,xmm6,xmm4,7
vblendpd xmm2,xmm6,XMMWORD PTR [ecx],7
vblendpd xmm2,xmm6,[ecx],7
vblendps xmm2,xmm6,xmm4,7
vblendps xmm2,xmm6,XMMWORD PTR [ecx],7
vblendps xmm2,xmm6,[ecx],7
vcmppd xmm2,xmm6,xmm4,7
vcmppd xmm2,xmm6,XMMWORD PTR [ecx],7
vcmppd xmm2,xmm6,[ecx],7
vcmpps xmm2,xmm6,xmm4,7
vcmpps xmm2,xmm6,XMMWORD PTR [ecx],7
vcmpps xmm2,xmm6,[ecx],7
vdppd xmm2,xmm6,xmm4,7
vdppd xmm2,xmm6,XMMWORD PTR [ecx],7
vdppd xmm2,xmm6,[ecx],7
vdpps xmm2,xmm6,xmm4,7
vdpps xmm2,xmm6,XMMWORD PTR [ecx],7
vdpps xmm2,xmm6,[ecx],7
vmpsadbw xmm2,xmm6,xmm4,7
vmpsadbw xmm2,xmm6,XMMWORD PTR [ecx],7
vmpsadbw xmm2,xmm6,[ecx],7
vpalignr xmm2,xmm6,xmm4,7
vpalignr xmm2,xmm6,XMMWORD PTR [ecx],7
vpalignr xmm2,xmm6,[ecx],7
vpblendw xmm2,xmm6,xmm4,7
vpblendw xmm2,xmm6,XMMWORD PTR [ecx],7
vpblendw xmm2,xmm6,[ecx],7
vpclmulqdq xmm2,xmm6,xmm4,7
vpclmulqdq xmm2,xmm6,XMMWORD PTR [ecx],7
vpclmulqdq xmm2,xmm6,[ecx],7
vshufpd xmm2,xmm6,xmm4,7
vshufpd xmm2,xmm6,XMMWORD PTR [ecx],7
vshufpd xmm2,xmm6,[ecx],7
vshufps xmm2,xmm6,xmm4,7
vshufps xmm2,xmm6,XMMWORD PTR [ecx],7
vshufps xmm2,xmm6,[ecx],7
vgf2p8affineqb xmm6, xmm5, xmm4, 0xab
vgf2p8affineqb xmm6, xmm5, xmm4, 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [ecx], 123
vgf2p8affineqb xmm6, xmm5, [ecx], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [edx+2032], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [edx+2048], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [edx-2048], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [edx-2064], 123
vgf2p8affineinvqb xmm6, xmm5, xmm4, 0xab
vgf2p8affineinvqb xmm6, xmm5, xmm4, 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [ecx], 123
vgf2p8affineinvqb xmm6, xmm5, [ecx], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [edx+2032], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [edx+2048], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [edx-2048], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [edx-2064], 123
# Tests for op xmm, xmm/mem128, xmm, xmm
vblendvpd xmm7,xmm2,xmm6,xmm4
vblendvpd xmm7,xmm2,XMMWORD PTR [ecx],xmm4
vblendvpd xmm7,xmm2,[ecx],xmm4
vblendvps xmm7,xmm2,xmm6,xmm4
vblendvps xmm7,xmm2,XMMWORD PTR [ecx],xmm4
vblendvps xmm7,xmm2,[ecx],xmm4
vpblendvb xmm7,xmm2,xmm6,xmm4
vpblendvb xmm7,xmm2,XMMWORD PTR [ecx],xmm4
vpblendvb xmm7,xmm2,[ecx],xmm4
# Tests for op mem64, ymm
vbroadcastsd ymm4,QWORD PTR [ecx]
vbroadcastsd ymm4,[ecx]
# Tests for op xmm/mem64, xmm
vcomisd xmm6,xmm4
vcomisd xmm4,QWORD PTR [ecx]
vcomisd xmm4,[ecx]
vcvtdq2pd xmm6,xmm4
vcvtdq2pd xmm4,QWORD PTR [ecx]
vcvtdq2pd xmm4,[ecx]
vcvtps2pd xmm6,xmm4
vcvtps2pd xmm4,QWORD PTR [ecx]
vcvtps2pd xmm4,[ecx]
vmovddup xmm6,xmm4
vmovddup xmm4,QWORD PTR [ecx]
vmovddup xmm4,[ecx]
vpmovsxbw xmm6,xmm4
vpmovsxbw xmm4,QWORD PTR [ecx]
vpmovsxbw xmm4,[ecx]
vpmovsxwd xmm6,xmm4
vpmovsxwd xmm4,QWORD PTR [ecx]
vpmovsxwd xmm4,[ecx]
vpmovsxdq xmm6,xmm4
vpmovsxdq xmm4,QWORD PTR [ecx]
vpmovsxdq xmm4,[ecx]
vpmovzxbw xmm6,xmm4
vpmovzxbw xmm4,QWORD PTR [ecx]
vpmovzxbw xmm4,[ecx]
vpmovzxwd xmm6,xmm4
vpmovzxwd xmm4,QWORD PTR [ecx]
vpmovzxwd xmm4,[ecx]
vpmovzxdq xmm6,xmm4
vpmovzxdq xmm4,QWORD PTR [ecx]
vpmovzxdq xmm4,[ecx]
vucomisd xmm6,xmm4
vucomisd xmm4,QWORD PTR [ecx]
vucomisd xmm4,[ecx]
# Tests for op mem64, xmm
vmovsd xmm4,QWORD PTR [ecx]
vmovsd xmm4,[ecx]
# Tests for op xmm, mem64
vmovlpd QWORD PTR [ecx],xmm4
vmovlpd [ecx],xmm4
vmovlps QWORD PTR [ecx],xmm4
vmovlps [ecx],xmm4
vmovhpd QWORD PTR [ecx],xmm4
vmovhpd [ecx],xmm4
vmovhps QWORD PTR [ecx],xmm4
vmovhps [ecx],xmm4
vmovsd QWORD PTR [ecx],xmm4
vmovsd [ecx],xmm4
# Tests for op xmm, regq/mem64
# Tests for op regq/mem64, xmm
vmovq QWORD PTR [ecx],xmm4
vmovq xmm4,QWORD PTR [ecx]
vmovq [ecx],xmm4
vmovq xmm4,[ecx]
# Tests for op xmm/mem64, regl
vcvtsd2si ecx,xmm4
vcvtsd2si ecx,QWORD PTR [ecx]
vcvtsd2si ecx,[ecx]
vcvttsd2si ecx,xmm4
vcvttsd2si ecx,QWORD PTR [ecx]
vcvttsd2si ecx,[ecx]
# Tests for op mem64, xmm, xmm
vmovlpd xmm6,xmm4,QWORD PTR [ecx]
vmovlpd xmm6,xmm4,[ecx]
vmovlps xmm6,xmm4,QWORD PTR [ecx]
vmovlps xmm6,xmm4,[ecx]
vmovhpd xmm6,xmm4,QWORD PTR [ecx]
vmovhpd xmm6,xmm4,[ecx]
vmovhps xmm6,xmm4,QWORD PTR [ecx]
vmovhps xmm6,xmm4,[ecx]
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd xmm2,xmm6,xmm4,7
vcmpsd xmm2,xmm6,QWORD PTR [ecx],7
vcmpsd xmm2,xmm6,[ecx],7
vroundsd xmm2,xmm6,xmm4,7
vroundsd xmm2,xmm6,QWORD PTR [ecx],7
vroundsd xmm2,xmm6,[ecx],7
# Tests for op xmm/mem64, xmm, xmm
vaddsd xmm2,xmm6,xmm4
vaddsd xmm2,xmm6,QWORD PTR [ecx]
vaddsd xmm2,xmm6,[ecx]
vcvtsd2ss xmm2,xmm6,xmm4
vcvtsd2ss xmm2,xmm6,QWORD PTR [ecx]
vcvtsd2ss xmm2,xmm6,[ecx]
vdivsd xmm2,xmm6,xmm4
vdivsd xmm2,xmm6,QWORD PTR [ecx]
vdivsd xmm2,xmm6,[ecx]
vmaxsd xmm2,xmm6,xmm4
vmaxsd xmm2,xmm6,QWORD PTR [ecx]
vmaxsd xmm2,xmm6,[ecx]
vminsd xmm2,xmm6,xmm4
vminsd xmm2,xmm6,QWORD PTR [ecx]
vminsd xmm2,xmm6,[ecx]
vmulsd xmm2,xmm6,xmm4
vmulsd xmm2,xmm6,QWORD PTR [ecx]
vmulsd xmm2,xmm6,[ecx]
vsqrtsd xmm2,xmm6,xmm4
vsqrtsd xmm2,xmm6,QWORD PTR [ecx]
vsqrtsd xmm2,xmm6,[ecx]
vsubsd xmm2,xmm6,xmm4
vsubsd xmm2,xmm6,QWORD PTR [ecx]
vsubsd xmm2,xmm6,[ecx]
vcmpeqsd xmm2,xmm6,xmm4
vcmpeqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpeqsd xmm2,xmm6,[ecx]
vcmpltsd xmm2,xmm6,xmm4
vcmpltsd xmm2,xmm6,QWORD PTR [ecx]
vcmpltsd xmm2,xmm6,[ecx]
vcmplesd xmm2,xmm6,xmm4
vcmplesd xmm2,xmm6,QWORD PTR [ecx]
vcmplesd xmm2,xmm6,[ecx]
vcmpunordsd xmm2,xmm6,xmm4
vcmpunordsd xmm2,xmm6,QWORD PTR [ecx]
vcmpunordsd xmm2,xmm6,[ecx]
vcmpneqsd xmm2,xmm6,xmm4
vcmpneqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpneqsd xmm2,xmm6,[ecx]
vcmpnltsd xmm2,xmm6,xmm4
vcmpnltsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnltsd xmm2,xmm6,[ecx]
vcmpnlesd xmm2,xmm6,xmm4
vcmpnlesd xmm2,xmm6,QWORD PTR [ecx]
vcmpnlesd xmm2,xmm6,[ecx]
vcmpordsd xmm2,xmm6,xmm4
vcmpordsd xmm2,xmm6,QWORD PTR [ecx]
vcmpordsd xmm2,xmm6,[ecx]
vcmpeq_uqsd xmm2,xmm6,xmm4
vcmpeq_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpeq_uqsd xmm2,xmm6,[ecx]
vcmpngesd xmm2,xmm6,xmm4
vcmpngesd xmm2,xmm6,QWORD PTR [ecx]
vcmpngesd xmm2,xmm6,[ecx]
vcmpngtsd xmm2,xmm6,xmm4
vcmpngtsd xmm2,xmm6,QWORD PTR [ecx]
vcmpngtsd xmm2,xmm6,[ecx]
vcmpfalsesd xmm2,xmm6,xmm4
vcmpfalsesd xmm2,xmm6,QWORD PTR [ecx]
vcmpfalsesd xmm2,xmm6,[ecx]
vcmpneq_oqsd xmm2,xmm6,xmm4
vcmpneq_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpneq_oqsd xmm2,xmm6,[ecx]
vcmpgesd xmm2,xmm6,xmm4
vcmpgesd xmm2,xmm6,QWORD PTR [ecx]
vcmpgesd xmm2,xmm6,[ecx]
vcmpgtsd xmm2,xmm6,xmm4
vcmpgtsd xmm2,xmm6,QWORD PTR [ecx]
vcmpgtsd xmm2,xmm6,[ecx]
vcmptruesd xmm2,xmm6,xmm4
vcmptruesd xmm2,xmm6,QWORD PTR [ecx]
vcmptruesd xmm2,xmm6,[ecx]
vcmpeq_ossd xmm2,xmm6,xmm4
vcmpeq_ossd xmm2,xmm6,QWORD PTR [ecx]
vcmpeq_ossd xmm2,xmm6,[ecx]
vcmplt_oqsd xmm2,xmm6,xmm4
vcmplt_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmplt_oqsd xmm2,xmm6,[ecx]
vcmple_oqsd xmm2,xmm6,xmm4
vcmple_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmple_oqsd xmm2,xmm6,[ecx]
vcmpunord_ssd xmm2,xmm6,xmm4
vcmpunord_ssd xmm2,xmm6,QWORD PTR [ecx]
vcmpunord_ssd xmm2,xmm6,[ecx]
vcmpneq_ussd xmm2,xmm6,xmm4
vcmpneq_ussd xmm2,xmm6,QWORD PTR [ecx]
vcmpneq_ussd xmm2,xmm6,[ecx]
vcmpnlt_uqsd xmm2,xmm6,xmm4
vcmpnlt_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnlt_uqsd xmm2,xmm6,[ecx]
vcmpnle_uqsd xmm2,xmm6,xmm4
vcmpnle_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnle_uqsd xmm2,xmm6,[ecx]
vcmpord_ssd xmm2,xmm6,xmm4
vcmpord_ssd xmm2,xmm6,QWORD PTR [ecx]
vcmpord_ssd xmm2,xmm6,[ecx]
vcmpeq_ussd xmm2,xmm6,xmm4
vcmpeq_ussd xmm2,xmm6,QWORD PTR [ecx]
vcmpeq_ussd xmm2,xmm6,[ecx]
vcmpnge_uqsd xmm2,xmm6,xmm4
vcmpnge_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnge_uqsd xmm2,xmm6,[ecx]
vcmpngt_uqsd xmm2,xmm6,xmm4
vcmpngt_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpngt_uqsd xmm2,xmm6,[ecx]
vcmpfalse_ossd xmm2,xmm6,xmm4
vcmpfalse_ossd xmm2,xmm6,QWORD PTR [ecx]
vcmpfalse_ossd xmm2,xmm6,[ecx]
vcmpneq_ossd xmm2,xmm6,xmm4
vcmpneq_ossd xmm2,xmm6,QWORD PTR [ecx]
vcmpneq_ossd xmm2,xmm6,[ecx]
vcmpge_oqsd xmm2,xmm6,xmm4
vcmpge_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpge_oqsd xmm2,xmm6,[ecx]
vcmpgt_oqsd xmm2,xmm6,xmm4
vcmpgt_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpgt_oqsd xmm2,xmm6,[ecx]
vcmptrue_ussd xmm2,xmm6,xmm4
vcmptrue_ussd xmm2,xmm6,QWORD PTR [ecx]
vcmptrue_ussd xmm2,xmm6,[ecx]
# Tests for op mem64
vldmxcsr DWORD PTR [ecx]
vldmxcsr [ecx]
vstmxcsr DWORD PTR [ecx]
vstmxcsr [ecx]
# Tests for op xmm/mem32, xmm, xmm
vaddss xmm2,xmm6,xmm4
vaddss xmm2,xmm6,DWORD PTR [ecx]
vaddss xmm2,xmm6,[ecx]
vcvtss2sd xmm2,xmm6,xmm4
vcvtss2sd xmm2,xmm6,DWORD PTR [ecx]
vcvtss2sd xmm2,xmm6,[ecx]
vdivss xmm2,xmm6,xmm4
vdivss xmm2,xmm6,DWORD PTR [ecx]
vdivss xmm2,xmm6,[ecx]
vmaxss xmm2,xmm6,xmm4
vmaxss xmm2,xmm6,DWORD PTR [ecx]
vmaxss xmm2,xmm6,[ecx]
vminss xmm2,xmm6,xmm4
vminss xmm2,xmm6,DWORD PTR [ecx]
vminss xmm2,xmm6,[ecx]
vmulss xmm2,xmm6,xmm4
vmulss xmm2,xmm6,DWORD PTR [ecx]
vmulss xmm2,xmm6,[ecx]
vrcpss xmm2,xmm6,xmm4
vrcpss xmm2,xmm6,DWORD PTR [ecx]
vrcpss xmm2,xmm6,[ecx]
vrsqrtss xmm2,xmm6,xmm4
vrsqrtss xmm2,xmm6,DWORD PTR [ecx]
vrsqrtss xmm2,xmm6,[ecx]
vsqrtss xmm2,xmm6,xmm4
vsqrtss xmm2,xmm6,DWORD PTR [ecx]
vsqrtss xmm2,xmm6,[ecx]
vsubss xmm2,xmm6,xmm4
vsubss xmm2,xmm6,DWORD PTR [ecx]
vsubss xmm2,xmm6,[ecx]
vcmpeqss xmm2,xmm6,xmm4
vcmpeqss xmm2,xmm6,DWORD PTR [ecx]
vcmpeqss xmm2,xmm6,[ecx]
vcmpltss xmm2,xmm6,xmm4
vcmpltss xmm2,xmm6,DWORD PTR [ecx]
vcmpltss xmm2,xmm6,[ecx]
vcmpless xmm2,xmm6,xmm4
vcmpless xmm2,xmm6,DWORD PTR [ecx]
vcmpless xmm2,xmm6,[ecx]
vcmpunordss xmm2,xmm6,xmm4
vcmpunordss xmm2,xmm6,DWORD PTR [ecx]
vcmpunordss xmm2,xmm6,[ecx]
vcmpneqss xmm2,xmm6,xmm4
vcmpneqss xmm2,xmm6,DWORD PTR [ecx]
vcmpneqss xmm2,xmm6,[ecx]
vcmpnltss xmm2,xmm6,xmm4
vcmpnltss xmm2,xmm6,DWORD PTR [ecx]
vcmpnltss xmm2,xmm6,[ecx]
vcmpnless xmm2,xmm6,xmm4
vcmpnless xmm2,xmm6,DWORD PTR [ecx]
vcmpnless xmm2,xmm6,[ecx]
vcmpordss xmm2,xmm6,xmm4
vcmpordss xmm2,xmm6,DWORD PTR [ecx]
vcmpordss xmm2,xmm6,[ecx]
vcmpeq_uqss xmm2,xmm6,xmm4
vcmpeq_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpeq_uqss xmm2,xmm6,[ecx]
vcmpngess xmm2,xmm6,xmm4
vcmpngess xmm2,xmm6,DWORD PTR [ecx]
vcmpngess xmm2,xmm6,[ecx]
vcmpngtss xmm2,xmm6,xmm4
vcmpngtss xmm2,xmm6,DWORD PTR [ecx]
vcmpngtss xmm2,xmm6,[ecx]
vcmpfalsess xmm2,xmm6,xmm4
vcmpfalsess xmm2,xmm6,DWORD PTR [ecx]
vcmpfalsess xmm2,xmm6,[ecx]
vcmpneq_oqss xmm2,xmm6,xmm4
vcmpneq_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmpneq_oqss xmm2,xmm6,[ecx]
vcmpgess xmm2,xmm6,xmm4
vcmpgess xmm2,xmm6,DWORD PTR [ecx]
vcmpgess xmm2,xmm6,[ecx]
vcmpgtss xmm2,xmm6,xmm4
vcmpgtss xmm2,xmm6,DWORD PTR [ecx]
vcmpgtss xmm2,xmm6,[ecx]
vcmptruess xmm2,xmm6,xmm4
vcmptruess xmm2,xmm6,DWORD PTR [ecx]
vcmptruess xmm2,xmm6,[ecx]
vcmpeq_osss xmm2,xmm6,xmm4
vcmpeq_osss xmm2,xmm6,DWORD PTR [ecx]
vcmpeq_osss xmm2,xmm6,[ecx]
vcmplt_oqss xmm2,xmm6,xmm4
vcmplt_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmplt_oqss xmm2,xmm6,[ecx]
vcmple_oqss xmm2,xmm6,xmm4
vcmple_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmple_oqss xmm2,xmm6,[ecx]
vcmpunord_sss xmm2,xmm6,xmm4
vcmpunord_sss xmm2,xmm6,DWORD PTR [ecx]
vcmpunord_sss xmm2,xmm6,[ecx]
vcmpneq_usss xmm2,xmm6,xmm4
vcmpneq_usss xmm2,xmm6,DWORD PTR [ecx]
vcmpneq_usss xmm2,xmm6,[ecx]
vcmpnlt_uqss xmm2,xmm6,xmm4
vcmpnlt_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpnlt_uqss xmm2,xmm6,[ecx]
vcmpnle_uqss xmm2,xmm6,xmm4
vcmpnle_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpnle_uqss xmm2,xmm6,[ecx]
vcmpord_sss xmm2,xmm6,xmm4
vcmpord_sss xmm2,xmm6,DWORD PTR [ecx]
vcmpord_sss xmm2,xmm6,[ecx]
vcmpeq_usss xmm2,xmm6,xmm4
vcmpeq_usss xmm2,xmm6,DWORD PTR [ecx]
vcmpeq_usss xmm2,xmm6,[ecx]
vcmpnge_uqss xmm2,xmm6,xmm4
vcmpnge_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpnge_uqss xmm2,xmm6,[ecx]
vcmpngt_uqss xmm2,xmm6,xmm4
vcmpngt_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpngt_uqss xmm2,xmm6,[ecx]
vcmpfalse_osss xmm2,xmm6,xmm4
vcmpfalse_osss xmm2,xmm6,DWORD PTR [ecx]
vcmpfalse_osss xmm2,xmm6,[ecx]
vcmpneq_osss xmm2,xmm6,xmm4
vcmpneq_osss xmm2,xmm6,DWORD PTR [ecx]
vcmpneq_osss xmm2,xmm6,[ecx]
vcmpge_oqss xmm2,xmm6,xmm4
vcmpge_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmpge_oqss xmm2,xmm6,[ecx]
vcmpgt_oqss xmm2,xmm6,xmm4
vcmpgt_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmpgt_oqss xmm2,xmm6,[ecx]
vcmptrue_usss xmm2,xmm6,xmm4
vcmptrue_usss xmm2,xmm6,DWORD PTR [ecx]
vcmptrue_usss xmm2,xmm6,[ecx]
# Tests for op mem32, ymm
vbroadcastss ymm4,DWORD PTR [ecx]
vbroadcastss ymm4,[ecx]
# Tests for op xmm/mem32, xmm
vcomiss xmm6,xmm4
vcomiss xmm4,DWORD PTR [ecx]
vcomiss xmm4,[ecx]
vpmovsxbd xmm6,xmm4
vpmovsxbd xmm4,DWORD PTR [ecx]
vpmovsxbd xmm4,[ecx]
vpmovsxwq xmm6,xmm4
vpmovsxwq xmm4,DWORD PTR [ecx]
vpmovsxwq xmm4,[ecx]
vpmovzxbd xmm6,xmm4
vpmovzxbd xmm4,DWORD PTR [ecx]
vpmovzxbd xmm4,[ecx]
vpmovzxwq xmm6,xmm4
vpmovzxwq xmm4,DWORD PTR [ecx]
vpmovzxwq xmm4,[ecx]
vucomiss xmm6,xmm4
vucomiss xmm4,DWORD PTR [ecx]
vucomiss xmm4,[ecx]
# Tests for op mem32, xmm
vbroadcastss xmm4,DWORD PTR [ecx]
vbroadcastss xmm4,[ecx]
vmovss xmm4,DWORD PTR [ecx]
vmovss xmm4,[ecx]
# Tests for op xmm, mem32
vmovss DWORD PTR [ecx],xmm4
vmovss [ecx],xmm4
# Tests for op xmm, regl/mem32
# Tests for op regl/mem32, xmm
vmovd ecx,xmm4
vmovd DWORD PTR [ecx],xmm4
vmovd xmm4,ecx
vmovd xmm4,DWORD PTR [ecx]
vmovd [ecx],xmm4
vmovd xmm4,[ecx]
# Tests for op xmm/mem32, regl
vcvtss2si ecx,xmm4
vcvtss2si ecx,DWORD PTR [ecx]
vcvtss2si ecx,[ecx]
vcvttss2si ecx,xmm4
vcvttss2si ecx,DWORD PTR [ecx]
vcvttss2si ecx,[ecx]
# Tests for op imm8, xmm, regq/mem32
vextractps DWORD PTR [ecx],xmm4,7
vextractps [ecx],xmm4,7
# Tests for op imm8, xmm, regl/mem32
vpextrd ecx,xmm4,7
vpextrd DWORD PTR [ecx],xmm4,7
vpextrd [ecx],xmm4,7
vextractps ecx,xmm4,7
vextractps DWORD PTR [ecx],xmm4,7
vextractps [ecx],xmm4,7
# Tests for op imm8, regl/mem32, xmm, xmm
vpinsrd xmm6,xmm4,ecx,7
vpinsrd xmm6,xmm4,DWORD PTR [ecx],7
vpinsrd xmm6,xmm4,[ecx],7
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd xmm6,xmm4,ecx
vcvtsi2sd xmm6,xmm4,DWORD PTR [ecx]
vcvtsi2sd xmm6,xmm4,[ecx]
vcvtsi2ss xmm6,xmm4,ecx
vcvtsi2ss xmm6,xmm4,DWORD PTR [ecx]
vcvtsi2ss xmm6,xmm4,[ecx]
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss xmm2,xmm6,xmm4,7
vcmpss xmm2,xmm6,DWORD PTR [ecx],7
vcmpss xmm2,xmm6,[ecx],7
vinsertps xmm2,xmm6,xmm4,7
vinsertps xmm2,xmm6,DWORD PTR [ecx],7
vinsertps xmm2,xmm6,[ecx],7
vroundss xmm2,xmm6,xmm4,7
vroundss xmm2,xmm6,DWORD PTR [ecx],7
vroundss xmm2,xmm6,[ecx],7
# Tests for op xmm/m16, xmm
vpmovsxbq xmm6,xmm4
vpmovsxbq xmm4,WORD PTR [ecx]
vpmovsxbq xmm4,[ecx]
vpmovzxbq xmm6,xmm4
vpmovzxbq xmm4,WORD PTR [ecx]
vpmovzxbq xmm4,[ecx]
# Tests for op imm8, xmm, regl/mem16
vpextrw ecx,xmm4,7
vpextrw WORD PTR [ecx],xmm4,7
vpextrw [ecx],xmm4,7
# Tests for op imm8, xmm, regq/mem16
vpextrw WORD PTR [ecx],xmm4,7
vpextrw [ecx],xmm4,7
# Tests for op imm8, regl/mem16, xmm, xmm
vpinsrw xmm6,xmm4,ecx,7
vpinsrw xmm6,xmm4,WORD PTR [ecx],7
vpinsrw xmm6,xmm4,[ecx],7
# Tests for op imm8, xmm, regl/mem8
vpextrb ecx,xmm4,7
vpextrb BYTE PTR [ecx],xmm4,7
vpextrb [ecx],xmm4,7
# Tests for op imm8, regl/mem8, xmm, xmm
vpinsrb xmm6,xmm4,ecx,7
vpinsrb xmm6,xmm4,BYTE PTR [ecx],7
vpinsrb xmm6,xmm4,[ecx],7
# Tests for op imm8, xmm, regq/mem8
vpextrb BYTE PTR [ecx],xmm4,7
vpextrb [ecx],xmm4,7
# Tests for op xmm, xmm
vmaskmovdqu xmm6,xmm4
vmovq xmm6,xmm4
# Tests for op xmm, regl
vmovmskpd ecx,xmm4
vmovmskps ecx,xmm4
vpmovmskb ecx,xmm4
# Tests for op xmm, xmm, xmm
vmovhlps xmm2,xmm6,xmm4
vmovlhps xmm2,xmm6,xmm4
vmovsd xmm2,xmm6,xmm4
vmovss xmm2,xmm6,xmm4
# Tests for op imm8, xmm, xmm
vpslld xmm6,xmm4,7
vpslldq xmm6,xmm4,7
vpsllq xmm6,xmm4,7
vpsllw xmm6,xmm4,7
vpsrad xmm6,xmm4,7
vpsraw xmm6,xmm4,7
vpsrld xmm6,xmm4,7
vpsrldq xmm6,xmm4,7
vpsrlq xmm6,xmm4,7
vpsrlw xmm6,xmm4,7
# Tests for op imm8, xmm, regl
vpextrw ecx,xmm4,7
# Tests for op ymm, regl
vmovmskpd ecx,ymm4
vmovmskps ecx,ymm4
# Default instructions without suffixes.
vcvtpd2dq xmm6,xmm4
vcvtpd2dq xmm6,ymm4
vcvtpd2ps xmm6,xmm4
vcvtpd2ps xmm6,ymm4
vcvttpd2dq xmm6,xmm4
vcvttpd2dq xmm6,ymm4
#Tests with different memory and register operands.
vldmxcsr DWORD PTR ds:0x1234
vmovdqa xmm0,XMMWORD PTR ds:0x1234
vmovdqa XMMWORD PTR ds:0x1234,xmm0
vmovd DWORD PTR ds:0x1234,xmm0
vcvtsd2si eax,QWORD PTR ds:0x1234
vcvtdq2pd ymm0,XMMWORD PTR ds:0x1234
vcvtpd2ps xmm0,YMMWORD PTR ds:0x1234
vpavgb xmm7,xmm0,XMMWORD PTR ds:0x1234
vaeskeygenassist xmm0,XMMWORD PTR ds:0x1234,7
vpextrb ds:0x1234,xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR ds:0x1234
vpclmulqdq xmm7,xmm0,XMMWORD PTR ds:0x1234,7
vblendvps xmm6,xmm4,XMMWORD PTR ds:0x1234,xmm0
vpinsrb xmm7,xmm0,ds:0x1234,7
vmovdqa ymm0,YMMWORD PTR ds:0x1234
vmovdqa YMMWORD PTR ds:0x1234,ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR ds:0x1234
vroundpd ymm0,YMMWORD PTR ds:0x1234,7
vextractf128 XMMWORD PTR ds:0x1234,ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR ds:0x1234,7
vblendvpd ymm6,ymm4,YMMWORD PTR ds:0x1234,ymm0
vldmxcsr DWORD PTR [ebp]
vmovdqa xmm0,XMMWORD PTR [ebp]
vmovdqa XMMWORD PTR [ebp],xmm0
vmovd DWORD PTR [ebp],xmm0
vcvtsd2si eax,QWORD PTR [ebp]
vcvtdq2pd ymm0,XMMWORD PTR [ebp]
vcvtpd2ps xmm0,YMMWORD PTR [ebp]
vpavgb xmm7,xmm0,XMMWORD PTR [ebp]
vaeskeygenassist xmm0,XMMWORD PTR [ebp],7
vpextrb [ebp],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [ebp]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [ebp],7
vblendvps xmm6,xmm4,XMMWORD PTR [ebp],xmm0
vpinsrb xmm7,xmm0,[ebp],7
vmovdqa ymm0,YMMWORD PTR [ebp]
vmovdqa YMMWORD PTR [ebp],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [ebp]
vroundpd ymm0,YMMWORD PTR [ebp],7
vextractf128 XMMWORD PTR [ebp],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [ebp],7
vblendvpd ymm6,ymm4,YMMWORD PTR [ebp],ymm0
vldmxcsr DWORD PTR [ebp+0x99]
vmovdqa xmm0,XMMWORD PTR [ebp+0x99]
vmovdqa XMMWORD PTR [ebp+0x99],xmm0
vmovd DWORD PTR [ebp+0x99],xmm0
vcvtsd2si eax,QWORD PTR [ebp+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [ebp+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [ebp+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [ebp+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [ebp+0x99],7
vpextrb [ebp+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [ebp+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [ebp+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [ebp+0x99],xmm0
vpinsrb xmm7,xmm0,[ebp+0x99],7
vmovdqa ymm0,YMMWORD PTR [ebp+0x99]
vmovdqa YMMWORD PTR [ebp+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [ebp+0x99]
vroundpd ymm0,YMMWORD PTR [ebp+0x99],7
vextractf128 XMMWORD PTR [ebp+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [ebp+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [ebp+0x99],ymm0
vldmxcsr DWORD PTR [eiz*1+0x99]
vmovdqa xmm0,XMMWORD PTR [eiz*1+0x99]
vmovdqa XMMWORD PTR [eiz*1+0x99],xmm0
vmovd DWORD PTR [eiz*1+0x99],xmm0
vcvtsd2si eax,QWORD PTR [eiz*1+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [eiz*1+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [eiz*1+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [eiz*1+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [eiz*1+0x99],7
vpextrb [eiz*1+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [eiz*1+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [eiz*1+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [eiz*1+0x99],xmm0
vpinsrb xmm7,xmm0,[eiz*1+0x99],7
vmovdqa ymm0,YMMWORD PTR [eiz*1+0x99]
vmovdqa YMMWORD PTR [eiz*1+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [eiz*1+0x99]
vroundpd ymm0,YMMWORD PTR [eiz*1+0x99],7
vextractf128 XMMWORD PTR [eiz*1+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [eiz*1+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [eiz*1+0x99],ymm0
vldmxcsr DWORD PTR [eiz*2+0x99]
vmovdqa xmm0,XMMWORD PTR [eiz*2+0x99]
vmovdqa XMMWORD PTR [eiz*2+0x99],xmm0
vmovd DWORD PTR [eiz*2+0x99],xmm0
vcvtsd2si eax,QWORD PTR [eiz*2+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [eiz*2+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [eiz*2+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [eiz*2+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [eiz*2+0x99],7
vpextrb [eiz*2+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [eiz*2+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [eiz*2+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [eiz*2+0x99],xmm0
vpinsrb xmm7,xmm0,[eiz*2+0x99],7
vmovdqa ymm0,YMMWORD PTR [eiz*2+0x99]
vmovdqa YMMWORD PTR [eiz*2+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [eiz*2+0x99]
vroundpd ymm0,YMMWORD PTR [eiz*2+0x99],7
vextractf128 XMMWORD PTR [eiz*2+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [eiz*2+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [eiz*2+0x99],ymm0
vldmxcsr DWORD PTR [eax+eiz*1+0x99]
vmovdqa xmm0,XMMWORD PTR [eax+eiz*1+0x99]
vmovdqa XMMWORD PTR [eax+eiz*1+0x99],xmm0
vmovd DWORD PTR [eax+eiz*1+0x99],xmm0
vcvtsd2si eax,QWORD PTR [eax+eiz*1+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [eax+eiz*1+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [eax+eiz*1+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [eax+eiz*1+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [eax+eiz*1+0x99],7
vpextrb [eax+eiz*1+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [eax+eiz*1+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [eax+eiz*1+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [eax+eiz*1+0x99],xmm0
vpinsrb xmm7,xmm0,[eax+eiz*1+0x99],7
vmovdqa ymm0,YMMWORD PTR [eax+eiz*1+0x99]
vmovdqa YMMWORD PTR [eax+eiz*1+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [eax+eiz*1+0x99]
vroundpd ymm0,YMMWORD PTR [eax+eiz*1+0x99],7
vextractf128 XMMWORD PTR [eax+eiz*1+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [eax+eiz*1+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [eax+eiz*1+0x99],ymm0
vldmxcsr DWORD PTR [eax+eiz*2+0x99]
vmovdqa xmm0,XMMWORD PTR [eax+eiz*2+0x99]
vmovdqa XMMWORD PTR [eax+eiz*2+0x99],xmm0
vmovd DWORD PTR [eax+eiz*2+0x99],xmm0
vcvtsd2si eax,QWORD PTR [eax+eiz*2+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [eax+eiz*2+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [eax+eiz*2+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [eax+eiz*2+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [eax+eiz*2+0x99],7
vpextrb [eax+eiz*2+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [eax+eiz*2+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [eax+eiz*2+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [eax+eiz*2+0x99],xmm0
vpinsrb xmm7,xmm0,[eax+eiz*2+0x99],7
vmovdqa ymm0,YMMWORD PTR [eax+eiz*2+0x99]
vmovdqa YMMWORD PTR [eax+eiz*2+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [eax+eiz*2+0x99]
vroundpd ymm0,YMMWORD PTR [eax+eiz*2+0x99],7
vextractf128 XMMWORD PTR [eax+eiz*2+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [eax+eiz*2+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [eax+eiz*2+0x99],ymm0
vldmxcsr DWORD PTR [eax+ebx*4+0x99]
vmovdqa xmm0,XMMWORD PTR [eax+ebx*4+0x99]
vmovdqa XMMWORD PTR [eax+ebx*4+0x99],xmm0
vmovd DWORD PTR [eax+ebx*4+0x99],xmm0
vcvtsd2si eax,QWORD PTR [eax+ebx*4+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [eax+ebx*4+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [eax+ebx*4+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [eax+ebx*4+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [eax+ebx*4+0x99],7
vpextrb [eax+ebx*4+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [eax+ebx*4+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [eax+ebx*4+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [eax+ebx*4+0x99],xmm0
vpinsrb xmm7,xmm0,[eax+ebx*4+0x99],7
vmovdqa ymm0,YMMWORD PTR [eax+ebx*4+0x99]
vmovdqa YMMWORD PTR [eax+ebx*4+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [eax+ebx*4+0x99]
vroundpd ymm0,YMMWORD PTR [eax+ebx*4+0x99],7
vextractf128 XMMWORD PTR [eax+ebx*4+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [eax+ebx*4+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [eax+ebx*4+0x99],ymm0
vldmxcsr DWORD PTR [esp+ecx*8+0x99]
vmovdqa xmm0,XMMWORD PTR [esp+ecx*8+0x99]
vmovdqa XMMWORD PTR [esp+ecx*8+0x99],xmm0
vmovd DWORD PTR [esp+ecx*8+0x99],xmm0
vcvtsd2si eax,QWORD PTR [esp+ecx*8+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [esp+ecx*8+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [esp+ecx*8+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [esp+ecx*8+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [esp+ecx*8+0x99],7
vpextrb [esp+ecx*8+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [esp+ecx*8+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [esp+ecx*8+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [esp+ecx*8+0x99],xmm0
vpinsrb xmm7,xmm0,[esp+ecx*8+0x99],7
vmovdqa ymm0,YMMWORD PTR [esp+ecx*8+0x99]
vmovdqa YMMWORD PTR [esp+ecx*8+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [esp+ecx*8+0x99]
vroundpd ymm0,YMMWORD PTR [esp+ecx*8+0x99],7
vextractf128 XMMWORD PTR [esp+ecx*8+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [esp+ecx*8+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [esp+ecx*8+0x99],ymm0
vldmxcsr DWORD PTR [ebp+edx*1+0x99]
vmovdqa xmm0,XMMWORD PTR [ebp+edx*1+0x99]
vmovdqa XMMWORD PTR [ebp+edx*1+0x99],xmm0
vmovd DWORD PTR [ebp+edx*1+0x99],xmm0
vcvtsd2si eax,QWORD PTR [ebp+edx*1+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [ebp+edx*1+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [ebp+edx*1+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [ebp+edx*1+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [ebp+edx*1+0x99],7
vpextrb [ebp+edx*1+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [ebp+edx*1+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [ebp+edx*1+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [ebp+edx*1+0x99],xmm0
vpinsrb xmm7,xmm0,[ebp+edx*1+0x99],7
vmovdqa ymm0,YMMWORD PTR [ebp+edx*1+0x99]
vmovdqa YMMWORD PTR [ebp+edx*1+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [ebp+edx*1+0x99]
vroundpd ymm0,YMMWORD PTR [ebp+edx*1+0x99],7
vextractf128 XMMWORD PTR [ebp+edx*1+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [ebp+edx*1+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [ebp+edx*1+0x99],ymm0
# Tests for all register operands.
vmovmskpd eax,xmm0
vpslld xmm7,xmm0,7
vmovmskps eax,ymm0
|
tactcomplabs/xbgas-binutils-gdb
| 1,175
|
gas/testsuite/gas/i386/x86-64-relax-1.s
|
.text
je .LBB0_46
.zero 6, 0x90
je .LBB0_46
.LBB0_8:
.zero 134, 0x90
je .LBB0_8
.zero 4, 0x90
je .LBB0_8
.zero 8, 0x90
je .LBB0_46
.zero 10, 0x90
je .LBB0_8
.zero 4, 0x90
je .LBB0_8
movq 304(%rsp), %r14
.zero 2, 0x90
je .LBB0_8
je .LBB0_8
movq 256(%rsp), %r14
.zero 3, 0x90
je .LBB0_46
.zero 10, 0x90
je .LBB0_8
.zero 13, 0x90
je .LBB0_8
leaq 432(%rsp), %rsi
je .LBB0_8
movq 176(%rsp), %r14
je .LBB0_46
je .LBB0_8
je .LBB0_8
leaq 424(%rsp), %rsi
je .LBB0_8
.zero 22, 0x90
je .LBB0_8
.zero 11, 0x90
je .LBB0_8
leaq 416(%rsp), %rsi
je .LBB0_8
.zero 21, 0x90
je .LBB0_46
.zero 8, 0x90
je .LBB0_8
.zero 11, 0x90
je .LBB0_8
.zero 7, 0x90
je .LBB0_8
.zero 22, 0x90
je .LBB0_46
.zero 131, 0x90
.LBB0_46:
.balign 16, 0x90
movq 168(%rsp), %rax
.zero 3, 0x90
je .LBB1_35
.balign 16, 0x90
.zero 2, 0x90
je .LBB1_35
.zero 37, 0x90
je .LBB1_35
.zero 59, 0x90
je .LBB1_35
.zero 68, 0x90
je .LBB1_17
.balign 16, 0x90
.LBB1_17:
.zero 85, 0x90
.LBB1_35:
nop
|
tactcomplabs/xbgas-binutils-gdb
| 30,415
|
gas/testsuite/gas/i386/sse2avx.s
|
# Check SSE to AVX instructions
.allow_index_reg
.text
_start:
# Tests for op mem64
ldmxcsr (%ecx)
stmxcsr (%ecx)
# These should not be converted
data16 ldmxcsr (%ecx)
data16 stmxcsr (%ecx)
# Tests for op xmm/mem128, xmm
cvtdq2ps %xmm4,%xmm6
cvtdq2ps (%ecx),%xmm4
cvtpd2dq %xmm4,%xmm6
cvtpd2dq (%ecx),%xmm4
cvtpd2ps %xmm4,%xmm6
cvtpd2ps (%ecx),%xmm4
cvtps2dq %xmm4,%xmm6
cvtps2dq (%ecx),%xmm4
cvttpd2dq %xmm4,%xmm6
cvttpd2dq (%ecx),%xmm4
cvttps2dq %xmm4,%xmm6
cvttps2dq (%ecx),%xmm4
movapd %xmm4,%xmm6
movapd (%ecx),%xmm4
movaps %xmm4,%xmm6
movaps (%ecx),%xmm4
movdqa %xmm4,%xmm6
movdqa (%ecx),%xmm4
movdqu %xmm4,%xmm6
movdqu (%ecx),%xmm4
movshdup %xmm4,%xmm6
movshdup (%ecx),%xmm4
movsldup %xmm4,%xmm6
movsldup (%ecx),%xmm4
movupd %xmm4,%xmm6
movupd (%ecx),%xmm4
movups %xmm4,%xmm6
movups (%ecx),%xmm4
pabsb %xmm4,%xmm6
pabsb (%ecx),%xmm4
pabsw %xmm4,%xmm6
pabsw (%ecx),%xmm4
pabsd %xmm4,%xmm6
pabsd (%ecx),%xmm4
phminposuw %xmm4,%xmm6
phminposuw (%ecx),%xmm4
ptest %xmm4,%xmm6
ptest (%ecx),%xmm4
rcpps %xmm4,%xmm6
rcpps (%ecx),%xmm4
rsqrtps %xmm4,%xmm6
rsqrtps (%ecx),%xmm4
sqrtpd %xmm4,%xmm6
sqrtpd (%ecx),%xmm4
sqrtps %xmm4,%xmm6
sqrtps (%ecx),%xmm4
aesimc %xmm4,%xmm6
aesimc (%ecx),%xmm4
# Tests for op xmm, xmm/mem128
movapd %xmm4,%xmm6
movapd %xmm4,(%ecx)
movaps %xmm4,%xmm6
movaps %xmm4,(%ecx)
movdqa %xmm4,%xmm6
movdqa %xmm4,(%ecx)
movdqu %xmm4,%xmm6
movdqu %xmm4,(%ecx)
movupd %xmm4,%xmm6
movupd %xmm4,(%ecx)
movups %xmm4,%xmm6
movups %xmm4,(%ecx)
# Tests for op mem128, xmm
lddqu (%ecx),%xmm4
movntdqa (%ecx),%xmm4
# Tests for op xmm, mem128
movntdq %xmm4,(%ecx)
movntpd %xmm4,(%ecx)
movntps %xmm4,(%ecx)
# Tests for op xmm/mem128, xmm[, xmm]
addpd %xmm4,%xmm6
addpd (%ecx),%xmm6
addps %xmm4,%xmm6
addps (%ecx),%xmm6
addsubpd %xmm4,%xmm6
addsubpd (%ecx),%xmm6
addsubps %xmm4,%xmm6
addsubps (%ecx),%xmm6
andnpd %xmm4,%xmm6
andnpd (%ecx),%xmm6
andnps %xmm4,%xmm6
andnps (%ecx),%xmm6
andpd %xmm4,%xmm6
andpd (%ecx),%xmm6
andps %xmm4,%xmm6
andps (%ecx),%xmm6
divpd %xmm4,%xmm6
divpd (%ecx),%xmm6
divps %xmm4,%xmm6
divps (%ecx),%xmm6
haddpd %xmm4,%xmm6
haddpd (%ecx),%xmm6
haddps %xmm4,%xmm6
haddps (%ecx),%xmm6
hsubpd %xmm4,%xmm6
hsubpd (%ecx),%xmm6
hsubps %xmm4,%xmm6
hsubps (%ecx),%xmm6
maxpd %xmm4,%xmm6
maxpd (%ecx),%xmm6
maxps %xmm4,%xmm6
maxps (%ecx),%xmm6
minpd %xmm4,%xmm6
minpd (%ecx),%xmm6
minps %xmm4,%xmm6
minps (%ecx),%xmm6
mulpd %xmm4,%xmm6
mulpd (%ecx),%xmm6
mulps %xmm4,%xmm6
mulps (%ecx),%xmm6
orpd %xmm4,%xmm6
orpd (%ecx),%xmm6
orps %xmm4,%xmm6
orps (%ecx),%xmm6
packsswb %xmm4,%xmm6
packsswb (%ecx),%xmm6
packssdw %xmm4,%xmm6
packssdw (%ecx),%xmm6
packuswb %xmm4,%xmm6
packuswb (%ecx),%xmm6
packusdw %xmm4,%xmm6
packusdw (%ecx),%xmm6
paddb %xmm4,%xmm6
paddb (%ecx),%xmm6
paddw %xmm4,%xmm6
paddw (%ecx),%xmm6
paddd %xmm4,%xmm6
paddd (%ecx),%xmm6
paddq %xmm4,%xmm6
paddq (%ecx),%xmm6
paddsb %xmm4,%xmm6
paddsb (%ecx),%xmm6
paddsw %xmm4,%xmm6
paddsw (%ecx),%xmm6
paddusb %xmm4,%xmm6
paddusb (%ecx),%xmm6
paddusw %xmm4,%xmm6
paddusw (%ecx),%xmm6
pand %xmm4,%xmm6
pand (%ecx),%xmm6
pandn %xmm4,%xmm6
pandn (%ecx),%xmm6
pavgb %xmm4,%xmm6
pavgb (%ecx),%xmm6
pavgw %xmm4,%xmm6
pavgw (%ecx),%xmm6
pclmullqlqdq %xmm4,%xmm6
pclmullqlqdq (%ecx),%xmm6
pclmulhqlqdq %xmm4,%xmm6
pclmulhqlqdq (%ecx),%xmm6
pclmullqhqdq %xmm4,%xmm6
pclmullqhqdq (%ecx),%xmm6
pclmulhqhqdq %xmm4,%xmm6
pclmulhqhqdq (%ecx),%xmm6
pcmpeqb %xmm4,%xmm6
pcmpeqb (%ecx),%xmm6
pcmpeqw %xmm4,%xmm6
pcmpeqw (%ecx),%xmm6
pcmpeqd %xmm4,%xmm6
pcmpeqd (%ecx),%xmm6
pcmpeqq %xmm4,%xmm6
pcmpeqq (%ecx),%xmm6
pcmpgtb %xmm4,%xmm6
pcmpgtb (%ecx),%xmm6
pcmpgtw %xmm4,%xmm6
pcmpgtw (%ecx),%xmm6
pcmpgtd %xmm4,%xmm6
pcmpgtd (%ecx),%xmm6
pcmpgtq %xmm4,%xmm6
pcmpgtq (%ecx),%xmm6
phaddw %xmm4,%xmm6
phaddw (%ecx),%xmm6
phaddd %xmm4,%xmm6
phaddd (%ecx),%xmm6
phaddsw %xmm4,%xmm6
phaddsw (%ecx),%xmm6
phsubw %xmm4,%xmm6
phsubw (%ecx),%xmm6
phsubd %xmm4,%xmm6
phsubd (%ecx),%xmm6
phsubsw %xmm4,%xmm6
phsubsw (%ecx),%xmm6
pmaddwd %xmm4,%xmm6
pmaddwd (%ecx),%xmm6
pmaddubsw %xmm4,%xmm6
pmaddubsw (%ecx),%xmm6
pmaxsb %xmm4,%xmm6
pmaxsb (%ecx),%xmm6
pmaxsw %xmm4,%xmm6
pmaxsw (%ecx),%xmm6
pmaxsd %xmm4,%xmm6
pmaxsd (%ecx),%xmm6
pmaxub %xmm4,%xmm6
pmaxub (%ecx),%xmm6
pmaxuw %xmm4,%xmm6
pmaxuw (%ecx),%xmm6
pmaxud %xmm4,%xmm6
pmaxud (%ecx),%xmm6
pminsb %xmm4,%xmm6
pminsb (%ecx),%xmm6
pminsw %xmm4,%xmm6
pminsw (%ecx),%xmm6
pminsd %xmm4,%xmm6
pminsd (%ecx),%xmm6
pminub %xmm4,%xmm6
pminub (%ecx),%xmm6
pminuw %xmm4,%xmm6
pminuw (%ecx),%xmm6
pminud %xmm4,%xmm6
pminud (%ecx),%xmm6
pmulhuw %xmm4,%xmm6
pmulhuw (%ecx),%xmm6
pmulhrsw %xmm4,%xmm6
pmulhrsw (%ecx),%xmm6
pmulhw %xmm4,%xmm6
pmulhw (%ecx),%xmm6
pmullw %xmm4,%xmm6
pmullw (%ecx),%xmm6
pmulld %xmm4,%xmm6
pmulld (%ecx),%xmm6
pmuludq %xmm4,%xmm6
pmuludq (%ecx),%xmm6
pmuldq %xmm4,%xmm6
pmuldq (%ecx),%xmm6
por %xmm4,%xmm6
por (%ecx),%xmm6
psadbw %xmm4,%xmm6
psadbw (%ecx),%xmm6
pshufb %xmm4,%xmm6
pshufb (%ecx),%xmm6
psignb %xmm4,%xmm6
psignb (%ecx),%xmm6
psignw %xmm4,%xmm6
psignw (%ecx),%xmm6
psignd %xmm4,%xmm6
psignd (%ecx),%xmm6
psllw %xmm4,%xmm6
psllw (%ecx),%xmm6
pslld %xmm4,%xmm6
pslld (%ecx),%xmm6
psllq %xmm4,%xmm6
psllq (%ecx),%xmm6
psraw %xmm4,%xmm6
psraw (%ecx),%xmm6
psrad %xmm4,%xmm6
psrad (%ecx),%xmm6
psrlw %xmm4,%xmm6
psrlw (%ecx),%xmm6
psrld %xmm4,%xmm6
psrld (%ecx),%xmm6
psrlq %xmm4,%xmm6
psrlq (%ecx),%xmm6
psubb %xmm4,%xmm6
psubb (%ecx),%xmm6
psubw %xmm4,%xmm6
psubw (%ecx),%xmm6
psubd %xmm4,%xmm6
psubd (%ecx),%xmm6
psubq %xmm4,%xmm6
psubq (%ecx),%xmm6
psubsb %xmm4,%xmm6
psubsb (%ecx),%xmm6
psubsw %xmm4,%xmm6
psubsw (%ecx),%xmm6
psubusb %xmm4,%xmm6
psubusb (%ecx),%xmm6
psubusw %xmm4,%xmm6
psubusw (%ecx),%xmm6
punpckhbw %xmm4,%xmm6
punpckhbw (%ecx),%xmm6
punpckhwd %xmm4,%xmm6
punpckhwd (%ecx),%xmm6
punpckhdq %xmm4,%xmm6
punpckhdq (%ecx),%xmm6
punpckhqdq %xmm4,%xmm6
punpckhqdq (%ecx),%xmm6
punpcklbw %xmm4,%xmm6
punpcklbw (%ecx),%xmm6
punpcklwd %xmm4,%xmm6
punpcklwd (%ecx),%xmm6
punpckldq %xmm4,%xmm6
punpckldq (%ecx),%xmm6
punpcklqdq %xmm4,%xmm6
punpcklqdq (%ecx),%xmm6
pxor %xmm4,%xmm6
pxor (%ecx),%xmm6
subpd %xmm4,%xmm6
subpd (%ecx),%xmm6
subps %xmm4,%xmm6
subps (%ecx),%xmm6
unpckhpd %xmm4,%xmm6
unpckhpd (%ecx),%xmm6
unpckhps %xmm4,%xmm6
unpckhps (%ecx),%xmm6
unpcklpd %xmm4,%xmm6
unpcklpd (%ecx),%xmm6
unpcklps %xmm4,%xmm6
unpcklps (%ecx),%xmm6
xorpd %xmm4,%xmm6
xorpd (%ecx),%xmm6
xorps %xmm4,%xmm6
xorps (%ecx),%xmm6
aesenc %xmm4,%xmm6
aesenc (%ecx),%xmm6
aesenclast %xmm4,%xmm6
aesenclast (%ecx),%xmm6
aesdec %xmm4,%xmm6
aesdec (%ecx),%xmm6
aesdeclast %xmm4,%xmm6
aesdeclast (%ecx),%xmm6
cmpeqpd %xmm4,%xmm6
cmpeqpd (%ecx),%xmm6
cmpeqps %xmm4,%xmm6
cmpeqps (%ecx),%xmm6
cmpltpd %xmm4,%xmm6
cmpltpd (%ecx),%xmm6
cmpltps %xmm4,%xmm6
cmpltps (%ecx),%xmm6
cmplepd %xmm4,%xmm6
cmplepd (%ecx),%xmm6
cmpleps %xmm4,%xmm6
cmpleps (%ecx),%xmm6
cmpunordpd %xmm4,%xmm6
cmpunordpd (%ecx),%xmm6
cmpunordps %xmm4,%xmm6
cmpunordps (%ecx),%xmm6
cmpneqpd %xmm4,%xmm6
cmpneqpd (%ecx),%xmm6
cmpneqps %xmm4,%xmm6
cmpneqps (%ecx),%xmm6
cmpnltpd %xmm4,%xmm6
cmpnltpd (%ecx),%xmm6
cmpnltps %xmm4,%xmm6
cmpnltps (%ecx),%xmm6
cmpnlepd %xmm4,%xmm6
cmpnlepd (%ecx),%xmm6
cmpnleps %xmm4,%xmm6
cmpnleps (%ecx),%xmm6
cmpordpd %xmm4,%xmm6
cmpordpd (%ecx),%xmm6
cmpordps %xmm4,%xmm6
cmpordps (%ecx),%xmm6
# Tests for op imm8, xmm/mem128, xmm
aeskeygenassist $100,%xmm4,%xmm6
aeskeygenassist $100,(%ecx),%xmm6
pcmpestri $100,%xmm4,%xmm6
pcmpestri $100,(%ecx),%xmm6
pcmpestrm $100,%xmm4,%xmm6
pcmpestrm $100,(%ecx),%xmm6
pcmpistri $100,%xmm4,%xmm6
pcmpistri $100,(%ecx),%xmm6
pcmpistrm $100,%xmm4,%xmm6
pcmpistrm $100,(%ecx),%xmm6
pshufd $100,%xmm4,%xmm6
pshufd $100,(%ecx),%xmm6
pshufhw $100,%xmm4,%xmm6
pshufhw $100,(%ecx),%xmm6
pshuflw $100,%xmm4,%xmm6
pshuflw $100,(%ecx),%xmm6
roundpd $100,%xmm4,%xmm6
roundpd $100,(%ecx),%xmm6
roundps $100,%xmm4,%xmm6
roundps $100,(%ecx),%xmm6
# Tests for op imm8, xmm/mem128, xmm[, xmm]
blendpd $100,%xmm4,%xmm6
blendpd $100,(%ecx),%xmm6
blendps $100,%xmm4,%xmm6
blendps $100,(%ecx),%xmm6
cmppd $100,%xmm4,%xmm6
cmppd $100,(%ecx),%xmm6
cmpps $100,%xmm4,%xmm6
cmpps $100,(%ecx),%xmm6
dppd $100,%xmm4,%xmm6
dppd $100,(%ecx),%xmm6
dpps $100,%xmm4,%xmm6
dpps $100,(%ecx),%xmm6
mpsadbw $100,%xmm4,%xmm6
mpsadbw $100,(%ecx),%xmm6
palignr $100,%xmm4,%xmm6
palignr $100,(%ecx),%xmm6
pblendw $100,%xmm4,%xmm6
pblendw $100,(%ecx),%xmm6
pclmulqdq $100,%xmm4,%xmm6
pclmulqdq $100,(%ecx),%xmm6
shufpd $100,%xmm4,%xmm6
shufpd $100,(%ecx),%xmm6
shufps $100,%xmm4,%xmm6
shufps $100,(%ecx),%xmm6
# Tests for op xmm0, xmm/mem128, xmm[, xmm]
blendvpd %xmm0,%xmm4,%xmm6
blendvpd %xmm0,(%ecx),%xmm6
blendvpd %xmm4,%xmm6
blendvpd (%ecx),%xmm6
blendvps %xmm0,%xmm4,%xmm6
blendvps %xmm0,(%ecx),%xmm6
blendvps %xmm4,%xmm6
blendvps (%ecx),%xmm6
pblendvb %xmm0,%xmm4,%xmm6
pblendvb %xmm0,(%ecx),%xmm6
pblendvb %xmm4,%xmm6
pblendvb (%ecx),%xmm6
# Tests for op xmm/mem64, xmm
comisd %xmm4,%xmm6
comisd (%ecx),%xmm4
cvtdq2pd %xmm4,%xmm6
cvtdq2pd (%ecx),%xmm4
cvtpi2pd (%ecx),%xmm4
cvtps2pd %xmm4,%xmm6
cvtps2pd (%ecx),%xmm4
movddup %xmm4,%xmm6
movddup (%ecx),%xmm4
pmovsxbw %xmm4,%xmm6
pmovsxbw (%ecx),%xmm4
pmovsxwd %xmm4,%xmm6
pmovsxwd (%ecx),%xmm4
pmovsxdq %xmm4,%xmm6
pmovsxdq (%ecx),%xmm4
pmovzxbw %xmm4,%xmm6
pmovzxbw (%ecx),%xmm4
pmovzxwd %xmm4,%xmm6
pmovzxwd (%ecx),%xmm4
pmovzxdq %xmm4,%xmm6
pmovzxdq (%ecx),%xmm4
ucomisd %xmm4,%xmm6
ucomisd (%ecx),%xmm4
# Tests for op mem64, xmm
movsd (%ecx),%xmm4
# Tests for op xmm, mem64
movlpd %xmm4,(%ecx)
movlps %xmm4,(%ecx)
movhpd %xmm4,(%ecx)
movhps %xmm4,(%ecx)
movsd %xmm4,(%ecx)
# Tests for op xmm, regq/mem64
# Tests for op regq/mem64, xmm
movq %xmm4,(%ecx)
movq (%ecx),%xmm4
# Tests for op xmm/mem64, regl
cvtsd2si %xmm4,%ecx
cvtsd2si (%ecx),%ecx
cvttsd2si %xmm4,%ecx
cvttsd2si (%ecx),%ecx
# Tests for op mem64, xmm[, xmm]
movlpd (%ecx),%xmm4
movlps (%ecx),%xmm4
movhpd (%ecx),%xmm4
movhps (%ecx),%xmm4
# Tests for op imm8, xmm/mem64, xmm[, xmm]
cmpsd $100,%xmm4,%xmm6
cmpsd $100,(%ecx),%xmm6
roundsd $100,%xmm4,%xmm6
roundsd $100,(%ecx),%xmm6
# Tests for op xmm/mem64, xmm[, xmm]
addsd %xmm4,%xmm6
addsd (%ecx),%xmm6
cvtsd2ss %xmm4,%xmm6
cvtsd2ss (%ecx),%xmm6
divsd %xmm4,%xmm6
divsd (%ecx),%xmm6
maxsd %xmm4,%xmm6
maxsd (%ecx),%xmm6
minsd %xmm4,%xmm6
minsd (%ecx),%xmm6
mulsd %xmm4,%xmm6
mulsd (%ecx),%xmm6
sqrtsd %xmm4,%xmm6
sqrtsd (%ecx),%xmm6
subsd %xmm4,%xmm6
subsd (%ecx),%xmm6
cmpeqsd %xmm4,%xmm6
cmpeqsd (%ecx),%xmm6
cmpltsd %xmm4,%xmm6
cmpltsd (%ecx),%xmm6
cmplesd %xmm4,%xmm6
cmplesd (%ecx),%xmm6
cmpunordsd %xmm4,%xmm6
cmpunordsd (%ecx),%xmm6
cmpneqsd %xmm4,%xmm6
cmpneqsd (%ecx),%xmm6
cmpnltsd %xmm4,%xmm6
cmpnltsd (%ecx),%xmm6
cmpnlesd %xmm4,%xmm6
cmpnlesd (%ecx),%xmm6
cmpordsd %xmm4,%xmm6
cmpordsd (%ecx),%xmm6
# Tests for op xmm/mem32, xmm[, xmm]
addss %xmm4,%xmm6
addss (%ecx),%xmm6
cvtss2sd %xmm4,%xmm6
cvtss2sd (%ecx),%xmm6
divss %xmm4,%xmm6
divss (%ecx),%xmm6
maxss %xmm4,%xmm6
maxss (%ecx),%xmm6
minss %xmm4,%xmm6
minss (%ecx),%xmm6
mulss %xmm4,%xmm6
mulss (%ecx),%xmm6
rcpss %xmm4,%xmm6
rcpss (%ecx),%xmm6
rsqrtss %xmm4,%xmm6
rsqrtss (%ecx),%xmm6
sqrtss %xmm4,%xmm6
sqrtss (%ecx),%xmm6
subss %xmm4,%xmm6
subss (%ecx),%xmm6
cmpeqss %xmm4,%xmm6
cmpeqss (%ecx),%xmm6
cmpltss %xmm4,%xmm6
cmpltss (%ecx),%xmm6
cmpless %xmm4,%xmm6
cmpless (%ecx),%xmm6
cmpunordss %xmm4,%xmm6
cmpunordss (%ecx),%xmm6
cmpneqss %xmm4,%xmm6
cmpneqss (%ecx),%xmm6
cmpnltss %xmm4,%xmm6
cmpnltss (%ecx),%xmm6
cmpnless %xmm4,%xmm6
cmpnless (%ecx),%xmm6
cmpordss %xmm4,%xmm6
cmpordss (%ecx),%xmm6
# Tests for op xmm/mem32, xmm
comiss %xmm4,%xmm6
comiss (%ecx),%xmm4
pmovsxbd %xmm4,%xmm6
pmovsxbd (%ecx),%xmm4
pmovsxwq %xmm4,%xmm6
pmovsxwq (%ecx),%xmm4
pmovzxbd %xmm4,%xmm6
pmovzxbd (%ecx),%xmm4
pmovzxwq %xmm4,%xmm6
pmovzxwq (%ecx),%xmm4
ucomiss %xmm4,%xmm6
ucomiss (%ecx),%xmm4
# Tests for op mem32, xmm
movss (%ecx),%xmm4
# Tests for op xmm, mem32
movss %xmm4,(%ecx)
# Tests for op xmm, regl/mem32
# Tests for op regl/mem32, xmm
movd %xmm4,%ecx
movd %xmm4,(%ecx)
movd %ecx,%xmm4
movd (%ecx),%xmm4
# Tests for op xmm/mem32, regl
cvtss2si %xmm4,%ecx
cvtss2si (%ecx),%ecx
cvttss2si %xmm4,%ecx
cvttss2si (%ecx),%ecx
# Tests for op imm8, xmm, regq/mem32
extractps $100,%xmm4,(%ecx)
# Tests for op imm8, xmm, regl/mem32
pextrd $100,%xmm4,%ecx
pextrd $100,%xmm4,(%ecx)
extractps $100,%xmm4,%ecx
extractps $100,%xmm4,(%ecx)
# Tests for op regl/mem32, xmm[, xmm]
cvtsi2sd %ecx,%xmm4
cvtsi2sd (%ecx),%xmm4
cvtsi2ss %ecx,%xmm4
cvtsi2ss (%ecx),%xmm4
# Tests for op imm8, xmm/mem32, xmm[, xmm]
cmpss $100,%xmm4,%xmm6
cmpss $100,(%ecx),%xmm6
insertps $100,%xmm4,%xmm6
insertps $100,(%ecx),%xmm6
roundss $100,%xmm4,%xmm6
roundss $100,(%ecx),%xmm6
# Tests for op xmm/m16, xmm
pmovsxbq %xmm4,%xmm6
pmovsxbq (%ecx),%xmm4
pmovzxbq %xmm4,%xmm6
pmovzxbq (%ecx),%xmm4
# Tests for op imm8, xmm, regl/mem16
pextrw $100,%xmm4,%ecx
pextrw $100,%xmm4,(%ecx)
# Tests for op imm8, xmm, regq/mem16
pextrw $100,%xmm4,(%ecx)
# Tests for op imm8, regl/mem16, xmm[, xmm]
pinsrw $100,%ecx,%xmm4
pinsrw $100,(%ecx),%xmm4
# Tests for op imm8, xmm, regl/mem8
pextrb $100,%xmm4,%ecx
pextrb $100,%xmm4,(%ecx)
# Tests for op imm8, regl/mem8, xmm[, xmm]
pinsrb $100,%ecx,%xmm4
pinsrb $100,(%ecx),%xmm4
# Tests for op imm8, xmm, regq/mem8
pextrb $100,%xmm4,(%ecx)
# Tests for op imm8, regl/mem8, xmm[, xmm]
pinsrb $100,%ecx,%xmm4
pinsrb $100,(%ecx),%xmm4
# Tests for op xmm, xmm
maskmovdqu %xmm4,%xmm6
movq %xmm4,%xmm6
# Tests for op xmm, regl
movmskpd %xmm4,%ecx
movmskps %xmm4,%ecx
pmovmskb %xmm4,%ecx
# Tests for op xmm, xmm[, xmm]
movhlps %xmm4,%xmm6
movlhps %xmm4,%xmm6
movsd %xmm4,%xmm6
movss %xmm4,%xmm6
# Tests for op imm8, xmm[, xmm]
pslld $100,%xmm4
pslldq $100,%xmm4
psllq $100,%xmm4
psllw $100,%xmm4
psrad $100,%xmm4
psraw $100,%xmm4
psrld $100,%xmm4
psrldq $100,%xmm4
psrlq $100,%xmm4
psrlw $100,%xmm4
# Tests for op imm8, xmm, regl
pextrw $100,%xmm4,%ecx
.intel_syntax noprefix
# Tests for op mem64
ldmxcsr DWORD PTR [ecx]
stmxcsr DWORD PTR [ecx]
# Tests for op xmm/mem128, xmm
cvtdq2ps xmm6,xmm4
cvtdq2ps xmm4,XMMWORD PTR [ecx]
cvtpd2dq xmm6,xmm4
cvtpd2dq xmm4,XMMWORD PTR [ecx]
cvtpd2ps xmm6,xmm4
cvtpd2ps xmm4,XMMWORD PTR [ecx]
cvtps2dq xmm6,xmm4
cvtps2dq xmm4,XMMWORD PTR [ecx]
cvttpd2dq xmm6,xmm4
cvttpd2dq xmm4,XMMWORD PTR [ecx]
cvttps2dq xmm6,xmm4
cvttps2dq xmm4,XMMWORD PTR [ecx]
movapd xmm6,xmm4
movapd xmm4,XMMWORD PTR [ecx]
movaps xmm6,xmm4
movaps xmm4,XMMWORD PTR [ecx]
movdqa xmm6,xmm4
movdqa xmm4,XMMWORD PTR [ecx]
movdqu xmm6,xmm4
movdqu xmm4,XMMWORD PTR [ecx]
movshdup xmm6,xmm4
movshdup xmm4,XMMWORD PTR [ecx]
movsldup xmm6,xmm4
movsldup xmm4,XMMWORD PTR [ecx]
movupd xmm6,xmm4
movupd xmm4,XMMWORD PTR [ecx]
movups xmm6,xmm4
movups xmm4,XMMWORD PTR [ecx]
pabsb xmm6,xmm4
pabsb xmm4,XMMWORD PTR [ecx]
pabsw xmm6,xmm4
pabsw xmm4,XMMWORD PTR [ecx]
pabsd xmm6,xmm4
pabsd xmm4,XMMWORD PTR [ecx]
phminposuw xmm6,xmm4
phminposuw xmm4,XMMWORD PTR [ecx]
ptest xmm6,xmm4
ptest xmm4,XMMWORD PTR [ecx]
rcpps xmm6,xmm4
rcpps xmm4,XMMWORD PTR [ecx]
rsqrtps xmm6,xmm4
rsqrtps xmm4,XMMWORD PTR [ecx]
sqrtpd xmm6,xmm4
sqrtpd xmm4,XMMWORD PTR [ecx]
sqrtps xmm6,xmm4
sqrtps xmm4,XMMWORD PTR [ecx]
aesimc xmm6,xmm4
aesimc xmm4,XMMWORD PTR [ecx]
# Tests for op xmm, xmm/mem128
movapd xmm6,xmm4
movapd XMMWORD PTR [ecx],xmm4
movaps xmm6,xmm4
movaps XMMWORD PTR [ecx],xmm4
movdqa xmm6,xmm4
movdqa XMMWORD PTR [ecx],xmm4
movdqu xmm6,xmm4
movdqu XMMWORD PTR [ecx],xmm4
movupd xmm6,xmm4
movupd XMMWORD PTR [ecx],xmm4
movups xmm6,xmm4
movups XMMWORD PTR [ecx],xmm4
# Tests for op mem128, xmm
lddqu xmm4,XMMWORD PTR [ecx]
movntdqa xmm4,XMMWORD PTR [ecx]
# Tests for op xmm, mem128
movntdq XMMWORD PTR [ecx],xmm4
movntpd XMMWORD PTR [ecx],xmm4
movntps XMMWORD PTR [ecx],xmm4
# Tests for op xmm/mem128, xmm[, xmm]
addpd xmm6,xmm4
addpd xmm6,XMMWORD PTR [ecx]
addps xmm6,xmm4
addps xmm6,XMMWORD PTR [ecx]
addsubpd xmm6,xmm4
addsubpd xmm6,XMMWORD PTR [ecx]
addsubps xmm6,xmm4
addsubps xmm6,XMMWORD PTR [ecx]
andnpd xmm6,xmm4
andnpd xmm6,XMMWORD PTR [ecx]
andnps xmm6,xmm4
andnps xmm6,XMMWORD PTR [ecx]
andpd xmm6,xmm4
andpd xmm6,XMMWORD PTR [ecx]
andps xmm6,xmm4
andps xmm6,XMMWORD PTR [ecx]
divpd xmm6,xmm4
divpd xmm6,XMMWORD PTR [ecx]
divps xmm6,xmm4
divps xmm6,XMMWORD PTR [ecx]
haddpd xmm6,xmm4
haddpd xmm6,XMMWORD PTR [ecx]
haddps xmm6,xmm4
haddps xmm6,XMMWORD PTR [ecx]
hsubpd xmm6,xmm4
hsubpd xmm6,XMMWORD PTR [ecx]
hsubps xmm6,xmm4
hsubps xmm6,XMMWORD PTR [ecx]
maxpd xmm6,xmm4
maxpd xmm6,XMMWORD PTR [ecx]
maxps xmm6,xmm4
maxps xmm6,XMMWORD PTR [ecx]
minpd xmm6,xmm4
minpd xmm6,XMMWORD PTR [ecx]
minps xmm6,xmm4
minps xmm6,XMMWORD PTR [ecx]
mulpd xmm6,xmm4
mulpd xmm6,XMMWORD PTR [ecx]
mulps xmm6,xmm4
mulps xmm6,XMMWORD PTR [ecx]
orpd xmm6,xmm4
orpd xmm6,XMMWORD PTR [ecx]
orps xmm6,xmm4
orps xmm6,XMMWORD PTR [ecx]
packsswb xmm6,xmm4
packsswb xmm6,XMMWORD PTR [ecx]
packssdw xmm6,xmm4
packssdw xmm6,XMMWORD PTR [ecx]
packuswb xmm6,xmm4
packuswb xmm6,XMMWORD PTR [ecx]
packusdw xmm6,xmm4
packusdw xmm6,XMMWORD PTR [ecx]
paddb xmm6,xmm4
paddb xmm6,XMMWORD PTR [ecx]
paddw xmm6,xmm4
paddw xmm6,XMMWORD PTR [ecx]
paddd xmm6,xmm4
paddd xmm6,XMMWORD PTR [ecx]
paddq xmm6,xmm4
paddq xmm6,XMMWORD PTR [ecx]
paddsb xmm6,xmm4
paddsb xmm6,XMMWORD PTR [ecx]
paddsw xmm6,xmm4
paddsw xmm6,XMMWORD PTR [ecx]
paddusb xmm6,xmm4
paddusb xmm6,XMMWORD PTR [ecx]
paddusw xmm6,xmm4
paddusw xmm6,XMMWORD PTR [ecx]
pand xmm6,xmm4
pand xmm6,XMMWORD PTR [ecx]
pandn xmm6,xmm4
pandn xmm6,XMMWORD PTR [ecx]
pavgb xmm6,xmm4
pavgb xmm6,XMMWORD PTR [ecx]
pavgw xmm6,xmm4
pavgw xmm6,XMMWORD PTR [ecx]
pclmullqlqdq xmm6,xmm4
pclmullqlqdq xmm6,XMMWORD PTR [ecx]
pclmulhqlqdq xmm6,xmm4
pclmulhqlqdq xmm6,XMMWORD PTR [ecx]
pclmullqhqdq xmm6,xmm4
pclmullqhqdq xmm6,XMMWORD PTR [ecx]
pclmulhqhqdq xmm6,xmm4
pclmulhqhqdq xmm6,XMMWORD PTR [ecx]
pcmpeqb xmm6,xmm4
pcmpeqb xmm6,XMMWORD PTR [ecx]
pcmpeqw xmm6,xmm4
pcmpeqw xmm6,XMMWORD PTR [ecx]
pcmpeqd xmm6,xmm4
pcmpeqd xmm6,XMMWORD PTR [ecx]
pcmpeqq xmm6,xmm4
pcmpeqq xmm6,XMMWORD PTR [ecx]
pcmpgtb xmm6,xmm4
pcmpgtb xmm6,XMMWORD PTR [ecx]
pcmpgtw xmm6,xmm4
pcmpgtw xmm6,XMMWORD PTR [ecx]
pcmpgtd xmm6,xmm4
pcmpgtd xmm6,XMMWORD PTR [ecx]
pcmpgtq xmm6,xmm4
pcmpgtq xmm6,XMMWORD PTR [ecx]
phaddw xmm6,xmm4
phaddw xmm6,XMMWORD PTR [ecx]
phaddd xmm6,xmm4
phaddd xmm6,XMMWORD PTR [ecx]
phaddsw xmm6,xmm4
phaddsw xmm6,XMMWORD PTR [ecx]
phsubw xmm6,xmm4
phsubw xmm6,XMMWORD PTR [ecx]
phsubd xmm6,xmm4
phsubd xmm6,XMMWORD PTR [ecx]
phsubsw xmm6,xmm4
phsubsw xmm6,XMMWORD PTR [ecx]
pmaddwd xmm6,xmm4
pmaddwd xmm6,XMMWORD PTR [ecx]
pmaddubsw xmm6,xmm4
pmaddubsw xmm6,XMMWORD PTR [ecx]
pmaxsb xmm6,xmm4
pmaxsb xmm6,XMMWORD PTR [ecx]
pmaxsw xmm6,xmm4
pmaxsw xmm6,XMMWORD PTR [ecx]
pmaxsd xmm6,xmm4
pmaxsd xmm6,XMMWORD PTR [ecx]
pmaxub xmm6,xmm4
pmaxub xmm6,XMMWORD PTR [ecx]
pmaxuw xmm6,xmm4
pmaxuw xmm6,XMMWORD PTR [ecx]
pmaxud xmm6,xmm4
pmaxud xmm6,XMMWORD PTR [ecx]
pminsb xmm6,xmm4
pminsb xmm6,XMMWORD PTR [ecx]
pminsw xmm6,xmm4
pminsw xmm6,XMMWORD PTR [ecx]
pminsd xmm6,xmm4
pminsd xmm6,XMMWORD PTR [ecx]
pminub xmm6,xmm4
pminub xmm6,XMMWORD PTR [ecx]
pminuw xmm6,xmm4
pminuw xmm6,XMMWORD PTR [ecx]
pminud xmm6,xmm4
pminud xmm6,XMMWORD PTR [ecx]
pmulhuw xmm6,xmm4
pmulhuw xmm6,XMMWORD PTR [ecx]
pmulhrsw xmm6,xmm4
pmulhrsw xmm6,XMMWORD PTR [ecx]
pmulhw xmm6,xmm4
pmulhw xmm6,XMMWORD PTR [ecx]
pmullw xmm6,xmm4
pmullw xmm6,XMMWORD PTR [ecx]
pmulld xmm6,xmm4
pmulld xmm6,XMMWORD PTR [ecx]
pmuludq xmm6,xmm4
pmuludq xmm6,XMMWORD PTR [ecx]
pmuldq xmm6,xmm4
pmuldq xmm6,XMMWORD PTR [ecx]
por xmm6,xmm4
por xmm6,XMMWORD PTR [ecx]
psadbw xmm6,xmm4
psadbw xmm6,XMMWORD PTR [ecx]
pshufb xmm6,xmm4
pshufb xmm6,XMMWORD PTR [ecx]
psignb xmm6,xmm4
psignb xmm6,XMMWORD PTR [ecx]
psignw xmm6,xmm4
psignw xmm6,XMMWORD PTR [ecx]
psignd xmm6,xmm4
psignd xmm6,XMMWORD PTR [ecx]
psllw xmm6,xmm4
psllw xmm6,XMMWORD PTR [ecx]
pslld xmm6,xmm4
pslld xmm6,XMMWORD PTR [ecx]
psllq xmm6,xmm4
psllq xmm6,XMMWORD PTR [ecx]
psraw xmm6,xmm4
psraw xmm6,XMMWORD PTR [ecx]
psrad xmm6,xmm4
psrad xmm6,XMMWORD PTR [ecx]
psrlw xmm6,xmm4
psrlw xmm6,XMMWORD PTR [ecx]
psrld xmm6,xmm4
psrld xmm6,XMMWORD PTR [ecx]
psrlq xmm6,xmm4
psrlq xmm6,XMMWORD PTR [ecx]
psubb xmm6,xmm4
psubb xmm6,XMMWORD PTR [ecx]
psubw xmm6,xmm4
psubw xmm6,XMMWORD PTR [ecx]
psubd xmm6,xmm4
psubd xmm6,XMMWORD PTR [ecx]
psubq xmm6,xmm4
psubq xmm6,XMMWORD PTR [ecx]
psubsb xmm6,xmm4
psubsb xmm6,XMMWORD PTR [ecx]
psubsw xmm6,xmm4
psubsw xmm6,XMMWORD PTR [ecx]
psubusb xmm6,xmm4
psubusb xmm6,XMMWORD PTR [ecx]
psubusw xmm6,xmm4
psubusw xmm6,XMMWORD PTR [ecx]
punpckhbw xmm6,xmm4
punpckhbw xmm6,XMMWORD PTR [ecx]
punpckhwd xmm6,xmm4
punpckhwd xmm6,XMMWORD PTR [ecx]
punpckhdq xmm6,xmm4
punpckhdq xmm6,XMMWORD PTR [ecx]
punpckhqdq xmm6,xmm4
punpckhqdq xmm6,XMMWORD PTR [ecx]
punpcklbw xmm6,xmm4
punpcklbw xmm6,XMMWORD PTR [ecx]
punpcklwd xmm6,xmm4
punpcklwd xmm6,XMMWORD PTR [ecx]
punpckldq xmm6,xmm4
punpckldq xmm6,XMMWORD PTR [ecx]
punpcklqdq xmm6,xmm4
punpcklqdq xmm6,XMMWORD PTR [ecx]
pxor xmm6,xmm4
pxor xmm6,XMMWORD PTR [ecx]
subpd xmm6,xmm4
subpd xmm6,XMMWORD PTR [ecx]
subps xmm6,xmm4
subps xmm6,XMMWORD PTR [ecx]
unpckhpd xmm6,xmm4
unpckhpd xmm6,XMMWORD PTR [ecx]
unpckhps xmm6,xmm4
unpckhps xmm6,XMMWORD PTR [ecx]
unpcklpd xmm6,xmm4
unpcklpd xmm6,XMMWORD PTR [ecx]
unpcklps xmm6,xmm4
unpcklps xmm6,XMMWORD PTR [ecx]
xorpd xmm6,xmm4
xorpd xmm6,XMMWORD PTR [ecx]
xorps xmm6,xmm4
xorps xmm6,XMMWORD PTR [ecx]
aesenc xmm6,xmm4
aesenc xmm6,XMMWORD PTR [ecx]
aesenclast xmm6,xmm4
aesenclast xmm6,XMMWORD PTR [ecx]
aesdec xmm6,xmm4
aesdec xmm6,XMMWORD PTR [ecx]
aesdeclast xmm6,xmm4
aesdeclast xmm6,XMMWORD PTR [ecx]
cmpeqpd xmm6,xmm4
cmpeqpd xmm6,XMMWORD PTR [ecx]
cmpeqps xmm6,xmm4
cmpeqps xmm6,XMMWORD PTR [ecx]
cmpltpd xmm6,xmm4
cmpltpd xmm6,XMMWORD PTR [ecx]
cmpltps xmm6,xmm4
cmpltps xmm6,XMMWORD PTR [ecx]
cmplepd xmm6,xmm4
cmplepd xmm6,XMMWORD PTR [ecx]
cmpleps xmm6,xmm4
cmpleps xmm6,XMMWORD PTR [ecx]
cmpunordpd xmm6,xmm4
cmpunordpd xmm6,XMMWORD PTR [ecx]
cmpunordps xmm6,xmm4
cmpunordps xmm6,XMMWORD PTR [ecx]
cmpneqpd xmm6,xmm4
cmpneqpd xmm6,XMMWORD PTR [ecx]
cmpneqps xmm6,xmm4
cmpneqps xmm6,XMMWORD PTR [ecx]
cmpnltpd xmm6,xmm4
cmpnltpd xmm6,XMMWORD PTR [ecx]
cmpnltps xmm6,xmm4
cmpnltps xmm6,XMMWORD PTR [ecx]
cmpnlepd xmm6,xmm4
cmpnlepd xmm6,XMMWORD PTR [ecx]
cmpnleps xmm6,xmm4
cmpnleps xmm6,XMMWORD PTR [ecx]
cmpordpd xmm6,xmm4
cmpordpd xmm6,XMMWORD PTR [ecx]
cmpordps xmm6,xmm4
cmpordps xmm6,XMMWORD PTR [ecx]
# Tests for op imm8, xmm/mem128, xmm
aeskeygenassist xmm6,xmm4,100
aeskeygenassist xmm6,XMMWORD PTR [ecx],100
pcmpestri xmm6,xmm4,100
pcmpestri xmm6,XMMWORD PTR [ecx],100
pcmpestrm xmm6,xmm4,100
pcmpestrm xmm6,XMMWORD PTR [ecx],100
pcmpistri xmm6,xmm4,100
pcmpistri xmm6,XMMWORD PTR [ecx],100
pcmpistrm xmm6,xmm4,100
pcmpistrm xmm6,XMMWORD PTR [ecx],100
pshufd xmm6,xmm4,100
pshufd xmm6,XMMWORD PTR [ecx],100
pshufhw xmm6,xmm4,100
pshufhw xmm6,XMMWORD PTR [ecx],100
pshuflw xmm6,xmm4,100
pshuflw xmm6,XMMWORD PTR [ecx],100
roundpd xmm6,xmm4,100
roundpd xmm6,XMMWORD PTR [ecx],100
roundps xmm6,xmm4,100
roundps xmm6,XMMWORD PTR [ecx],100
# Tests for op imm8, xmm/mem128, xmm[, xmm]
blendpd xmm6,xmm4,100
blendpd xmm6,XMMWORD PTR [ecx],100
blendps xmm6,xmm4,100
blendps xmm6,XMMWORD PTR [ecx],100
cmppd xmm6,xmm4,100
cmppd xmm6,XMMWORD PTR [ecx],100
cmpps xmm6,xmm4,100
cmpps xmm6,XMMWORD PTR [ecx],100
dppd xmm6,xmm4,100
dppd xmm6,XMMWORD PTR [ecx],100
dpps xmm6,xmm4,100
dpps xmm6,XMMWORD PTR [ecx],100
mpsadbw xmm6,xmm4,100
mpsadbw xmm6,XMMWORD PTR [ecx],100
palignr xmm6,xmm4,100
palignr xmm6,XMMWORD PTR [ecx],100
pblendw xmm6,xmm4,100
pblendw xmm6,XMMWORD PTR [ecx],100
pclmulqdq xmm6,xmm4,100
pclmulqdq xmm6,XMMWORD PTR [ecx],100
shufpd xmm6,xmm4,100
shufpd xmm6,XMMWORD PTR [ecx],100
shufps xmm6,xmm4,100
shufps xmm6,XMMWORD PTR [ecx],100
# Tests for op xmm0, xmm/mem128, xmm[, xmm]
blendvpd xmm6,xmm4,xmm0
blendvpd xmm6,XMMWORD PTR [ecx],xmm0
blendvpd xmm6,xmm4
blendvpd xmm6,XMMWORD PTR [ecx]
blendvps xmm6,xmm4,xmm0
blendvps xmm6,XMMWORD PTR [ecx],xmm0
blendvps xmm6,xmm4
blendvps xmm6,XMMWORD PTR [ecx]
pblendvb xmm6,xmm4,xmm0
pblendvb xmm6,XMMWORD PTR [ecx],xmm0
pblendvb xmm6,xmm4
pblendvb xmm6,XMMWORD PTR [ecx]
# Tests for op xmm/mem64, xmm
comisd xmm6,xmm4
comisd xmm4,QWORD PTR [ecx]
cvtdq2pd xmm6,xmm4
cvtdq2pd xmm4,QWORD PTR [ecx]
cvtpi2pd xmm4,QWORD PTR [ecx]
cvtps2pd xmm6,xmm4
cvtps2pd xmm4,QWORD PTR [ecx]
movddup xmm6,xmm4
movddup xmm4,QWORD PTR [ecx]
pmovsxbw xmm6,xmm4
pmovsxbw xmm4,QWORD PTR [ecx]
pmovsxwd xmm6,xmm4
pmovsxwd xmm4,QWORD PTR [ecx]
pmovsxdq xmm6,xmm4
pmovsxdq xmm4,QWORD PTR [ecx]
pmovzxbw xmm6,xmm4
pmovzxbw xmm4,QWORD PTR [ecx]
pmovzxwd xmm6,xmm4
pmovzxwd xmm4,QWORD PTR [ecx]
pmovzxdq xmm6,xmm4
pmovzxdq xmm4,QWORD PTR [ecx]
ucomisd xmm6,xmm4
ucomisd xmm4,QWORD PTR [ecx]
# Tests for op mem64, xmm
movsd xmm4,QWORD PTR [ecx]
# Tests for op xmm, mem64
movlpd QWORD PTR [ecx],xmm4
movlps QWORD PTR [ecx],xmm4
movhpd QWORD PTR [ecx],xmm4
movhps QWORD PTR [ecx],xmm4
movsd QWORD PTR [ecx],xmm4
# Tests for op xmm, regq/mem64
# Tests for op regq/mem64, xmm
movq QWORD PTR [ecx],xmm4
movq xmm4,QWORD PTR [ecx]
# Tests for op xmm/mem64, regl
cvtsd2si ecx,xmm4
cvtsd2si ecx,QWORD PTR [ecx]
cvttsd2si ecx,xmm4
cvttsd2si ecx,QWORD PTR [ecx]
# Tests for op mem64, xmm[, xmm]
movlpd xmm4,QWORD PTR [ecx]
movlps xmm4,QWORD PTR [ecx]
movhpd xmm4,QWORD PTR [ecx]
movhps xmm4,QWORD PTR [ecx]
# Tests for op imm8, xmm/mem64, xmm[, xmm]
cmpsd xmm6,xmm4,100
cmpsd xmm6,QWORD PTR [ecx],100
roundsd xmm6,xmm4,100
roundsd xmm6,QWORD PTR [ecx],100
# Tests for op xmm/mem64, xmm[, xmm]
addsd xmm6,xmm4
addsd xmm6,QWORD PTR [ecx]
cvtsd2ss xmm6,xmm4
cvtsd2ss xmm6,QWORD PTR [ecx]
divsd xmm6,xmm4
divsd xmm6,QWORD PTR [ecx]
maxsd xmm6,xmm4
maxsd xmm6,QWORD PTR [ecx]
minsd xmm6,xmm4
minsd xmm6,QWORD PTR [ecx]
mulsd xmm6,xmm4
mulsd xmm6,QWORD PTR [ecx]
sqrtsd xmm6,xmm4
sqrtsd xmm6,QWORD PTR [ecx]
subsd xmm6,xmm4
subsd xmm6,QWORD PTR [ecx]
cmpeqsd xmm6,xmm4
cmpeqsd xmm6,QWORD PTR [ecx]
cmpltsd xmm6,xmm4
cmpltsd xmm6,QWORD PTR [ecx]
cmplesd xmm6,xmm4
cmplesd xmm6,QWORD PTR [ecx]
cmpunordsd xmm6,xmm4
cmpunordsd xmm6,QWORD PTR [ecx]
cmpneqsd xmm6,xmm4
cmpneqsd xmm6,QWORD PTR [ecx]
cmpnltsd xmm6,xmm4
cmpnltsd xmm6,QWORD PTR [ecx]
cmpnlesd xmm6,xmm4
cmpnlesd xmm6,QWORD PTR [ecx]
cmpordsd xmm6,xmm4
cmpordsd xmm6,QWORD PTR [ecx]
# Tests for op xmm/mem32, xmm[, xmm]
addss xmm6,xmm4
addss xmm6,DWORD PTR [ecx]
cvtss2sd xmm6,xmm4
cvtss2sd xmm6,DWORD PTR [ecx]
divss xmm6,xmm4
divss xmm6,DWORD PTR [ecx]
maxss xmm6,xmm4
maxss xmm6,DWORD PTR [ecx]
minss xmm6,xmm4
minss xmm6,DWORD PTR [ecx]
mulss xmm6,xmm4
mulss xmm6,DWORD PTR [ecx]
rcpss xmm6,xmm4
rcpss xmm6,DWORD PTR [ecx]
rsqrtss xmm6,xmm4
rsqrtss xmm6,DWORD PTR [ecx]
sqrtss xmm6,xmm4
sqrtss xmm6,DWORD PTR [ecx]
subss xmm6,xmm4
subss xmm6,DWORD PTR [ecx]
cmpeqss xmm6,xmm4
cmpeqss xmm6,DWORD PTR [ecx]
cmpltss xmm6,xmm4
cmpltss xmm6,DWORD PTR [ecx]
cmpless xmm6,xmm4
cmpless xmm6,DWORD PTR [ecx]
cmpunordss xmm6,xmm4
cmpunordss xmm6,DWORD PTR [ecx]
cmpneqss xmm6,xmm4
cmpneqss xmm6,DWORD PTR [ecx]
cmpnltss xmm6,xmm4
cmpnltss xmm6,DWORD PTR [ecx]
cmpnless xmm6,xmm4
cmpnless xmm6,DWORD PTR [ecx]
cmpordss xmm6,xmm4
cmpordss xmm6,DWORD PTR [ecx]
# Tests for op xmm/mem32, xmm
comiss xmm6,xmm4
comiss xmm4,DWORD PTR [ecx]
pmovsxbd xmm6,xmm4
pmovsxbd xmm4,DWORD PTR [ecx]
pmovsxwq xmm6,xmm4
pmovsxwq xmm4,DWORD PTR [ecx]
pmovzxbd xmm6,xmm4
pmovzxbd xmm4,DWORD PTR [ecx]
pmovzxwq xmm6,xmm4
pmovzxwq xmm4,DWORD PTR [ecx]
ucomiss xmm6,xmm4
ucomiss xmm4,DWORD PTR [ecx]
# Tests for op mem32, xmm
movss xmm4,DWORD PTR [ecx]
# Tests for op xmm, mem32
movss DWORD PTR [ecx],xmm4
# Tests for op xmm, regl/mem32
# Tests for op regl/mem32, xmm
movd ecx,xmm4
movd DWORD PTR [ecx],xmm4
movd xmm4,ecx
movd xmm4,DWORD PTR [ecx]
# Tests for op xmm/mem32, regl
cvtss2si ecx,xmm4
cvtss2si ecx,DWORD PTR [ecx]
cvttss2si ecx,xmm4
cvttss2si ecx,DWORD PTR [ecx]
# Tests for op imm8, xmm, regq/mem32
extractps DWORD PTR [ecx],xmm4,100
# Tests for op imm8, xmm, regl/mem32
pextrd ecx,xmm4,100
pextrd DWORD PTR [ecx],xmm4,100
extractps ecx,xmm4,100
extractps DWORD PTR [ecx],xmm4,100
# Tests for op regl/mem32, xmm[, xmm]
cvtsi2sd xmm4,ecx
cvtsi2sd xmm4,DWORD PTR [ecx]
cvtsi2ss xmm4,ecx
cvtsi2ss xmm4,DWORD PTR [ecx]
# Tests for op imm8, xmm/mem32, xmm[, xmm]
cmpss xmm6,xmm4,100
cmpss xmm6,DWORD PTR [ecx],100
insertps xmm6,xmm4,100
insertps xmm6,DWORD PTR [ecx],100
roundss xmm6,xmm4,100
roundss xmm6,DWORD PTR [ecx],100
# Tests for op xmm/m16, xmm
pmovsxbq xmm6,xmm4
pmovsxbq xmm4,WORD PTR [ecx]
pmovzxbq xmm6,xmm4
pmovzxbq xmm4,WORD PTR [ecx]
# Tests for op imm8, xmm, regl/mem16
pextrw ecx,xmm4,100
pextrw WORD PTR [ecx],xmm4,100
# Tests for op imm8, xmm, regq/mem16
pextrw WORD PTR [ecx],xmm4,100
# Tests for op imm8, regl/mem16, xmm[, xmm]
pinsrw xmm4,ecx,100
pinsrw xmm4,WORD PTR [ecx],100
# Tests for op imm8, xmm, regl/mem8
pextrb ecx,xmm4,100
pextrb BYTE PTR [ecx],xmm4,100
# Tests for op imm8, regl/mem8, xmm[, xmm]
pinsrb xmm4,ecx,100
pinsrb xmm4,BYTE PTR [ecx],100
# Tests for op imm8, xmm, regq/mem8
pextrb BYTE PTR [ecx],xmm4,100
# Tests for op imm8, regl/mem8, xmm[, xmm]
pinsrb xmm4,ecx,100
pinsrb xmm4,BYTE PTR [ecx],100
# Tests for op xmm, xmm
maskmovdqu xmm6,xmm4
movq xmm6,xmm4
# Tests for op xmm, regl
movmskpd ecx,xmm4
movmskps ecx,xmm4
pmovmskb ecx,xmm4
# Tests for op xmm, xmm[, xmm]
movhlps xmm6,xmm4
movlhps xmm6,xmm4
movsd xmm6,xmm4
movss xmm6,xmm4
# Tests for op imm8, xmm[, xmm]
pslld xmm4,100
pslldq xmm4,100
psllq xmm4,100
psllw xmm4,100
psrad xmm4,100
psraw xmm4,100
psrld xmm4,100
psrldq xmm4,100
psrlq xmm4,100
psrlw xmm4,100
# Tests for op imm8, xmm, regl
pextrw ecx,xmm4,100
|
tactcomplabs/xbgas-binutils-gdb
| 2,282
|
gas/testsuite/gas/i386/noavx512-2.s
|
# Test .arch .noavx512vl
.text
vpabsb %zmm5, %zmm6{%k7} # AVX512BW
vpabsb %xmm5, %xmm6{%k7} # AVX512BW + AVX512VL
vpabsb %ymm5, %ymm6{%k7} # AVX512BW + AVX512VL
vpconflictd %zmm5, %zmm6 # AVX412CD
vpconflictd %xmm5, %xmm6 # AVX412CD + AVX512VL
vpconflictd %ymm5, %ymm6 # AVX412CD + AVX512VL
vcvtpd2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512DQ + AVX512VL
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512DQ + AVX512VL
vexp2ps %zmm5, %zmm6{%k7} # AVX512ER
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512F + AVX512VL
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512F + AVX512VL
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512IFMA + AVX512VL
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512IFMA + AVX512VL
vgatherpf0dpd 23(%ebp,%ymm7,8){%k1} # AVX512PF
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %xmm4, %xmm5, %xmm6{%k7} # AVX512VBMI + AVX512VL
vpermb %ymm4, %ymm5, %ymm6{%k7} # AVX512VBMI + AVX512VL
.arch .noavx512vl
vpabsb %zmm5, %zmm6{%k7} # AVX512BW
vpabsb %xmm5, %xmm6{%k7} # AVX512BW + AVX512VL
vpabsb %ymm5, %ymm6{%k7} # AVX512BW + AVX512VL
vpconflictd %zmm5, %zmm6 # AVX412CD
vpconflictd %xmm5, %xmm6 # AVX412CD + AVX512VL
vpconflictd %ymm5, %ymm6 # AVX412CD + AVX512VL
vcvtpd2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512DQ + AVX512VL
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512DQ + AVX512VL
vexp2ps %zmm5, %zmm6{%k7} # AVX512ER
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512F + AVX512VL
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512F + AVX512VL
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512IFMA + AVX512VL
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512IFMA + AVX512VL
vgatherpf0dpd 23(%ebp,%ymm7,8){%k1} # AVX512PF
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %xmm4, %xmm5, %xmm6{%k7} # AVX512VBMI + AVX512VL
vpermb %ymm4, %ymm5, %ymm6{%k7} # AVX512VBMI + AVX512VL
vpabsb %xmm5, %xmm6
vpabsb %ymm5, %ymm6
vaddpd %xmm4, %xmm5, %xmm6
vaddpd %ymm4, %ymm5, %ymm6
pabsb %xmm5, %xmm6
addpd %xmm4, %xmm6
.intel_syntax noprefix
vfpclasspd k0, [eax], 0
vfpclassps k0, [eax+0x80], 0
.p2align 4
|
tactcomplabs/xbgas-binutils-gdb
| 3,501
|
gas/testsuite/gas/i386/sse2.s
|
foo:
movnti %eax, (%eax)
sfence
lfence
mfence
addpd (%ecx),%xmm0
addpd %xmm2,%xmm1
addsd (%ebx),%xmm2
addsd %xmm4,%xmm3
andnpd 0x0(%ebp),%xmm4
andnpd %xmm6,%xmm5
andpd (%edi),%xmm6
andpd %xmm0,%xmm7
cmppd $0x2,%xmm1,%xmm0
cmppd $0x3,(%edx),%xmm1
cmpsd $0x4,%xmm2,%xmm2
cmpsd $0x5,(%esp,1),%xmm3
cmppd $0x6,%xmm5,%xmm4
cmppd $0x7,(%esi),%xmm5
cmpsd $0x0,%xmm7,%xmm6
cmpsd $0x1,(%eax),%xmm7
cmpeqpd %xmm1,%xmm0
cmpeqpd (%edx),%xmm1
cmpeqsd %xmm2,%xmm2
cmpeqsd (%esp,1),%xmm3
cmpltpd %xmm5,%xmm4
cmpltpd (%esi),%xmm5
cmpltsd %xmm7,%xmm6
cmpltsd (%eax),%xmm7
cmplepd (%ecx),%xmm0
cmplepd %xmm2,%xmm1
cmplesd (%ebx),%xmm2
cmplesd %xmm4,%xmm3
cmpunordpd 0x0(%ebp),%xmm4
cmpunordpd %xmm6,%xmm5
cmpunordsd (%edi),%xmm6
cmpunordsd %xmm0,%xmm7
cmpneqpd %xmm1,%xmm0
cmpneqpd (%edx),%xmm1
cmpneqsd %xmm2,%xmm2
cmpneqsd (%esp,1),%xmm3
cmpnltpd %xmm5,%xmm4
cmpnltpd (%esi),%xmm5
cmpnltsd %xmm7,%xmm6
cmpnltsd (%eax),%xmm7
cmpnlepd (%ecx),%xmm0
cmpnlepd %xmm2,%xmm1
cmpnlesd (%ebx),%xmm2
cmpnlesd %xmm4,%xmm3
cmpordpd 0x0(%ebp),%xmm4
cmpordpd %xmm6,%xmm5
cmpordsd (%edi),%xmm6
cmpordsd %xmm0,%xmm7
comisd %xmm1,%xmm0
comisd (%edx),%xmm1
cvtpi2pd %mm3,%xmm2
cvtpi2pd (%esp,1),%xmm3
cvtsi2sd %ebp,%xmm4
cvtsi2sd (%esi),%xmm5
cvtpd2pi %xmm7,%mm6
cvtpd2pi (%eax),%mm7
cvtsd2si (%ecx),%eax
cvtsd2si %xmm2,%ecx
cvttpd2pi (%ebx),%mm2
cvttpd2pi %xmm4,%mm3
cvttsd2si 0x0(%ebp),%esp
cvttsd2si %xmm6,%ebp
divpd %xmm1,%xmm0
divpd (%edx),%xmm1
divsd %xmm3,%xmm2
divsd (%esp,1),%xmm3
ldmxcsr 0x0(%ebp)
stmxcsr (%esi)
sfence
maxpd %xmm1,%xmm0
maxpd (%edx),%xmm1
maxsd %xmm3,%xmm2
maxsd (%esp,1),%xmm3
minpd %xmm5,%xmm4
minpd (%esi),%xmm5
minsd %xmm7,%xmm6
minsd (%eax),%xmm7
movapd %xmm1,%xmm0
movapd %xmm2,(%ecx)
movapd (%edx),%xmm2
movhpd %xmm5,(%esp,1)
movhpd (%esi),%xmm5
movlpd %xmm0,(%edi)
movlpd (%eax),%xmm0
movmskpd %xmm2,%ecx
movupd %xmm3,%xmm2
movupd %xmm4,(%edx)
movupd 0x0(%ebp),%xmm4
movsd %xmm6,%xmm5
movsd %xmm7,(%esi)
movsd (%eax),%xmm7
mulpd %xmm1,%xmm0
mulpd (%edx),%xmm1
mulsd %xmm2,%xmm2
mulsd (%esp,1),%xmm3
orpd %xmm5,%xmm4
orpd (%esi),%xmm5
shufpd $0x2,(%edi),%xmm6
shufpd $0x3,%xmm0,%xmm7
sqrtpd %xmm1,%xmm0
sqrtpd (%edx),%xmm1
sqrtsd %xmm2,%xmm2
sqrtsd (%esp,1),%xmm3
subpd %xmm5,%xmm4
subpd (%esi),%xmm5
subsd %xmm7,%xmm6
subsd (%eax),%xmm7
ucomisd (%ecx),%xmm0
ucomisd %xmm2,%xmm1
unpckhpd (%ebx),%xmm2
unpckhpd %xmm4,%xmm3
unpcklpd 0x0(%ebp),%xmm4
unpcklpd %xmm6,%xmm5
xorpd (%edi),%xmm6
xorpd %xmm0,%xmm7
movntpd %xmm6,(%ebx)
xorpd %xmm0, %xmm1
cvtdq2pd %xmm0, %xmm1
cvtpd2dq %xmm0, %xmm1
cvtdq2ps %xmm0, %xmm1
cvtpd2ps %xmm0, %xmm1
cvtps2pd %xmm0, %xmm1
cvtps2dq %xmm0, %xmm1
cvtsd2ss %xmm0, %xmm1
cvtss2sd %xmm0, %xmm1
cvttpd2dq %xmm0, %xmm1
cvttps2dq %xmm0, %xmm1
maskmovdqu %xmm0, %xmm1
movdqa %xmm0, %xmm1
movdqa %xmm0, (%esi)
movdqu %xmm0, %xmm1
movdqu %xmm0, (%esi)
movdq2q %xmm0, %mm1
movq2dq %mm0, %xmm1
pmuludq %mm0, %mm1
pmuludq (%eax), %mm1
pmuludq %xmm0, %xmm1
pmuludq (%eax), %xmm1
pshufd $1, %xmm0, %xmm1
pshufhw $1, %xmm0, %xmm1
pshuflw $1, %xmm0, %xmm1
pslldq $1, %xmm0
psrldq $1, %xmm0
punpckhqdq %xmm0, %xmm1
paddq %mm1,%mm0
paddq (%eax),%mm0
paddq %xmm1,%xmm0
paddq (%eax),%xmm0
psubq %mm1,%mm0
psubq (%eax),%mm0
psubq %xmm1,%xmm0
psubq (%eax),%xmm0
|
tactcomplabs/xbgas-binutils-gdb
| 2,324
|
gas/testsuite/gas/i386/x86-64-fma4.s
|
# Check 64bit FMA4 instructions
.allow_index_reg
.text
_start:
vfmaddpd %ymm4,%ymm6,%ymm2,%ymm7
vfmaddpd (%rcx),%ymm6,%ymm2,%ymm7
vfmaddps %ymm4,%ymm6,%ymm2,%ymm7
vfmaddps (%rcx),%ymm6,%ymm2,%ymm7
vfmaddps %xmm4,0x01(%rdx,%rbx,8),%xmm7,%xmm11
vfmaddps %xmm8,0x80(%rcx,%rax,4),%xmm6,%xmm4
vfmaddsubpd %ymm4,%ymm6,%ymm2,%ymm7
vfmaddsubpd (%rcx),%ymm6,%ymm2,%ymm7
vfmaddsubps %ymm4,%ymm6,%ymm2,%ymm7
vfmaddsubps (%rcx),%ymm6,%ymm2,%ymm7
vfmaddpd %xmm4,%xmm6,%xmm2,%xmm7
vfmaddpd (%rcx),%xmm6,%xmm2,%xmm7
vfmaddpd %xmm4,(%rcx),%xmm2,%xmm7
vfmaddps %xmm4,%xmm6,%xmm2,%xmm7
vfmaddps (%rcx),%xmm6,%xmm2,%xmm7
vfmaddps %xmm4,(%rcx),%xmm2,%xmm7
vfmaddsubpd %xmm4,%xmm6,%xmm2,%xmm7
vfmaddsubpd (%rcx),%xmm6,%xmm2,%xmm7
vfmaddsubpd %xmm4,(%rcx),%xmm2,%xmm7
vfmaddsubps %xmm4,%xmm6,%xmm2,%xmm7
vfmaddsubps (%rcx),%xmm6,%xmm2,%xmm7
vfmaddsubps %xmm4,(%rcx),%xmm2,%xmm7
vfmaddsd %xmm4,%xmm6,%xmm2,%xmm7
vfmaddsd (%rcx),%xmm6,%xmm2,%xmm7
vfmaddsd %xmm4,(%rcx),%xmm2,%xmm7
vfmaddss %xmm4,%xmm6,%xmm2,%xmm7
vfmaddss (%rcx),%xmm6,%xmm2,%xmm7
vfmaddss %xmm4,(%rcx),%xmm2,%xmm7
vfnmaddpd %ymm4,%ymm6,%ymm2,%ymm7
vfnmaddpd (%rcx),%ymm6,%ymm2,%ymm7
vfnmaddps %ymm4,%ymm6,%ymm2,%ymm7
vfnmaddps (%rcx),%ymm6,%ymm2,%ymm7
vfnmsubpd %ymm4,%ymm6,%ymm2,%ymm7
vfnmsubpd (%rcx),%ymm6,%ymm2,%ymm7
vfnmsubps %ymm4,%ymm6,%ymm2,%ymm7
vfnmsubps (%rcx),%ymm6,%ymm2,%ymm7
vfnmaddpd %xmm4,%xmm6,%xmm2,%xmm7
vfnmaddpd (%rcx),%xmm6,%xmm2,%xmm7
vfnmaddpd %xmm4,(%rcx),%xmm2,%xmm7
vfnmaddps %xmm4,%xmm6,%xmm2,%xmm7
vfnmaddps (%rcx),%xmm6,%xmm2,%xmm7
vfnmaddps %xmm4,(%rcx),%xmm2,%xmm7
vfnmsubpd %xmm4,%xmm6,%xmm2,%xmm7
vfnmsubpd (%rcx),%xmm6,%xmm2,%xmm7
vfnmsubpd %xmm4,(%rcx),%xmm2,%xmm7
vfnmsubps %xmm4,%xmm6,%xmm2,%xmm7
vfnmsubps (%rcx),%xmm6,%xmm2,%xmm7
vfnmsubps %xmm4,(%rcx),%xmm2,%xmm7
vfnmaddsd %xmm4,%xmm6,%xmm2,%xmm7
vfnmaddsd (%rcx),%xmm6,%xmm2,%xmm7
vfnmaddsd %xmm4,(%rcx),%xmm2,%xmm7
vfnmsubsd %xmm4,%xmm6,%xmm2,%xmm7
vfnmsubsd (%rcx),%xmm6,%xmm2,%xmm7
vfnmsubsd %xmm4,(%rcx),%xmm2,%xmm7
vfnmaddss %xmm4,%xmm6,%xmm2,%xmm7
vfnmaddss (%rcx),%xmm6,%xmm2,%xmm7
vfnmaddss %xmm4,(%rcx),%xmm2,%xmm7
vfnmsubss %xmm4,%xmm6,%xmm2,%xmm7
vfnmsubss (%rcx),%xmm6,%xmm2,%xmm7
vfmaddpd (%r13,%rcx),%xmm11,%xmm3,%xmm4
vfmaddpd 0xbe(%r9,%rax,8),%xmm9,%xmm1,%xmm7
vfmsubpd (%r13,%rcx),%xmm11,%xmm3,%xmm4
|
tactcomplabs/xbgas-binutils-gdb
| 715,565
|
gas/testsuite/gas/i386/x86-64-avx512f_vl.s
|
# Check 64bit AVX512{F,VL} instructions
.allow_index_reg
.text
_start:
vaddpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vaddpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vaddpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vaddpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vaddpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vaddpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vaddpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vaddpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vaddpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vaddpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vaddpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vaddpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vaddpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vaddpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vaddpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vaddpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vaddpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vaddpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vaddpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vaddpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vaddps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vaddps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vaddps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vaddps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vaddps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vaddps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vaddps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vaddps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vaddps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vaddps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vaddps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vaddps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vaddps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vaddps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vaddps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vaddps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vaddps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vaddps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vaddps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vaddps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
valignd $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
valignd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
valignd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
valignd $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
valignd $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
valignd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
valignd $123, (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
valignd $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignd $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
valignd $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignd $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
valignd $123, 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignd $123, 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
valignd $123, -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignd $123, -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
valignd $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
valignd $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
valignd $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
valignd $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
valignd $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
valignd $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
valignd $123, (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
valignd $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignd $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
valignd $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignd $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
valignd $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignd $123, 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
valignd $123, -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignd $123, -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vblendmpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vblendmpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vblendmpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vblendmpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vblendmps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vblendmps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vblendmps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vblendmps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vblendmps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vblendmps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vblendmps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vblendmps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vblendmps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vblendmps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vblendmps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vblendmps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vblendmps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vblendmps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vblendmps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vblendmps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vblendmps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vblendmps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vblendmps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vblendmps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vbroadcastf32x4 (%rcx), %ymm30 # AVX512{F,VL}
vbroadcastf32x4 (%rcx), %ymm30{%k7} # AVX512{F,VL}
vbroadcastf32x4 (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vbroadcastf32x4 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vbroadcastf32x4 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcastf32x4 2048(%rdx), %ymm30 # AVX512{F,VL}
vbroadcastf32x4 -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcastf32x4 -2064(%rdx), %ymm30 # AVX512{F,VL}
vbroadcasti32x4 (%rcx), %ymm30 # AVX512{F,VL}
vbroadcasti32x4 (%rcx), %ymm30{%k7} # AVX512{F,VL}
vbroadcasti32x4 (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vbroadcasti32x4 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vbroadcasti32x4 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcasti32x4 2048(%rdx), %ymm30 # AVX512{F,VL}
vbroadcasti32x4 -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcasti32x4 -2064(%rdx), %ymm30 # AVX512{F,VL}
vbroadcastsd (%rcx), %ymm30 # AVX512{F,VL}
vbroadcastsd (%rcx), %ymm30{%k7} # AVX512{F,VL}
vbroadcastsd (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vbroadcastsd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vbroadcastsd 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcastsd 1024(%rdx), %ymm30 # AVX512{F,VL}
vbroadcastsd -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcastsd -1032(%rdx), %ymm30 # AVX512{F,VL}
vbroadcastsd %xmm29, %ymm30 # AVX512{F,VL}
vbroadcastsd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vbroadcastsd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vbroadcastss (%rcx), %xmm30 # AVX512{F,VL}
vbroadcastss (%rcx), %xmm30{%k7} # AVX512{F,VL}
vbroadcastss (%rcx), %xmm30{%k7}{z} # AVX512{F,VL}
vbroadcastss 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vbroadcastss 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vbroadcastss 512(%rdx), %xmm30 # AVX512{F,VL}
vbroadcastss -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vbroadcastss -516(%rdx), %xmm30 # AVX512{F,VL}
vbroadcastss (%rcx), %ymm30 # AVX512{F,VL}
vbroadcastss (%rcx), %ymm30{%k7} # AVX512{F,VL}
vbroadcastss (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vbroadcastss 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vbroadcastss 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcastss 512(%rdx), %ymm30 # AVX512{F,VL}
vbroadcastss -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcastss -516(%rdx), %ymm30 # AVX512{F,VL}
vbroadcastss %xmm29, %xmm30 # AVX512{F,VL}
vbroadcastss %xmm29, %xmm30{%k7} # AVX512{F,VL}
vbroadcastss %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vbroadcastss %xmm29, %ymm30 # AVX512{F,VL}
vbroadcastss %xmm29, %ymm30{%k7} # AVX512{F,VL}
vbroadcastss %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcmppd $0xab, %xmm28, %xmm29, %k5 # AVX512{F,VL}
vcmppd $0xab, %xmm28, %xmm29, %k5{%k7} # AVX512{F,VL}
vcmppd $123, %xmm28, %xmm29, %k5 # AVX512{F,VL}
vcmppd $123, (%rcx), %xmm29, %k5 # AVX512{F,VL}
vcmppd $123, 0x123(%rax,%r14,8), %xmm29, %k5 # AVX512{F,VL}
vcmppd $123, (%rcx){1to2}, %xmm29, %k5 # AVX512{F,VL}
vcmppd $123, 2032(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, 2048(%rdx), %xmm29, %k5 # AVX512{F,VL}
vcmppd $123, -2048(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, -2064(%rdx), %xmm29, %k5 # AVX512{F,VL}
vcmppd $123, 1016(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, 1024(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL}
vcmppd $123, -1024(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, -1032(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL}
vcmppd $0xab, %ymm28, %ymm29, %k5 # AVX512{F,VL}
vcmppd $0xab, %ymm28, %ymm29, %k5{%k7} # AVX512{F,VL}
vcmppd $123, %ymm28, %ymm29, %k5 # AVX512{F,VL}
vcmppd $123, (%rcx), %ymm29, %k5 # AVX512{F,VL}
vcmppd $123, 0x123(%rax,%r14,8), %ymm29, %k5 # AVX512{F,VL}
vcmppd $123, (%rcx){1to4}, %ymm29, %k5 # AVX512{F,VL}
vcmppd $123, 4064(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, 4096(%rdx), %ymm29, %k5 # AVX512{F,VL}
vcmppd $123, -4096(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, -4128(%rdx), %ymm29, %k5 # AVX512{F,VL}
vcmppd $123, 1016(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, 1024(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL}
vcmppd $123, -1024(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, -1032(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL}
vcmpps $0xab, %xmm28, %xmm29, %k5 # AVX512{F,VL}
vcmpps $0xab, %xmm28, %xmm29, %k5{%k7} # AVX512{F,VL}
vcmpps $123, %xmm28, %xmm29, %k5 # AVX512{F,VL}
vcmpps $123, (%rcx), %xmm29, %k5 # AVX512{F,VL}
vcmpps $123, 0x123(%rax,%r14,8), %xmm29, %k5 # AVX512{F,VL}
vcmpps $123, (%rcx){1to4}, %xmm29, %k5 # AVX512{F,VL}
vcmpps $123, 2032(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, 2048(%rdx), %xmm29, %k5 # AVX512{F,VL}
vcmpps $123, -2048(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, -2064(%rdx), %xmm29, %k5 # AVX512{F,VL}
vcmpps $123, 508(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, 512(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL}
vcmpps $123, -512(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, -516(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL}
vcmpps $0xab, %ymm28, %ymm29, %k5 # AVX512{F,VL}
vcmpps $0xab, %ymm28, %ymm29, %k5{%k7} # AVX512{F,VL}
vcmpps $123, %ymm28, %ymm29, %k5 # AVX512{F,VL}
vcmpps $123, (%rcx), %ymm29, %k5 # AVX512{F,VL}
vcmpps $123, 0x123(%rax,%r14,8), %ymm29, %k5 # AVX512{F,VL}
vcmpps $123, (%rcx){1to8}, %ymm29, %k5 # AVX512{F,VL}
vcmpps $123, 4064(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, 4096(%rdx), %ymm29, %k5 # AVX512{F,VL}
vcmpps $123, -4096(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, -4128(%rdx), %ymm29, %k5 # AVX512{F,VL}
vcmpps $123, 508(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, 512(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL}
vcmpps $123, -512(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, -516(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL}
vcompresspd %xmm30, (%rcx) # AVX512{F,VL}
vcompresspd %xmm30, (%rcx){%k7} # AVX512{F,VL}
vcompresspd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vcompresspd %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vcompresspd %xmm30, 1024(%rdx) # AVX512{F,VL}
vcompresspd %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vcompresspd %xmm30, -1032(%rdx) # AVX512{F,VL}
vcompresspd %ymm30, (%rcx) # AVX512{F,VL}
vcompresspd %ymm30, (%rcx){%k7} # AVX512{F,VL}
vcompresspd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vcompresspd %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vcompresspd %ymm30, 1024(%rdx) # AVX512{F,VL}
vcompresspd %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vcompresspd %ymm30, -1032(%rdx) # AVX512{F,VL}
vcompresspd %xmm29, %xmm30 # AVX512{F,VL}
vcompresspd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcompresspd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcompresspd %ymm29, %ymm30 # AVX512{F,VL}
vcompresspd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcompresspd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcompressps %xmm30, (%rcx) # AVX512{F,VL}
vcompressps %xmm30, (%rcx){%k7} # AVX512{F,VL}
vcompressps %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vcompressps %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vcompressps %xmm30, 512(%rdx) # AVX512{F,VL}
vcompressps %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vcompressps %xmm30, -516(%rdx) # AVX512{F,VL}
vcompressps %ymm30, (%rcx) # AVX512{F,VL}
vcompressps %ymm30, (%rcx){%k7} # AVX512{F,VL}
vcompressps %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vcompressps %ymm30, 508(%rdx) # AVX512{F,VL} Disp8
vcompressps %ymm30, 512(%rdx) # AVX512{F,VL}
vcompressps %ymm30, -512(%rdx) # AVX512{F,VL} Disp8
vcompressps %ymm30, -516(%rdx) # AVX512{F,VL}
vcompressps %xmm29, %xmm30 # AVX512{F,VL}
vcompressps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcompressps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcompressps %ymm29, %ymm30 # AVX512{F,VL}
vcompressps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcompressps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtdq2pd %xmm29, %xmm30 # AVX512{F,VL}
vcvtdq2pd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtdq2pd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtdq2pd (%rcx), %xmm30 # AVX512{F,VL}
vcvtdq2pd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtdq2pd (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvtdq2pd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtdq2pd 1024(%rdx), %xmm30 # AVX512{F,VL}
vcvtdq2pd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtdq2pd -1032(%rdx), %xmm30 # AVX512{F,VL}
vcvtdq2pd 508(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtdq2pd 512(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtdq2pd -512(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtdq2pd -516(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtdq2pd %xmm29, %ymm30 # AVX512{F,VL}
vcvtdq2pd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vcvtdq2pd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtdq2pd (%rcx), %ymm30 # AVX512{F,VL}
vcvtdq2pd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtdq2pd (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vcvtdq2pd 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtdq2pd 2048(%rdx), %ymm30 # AVX512{F,VL}
vcvtdq2pd -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtdq2pd -2064(%rdx), %ymm30 # AVX512{F,VL}
vcvtdq2pd 508(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vcvtdq2pd 512(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vcvtdq2pd -512(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vcvtdq2pd -516(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vcvtdq2ps %xmm29, %xmm30 # AVX512{F,VL}
vcvtdq2ps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtdq2ps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtdq2ps (%rcx), %xmm30 # AVX512{F,VL}
vcvtdq2ps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtdq2ps (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvtdq2ps 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtdq2ps 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvtdq2ps -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtdq2ps -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvtdq2ps 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtdq2ps 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtdq2ps -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtdq2ps -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtdq2ps %ymm29, %ymm30 # AVX512{F,VL}
vcvtdq2ps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcvtdq2ps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtdq2ps (%rcx), %ymm30 # AVX512{F,VL}
vcvtdq2ps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtdq2ps (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vcvtdq2ps 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtdq2ps 4096(%rdx), %ymm30 # AVX512{F,VL}
vcvtdq2ps -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtdq2ps -4128(%rdx), %ymm30 # AVX512{F,VL}
vcvtdq2ps 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtdq2ps 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvtdq2ps -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtdq2ps -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvtpd2dq %xmm29, %xmm30 # AVX512{F,VL}
vcvtpd2dq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtpd2dq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtpd2dqx (%rcx), %xmm30 # AVX512{F,VL}
vcvtpd2dqx 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtpd2dq (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2dqx 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqx 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2dqx -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqx -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2dqx 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqx 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2dqx -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqx -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2dq %ymm29, %xmm30 # AVX512{F,VL}
vcvtpd2dq %ymm29, %xmm30{%k7} # AVX512{F,VL}
vcvtpd2dq %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtpd2dqy (%rcx), %xmm30 # AVX512{F,VL}
vcvtpd2dqy 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtpd2dq (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2dqy 4064(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqy 4096(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2dqy -4096(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqy -4128(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2dqy 1016(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqy 1024(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2dqy -1024(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqy -1032(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2ps %xmm29, %xmm30 # AVX512{F,VL}
vcvtpd2ps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtpd2ps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtpd2psx (%rcx), %xmm30 # AVX512{F,VL}
vcvtpd2psx 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtpd2ps (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2psx 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psx 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2psx -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psx -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2psx 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psx 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2psx -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psx -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2ps %ymm29, %xmm30 # AVX512{F,VL}
vcvtpd2ps %ymm29, %xmm30{%k7} # AVX512{F,VL}
vcvtpd2ps %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtpd2psy (%rcx), %xmm30 # AVX512{F,VL}
vcvtpd2psy 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtpd2ps (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2psy 4064(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psy 4096(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2psy -4096(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psy -4128(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2psy 1016(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psy 1024(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2psy -1024(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psy -1032(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2udq %xmm29, %xmm30 # AVX512{F,VL}
vcvtpd2udq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtpd2udq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtpd2udqx (%rcx), %xmm30 # AVX512{F,VL}
vcvtpd2udqx 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtpd2udq (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2udqx 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqx 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2udqx -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqx -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2udqx 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqx 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2udqx -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqx -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2udq %ymm29, %xmm30 # AVX512{F,VL}
vcvtpd2udq %ymm29, %xmm30{%k7} # AVX512{F,VL}
vcvtpd2udq %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtpd2udqy (%rcx), %xmm30 # AVX512{F,VL}
vcvtpd2udqy 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtpd2udq (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2udqy 4064(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqy 4096(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2udqy -4096(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqy -4128(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2udqy 1016(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqy 1024(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2udqy -1024(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqy -1032(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtph2ps %xmm29, %xmm30 # AVX512{F,VL}
vcvtph2ps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtph2ps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtph2ps (%rcx), %xmm30 # AVX512{F,VL}
vcvtph2ps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtph2ps 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtph2ps 1024(%rdx), %xmm30 # AVX512{F,VL}
vcvtph2ps -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtph2ps -1032(%rdx), %xmm30 # AVX512{F,VL}
vcvtph2ps %xmm29, %ymm30 # AVX512{F,VL}
vcvtph2ps %xmm29, %ymm30{%k7} # AVX512{F,VL}
vcvtph2ps %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtph2ps (%rcx), %ymm30 # AVX512{F,VL}
vcvtph2ps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtph2ps 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtph2ps 2048(%rdx), %ymm30 # AVX512{F,VL}
vcvtph2ps -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtph2ps -2064(%rdx), %ymm30 # AVX512{F,VL}
vcvtps2dq %xmm29, %xmm30 # AVX512{F,VL}
vcvtps2dq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtps2dq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtps2dq (%rcx), %xmm30 # AVX512{F,VL}
vcvtps2dq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtps2dq (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvtps2dq 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtps2dq 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvtps2dq -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtps2dq -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvtps2dq 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtps2dq 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtps2dq -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtps2dq -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtps2dq %ymm29, %ymm30 # AVX512{F,VL}
vcvtps2dq %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcvtps2dq %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtps2dq (%rcx), %ymm30 # AVX512{F,VL}
vcvtps2dq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtps2dq (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vcvtps2dq 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtps2dq 4096(%rdx), %ymm30 # AVX512{F,VL}
vcvtps2dq -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtps2dq -4128(%rdx), %ymm30 # AVX512{F,VL}
vcvtps2dq 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtps2dq 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvtps2dq -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtps2dq -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvtps2pd %xmm29, %xmm30 # AVX512{F,VL}
vcvtps2pd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtps2pd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtps2pd (%rcx), %xmm30 # AVX512{F,VL}
vcvtps2pd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtps2pd (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvtps2pd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtps2pd 1024(%rdx), %xmm30 # AVX512{F,VL}
vcvtps2pd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtps2pd -1032(%rdx), %xmm30 # AVX512{F,VL}
vcvtps2pd 508(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtps2pd 512(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtps2pd -512(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtps2pd -516(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtps2pd %xmm29, %ymm30 # AVX512{F,VL}
vcvtps2pd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vcvtps2pd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtps2pd (%rcx), %ymm30 # AVX512{F,VL}
vcvtps2pd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtps2pd (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vcvtps2pd 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtps2pd 2048(%rdx), %ymm30 # AVX512{F,VL}
vcvtps2pd -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtps2pd -2064(%rdx), %ymm30 # AVX512{F,VL}
vcvtps2pd 508(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vcvtps2pd 512(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vcvtps2pd -512(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vcvtps2pd -516(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vcvtps2ph $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vcvtps2ph $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtps2ph $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtps2ph $123, %xmm29, %xmm30 # AVX512{F,VL}
vcvtps2ph $0xab, %ymm29, %xmm30 # AVX512{F,VL}
vcvtps2ph $0xab, %ymm29, %xmm30{%k7} # AVX512{F,VL}
vcvtps2ph $0xab, %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtps2ph $123, %ymm29, %xmm30 # AVX512{F,VL}
vcvtps2udq %xmm29, %xmm30 # AVX512{F,VL}
vcvtps2udq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtps2udq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtps2udq (%rcx), %xmm30 # AVX512{F,VL}
vcvtps2udq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtps2udq (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvtps2udq 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtps2udq 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvtps2udq -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtps2udq -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvtps2udq 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtps2udq 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtps2udq -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtps2udq -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtps2udq %ymm29, %ymm30 # AVX512{F,VL}
vcvtps2udq %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcvtps2udq %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtps2udq (%rcx), %ymm30 # AVX512{F,VL}
vcvtps2udq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtps2udq (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vcvtps2udq 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtps2udq 4096(%rdx), %ymm30 # AVX512{F,VL}
vcvtps2udq -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtps2udq -4128(%rdx), %ymm30 # AVX512{F,VL}
vcvtps2udq 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtps2udq 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvtps2udq -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtps2udq -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvttpd2dq %xmm29, %xmm30 # AVX512{F,VL}
vcvttpd2dq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvttpd2dq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvttpd2dqx (%rcx), %xmm30 # AVX512{F,VL}
vcvttpd2dqx 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvttpd2dq (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvttpd2dqx 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqx 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2dqx -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqx -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2dqx 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqx 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvttpd2dqx -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqx -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvttpd2dq %ymm29, %xmm30 # AVX512{F,VL}
vcvttpd2dq %ymm29, %xmm30{%k7} # AVX512{F,VL}
vcvttpd2dq %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvttpd2dqy (%rcx), %xmm30 # AVX512{F,VL}
vcvttpd2dqy 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvttpd2dq (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvttpd2dqy 4064(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqy 4096(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2dqy -4096(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqy -4128(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2dqy 1016(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqy 1024(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttpd2dqy -1024(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqy -1032(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2dq %xmm29, %xmm30 # AVX512{F,VL}
vcvttps2dq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvttps2dq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvttps2dq (%rcx), %xmm30 # AVX512{F,VL}
vcvttps2dq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvttps2dq (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2dq 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttps2dq 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvttps2dq -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttps2dq -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvttps2dq 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttps2dq 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2dq -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttps2dq -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2dq %ymm29, %ymm30 # AVX512{F,VL}
vcvttps2dq %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcvttps2dq %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvttps2dq (%rcx), %ymm30 # AVX512{F,VL}
vcvttps2dq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvttps2dq (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vcvttps2dq 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvttps2dq 4096(%rdx), %ymm30 # AVX512{F,VL}
vcvttps2dq -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvttps2dq -4128(%rdx), %ymm30 # AVX512{F,VL}
vcvttps2dq 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvttps2dq 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvttps2dq -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvttps2dq -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvtudq2pd %xmm29, %xmm30 # AVX512{F,VL}
vcvtudq2pd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtudq2pd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtudq2pd (%rcx), %xmm30 # AVX512{F,VL}
vcvtudq2pd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtudq2pd (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvtudq2pd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtudq2pd 1024(%rdx), %xmm30 # AVX512{F,VL}
vcvtudq2pd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtudq2pd -1032(%rdx), %xmm30 # AVX512{F,VL}
vcvtudq2pd 508(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtudq2pd 512(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtudq2pd -512(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtudq2pd -516(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtudq2pd %xmm29, %ymm30 # AVX512{F,VL}
vcvtudq2pd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vcvtudq2pd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtudq2pd (%rcx), %ymm30 # AVX512{F,VL}
vcvtudq2pd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtudq2pd (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vcvtudq2pd 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtudq2pd 2048(%rdx), %ymm30 # AVX512{F,VL}
vcvtudq2pd -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtudq2pd -2064(%rdx), %ymm30 # AVX512{F,VL}
vcvtudq2pd 508(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vcvtudq2pd 512(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vcvtudq2pd -512(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vcvtudq2pd -516(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vcvtudq2ps %xmm29, %xmm30 # AVX512{F,VL}
vcvtudq2ps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtudq2ps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtudq2ps (%rcx), %xmm30 # AVX512{F,VL}
vcvtudq2ps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtudq2ps (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvtudq2ps 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtudq2ps 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvtudq2ps -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtudq2ps -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvtudq2ps 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtudq2ps 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtudq2ps -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtudq2ps -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtudq2ps %ymm29, %ymm30 # AVX512{F,VL}
vcvtudq2ps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcvtudq2ps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtudq2ps (%rcx), %ymm30 # AVX512{F,VL}
vcvtudq2ps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtudq2ps (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vcvtudq2ps 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtudq2ps 4096(%rdx), %ymm30 # AVX512{F,VL}
vcvtudq2ps -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtudq2ps -4128(%rdx), %ymm30 # AVX512{F,VL}
vcvtudq2ps 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtudq2ps 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvtudq2ps -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtudq2ps -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vdivpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vdivpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vdivpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vdivpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vdivpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vdivpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vdivpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vdivpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vdivpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vdivpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vdivpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vdivpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vdivpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vdivpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vdivpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vdivpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vdivpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vdivpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vdivpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vdivpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vdivps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vdivps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vdivps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vdivps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vdivps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vdivps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vdivps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vdivps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vdivps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vdivps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vdivps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vdivps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vdivps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vdivps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vdivps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vdivps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vdivps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vdivps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vdivps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vdivps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vexpandpd (%rcx), %xmm30 # AVX512{F,VL}
vexpandpd (%rcx), %xmm30{%k7} # AVX512{F,VL}
vexpandpd (%rcx), %xmm30{%k7}{z} # AVX512{F,VL}
vexpandpd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vexpandpd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vexpandpd 1024(%rdx), %xmm30 # AVX512{F,VL}
vexpandpd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vexpandpd -1032(%rdx), %xmm30 # AVX512{F,VL}
vexpandpd (%rcx), %ymm30 # AVX512{F,VL}
vexpandpd (%rcx), %ymm30{%k7} # AVX512{F,VL}
vexpandpd (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vexpandpd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vexpandpd 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vexpandpd 1024(%rdx), %ymm30 # AVX512{F,VL}
vexpandpd -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vexpandpd -1032(%rdx), %ymm30 # AVX512{F,VL}
vexpandpd %xmm29, %xmm30 # AVX512{F,VL}
vexpandpd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vexpandpd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vexpandpd %ymm29, %ymm30 # AVX512{F,VL}
vexpandpd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vexpandpd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vexpandps (%rcx), %xmm30 # AVX512{F,VL}
vexpandps (%rcx), %xmm30{%k7} # AVX512{F,VL}
vexpandps (%rcx), %xmm30{%k7}{z} # AVX512{F,VL}
vexpandps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vexpandps 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vexpandps 512(%rdx), %xmm30 # AVX512{F,VL}
vexpandps -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vexpandps -516(%rdx), %xmm30 # AVX512{F,VL}
vexpandps (%rcx), %ymm30 # AVX512{F,VL}
vexpandps (%rcx), %ymm30{%k7} # AVX512{F,VL}
vexpandps (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vexpandps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vexpandps 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vexpandps 512(%rdx), %ymm30 # AVX512{F,VL}
vexpandps -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vexpandps -516(%rdx), %ymm30 # AVX512{F,VL}
vexpandps %xmm29, %xmm30 # AVX512{F,VL}
vexpandps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vexpandps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vexpandps %ymm29, %ymm30 # AVX512{F,VL}
vexpandps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vexpandps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vextractf32x4 $0xab, %ymm29, %xmm30 # AVX512{F,VL}
vextractf32x4 $0xab, %ymm29, %xmm30{%k7} # AVX512{F,VL}
vextractf32x4 $0xab, %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vextractf32x4 $123, %ymm29, %xmm30 # AVX512{F,VL}
vextracti32x4 $0xab, %ymm29, %xmm30 # AVX512{F,VL}
vextracti32x4 $0xab, %ymm29, %xmm30{%k7} # AVX512{F,VL}
vextracti32x4 $0xab, %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vextracti32x4 $123, %ymm29, %xmm30 # AVX512{F,VL}
vfmadd132pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmadd132pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmadd132pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmadd132pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmadd132pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmadd132ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmadd132ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmadd132ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmadd132ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmadd213pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmadd213pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmadd213pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmadd213pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmadd213ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmadd213ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmadd213ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmadd213ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmadd231pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmadd231pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmadd231pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmadd231pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmadd231ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmadd231ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmadd231ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmadd231ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmaddsub132pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmaddsub132pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmaddsub132pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmaddsub132pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmaddsub132ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmaddsub132ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmaddsub132ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmaddsub132ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmaddsub213pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmaddsub213pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmaddsub213pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmaddsub213pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmaddsub213ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmaddsub213ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmaddsub213ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmaddsub213ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmaddsub231pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmaddsub231pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmaddsub231pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmaddsub231pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmaddsub231ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmaddsub231ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmaddsub231ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmaddsub231ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsub132pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsub132pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsub132pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsub132pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsub132ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsub132ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsub132ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsub132ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsub213pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsub213pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsub213pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsub213pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsub213ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsub213ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsub213ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsub213ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsub231pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsub231pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsub231pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsub231pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsub231ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsub231ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsub231ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsub231ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsubadd132pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsubadd132pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsubadd132pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsubadd132pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsubadd132ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsubadd132ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsubadd132ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsubadd132ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsubadd213pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsubadd213pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsubadd213pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsubadd213pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsubadd213ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsubadd213ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsubadd213ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsubadd213ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsubadd231pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsubadd231pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsubadd231pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsubadd231pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsubadd231ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsubadd231ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsubadd231ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsubadd231ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmadd132pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmadd132pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmadd132pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmadd132pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmadd132ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmadd132ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmadd132ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmadd132ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmadd213pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmadd213pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmadd213pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmadd213pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmadd213ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmadd213ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmadd213ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmadd213ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmadd231pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmadd231pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmadd231pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmadd231pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmadd231ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmadd231ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmadd231ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmadd231ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmsub132pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmsub132pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmsub132pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmsub132pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmsub132ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmsub132ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmsub132ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmsub132ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmsub213pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmsub213pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmsub213pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmsub213pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmsub213ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmsub213ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmsub213ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmsub213ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmsub231pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmsub231pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmsub231pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmsub231pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmsub231ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmsub231ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmsub231ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmsub231ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vgatherdpd 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vgatherdpd 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vgatherdpd 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vgatherdpd 123(%r14,%xmm31,8), %ymm30{%k1} # AVX512{F,VL}
vgatherdpd 256(%r9,%xmm31), %ymm30{%k1} # AVX512{F,VL}
vgatherdpd 1024(%rcx,%xmm31,4), %ymm30{%k1} # AVX512{F,VL}
vgatherdps 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vgatherdps 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vgatherdps 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vgatherdps 123(%r14,%ymm31,8), %ymm30{%k1} # AVX512{F,VL}
vgatherdps 256(%r9,%ymm31), %ymm30{%k1} # AVX512{F,VL}
vgatherdps 1024(%rcx,%ymm31,4), %ymm30{%k1} # AVX512{F,VL}
vgatherqpd 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vgatherqpd 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vgatherqpd 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vgatherqpd 123(%r14,%ymm31,8), %ymm30{%k1} # AVX512{F,VL}
vgatherqpd 256(%r9,%ymm31), %ymm30{%k1} # AVX512{F,VL}
vgatherqpd 1024(%rcx,%ymm31,4), %ymm30{%k1} # AVX512{F,VL}
vgatherqps 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vgatherqps 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vgatherqps 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vgatherqps 123(%r14,%ymm31,8), %xmm30{%k1} # AVX512{F,VL}
vgatherqps 256(%r9,%ymm31), %xmm30{%k1} # AVX512{F,VL}
vgatherqps 1024(%rcx,%ymm31,4), %xmm30{%k1} # AVX512{F,VL}
vgetexppd %xmm29, %xmm30 # AVX512{F,VL}
vgetexppd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vgetexppd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vgetexppd (%rcx), %xmm30 # AVX512{F,VL}
vgetexppd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vgetexppd (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vgetexppd 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetexppd 2048(%rdx), %xmm30 # AVX512{F,VL}
vgetexppd -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetexppd -2064(%rdx), %xmm30 # AVX512{F,VL}
vgetexppd 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vgetexppd 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vgetexppd -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vgetexppd -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vgetexppd %ymm29, %ymm30 # AVX512{F,VL}
vgetexppd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vgetexppd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vgetexppd (%rcx), %ymm30 # AVX512{F,VL}
vgetexppd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vgetexppd (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vgetexppd 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetexppd 4096(%rdx), %ymm30 # AVX512{F,VL}
vgetexppd -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetexppd -4128(%rdx), %ymm30 # AVX512{F,VL}
vgetexppd 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vgetexppd 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vgetexppd -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vgetexppd -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vgetexpps %xmm29, %xmm30 # AVX512{F,VL}
vgetexpps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vgetexpps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vgetexpps (%rcx), %xmm30 # AVX512{F,VL}
vgetexpps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vgetexpps (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vgetexpps 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetexpps 2048(%rdx), %xmm30 # AVX512{F,VL}
vgetexpps -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetexpps -2064(%rdx), %xmm30 # AVX512{F,VL}
vgetexpps 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vgetexpps 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vgetexpps -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vgetexpps -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vgetexpps %ymm29, %ymm30 # AVX512{F,VL}
vgetexpps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vgetexpps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vgetexpps (%rcx), %ymm30 # AVX512{F,VL}
vgetexpps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vgetexpps (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vgetexpps 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetexpps 4096(%rdx), %ymm30 # AVX512{F,VL}
vgetexpps -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetexpps -4128(%rdx), %ymm30 # AVX512{F,VL}
vgetexpps 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vgetexpps 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vgetexpps -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vgetexpps -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vgetmantpd $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vgetmantpd $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vgetmantpd $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vgetmantpd $123, %xmm29, %xmm30 # AVX512{F,VL}
vgetmantpd $123, (%rcx), %xmm30 # AVX512{F,VL}
vgetmantpd $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vgetmantpd $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vgetmantpd $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetmantpd $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vgetmantpd $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetmantpd $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vgetmantpd $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vgetmantpd $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vgetmantpd $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vgetmantpd $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vgetmantpd $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vgetmantpd $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vgetmantpd $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vgetmantpd $123, %ymm29, %ymm30 # AVX512{F,VL}
vgetmantpd $123, (%rcx), %ymm30 # AVX512{F,VL}
vgetmantpd $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vgetmantpd $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vgetmantpd $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetmantpd $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vgetmantpd $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetmantpd $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vgetmantpd $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vgetmantpd $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vgetmantpd $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vgetmantpd $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vgetmantps $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vgetmantps $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vgetmantps $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vgetmantps $123, %xmm29, %xmm30 # AVX512{F,VL}
vgetmantps $123, (%rcx), %xmm30 # AVX512{F,VL}
vgetmantps $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vgetmantps $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vgetmantps $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetmantps $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vgetmantps $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetmantps $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vgetmantps $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vgetmantps $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vgetmantps $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vgetmantps $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vgetmantps $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vgetmantps $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vgetmantps $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vgetmantps $123, %ymm29, %ymm30 # AVX512{F,VL}
vgetmantps $123, (%rcx), %ymm30 # AVX512{F,VL}
vgetmantps $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vgetmantps $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vgetmantps $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetmantps $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vgetmantps $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetmantps $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vgetmantps $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vgetmantps $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vgetmantps $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vgetmantps $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vinsertf32x4 $0xab, %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vinsertf32x4 $0xab, %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vinsertf32x4 $0xab, %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vinsertf32x4 $123, %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vinsertf32x4 $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vinsertf32x4 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vinsertf32x4 $123, 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vinsertf32x4 $123, 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vinsertf32x4 $123, -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vinsertf32x4 $123, -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vinserti32x4 $0xab, %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vinserti32x4 $0xab, %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vinserti32x4 $0xab, %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vinserti32x4 $123, %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vinserti32x4 $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vinserti32x4 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vinserti32x4 $123, 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vinserti32x4 $123, 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vinserti32x4 $123, -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vinserti32x4 $123, -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmaxpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmaxpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmaxpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmaxpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vmaxps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vmaxps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmaxps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmaxps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vmaxps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vmaxps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vmaxps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmaxps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmaxps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vmaxps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vmaxps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vmaxps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmaxps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmaxps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vmaxps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vmaxps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vmaxps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmaxps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmaxps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vmaxps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vminpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vminpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vminpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vminpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vminpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vminpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vminpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vminpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vminpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vminpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vminpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vminpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vminpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vminpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vminpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vminpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vminpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vminpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vminpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vminpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vminps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vminps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vminps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vminps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vminps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vminps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vminps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vminps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vminps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vminps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vminps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vminps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vminps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vminps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vminps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vminps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vminps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vminps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vminps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vminps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vmovapd %xmm29, %xmm30 # AVX512{F,VL}
vmovapd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovapd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovapd (%rcx), %xmm30 # AVX512{F,VL}
vmovapd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovapd 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovapd 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovapd -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovapd -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovapd %ymm29, %ymm30 # AVX512{F,VL}
vmovapd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovapd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovapd (%rcx), %ymm30 # AVX512{F,VL}
vmovapd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovapd 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovapd 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovapd -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovapd -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovaps %xmm29, %xmm30 # AVX512{F,VL}
vmovaps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovaps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovaps (%rcx), %xmm30 # AVX512{F,VL}
vmovaps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovaps 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovaps 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovaps -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovaps -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovaps %ymm29, %ymm30 # AVX512{F,VL}
vmovaps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovaps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovaps (%rcx), %ymm30 # AVX512{F,VL}
vmovaps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovaps 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovaps 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovaps -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovaps -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovddup %xmm29, %xmm30 # AVX512{F,VL}
vmovddup %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovddup %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovddup (%rcx), %xmm30 # AVX512{F,VL}
vmovddup 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovddup 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovddup 1024(%rdx), %xmm30 # AVX512{F,VL}
vmovddup -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovddup -1032(%rdx), %xmm30 # AVX512{F,VL}
vmovddup %ymm29, %ymm30 # AVX512{F,VL}
vmovddup %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovddup %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovddup (%rcx), %ymm30 # AVX512{F,VL}
vmovddup 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovddup 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovddup 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovddup -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovddup -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa32 (%rcx), %xmm30 # AVX512{F,VL}
vmovdqa32 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovdqa32 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqa32 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovdqa32 -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqa32 -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa32 (%rcx), %ymm30 # AVX512{F,VL}
vmovdqa32 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovdqa32 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqa32 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovdqa32 -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqa32 -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa64 (%rcx), %xmm30 # AVX512{F,VL}
vmovdqa64 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovdqa64 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqa64 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovdqa64 -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqa64 -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa64 (%rcx), %ymm30 # AVX512{F,VL}
vmovdqa64 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovdqa64 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqa64 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovdqa64 -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqa64 -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu32 (%rcx), %xmm30 # AVX512{F,VL}
vmovdqu32 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovdqu32 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqu32 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovdqu32 -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqu32 -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu32 (%rcx), %ymm30 # AVX512{F,VL}
vmovdqu32 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovdqu32 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqu32 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovdqu32 -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqu32 -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu64 (%rcx), %xmm30 # AVX512{F,VL}
vmovdqu64 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovdqu64 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqu64 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovdqu64 -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqu64 -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu64 (%rcx), %ymm30 # AVX512{F,VL}
vmovdqu64 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovdqu64 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqu64 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovdqu64 -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqu64 -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovntdq %xmm30, (%rcx) # AVX512{F,VL}
vmovntdq %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovntdq %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovntdq %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovntdq %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovntdq %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovntdq %ymm30, (%rcx) # AVX512{F,VL}
vmovntdq %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovntdq %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovntdq %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovntdq %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovntdq %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovntdqa (%rcx), %xmm30 # AVX512{F,VL}
vmovntdqa 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovntdqa 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovntdqa 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovntdqa -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovntdqa -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovntdqa (%rcx), %ymm30 # AVX512{F,VL}
vmovntdqa 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovntdqa 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovntdqa 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovntdqa -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovntdqa -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovntpd %xmm30, (%rcx) # AVX512{F,VL}
vmovntpd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovntpd %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovntpd %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovntpd %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovntpd %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovntpd %ymm30, (%rcx) # AVX512{F,VL}
vmovntpd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovntpd %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovntpd %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovntpd %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovntpd %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovntps %xmm30, (%rcx) # AVX512{F,VL}
vmovntps %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovntps %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovntps %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovntps %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovntps %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovntps %ymm30, (%rcx) # AVX512{F,VL}
vmovntps %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovntps %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovntps %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovntps %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovntps %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovshdup %xmm29, %xmm30 # AVX512{F,VL}
vmovshdup %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovshdup %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovshdup (%rcx), %xmm30 # AVX512{F,VL}
vmovshdup 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovshdup 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovshdup 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovshdup -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovshdup -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovshdup %ymm29, %ymm30 # AVX512{F,VL}
vmovshdup %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovshdup %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovshdup (%rcx), %ymm30 # AVX512{F,VL}
vmovshdup 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovshdup 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovshdup 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovshdup -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovshdup -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovsldup %xmm29, %xmm30 # AVX512{F,VL}
vmovsldup %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovsldup %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovsldup (%rcx), %xmm30 # AVX512{F,VL}
vmovsldup 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovsldup 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovsldup 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovsldup -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovsldup -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovsldup %ymm29, %ymm30 # AVX512{F,VL}
vmovsldup %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovsldup %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovsldup (%rcx), %ymm30 # AVX512{F,VL}
vmovsldup 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovsldup 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovsldup 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovsldup -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovsldup -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovupd %xmm29, %xmm30 # AVX512{F,VL}
vmovupd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovupd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovupd (%rcx), %xmm30 # AVX512{F,VL}
vmovupd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovupd 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovupd 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovupd -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovupd -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovupd %ymm29, %ymm30 # AVX512{F,VL}
vmovupd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovupd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovupd (%rcx), %ymm30 # AVX512{F,VL}
vmovupd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovupd 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovupd 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovupd -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovupd -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovups %xmm29, %xmm30 # AVX512{F,VL}
vmovups %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovups %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovups (%rcx), %xmm30 # AVX512{F,VL}
vmovups 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovups 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovups 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovups -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovups -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovups %ymm29, %ymm30 # AVX512{F,VL}
vmovups %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovups %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovups (%rcx), %ymm30 # AVX512{F,VL}
vmovups 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovups 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovups 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovups -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovups -4128(%rdx), %ymm30 # AVX512{F,VL}
vmulpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vmulpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmulpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmulpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vmulpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vmulpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vmulpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmulpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmulpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vmulpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vmulpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vmulpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmulpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmulpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vmulpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vmulpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vmulpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmulpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmulpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vmulpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vmulps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vmulps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmulps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmulps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vmulps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vmulps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vmulps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmulps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmulps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vmulps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vmulps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vmulps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmulps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmulps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vmulps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vmulps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vmulps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmulps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmulps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vmulps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpabsd %xmm29, %xmm30 # AVX512{F,VL}
vpabsd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpabsd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpabsd (%rcx), %xmm30 # AVX512{F,VL}
vpabsd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpabsd (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vpabsd 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpabsd 2048(%rdx), %xmm30 # AVX512{F,VL}
vpabsd -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpabsd -2064(%rdx), %xmm30 # AVX512{F,VL}
vpabsd 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpabsd 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpabsd -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpabsd -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpabsd %ymm29, %ymm30 # AVX512{F,VL}
vpabsd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpabsd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpabsd (%rcx), %ymm30 # AVX512{F,VL}
vpabsd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpabsd (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vpabsd 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpabsd 4096(%rdx), %ymm30 # AVX512{F,VL}
vpabsd -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpabsd -4128(%rdx), %ymm30 # AVX512{F,VL}
vpabsd 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpabsd 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpabsd -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpabsd -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpabsq %xmm29, %xmm30 # AVX512{F,VL}
vpabsq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpabsq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpabsq (%rcx), %xmm30 # AVX512{F,VL}
vpabsq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpabsq (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vpabsq 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpabsq 2048(%rdx), %xmm30 # AVX512{F,VL}
vpabsq -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpabsq -2064(%rdx), %xmm30 # AVX512{F,VL}
vpabsq 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpabsq 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpabsq -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpabsq -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpabsq %ymm29, %ymm30 # AVX512{F,VL}
vpabsq %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpabsq %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpabsq (%rcx), %ymm30 # AVX512{F,VL}
vpabsq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpabsq (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vpabsq 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpabsq 4096(%rdx), %ymm30 # AVX512{F,VL}
vpabsq -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpabsq -4128(%rdx), %ymm30 # AVX512{F,VL}
vpabsq 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpabsq 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpabsq -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpabsq -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpaddd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpaddd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpaddd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpaddd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpaddd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpaddd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpaddd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpaddd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpaddd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpaddd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpaddd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpaddd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpaddd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpaddd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpaddd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpaddd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpaddd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpaddd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpaddd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpaddd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpaddq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpaddq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpaddq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpaddq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpaddq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpaddq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpaddq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpaddq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpaddq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpaddq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpaddq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpaddq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpaddq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpaddq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpaddq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpaddq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpaddq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpaddq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpaddq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpaddq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpandd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpandd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpandd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpandd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpandd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpandd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpandd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpandd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpandd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpandd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpandd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpandd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpandd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpandd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpandd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpandd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpandnd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpandnd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpandnd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpandnd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpandnd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpandnd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpandnd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandnd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandnd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpandnd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpandnd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpandnd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpandnd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpandnd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpandnd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpandnd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpandnd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandnd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandnd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpandnd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpandnq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpandnq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpandnq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpandnq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpandnq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpandnq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpandnq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandnq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandnq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpandnq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpandnq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpandnq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpandnq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpandnq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpandnq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpandnq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpandnq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandnq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandnq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpandnq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpandq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpandq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpandq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpandq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpandq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpandq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpandq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpandq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpandq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpandq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpandq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpandq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpandq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpandq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpandq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpandq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpblendmd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpblendmd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpblendmd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpblendmd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpbroadcastd (%rcx), %xmm30 # AVX512{F,VL}
vpbroadcastd (%rcx), %xmm30{%k7} # AVX512{F,VL}
vpbroadcastd (%rcx), %xmm30{%k7}{z} # AVX512{F,VL}
vpbroadcastd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpbroadcastd 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpbroadcastd 512(%rdx), %xmm30 # AVX512{F,VL}
vpbroadcastd -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpbroadcastd -516(%rdx), %xmm30 # AVX512{F,VL}
vpbroadcastd (%rcx), %ymm30 # AVX512{F,VL}
vpbroadcastd (%rcx), %ymm30{%k7} # AVX512{F,VL}
vpbroadcastd (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vpbroadcastd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpbroadcastd 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpbroadcastd 512(%rdx), %ymm30 # AVX512{F,VL}
vpbroadcastd -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpbroadcastd -516(%rdx), %ymm30 # AVX512{F,VL}
vpbroadcastd %xmm29, %xmm30 # AVX512{F,VL}
vpbroadcastd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpbroadcastd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpbroadcastd %xmm29, %ymm30 # AVX512{F,VL}
vpbroadcastd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpbroadcastd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpbroadcastd %eax, %xmm30 # AVX512{F,VL}
vpbroadcastd %eax, %xmm30{%k7} # AVX512{F,VL}
vpbroadcastd %eax, %xmm30{%k7}{z} # AVX512{F,VL}
vpbroadcastd %ebp, %xmm30 # AVX512{F,VL}
vpbroadcastd %r13d, %xmm30 # AVX512{F,VL}
vpbroadcastd %eax, %ymm30 # AVX512{F,VL}
vpbroadcastd %eax, %ymm30{%k7} # AVX512{F,VL}
vpbroadcastd %eax, %ymm30{%k7}{z} # AVX512{F,VL}
vpbroadcastd %ebp, %ymm30 # AVX512{F,VL}
vpbroadcastd %r13d, %ymm30 # AVX512{F,VL}
vpbroadcastq (%rcx), %xmm30 # AVX512{F,VL}
vpbroadcastq (%rcx), %xmm30{%k7} # AVX512{F,VL}
vpbroadcastq (%rcx), %xmm30{%k7}{z} # AVX512{F,VL}
vpbroadcastq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpbroadcastq 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpbroadcastq 1024(%rdx), %xmm30 # AVX512{F,VL}
vpbroadcastq -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpbroadcastq -1032(%rdx), %xmm30 # AVX512{F,VL}
vpbroadcastq (%rcx), %ymm30 # AVX512{F,VL}
vpbroadcastq (%rcx), %ymm30{%k7} # AVX512{F,VL}
vpbroadcastq (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vpbroadcastq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpbroadcastq 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpbroadcastq 1024(%rdx), %ymm30 # AVX512{F,VL}
vpbroadcastq -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpbroadcastq -1032(%rdx), %ymm30 # AVX512{F,VL}
vpbroadcastq %xmm29, %xmm30 # AVX512{F,VL}
vpbroadcastq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpbroadcastq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpbroadcastq %xmm29, %ymm30 # AVX512{F,VL}
vpbroadcastq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpbroadcastq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpbroadcastq %rax, %xmm30 # AVX512{F,VL}
vpbroadcastq %rax, %xmm30{%k7} # AVX512{F,VL}
vpbroadcastq %rax, %xmm30{%k7}{z} # AVX512{F,VL}
vpbroadcastq %r8, %xmm30 # AVX512{F,VL}
vpbroadcastq %rax, %ymm30 # AVX512{F,VL}
vpbroadcastq %rax, %ymm30{%k7} # AVX512{F,VL}
vpbroadcastq %rax, %ymm30{%k7}{z} # AVX512{F,VL}
vpbroadcastq %r8, %ymm30 # AVX512{F,VL}
vpcmpd $0xab, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpd $0xab, %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpd $123, (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpd $123, 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpd $123, (%rcx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpd $123, 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpd $123, -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpd $123, 508(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, 512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpd $123, -512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, -516(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpd $0xab, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpd $0xab, %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpd $123, (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpd $123, 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpd $123, (%rcx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpd $123, 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpd $123, -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpd $123, 508(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, 512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpd $123, -512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, -516(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpeqd (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd (%rcx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd 508(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd 512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd -512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd -516(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpeqd (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd (%rcx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd 508(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd 512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd -512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd -516(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpeqq (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq (%rcx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq 1016(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq 1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq -1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq -1032(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpeqq (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq (%rcx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq 1016(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq 1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq -1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq -1032(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpgtd (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd (%rcx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd 508(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd 512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd -512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd -516(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpgtd (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd (%rcx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd 508(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd 512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd -512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd -516(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpgtq (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq (%rcx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq 1016(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq 1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq -1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq -1032(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpgtq (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq (%rcx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq 1016(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq 1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq -1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq -1032(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpq $0xab, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpq $0xab, %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpq $123, (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpq $123, 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpq $123, (%rcx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpq $123, 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpq $123, -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpq $123, 1016(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, 1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpq $123, -1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, -1032(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpq $0xab, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpq $0xab, %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpq $123, (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpq $123, 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpq $123, (%rcx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpq $123, 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpq $123, -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpq $123, 1016(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, 1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpq $123, -1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, -1032(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpud $0xab, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpud $0xab, %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpud $123, (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpud $123, 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpud $123, (%rcx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpud $123, 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpud $123, -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpud $123, 508(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, 512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpud $123, -512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, -516(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpud $0xab, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpud $0xab, %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpud $123, (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpud $123, 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpud $123, (%rcx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpud $123, 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpud $123, -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpud $123, 508(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, 512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpud $123, -512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, -516(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $0xab, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $0xab, %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $123, (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $123, 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $123, (%rcx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $123, 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $123, -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $123, 1016(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, 1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $123, -1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, -1032(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $0xab, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $0xab, %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $123, (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $123, 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $123, (%rcx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $123, 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $123, -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $123, 1016(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, 1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $123, -1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, -1032(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpblendmq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpblendmq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpblendmq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpblendmq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpblendmq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpblendmq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpblendmq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpblendmq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpcompressd %xmm30, (%rcx) # AVX512{F,VL}
vpcompressd %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpcompressd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpcompressd %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vpcompressd %xmm30, 512(%rdx) # AVX512{F,VL}
vpcompressd %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vpcompressd %xmm30, -516(%rdx) # AVX512{F,VL}
vpcompressd %ymm30, (%rcx) # AVX512{F,VL}
vpcompressd %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpcompressd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpcompressd %ymm30, 508(%rdx) # AVX512{F,VL} Disp8
vpcompressd %ymm30, 512(%rdx) # AVX512{F,VL}
vpcompressd %ymm30, -512(%rdx) # AVX512{F,VL} Disp8
vpcompressd %ymm30, -516(%rdx) # AVX512{F,VL}
vpcompressd %xmm29, %xmm30 # AVX512{F,VL}
vpcompressd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpcompressd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpcompressd %ymm29, %ymm30 # AVX512{F,VL}
vpcompressd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpcompressd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermilpd $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermilpd $123, %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd $123, (%rcx), %xmm30 # AVX512{F,VL}
vpermilpd $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpermilpd $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vpermilpd $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpermilpd $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpermilpd $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpermilpd $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpermilpd $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpermilpd $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpermilpd $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpermilpd $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpermilpd $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermilpd $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermilpd $123, %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd $123, (%rcx), %ymm30 # AVX512{F,VL}
vpermilpd $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpermilpd $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vpermilpd $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermilpd $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpermilpd $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermilpd $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpermilpd $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpermilpd $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpermilpd $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpermilpd $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpermilpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermilpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermilpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermilpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermilpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermilps $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpermilps $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermilps $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermilps $123, %xmm29, %xmm30 # AVX512{F,VL}
vpermilps $123, (%rcx), %xmm30 # AVX512{F,VL}
vpermilps $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpermilps $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vpermilps $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpermilps $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpermilps $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpermilps $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpermilps $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpermilps $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpermilps $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpermilps $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpermilps $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpermilps $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermilps $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermilps $123, %ymm29, %ymm30 # AVX512{F,VL}
vpermilps $123, (%rcx), %ymm30 # AVX512{F,VL}
vpermilps $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpermilps $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vpermilps $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermilps $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpermilps $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermilps $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpermilps $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpermilps $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpermilps $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpermilps $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpermilps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermilps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermilps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermilps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermilps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermilps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermilps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermilps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermilps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermilps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermilps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermilps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermilps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermilps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermilps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermilps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermilps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermilps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermilps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermilps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermpd $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpermpd $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermpd $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermpd $123, %ymm29, %ymm30 # AVX512{F,VL}
vpermpd $123, (%rcx), %ymm30 # AVX512{F,VL}
vpermpd $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpermpd $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vpermpd $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermpd $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpermpd $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermpd $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpermpd $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpermpd $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpermpd $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpermpd $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpermps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermq $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpermq $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermq $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermq $123, %ymm29, %ymm30 # AVX512{F,VL}
vpermq $123, (%rcx), %ymm30 # AVX512{F,VL}
vpermq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpermq $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vpermq $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermq $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpermq $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermq $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpermq $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpermq $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpermq $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpermq $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpexpandd (%rcx), %xmm30 # AVX512{F,VL}
vpexpandd (%rcx), %xmm30{%k7} # AVX512{F,VL}
vpexpandd (%rcx), %xmm30{%k7}{z} # AVX512{F,VL}
vpexpandd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpexpandd 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpexpandd 512(%rdx), %xmm30 # AVX512{F,VL}
vpexpandd -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpexpandd -516(%rdx), %xmm30 # AVX512{F,VL}
vpexpandd (%rcx), %ymm30 # AVX512{F,VL}
vpexpandd (%rcx), %ymm30{%k7} # AVX512{F,VL}
vpexpandd (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vpexpandd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpexpandd 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpexpandd 512(%rdx), %ymm30 # AVX512{F,VL}
vpexpandd -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpexpandd -516(%rdx), %ymm30 # AVX512{F,VL}
vpexpandd %xmm29, %xmm30 # AVX512{F,VL}
vpexpandd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpexpandd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpexpandd %ymm29, %ymm30 # AVX512{F,VL}
vpexpandd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpexpandd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpexpandq (%rcx), %xmm30 # AVX512{F,VL}
vpexpandq (%rcx), %xmm30{%k7} # AVX512{F,VL}
vpexpandq (%rcx), %xmm30{%k7}{z} # AVX512{F,VL}
vpexpandq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpexpandq 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpexpandq 1024(%rdx), %xmm30 # AVX512{F,VL}
vpexpandq -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpexpandq -1032(%rdx), %xmm30 # AVX512{F,VL}
vpexpandq (%rcx), %ymm30 # AVX512{F,VL}
vpexpandq (%rcx), %ymm30{%k7} # AVX512{F,VL}
vpexpandq (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vpexpandq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpexpandq 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpexpandq 1024(%rdx), %ymm30 # AVX512{F,VL}
vpexpandq -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpexpandq -1032(%rdx), %ymm30 # AVX512{F,VL}
vpexpandq %xmm29, %xmm30 # AVX512{F,VL}
vpexpandq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpexpandq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpexpandq %ymm29, %ymm30 # AVX512{F,VL}
vpexpandq %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpexpandq %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpgatherdd 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vpgatherdd 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vpgatherdd 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vpgatherdd 123(%r14,%ymm31,8), %ymm30{%k1} # AVX512{F,VL}
vpgatherdd 256(%r9,%ymm31), %ymm30{%k1} # AVX512{F,VL}
vpgatherdd 1024(%rcx,%ymm31,4), %ymm30{%k1} # AVX512{F,VL}
vpgatherdq 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vpgatherdq 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vpgatherdq 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vpgatherdq 123(%r14,%xmm31,8), %ymm30{%k1} # AVX512{F,VL}
vpgatherdq 256(%r9,%xmm31), %ymm30{%k1} # AVX512{F,VL}
vpgatherdq 1024(%rcx,%xmm31,4), %ymm30{%k1} # AVX512{F,VL}
vpgatherqd 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vpgatherqd 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vpgatherqd 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vpgatherqd 123(%r14,%ymm31,8), %xmm30{%k1} # AVX512{F,VL}
vpgatherqd 256(%r9,%ymm31), %xmm30{%k1} # AVX512{F,VL}
vpgatherqd 1024(%rcx,%ymm31,4), %xmm30{%k1} # AVX512{F,VL}
vpgatherqq 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vpgatherqq 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vpgatherqq 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vpgatherqq 123(%r14,%ymm31,8), %ymm30{%k1} # AVX512{F,VL}
vpgatherqq 256(%r9,%ymm31), %ymm30{%k1} # AVX512{F,VL}
vpgatherqq 1024(%rcx,%ymm31,4), %ymm30{%k1} # AVX512{F,VL}
vpmaxsd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmaxsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmaxsd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpmaxsd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmaxsd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmaxsq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmaxsq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpmaxsq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmaxsq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmaxud %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmaxud (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxud 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxud -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxud 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxud -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpmaxud %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmaxud (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxud 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxud -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxud 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxud -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmaxuq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmaxuq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxuq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxuq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxuq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxuq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpmaxuq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmaxuq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxuq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxuq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxuq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxuq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpminsd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpminsd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpminsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpminsd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpminsd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpminsd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpminsd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminsd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminsd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpminsd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpminsd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpminsd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpminsd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpminsd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpminsd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpminsd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpminsd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminsd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminsd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpminsd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpminsq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpminsq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpminsq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpminsq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpminsq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpminsq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpminsq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminsq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminsq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpminsq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpminsq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpminsq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpminsq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpminsq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpminsq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpminsq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpminsq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminsq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminsq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpminsq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpminud %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpminud %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpminud %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpminud (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpminud 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpminud (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpminud 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminud 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminud -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminud -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminud 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminud 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpminud -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminud -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpminud %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpminud %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpminud %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpminud (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpminud 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpminud (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpminud 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminud 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminud -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminud -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminud 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminud 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpminud -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminud -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpminuq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpminuq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpminuq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpminuq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpminuq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpminuq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpminuq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminuq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminuq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminuq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminuq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminuq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpminuq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminuq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpminuq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpminuq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpminuq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpminuq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpminuq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpminuq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpminuq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminuq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminuq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminuq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminuq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminuq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpminuq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminuq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmovsxbd %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxbd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxbd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxbd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxbd 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbd 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbd -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbd -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbd %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxbd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxbd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxbd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxbd 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbd 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxbd -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbd -1032(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxbq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxbq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxbq 254(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbq 256(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbq -256(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbq -258(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxbq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxbq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxbq 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbq 512(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxbq -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbq -516(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxdq %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxdq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxdq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxdq (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxdq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxdq 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxdq 1024(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxdq -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxdq -1032(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxdq %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxdq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxdq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxdq (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxdq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxdq 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxdq 2048(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxdq -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxdq -2064(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxwd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxwd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxwd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwd 1024(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwd -1032(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxwd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxwd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxwd 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwd 2048(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwd -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwd -2064(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxwq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxwq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxwq 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwq 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwq -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwq -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxwq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxwq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxwq 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwq 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwq -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwq -1032(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxbd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxbd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxbd 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbd 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbd -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbd -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxbd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxbd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxbd 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbd 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbd -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbd -1032(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxbq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxbq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxbq 254(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbq 256(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbq -256(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbq -258(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxbq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxbq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxbq 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbq 512(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbq -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbq -516(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxdq %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxdq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxdq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxdq (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxdq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxdq 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxdq 1024(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxdq -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxdq -1032(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxdq %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxdq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxdq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxdq (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxdq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxdq 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxdq 2048(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxdq -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxdq -2064(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxwd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxwd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxwd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwd 1024(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwd -1032(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxwd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxwd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxwd 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwd 2048(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwd -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwd -2064(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxwq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxwq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxwq 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwq 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwq -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwq -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxwq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxwq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxwq 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwq 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwq -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwq -1032(%rdx), %ymm30 # AVX512{F,VL}
vpmuldq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmuldq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmuldq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuldq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuldq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuldq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuldq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpmuldq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpmuldq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmuldq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpmuldq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpmuldq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmuldq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuldq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmuldq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuldq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmuldq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuldq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmuldq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuldq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmulld %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpmulld %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmulld %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmulld (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpmulld 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpmulld (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmulld 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmulld 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmulld -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmulld -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmulld 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmulld 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmulld -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmulld -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmulld %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpmulld %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpmulld %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmulld (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpmulld 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpmulld (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmulld 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmulld 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmulld -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmulld -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmulld 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmulld 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmulld -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmulld -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmuludq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmuludq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuludq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuludq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuludq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuludq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpmuludq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmuludq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuludq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuludq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuludq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuludq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpord %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpord %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpord %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpord (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpord 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpord (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpord 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpord 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpord -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpord -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpord 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpord 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpord -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpord -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpord %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpord %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpord %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpord (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpord 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpord (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpord 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpord 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpord -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpord -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpord 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpord 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpord -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpord -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vporq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vporq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vporq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vporq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vporq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vporq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vporq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vporq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vporq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vporq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vporq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vporq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vporq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vporq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vporq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vporq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vporq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vporq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vporq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vporq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vporq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vporq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vporq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vporq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vporq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vporq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vporq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vporq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpscatterdd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterdd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterdd %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vpscatterdd %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vpscatterdd %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vpscatterdd %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vpscatterdd %ymm30, 256(%r9,%ymm31){%k1} # AVX512{F,VL}
vpscatterdd %ymm30, 1024(%rcx,%ymm31,4){%k1} # AVX512{F,VL}
vpscatterdq %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterdq %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterdq %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vpscatterdq %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vpscatterdq %ymm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterdq %ymm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterdq %ymm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vpscatterdq %ymm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 256(%r9,%ymm31){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 1024(%rcx,%ymm31,4){%k1} # AVX512{F,VL}
vpscatterqq %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterqq %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterqq %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vpscatterqq %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vpscatterqq %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vpscatterqq %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vpscatterqq %ymm30, 256(%r9,%ymm31){%k1} # AVX512{F,VL}
vpscatterqq %ymm30, 1024(%rcx,%ymm31,4){%k1} # AVX512{F,VL}
vpshufd $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpshufd $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpshufd $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpshufd $123, %xmm29, %xmm30 # AVX512{F,VL}
vpshufd $123, (%rcx), %xmm30 # AVX512{F,VL}
vpshufd $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpshufd $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vpshufd $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpshufd $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpshufd $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpshufd $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpshufd $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpshufd $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpshufd $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpshufd $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpshufd $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpshufd $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpshufd $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpshufd $123, %ymm29, %ymm30 # AVX512{F,VL}
vpshufd $123, (%rcx), %ymm30 # AVX512{F,VL}
vpshufd $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpshufd $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vpshufd $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpshufd $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpshufd $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpshufd $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpshufd $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpshufd $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpshufd $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpshufd $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpslld %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpslld %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpslld %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpslld (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpslld 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpslld 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpslld 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpslld -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpslld -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpslld %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vpslld %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpslld %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpslld (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpslld 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpslld 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpslld 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpslld -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpslld -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsllq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsllq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsllq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsllq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllq %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsllq %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsllq %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsllq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsllq 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllq 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllq -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllq -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsllvd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsllvd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsllvd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsllvd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsllvq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsllvq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsllvq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsllvq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrad %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsrad %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrad %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrad (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrad 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsrad 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrad 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrad -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrad -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrad %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsrad %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrad %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrad (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrad 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsrad 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrad 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrad -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrad -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsraq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsraq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsraq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsraq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsraq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsraq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsraq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsraq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsraq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsraq %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsraq %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsraq %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsraq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsraq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsraq 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsraq 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsraq -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsraq -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsravd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsravd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsravd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsravd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsravd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsravd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsravd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsravd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsravd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsravd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsravd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsravd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsravd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsravd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsravd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsravd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsravd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsravd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsravd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsravd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsravq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsravq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsravq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsravq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsravq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsravq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsravq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsravq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsravq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsravq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsravq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsravq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsravq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsravq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsravq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsravq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsravq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsravq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsravq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsravq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrld %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsrld %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrld %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrld (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrld 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsrld 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrld 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrld -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrld -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrld %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsrld %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrld %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrld (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrld 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsrld 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrld 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrld -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrld -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrlq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrlq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlq %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlq %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrlq %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrlq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlq 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlq 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlq -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlq -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrlvd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrlvd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrlvd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrlvd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrlvq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrlvq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrlvq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrlvq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrld $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpsrld $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrld $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrld $123, %xmm29, %xmm30 # AVX512{F,VL}
vpsrld $123, (%rcx), %xmm30 # AVX512{F,VL}
vpsrld $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpsrld $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vpsrld $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsrld $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpsrld $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsrld $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpsrld $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpsrld $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpsrld $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpsrld $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpsrld $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpsrld $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrld $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrld $123, %ymm29, %ymm30 # AVX512{F,VL}
vpsrld $123, (%rcx), %ymm30 # AVX512{F,VL}
vpsrld $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpsrld $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vpsrld $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsrld $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpsrld $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsrld $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpsrld $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpsrld $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpsrld $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpsrld $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpsrlq $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlq $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrlq $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrlq $123, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlq $123, (%rcx), %xmm30 # AVX512{F,VL}
vpsrlq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpsrlq $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vpsrlq $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsrlq $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpsrlq $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsrlq $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpsrlq $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpsrlq $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpsrlq $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpsrlq $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpsrlq $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlq $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrlq $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrlq $123, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlq $123, (%rcx), %ymm30 # AVX512{F,VL}
vpsrlq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpsrlq $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vpsrlq $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsrlq $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpsrlq $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsrlq $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpsrlq $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpsrlq $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpsrlq $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpsrlq $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpsubd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsubd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsubd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsubd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsubd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsubd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsubd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsubd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsubd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsubd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsubd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsubd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsubd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsubd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsubd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsubd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsubd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsubd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsubd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsubd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsubq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsubq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsubq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsubq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsubq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsubq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsubq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsubq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsubq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsubq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsubq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsubq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsubq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsubq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsubq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsubq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsubq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsubq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsubq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsubq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vptestmd %xmm29, %xmm30, %k5 # AVX512{F,VL}
vptestmd %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vptestmd (%rcx), %xmm30, %k5 # AVX512{F,VL}
vptestmd 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vptestmd (%rcx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vptestmd 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmd 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vptestmd -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmd -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vptestmd 508(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmd 512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vptestmd -512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmd -516(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vptestmd %ymm29, %ymm30, %k5 # AVX512{F,VL}
vptestmd %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vptestmd (%rcx), %ymm30, %k5 # AVX512{F,VL}
vptestmd 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vptestmd (%rcx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vptestmd 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmd 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vptestmd -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmd -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vptestmd 508(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmd 512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vptestmd -512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmd -516(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vptestmq %xmm29, %xmm30, %k5 # AVX512{F,VL}
vptestmq %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vptestmq (%rcx), %xmm30, %k5 # AVX512{F,VL}
vptestmq 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vptestmq (%rcx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vptestmq 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmq 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vptestmq -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmq -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vptestmq 1016(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmq 1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vptestmq -1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmq -1032(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vptestmq %ymm29, %ymm30, %k5 # AVX512{F,VL}
vptestmq %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vptestmq (%rcx), %ymm30, %k5 # AVX512{F,VL}
vptestmq 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vptestmq (%rcx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vptestmq 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmq 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vptestmq -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmq -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vptestmq 1016(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmq 1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vptestmq -1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmq -1032(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpunpckhdq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpunpckhdq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpunpckhdq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhdq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhdq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhdq 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhdq -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhdq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpunpckhdq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpunpckhdq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhdq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhdq (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhdq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhdq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhdq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhdq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhdq 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhdq 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhdq -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhdq -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpunpckhqdq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpunpckhqdq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhqdq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhqdq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhqdq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhqdq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpunpckhqdq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpunpckhqdq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhqdq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhqdq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhqdq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhqdq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpunpckldq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpunpckldq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckldq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckldq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckldq 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckldq -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpunpckldq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpunpckldq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckldq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckldq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckldq 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckldq -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpunpcklqdq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpunpcklqdq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpcklqdq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpcklqdq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpcklqdq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpcklqdq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpunpcklqdq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpunpcklqdq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpcklqdq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpcklqdq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpcklqdq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpcklqdq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpxord %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpxord %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpxord %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpxord (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpxord 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpxord (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpxord 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxord 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpxord -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxord -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpxord 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxord 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpxord -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxord -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpxord %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpxord %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpxord %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpxord (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpxord 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpxord (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpxord 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxord 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpxord -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxord -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpxord 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxord 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpxord -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxord -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpxorq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpxorq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpxorq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpxorq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpxorq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpxorq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpxorq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxorq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpxorq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxorq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpxorq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxorq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpxorq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxorq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpxorq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpxorq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpxorq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpxorq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpxorq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpxorq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpxorq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxorq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpxorq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxorq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpxorq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxorq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpxorq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxorq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vrcp14pd %xmm29, %xmm30 # AVX512{F,VL}
vrcp14pd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vrcp14pd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vrcp14pd (%rcx), %xmm30 # AVX512{F,VL}
vrcp14pd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vrcp14pd (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vrcp14pd 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrcp14pd 2048(%rdx), %xmm30 # AVX512{F,VL}
vrcp14pd -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrcp14pd -2064(%rdx), %xmm30 # AVX512{F,VL}
vrcp14pd 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vrcp14pd 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vrcp14pd -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vrcp14pd -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vrcp14pd %ymm29, %ymm30 # AVX512{F,VL}
vrcp14pd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vrcp14pd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vrcp14pd (%rcx), %ymm30 # AVX512{F,VL}
vrcp14pd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vrcp14pd (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vrcp14pd 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrcp14pd 4096(%rdx), %ymm30 # AVX512{F,VL}
vrcp14pd -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrcp14pd -4128(%rdx), %ymm30 # AVX512{F,VL}
vrcp14pd 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vrcp14pd 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vrcp14pd -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vrcp14pd -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vrcp14ps %xmm29, %xmm30 # AVX512{F,VL}
vrcp14ps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vrcp14ps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vrcp14ps (%rcx), %xmm30 # AVX512{F,VL}
vrcp14ps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vrcp14ps (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vrcp14ps 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrcp14ps 2048(%rdx), %xmm30 # AVX512{F,VL}
vrcp14ps -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrcp14ps -2064(%rdx), %xmm30 # AVX512{F,VL}
vrcp14ps 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vrcp14ps 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vrcp14ps -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vrcp14ps -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vrcp14ps %ymm29, %ymm30 # AVX512{F,VL}
vrcp14ps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vrcp14ps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vrcp14ps (%rcx), %ymm30 # AVX512{F,VL}
vrcp14ps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vrcp14ps (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vrcp14ps 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrcp14ps 4096(%rdx), %ymm30 # AVX512{F,VL}
vrcp14ps -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrcp14ps -4128(%rdx), %ymm30 # AVX512{F,VL}
vrcp14ps 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vrcp14ps 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vrcp14ps -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vrcp14ps -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vrsqrt14pd %xmm29, %xmm30 # AVX512{F,VL}
vrsqrt14pd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vrsqrt14pd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vrsqrt14pd (%rcx), %xmm30 # AVX512{F,VL}
vrsqrt14pd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vrsqrt14pd (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vrsqrt14pd 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrsqrt14pd 2048(%rdx), %xmm30 # AVX512{F,VL}
vrsqrt14pd -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrsqrt14pd -2064(%rdx), %xmm30 # AVX512{F,VL}
vrsqrt14pd 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vrsqrt14pd 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vrsqrt14pd -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vrsqrt14pd -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vrsqrt14pd %ymm29, %ymm30 # AVX512{F,VL}
vrsqrt14pd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vrsqrt14pd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vrsqrt14pd (%rcx), %ymm30 # AVX512{F,VL}
vrsqrt14pd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vrsqrt14pd (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vrsqrt14pd 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrsqrt14pd 4096(%rdx), %ymm30 # AVX512{F,VL}
vrsqrt14pd -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrsqrt14pd -4128(%rdx), %ymm30 # AVX512{F,VL}
vrsqrt14pd 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vrsqrt14pd 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vrsqrt14pd -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vrsqrt14pd -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vrsqrt14ps %xmm29, %xmm30 # AVX512{F,VL}
vrsqrt14ps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vrsqrt14ps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vrsqrt14ps (%rcx), %xmm30 # AVX512{F,VL}
vrsqrt14ps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vrsqrt14ps (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vrsqrt14ps 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrsqrt14ps 2048(%rdx), %xmm30 # AVX512{F,VL}
vrsqrt14ps -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrsqrt14ps -2064(%rdx), %xmm30 # AVX512{F,VL}
vrsqrt14ps 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vrsqrt14ps 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vrsqrt14ps -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vrsqrt14ps -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vrsqrt14ps %ymm29, %ymm30 # AVX512{F,VL}
vrsqrt14ps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vrsqrt14ps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vrsqrt14ps (%rcx), %ymm30 # AVX512{F,VL}
vrsqrt14ps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vrsqrt14ps (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vrsqrt14ps 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrsqrt14ps 4096(%rdx), %ymm30 # AVX512{F,VL}
vrsqrt14ps -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrsqrt14ps -4128(%rdx), %ymm30 # AVX512{F,VL}
vrsqrt14ps 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vrsqrt14ps 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vrsqrt14ps -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vrsqrt14ps -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vscatterdpd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterdpd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterdpd %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vscatterdpd %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vscatterdpd %ymm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterdpd %ymm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterdpd %ymm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vscatterdpd %ymm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vscatterdps %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterdps %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterdps %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vscatterdps %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vscatterdps %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vscatterdps %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vscatterdps %ymm30, 256(%r9,%ymm31){%k1} # AVX512{F,VL}
vscatterdps %ymm30, 1024(%rcx,%ymm31,4){%k1} # AVX512{F,VL}
vscatterqpd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterqpd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterqpd %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vscatterqpd %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vscatterqpd %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vscatterqpd %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vscatterqpd %ymm30, 256(%r9,%ymm31){%k1} # AVX512{F,VL}
vscatterqpd %ymm30, 1024(%rcx,%ymm31,4){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 256(%r9,%ymm31){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 1024(%rcx,%ymm31,4){%k1} # AVX512{F,VL}
vshufpd $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vshufpd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vshufpd $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $123, (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufpd $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufpd $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufpd $123, 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $123, -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufpd $123, -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vshufpd $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vshufpd $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $123, (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufpd $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufpd $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufpd $123, 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $123, -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufpd $123, -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshufps $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vshufps $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vshufps $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vshufps $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vshufps $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vshufps $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vshufps $123, (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vshufps $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufps $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vshufps $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufps $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vshufps $123, 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufps $123, 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vshufps $123, -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufps $123, -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vshufps $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufps $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vshufps $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vshufps $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufps $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vshufps $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vshufps $123, (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshufps $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufps $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufps $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufps $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufps $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufps $123, 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshufps $123, -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufps $123, -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vsqrtpd %xmm29, %xmm30 # AVX512{F,VL}
vsqrtpd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vsqrtpd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vsqrtpd (%rcx), %xmm30 # AVX512{F,VL}
vsqrtpd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vsqrtpd (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vsqrtpd 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vsqrtpd 2048(%rdx), %xmm30 # AVX512{F,VL}
vsqrtpd -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vsqrtpd -2064(%rdx), %xmm30 # AVX512{F,VL}
vsqrtpd 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vsqrtpd 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vsqrtpd -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vsqrtpd -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vsqrtpd %ymm29, %ymm30 # AVX512{F,VL}
vsqrtpd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vsqrtpd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vsqrtpd (%rcx), %ymm30 # AVX512{F,VL}
vsqrtpd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vsqrtpd (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vsqrtpd 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vsqrtpd 4096(%rdx), %ymm30 # AVX512{F,VL}
vsqrtpd -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vsqrtpd -4128(%rdx), %ymm30 # AVX512{F,VL}
vsqrtpd 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vsqrtpd 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vsqrtpd -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vsqrtpd -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vsqrtps %xmm29, %xmm30 # AVX512{F,VL}
vsqrtps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vsqrtps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vsqrtps (%rcx), %xmm30 # AVX512{F,VL}
vsqrtps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vsqrtps (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vsqrtps 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vsqrtps 2048(%rdx), %xmm30 # AVX512{F,VL}
vsqrtps -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vsqrtps -2064(%rdx), %xmm30 # AVX512{F,VL}
vsqrtps 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vsqrtps 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vsqrtps -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vsqrtps -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vsqrtps %ymm29, %ymm30 # AVX512{F,VL}
vsqrtps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vsqrtps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vsqrtps (%rcx), %ymm30 # AVX512{F,VL}
vsqrtps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vsqrtps (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vsqrtps 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vsqrtps 4096(%rdx), %ymm30 # AVX512{F,VL}
vsqrtps -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vsqrtps -4128(%rdx), %ymm30 # AVX512{F,VL}
vsqrtps 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vsqrtps 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vsqrtps -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vsqrtps -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vsubpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vsubpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vsubpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vsubpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vsubpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vsubpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vsubpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vsubpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vsubpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vsubpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vsubpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vsubpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vsubpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vsubpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vsubpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vsubpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vsubpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vsubpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vsubpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vsubpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vsubps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vsubps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vsubps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vsubps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vsubps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vsubps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vsubps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vsubps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vsubps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vsubps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vsubps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vsubps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vsubps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vsubps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vsubps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vsubps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vsubps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vsubps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vsubps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vsubps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vunpckhpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vunpckhpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vunpckhpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vunpckhpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vunpckhps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vunpckhps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vunpckhps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vunpckhps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vunpcklpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vunpcklpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vunpcklpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vunpcklpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vunpcklps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vunpcklps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vunpcklps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vunpcklps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpternlogd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpternlogd $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $123, (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogd $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogd $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $123, 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogd $123, 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $123, -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogd $123, -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpternlogd $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpternlogd $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $123, (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogd $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogd $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogd $123, 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $123, -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogd $123, -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpternlogq $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpternlogq $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $123, (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogq $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogq $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogq $123, 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $123, -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogq $123, -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpternlogq $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpternlogq $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $123, (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogq $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogq $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogq $123, 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $123, -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogq $123, -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmovqb %xmm29, %xmm30 # AVX512{F,VL}
vpmovqb %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovqb %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovqb %ymm29, %xmm30 # AVX512{F,VL}
vpmovqb %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovqb %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsqb %xmm29, %xmm30 # AVX512{F,VL}
vpmovsqb %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsqb %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsqb %ymm29, %xmm30 # AVX512{F,VL}
vpmovsqb %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsqb %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusqb %xmm29, %xmm30 # AVX512{F,VL}
vpmovusqb %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusqb %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusqb %ymm29, %xmm30 # AVX512{F,VL}
vpmovusqb %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusqb %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovqw %xmm29, %xmm30 # AVX512{F,VL}
vpmovqw %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovqw %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovqw %ymm29, %xmm30 # AVX512{F,VL}
vpmovqw %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovqw %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsqw %xmm29, %xmm30 # AVX512{F,VL}
vpmovsqw %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsqw %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsqw %ymm29, %xmm30 # AVX512{F,VL}
vpmovsqw %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsqw %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusqw %xmm29, %xmm30 # AVX512{F,VL}
vpmovusqw %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusqw %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusqw %ymm29, %xmm30 # AVX512{F,VL}
vpmovusqw %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusqw %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovqd %xmm29, %xmm30 # AVX512{F,VL}
vpmovqd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovqd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovqd %ymm29, %xmm30 # AVX512{F,VL}
vpmovqd %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovqd %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsqd %xmm29, %xmm30 # AVX512{F,VL}
vpmovsqd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsqd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsqd %ymm29, %xmm30 # AVX512{F,VL}
vpmovsqd %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsqd %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusqd %xmm29, %xmm30 # AVX512{F,VL}
vpmovusqd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusqd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusqd %ymm29, %xmm30 # AVX512{F,VL}
vpmovusqd %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusqd %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovdb %xmm29, %xmm30 # AVX512{F,VL}
vpmovdb %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovdb %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovdb %ymm29, %xmm30 # AVX512{F,VL}
vpmovdb %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovdb %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsdb %xmm29, %xmm30 # AVX512{F,VL}
vpmovsdb %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsdb %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsdb %ymm29, %xmm30 # AVX512{F,VL}
vpmovsdb %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsdb %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusdb %xmm29, %xmm30 # AVX512{F,VL}
vpmovusdb %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusdb %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusdb %ymm29, %xmm30 # AVX512{F,VL}
vpmovusdb %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusdb %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovdw %xmm29, %xmm30 # AVX512{F,VL}
vpmovdw %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovdw %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovdw %ymm29, %xmm30 # AVX512{F,VL}
vpmovdw %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovdw %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsdw %xmm29, %xmm30 # AVX512{F,VL}
vpmovsdw %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsdw %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsdw %ymm29, %xmm30 # AVX512{F,VL}
vpmovsdw %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsdw %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusdw %xmm29, %xmm30 # AVX512{F,VL}
vpmovusdw %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusdw %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusdw %ymm29, %xmm30 # AVX512{F,VL}
vpmovusdw %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusdw %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vshuff32x4 $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vshuff32x4 $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vshuff32x4 $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $123, (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff32x4 $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff32x4 $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff32x4 $123, 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $123, -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff32x4 $123, -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vshuff64x2 $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vshuff64x2 $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $123, (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff64x2 $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff64x2 $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff64x2 $123, 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $123, -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff64x2 $123, -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vshufi32x4 $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vshufi32x4 $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $123, (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi32x4 $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi32x4 $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi32x4 $123, 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $123, -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi32x4 $123, -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vshufi64x2 $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vshufi64x2 $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $123, (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi64x2 $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi64x2 $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi64x2 $123, 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $123, -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi64x2 $123, -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermt2d %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermt2d (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2d 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2d -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2d 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2d -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermt2d %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermt2d (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2d 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2d -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2d 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2d -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermt2q %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermt2q (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2q 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2q -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2q 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2q -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermt2q %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermt2q (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2q 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2q -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2q 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2q -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermt2ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermt2ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermt2ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermt2ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermt2pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermt2pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermt2pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermt2pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
valignq $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
valignq $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
valignq $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
valignq $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
valignq $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
valignq $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
valignq $123, (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
valignq $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignq $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
valignq $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignq $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
valignq $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignq $123, 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
valignq $123, -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignq $123, -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
valignq $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
valignq $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
valignq $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
valignq $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
valignq $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
valignq $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
valignq $123, (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
valignq $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignq $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
valignq $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignq $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
valignq $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignq $123, 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
valignq $123, -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignq $123, -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vscalefpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vscalefpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vscalefpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vscalefpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vscalefps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vscalefps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vscalefps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vscalefps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vscalefps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vscalefps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vscalefps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vscalefps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vscalefps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vscalefps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vscalefps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vscalefps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vscalefps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vscalefps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vscalefps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vscalefps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vscalefps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vscalefps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vscalefps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vscalefps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfixupimmpd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfixupimmpd $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $123, (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $123, -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfixupimmpd $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfixupimmpd $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $123, (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $123, -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfixupimmps $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfixupimmps $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $123, (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmps $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmps $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $123, 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmps $123, 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $123, -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmps $123, -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfixupimmps $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfixupimmps $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $123, (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmps $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmps $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmps $123, 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $123, -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmps $123, -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpslld $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpslld $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpslld $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpslld $123, %xmm29, %xmm30 # AVX512{F,VL}
vpslld $123, (%rcx), %xmm30 # AVX512{F,VL}
vpslld $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpslld $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vpslld $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpslld $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpslld $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpslld $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpslld $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpslld $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpslld $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpslld $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpslld $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpslld $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpslld $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpslld $123, %ymm29, %ymm30 # AVX512{F,VL}
vpslld $123, (%rcx), %ymm30 # AVX512{F,VL}
vpslld $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpslld $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vpslld $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpslld $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpslld $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpslld $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpslld $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpslld $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpslld $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpslld $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpsllq $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpsllq $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsllq $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsllq $123, %xmm29, %xmm30 # AVX512{F,VL}
vpsllq $123, (%rcx), %xmm30 # AVX512{F,VL}
vpsllq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpsllq $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vpsllq $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsllq $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpsllq $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsllq $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpsllq $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpsllq $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpsllq $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpsllq $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpsllq $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpsllq $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsllq $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsllq $123, %ymm29, %ymm30 # AVX512{F,VL}
vpsllq $123, (%rcx), %ymm30 # AVX512{F,VL}
vpsllq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpsllq $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vpsllq $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsllq $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpsllq $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsllq $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpsllq $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpsllq $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpsllq $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpsllq $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpsrad $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpsrad $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrad $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrad $123, %xmm29, %xmm30 # AVX512{F,VL}
vpsrad $123, (%rcx), %xmm30 # AVX512{F,VL}
vpsrad $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpsrad $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vpsrad $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsrad $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpsrad $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsrad $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpsrad $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpsrad $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpsrad $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpsrad $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpsrad $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpsrad $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrad $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrad $123, %ymm29, %ymm30 # AVX512{F,VL}
vpsrad $123, (%rcx), %ymm30 # AVX512{F,VL}
vpsrad $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpsrad $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vpsrad $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsrad $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpsrad $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsrad $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpsrad $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpsrad $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpsrad $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpsrad $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpsraq $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpsraq $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsraq $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsraq $123, %xmm29, %xmm30 # AVX512{F,VL}
vpsraq $123, (%rcx), %xmm30 # AVX512{F,VL}
vpsraq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpsraq $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vpsraq $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsraq $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpsraq $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsraq $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpsraq $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpsraq $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpsraq $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpsraq $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpsraq $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpsraq $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsraq $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsraq $123, %ymm29, %ymm30 # AVX512{F,VL}
vpsraq $123, (%rcx), %ymm30 # AVX512{F,VL}
vpsraq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpsraq $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vpsraq $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsraq $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpsraq $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsraq $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpsraq $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpsraq $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpsraq $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpsraq $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vprolvd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vprolvd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprolvd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprolvd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vprolvd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vprolvd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vprolvd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprolvd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprolvd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vprolvd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vprolvd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vprolvd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprolvd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprolvd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vprolvd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vprolvd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vprolvd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprolvd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprolvd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vprolvd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vprold $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vprold $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprold $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprold $123, %xmm29, %xmm30 # AVX512{F,VL}
vprold $123, (%rcx), %xmm30 # AVX512{F,VL}
vprold $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vprold $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vprold $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprold $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vprold $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprold $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vprold $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vprold $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vprold $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vprold $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vprold $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vprold $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprold $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprold $123, %ymm29, %ymm30 # AVX512{F,VL}
vprold $123, (%rcx), %ymm30 # AVX512{F,VL}
vprold $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vprold $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vprold $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprold $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vprold $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprold $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vprold $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vprold $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vprold $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vprold $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vprolvq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vprolvq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprolvq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprolvq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vprolvq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vprolvq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vprolvq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprolvq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprolvq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vprolvq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vprolvq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vprolvq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprolvq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprolvq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vprolvq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vprolvq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vprolvq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprolvq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprolvq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vprolvq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vprolq $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vprolq $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprolq $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprolq $123, %xmm29, %xmm30 # AVX512{F,VL}
vprolq $123, (%rcx), %xmm30 # AVX512{F,VL}
vprolq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vprolq $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vprolq $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprolq $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vprolq $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprolq $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vprolq $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vprolq $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vprolq $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vprolq $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vprolq $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vprolq $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprolq $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprolq $123, %ymm29, %ymm30 # AVX512{F,VL}
vprolq $123, (%rcx), %ymm30 # AVX512{F,VL}
vprolq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vprolq $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vprolq $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprolq $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vprolq $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprolq $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vprolq $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vprolq $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vprolq $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vprolq $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vprorvd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vprorvd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprorvd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprorvd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vprorvd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vprorvd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vprorvd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprorvd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprorvd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vprorvd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vprorvd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vprorvd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprorvd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprorvd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vprorvd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vprorvd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vprorvd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprorvd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprorvd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vprorvd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vprord $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vprord $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprord $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprord $123, %xmm29, %xmm30 # AVX512{F,VL}
vprord $123, (%rcx), %xmm30 # AVX512{F,VL}
vprord $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vprord $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vprord $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprord $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vprord $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprord $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vprord $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vprord $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vprord $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vprord $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vprord $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vprord $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprord $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprord $123, %ymm29, %ymm30 # AVX512{F,VL}
vprord $123, (%rcx), %ymm30 # AVX512{F,VL}
vprord $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vprord $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vprord $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprord $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vprord $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprord $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vprord $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vprord $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vprord $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vprord $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vprorvq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vprorvq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprorvq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprorvq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vprorvq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vprorvq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vprorvq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprorvq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprorvq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vprorvq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vprorvq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vprorvq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprorvq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprorvq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vprorvq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vprorvq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vprorvq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprorvq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprorvq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vprorvq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vprorq $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vprorq $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprorq $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprorq $123, %xmm29, %xmm30 # AVX512{F,VL}
vprorq $123, (%rcx), %xmm30 # AVX512{F,VL}
vprorq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vprorq $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vprorq $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprorq $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vprorq $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprorq $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vprorq $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vprorq $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vprorq $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vprorq $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vprorq $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vprorq $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprorq $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprorq $123, %ymm29, %ymm30 # AVX512{F,VL}
vprorq $123, (%rcx), %ymm30 # AVX512{F,VL}
vprorq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vprorq $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vprorq $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprorq $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vprorq $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprorq $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vprorq $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vprorq $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vprorq $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vprorq $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vrndscalepd $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vrndscalepd $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vrndscalepd $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vrndscalepd $123, %xmm29, %xmm30 # AVX512{F,VL}
vrndscalepd $123, (%rcx), %xmm30 # AVX512{F,VL}
vrndscalepd $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vrndscalepd $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vrndscalepd $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrndscalepd $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vrndscalepd $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrndscalepd $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vrndscalepd $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vrndscalepd $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vrndscalepd $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vrndscalepd $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vrndscalepd $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vrndscalepd $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vrndscalepd $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vrndscalepd $123, %ymm29, %ymm30 # AVX512{F,VL}
vrndscalepd $123, (%rcx), %ymm30 # AVX512{F,VL}
vrndscalepd $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vrndscalepd $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vrndscalepd $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrndscalepd $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vrndscalepd $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrndscalepd $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vrndscalepd $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vrndscalepd $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vrndscalepd $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vrndscalepd $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vrndscaleps $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vrndscaleps $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vrndscaleps $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vrndscaleps $123, %xmm29, %xmm30 # AVX512{F,VL}
vrndscaleps $123, (%rcx), %xmm30 # AVX512{F,VL}
vrndscaleps $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vrndscaleps $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vrndscaleps $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrndscaleps $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vrndscaleps $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrndscaleps $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vrndscaleps $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vrndscaleps $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vrndscaleps $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vrndscaleps $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vrndscaleps $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vrndscaleps $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vrndscaleps $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vrndscaleps $123, %ymm29, %ymm30 # AVX512{F,VL}
vrndscaleps $123, (%rcx), %ymm30 # AVX512{F,VL}
vrndscaleps $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vrndscaleps $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vrndscaleps $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrndscaleps $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vrndscaleps $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrndscaleps $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vrndscaleps $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vrndscaleps $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vrndscaleps $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vrndscaleps $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpcompressq %xmm30, (%rcx) # AVX512{F,VL}
vpcompressq %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpcompressq %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpcompressq %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpcompressq %xmm30, 1024(%rdx) # AVX512{F,VL}
vpcompressq %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpcompressq %xmm30, -1032(%rdx) # AVX512{F,VL}
vpcompressq %ymm30, (%rcx) # AVX512{F,VL}
vpcompressq %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpcompressq %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpcompressq %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpcompressq %ymm30, 1024(%rdx) # AVX512{F,VL}
vpcompressq %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpcompressq %ymm30, -1032(%rdx) # AVX512{F,VL}
vpcompressq %xmm29, %xmm30 # AVX512{F,VL}
vpcompressq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpcompressq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpcompressq %ymm29, %ymm30 # AVX512{F,VL}
vpcompressq %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpcompressq %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtps2ph $0xab, %xmm30, (%rcx) # AVX512{F,VL}
vcvtps2ph $0xab, %xmm30, (%rcx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %xmm30, (%rcx) # AVX512{F,VL}
vcvtps2ph $123, %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vcvtps2ph $123, %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vcvtps2ph $123, %xmm30, 1024(%rdx) # AVX512{F,VL}
vcvtps2ph $123, %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vcvtps2ph $123, %xmm30, -1032(%rdx) # AVX512{F,VL}
vcvtps2ph $0xab, %ymm30, (%rcx) # AVX512{F,VL}
vcvtps2ph $0xab, %ymm30, (%rcx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %ymm30, (%rcx) # AVX512{F,VL}
vcvtps2ph $123, %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vcvtps2ph $123, %ymm30, 2032(%rdx) # AVX512{F,VL} Disp8
vcvtps2ph $123, %ymm30, 2048(%rdx) # AVX512{F,VL}
vcvtps2ph $123, %ymm30, -2048(%rdx) # AVX512{F,VL} Disp8
vcvtps2ph $123, %ymm30, -2064(%rdx) # AVX512{F,VL}
vextractf32x4 $0xab, %ymm29, (%rcx) # AVX512{F,VL}
vextractf32x4 $0xab, %ymm29, (%rcx){%k7} # AVX512{F,VL}
vextractf32x4 $123, %ymm29, (%rcx) # AVX512{F,VL}
vextractf32x4 $123, %ymm29, 0x123(%rax,%r14,8) # AVX512{F,VL}
vextractf32x4 $123, %ymm29, 2032(%rdx) # AVX512{F,VL} Disp8
vextractf32x4 $123, %ymm29, 2048(%rdx) # AVX512{F,VL}
vextractf32x4 $123, %ymm29, -2048(%rdx) # AVX512{F,VL} Disp8
vextractf32x4 $123, %ymm29, -2064(%rdx) # AVX512{F,VL}
vextracti32x4 $0xab, %ymm29, (%rcx) # AVX512{F,VL}
vextracti32x4 $0xab, %ymm29, (%rcx){%k7} # AVX512{F,VL}
vextracti32x4 $123, %ymm29, (%rcx) # AVX512{F,VL}
vextracti32x4 $123, %ymm29, 0x123(%rax,%r14,8) # AVX512{F,VL}
vextracti32x4 $123, %ymm29, 2032(%rdx) # AVX512{F,VL} Disp8
vextracti32x4 $123, %ymm29, 2048(%rdx) # AVX512{F,VL}
vextracti32x4 $123, %ymm29, -2048(%rdx) # AVX512{F,VL} Disp8
vextracti32x4 $123, %ymm29, -2064(%rdx) # AVX512{F,VL}
vmovapd %xmm30, (%rcx) # AVX512{F,VL}
vmovapd %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovapd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovapd %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovapd %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovapd %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovapd %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovapd %ymm30, (%rcx) # AVX512{F,VL}
vmovapd %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovapd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovapd %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovapd %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovapd %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovapd %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovaps %xmm30, (%rcx) # AVX512{F,VL}
vmovaps %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovaps %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovaps %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovaps %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovaps %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovaps %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovaps %ymm30, (%rcx) # AVX512{F,VL}
vmovaps %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovaps %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovaps %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovaps %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovaps %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovaps %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovdqa32 %xmm30, (%rcx) # AVX512{F,VL}
vmovdqa32 %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqa32 %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqa32 %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovdqa32 %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovdqa32 %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovdqa32 %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovdqa32 %ymm30, (%rcx) # AVX512{F,VL}
vmovdqa32 %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqa32 %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqa32 %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovdqa32 %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovdqa32 %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovdqa32 %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovdqa64 %xmm30, (%rcx) # AVX512{F,VL}
vmovdqa64 %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqa64 %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqa64 %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovdqa64 %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovdqa64 %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovdqa64 %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovdqa64 %ymm30, (%rcx) # AVX512{F,VL}
vmovdqa64 %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqa64 %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqa64 %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovdqa64 %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovdqa64 %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovdqa64 %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovdqu32 %xmm30, (%rcx) # AVX512{F,VL}
vmovdqu32 %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqu32 %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqu32 %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovdqu32 %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovdqu32 %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovdqu32 %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovdqu32 %ymm30, (%rcx) # AVX512{F,VL}
vmovdqu32 %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqu32 %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqu32 %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovdqu32 %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovdqu32 %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovdqu32 %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovdqu64 %xmm30, (%rcx) # AVX512{F,VL}
vmovdqu64 %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqu64 %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqu64 %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovdqu64 %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovdqu64 %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovdqu64 %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovdqu64 %ymm30, (%rcx) # AVX512{F,VL}
vmovdqu64 %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqu64 %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqu64 %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovdqu64 %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovdqu64 %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovdqu64 %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovupd %xmm30, (%rcx) # AVX512{F,VL}
vmovupd %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovupd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovupd %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovupd %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovupd %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovupd %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovupd %ymm30, (%rcx) # AVX512{F,VL}
vmovupd %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovupd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovupd %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovupd %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovupd %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovupd %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovups %xmm30, (%rcx) # AVX512{F,VL}
vmovups %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovups %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovups %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovups %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovups %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovups %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovups %ymm30, (%rcx) # AVX512{F,VL}
vmovups %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovups %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovups %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovups %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovups %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovups %ymm30, -4128(%rdx) # AVX512{F,VL}
vpmovqb %xmm30, (%rcx) # AVX512{F,VL}
vpmovqb %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovqb %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovqb %xmm30, 254(%rdx) # AVX512{F,VL} Disp8
vpmovqb %xmm30, 256(%rdx) # AVX512{F,VL}
vpmovqb %xmm30, -256(%rdx) # AVX512{F,VL} Disp8
vpmovqb %xmm30, -258(%rdx) # AVX512{F,VL}
vpmovqb %ymm30, (%rcx) # AVX512{F,VL}
vpmovqb %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovqb %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovqb %ymm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovqb %ymm30, 512(%rdx) # AVX512{F,VL}
vpmovqb %ymm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovqb %ymm30, -516(%rdx) # AVX512{F,VL}
vpmovsqb %xmm30, (%rcx) # AVX512{F,VL}
vpmovsqb %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsqb %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsqb %xmm30, 254(%rdx) # AVX512{F,VL} Disp8
vpmovsqb %xmm30, 256(%rdx) # AVX512{F,VL}
vpmovsqb %xmm30, -256(%rdx) # AVX512{F,VL} Disp8
vpmovsqb %xmm30, -258(%rdx) # AVX512{F,VL}
vpmovsqb %ymm30, (%rcx) # AVX512{F,VL}
vpmovsqb %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsqb %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsqb %ymm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovsqb %ymm30, 512(%rdx) # AVX512{F,VL}
vpmovsqb %ymm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovsqb %ymm30, -516(%rdx) # AVX512{F,VL}
vpmovusqb %xmm30, (%rcx) # AVX512{F,VL}
vpmovusqb %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusqb %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusqb %xmm30, 254(%rdx) # AVX512{F,VL} Disp8
vpmovusqb %xmm30, 256(%rdx) # AVX512{F,VL}
vpmovusqb %xmm30, -256(%rdx) # AVX512{F,VL} Disp8
vpmovusqb %xmm30, -258(%rdx) # AVX512{F,VL}
vpmovusqb %ymm30, (%rcx) # AVX512{F,VL}
vpmovusqb %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusqb %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusqb %ymm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovusqb %ymm30, 512(%rdx) # AVX512{F,VL}
vpmovusqb %ymm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovusqb %ymm30, -516(%rdx) # AVX512{F,VL}
vpmovqw %xmm30, (%rcx) # AVX512{F,VL}
vpmovqw %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovqw %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovqw %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovqw %xmm30, 512(%rdx) # AVX512{F,VL}
vpmovqw %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovqw %xmm30, -516(%rdx) # AVX512{F,VL}
vpmovqw %ymm30, (%rcx) # AVX512{F,VL}
vpmovqw %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovqw %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovqw %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovqw %ymm30, 1024(%rdx) # AVX512{F,VL}
vpmovqw %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovqw %ymm30, -1032(%rdx) # AVX512{F,VL}
vpmovsqw %xmm30, (%rcx) # AVX512{F,VL}
vpmovsqw %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsqw %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsqw %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovsqw %xmm30, 512(%rdx) # AVX512{F,VL}
vpmovsqw %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovsqw %xmm30, -516(%rdx) # AVX512{F,VL}
vpmovsqw %ymm30, (%rcx) # AVX512{F,VL}
vpmovsqw %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsqw %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsqw %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovsqw %ymm30, 1024(%rdx) # AVX512{F,VL}
vpmovsqw %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovsqw %ymm30, -1032(%rdx) # AVX512{F,VL}
vpmovusqw %xmm30, (%rcx) # AVX512{F,VL}
vpmovusqw %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusqw %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusqw %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovusqw %xmm30, 512(%rdx) # AVX512{F,VL}
vpmovusqw %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovusqw %xmm30, -516(%rdx) # AVX512{F,VL}
vpmovusqw %ymm30, (%rcx) # AVX512{F,VL}
vpmovusqw %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusqw %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusqw %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovusqw %ymm30, 1024(%rdx) # AVX512{F,VL}
vpmovusqw %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovusqw %ymm30, -1032(%rdx) # AVX512{F,VL}
vpmovqd %xmm30, (%rcx) # AVX512{F,VL}
vpmovqd %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovqd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovqd %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovqd %xmm30, 1024(%rdx) # AVX512{F,VL}
vpmovqd %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovqd %xmm30, -1032(%rdx) # AVX512{F,VL}
vpmovqd %ymm30, (%rcx) # AVX512{F,VL}
vpmovqd %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovqd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovqd %ymm30, 2032(%rdx) # AVX512{F,VL} Disp8
vpmovqd %ymm30, 2048(%rdx) # AVX512{F,VL}
vpmovqd %ymm30, -2048(%rdx) # AVX512{F,VL} Disp8
vpmovqd %ymm30, -2064(%rdx) # AVX512{F,VL}
vpmovsqd %xmm30, (%rcx) # AVX512{F,VL}
vpmovsqd %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsqd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsqd %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovsqd %xmm30, 1024(%rdx) # AVX512{F,VL}
vpmovsqd %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovsqd %xmm30, -1032(%rdx) # AVX512{F,VL}
vpmovsqd %ymm30, (%rcx) # AVX512{F,VL}
vpmovsqd %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsqd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsqd %ymm30, 2032(%rdx) # AVX512{F,VL} Disp8
vpmovsqd %ymm30, 2048(%rdx) # AVX512{F,VL}
vpmovsqd %ymm30, -2048(%rdx) # AVX512{F,VL} Disp8
vpmovsqd %ymm30, -2064(%rdx) # AVX512{F,VL}
vpmovusqd %xmm30, (%rcx) # AVX512{F,VL}
vpmovusqd %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusqd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusqd %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovusqd %xmm30, 1024(%rdx) # AVX512{F,VL}
vpmovusqd %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovusqd %xmm30, -1032(%rdx) # AVX512{F,VL}
vpmovusqd %ymm30, (%rcx) # AVX512{F,VL}
vpmovusqd %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusqd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusqd %ymm30, 2032(%rdx) # AVX512{F,VL} Disp8
vpmovusqd %ymm30, 2048(%rdx) # AVX512{F,VL}
vpmovusqd %ymm30, -2048(%rdx) # AVX512{F,VL} Disp8
vpmovusqd %ymm30, -2064(%rdx) # AVX512{F,VL}
vpmovdb %xmm30, (%rcx) # AVX512{F,VL}
vpmovdb %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovdb %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovdb %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovdb %xmm30, 512(%rdx) # AVX512{F,VL}
vpmovdb %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovdb %xmm30, -516(%rdx) # AVX512{F,VL}
vpmovdb %ymm30, (%rcx) # AVX512{F,VL}
vpmovdb %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovdb %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovdb %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovdb %ymm30, 1024(%rdx) # AVX512{F,VL}
vpmovdb %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovdb %ymm30, -1032(%rdx) # AVX512{F,VL}
vpmovsdb %xmm30, (%rcx) # AVX512{F,VL}
vpmovsdb %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsdb %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsdb %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovsdb %xmm30, 512(%rdx) # AVX512{F,VL}
vpmovsdb %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovsdb %xmm30, -516(%rdx) # AVX512{F,VL}
vpmovsdb %ymm30, (%rcx) # AVX512{F,VL}
vpmovsdb %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsdb %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsdb %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovsdb %ymm30, 1024(%rdx) # AVX512{F,VL}
vpmovsdb %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovsdb %ymm30, -1032(%rdx) # AVX512{F,VL}
vpmovusdb %xmm30, (%rcx) # AVX512{F,VL}
vpmovusdb %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusdb %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusdb %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovusdb %xmm30, 512(%rdx) # AVX512{F,VL}
vpmovusdb %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovusdb %xmm30, -516(%rdx) # AVX512{F,VL}
vpmovusdb %ymm30, (%rcx) # AVX512{F,VL}
vpmovusdb %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusdb %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusdb %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovusdb %ymm30, 1024(%rdx) # AVX512{F,VL}
vpmovusdb %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovusdb %ymm30, -1032(%rdx) # AVX512{F,VL}
vpmovdw %xmm30, (%rcx) # AVX512{F,VL}
vpmovdw %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovdw %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovdw %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovdw %xmm30, 1024(%rdx) # AVX512{F,VL}
vpmovdw %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovdw %xmm30, -1032(%rdx) # AVX512{F,VL}
vpmovdw %ymm30, (%rcx) # AVX512{F,VL}
vpmovdw %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovdw %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovdw %ymm30, 2032(%rdx) # AVX512{F,VL} Disp8
vpmovdw %ymm30, 2048(%rdx) # AVX512{F,VL}
vpmovdw %ymm30, -2048(%rdx) # AVX512{F,VL} Disp8
vpmovdw %ymm30, -2064(%rdx) # AVX512{F,VL}
vpmovsdw %xmm30, (%rcx) # AVX512{F,VL}
vpmovsdw %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsdw %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsdw %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovsdw %xmm30, 1024(%rdx) # AVX512{F,VL}
vpmovsdw %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovsdw %xmm30, -1032(%rdx) # AVX512{F,VL}
vpmovsdw %ymm30, (%rcx) # AVX512{F,VL}
vpmovsdw %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsdw %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsdw %ymm30, 2032(%rdx) # AVX512{F,VL} Disp8
vpmovsdw %ymm30, 2048(%rdx) # AVX512{F,VL}
vpmovsdw %ymm30, -2048(%rdx) # AVX512{F,VL} Disp8
vpmovsdw %ymm30, -2064(%rdx) # AVX512{F,VL}
vpmovusdw %xmm30, (%rcx) # AVX512{F,VL}
vpmovusdw %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusdw %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusdw %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovusdw %xmm30, 1024(%rdx) # AVX512{F,VL}
vpmovusdw %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovusdw %xmm30, -1032(%rdx) # AVX512{F,VL}
vpmovusdw %ymm30, (%rcx) # AVX512{F,VL}
vpmovusdw %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusdw %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusdw %ymm30, 2032(%rdx) # AVX512{F,VL} Disp8
vpmovusdw %ymm30, 2048(%rdx) # AVX512{F,VL}
vpmovusdw %ymm30, -2048(%rdx) # AVX512{F,VL} Disp8
vpmovusdw %ymm30, -2064(%rdx) # AVX512{F,VL}
vcvttpd2udq %xmm29, %xmm30 # AVX512{F,VL}
vcvttpd2udq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvttpd2udq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvttpd2udqx (%rcx), %xmm30 # AVX512{F,VL}
vcvttpd2udqx 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvttpd2udqx (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvttpd2udqx 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqx 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2udqx -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqx -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2udqx 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqx 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvttpd2udqx -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqx -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvttpd2udq %ymm29, %xmm30 # AVX512{F,VL}
vcvttpd2udq %ymm29, %xmm30{%k7} # AVX512{F,VL}
vcvttpd2udq %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvttpd2udqy (%rcx), %xmm30 # AVX512{F,VL}
vcvttpd2udqy 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvttpd2udqy (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvttpd2udqy 4064(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqy 4096(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2udqy -4096(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqy -4128(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2udqy 1016(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqy 1024(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttpd2udqy -1024(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqy -1032(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2udq %xmm29, %xmm30 # AVX512{F,VL}
vcvttps2udq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvttps2udq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvttps2udq (%rcx), %xmm30 # AVX512{F,VL}
vcvttps2udq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvttps2udq (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2udq 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttps2udq 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvttps2udq -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttps2udq -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvttps2udq 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttps2udq 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2udq -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttps2udq -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2udq %ymm29, %ymm30 # AVX512{F,VL}
vcvttps2udq %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcvttps2udq %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvttps2udq (%rcx), %ymm30 # AVX512{F,VL}
vcvttps2udq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvttps2udq (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vcvttps2udq 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvttps2udq 4096(%rdx), %ymm30 # AVX512{F,VL}
vcvttps2udq -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvttps2udq -4128(%rdx), %ymm30 # AVX512{F,VL}
vcvttps2udq 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvttps2udq 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvttps2udq -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvttps2udq -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpermi2d %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermi2d %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermi2d (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2d 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2d -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2d 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2d -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2d %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermi2d %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermi2d (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2d 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2d (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2d 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2d 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2d -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2d -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2d 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2d 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2d -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2d -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermi2q %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermi2q (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2q 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2q -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2q 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2q -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermi2q %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermi2q (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2q 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2q -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2q 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2q -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermi2ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermi2ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermi2ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermi2ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermi2pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermi2pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermi2pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermi2pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vptestnmd %xmm28, %xmm29, %k5 # AVX512{F,VL}
vptestnmd %xmm28, %xmm29, %k5{%k7} # AVX512{F,VL}
vptestnmd (%rcx), %xmm29, %k5 # AVX512{F,VL}
vptestnmd 0x123(%rax,%r14,8), %xmm29, %k5 # AVX512{F,VL}
vptestnmd (%rcx){1to4}, %xmm29, %k5 # AVX512{F,VL}
vptestnmd 2032(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmd 2048(%rdx), %xmm29, %k5 # AVX512{F,VL}
vptestnmd -2048(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmd -2064(%rdx), %xmm29, %k5 # AVX512{F,VL}
vptestnmd 508(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmd 512(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL}
vptestnmd -512(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmd -516(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL}
vptestnmd %ymm28, %ymm29, %k5 # AVX512{F,VL}
vptestnmd %ymm28, %ymm29, %k5{%k7} # AVX512{F,VL}
vptestnmd (%rcx), %ymm29, %k5 # AVX512{F,VL}
vptestnmd 0x123(%rax,%r14,8), %ymm29, %k5 # AVX512{F,VL}
vptestnmd (%rcx){1to8}, %ymm29, %k5 # AVX512{F,VL}
vptestnmd 4064(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmd 4096(%rdx), %ymm29, %k5 # AVX512{F,VL}
vptestnmd -4096(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmd -4128(%rdx), %ymm29, %k5 # AVX512{F,VL}
vptestnmd 508(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmd 512(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL}
vptestnmd -512(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmd -516(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL}
vptestnmq %xmm28, %xmm29, %k5 # AVX512{F,VL}
vptestnmq %xmm28, %xmm29, %k5{%k7} # AVX512{F,VL}
vptestnmq (%rcx), %xmm29, %k5 # AVX512{F,VL}
vptestnmq 0x123(%rax,%r14,8), %xmm29, %k5 # AVX512{F,VL}
vptestnmq (%rcx){1to2}, %xmm29, %k5 # AVX512{F,VL}
vptestnmq 2032(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmq 2048(%rdx), %xmm29, %k5 # AVX512{F,VL}
vptestnmq -2048(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmq -2064(%rdx), %xmm29, %k5 # AVX512{F,VL}
vptestnmq 1016(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmq 1024(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL}
vptestnmq -1024(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmq -1032(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL}
vptestnmq %ymm28, %ymm29, %k5 # AVX512{F,VL}
vptestnmq %ymm28, %ymm29, %k5{%k7} # AVX512{F,VL}
vptestnmq (%rcx), %ymm29, %k5 # AVX512{F,VL}
vptestnmq 0x123(%rax,%r14,8), %ymm29, %k5 # AVX512{F,VL}
vptestnmq (%rcx){1to4}, %ymm29, %k5 # AVX512{F,VL}
vptestnmq 4064(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmq 4096(%rdx), %ymm29, %k5 # AVX512{F,VL}
vptestnmq -4096(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmq -4128(%rdx), %ymm29, %k5 # AVX512{F,VL}
vptestnmq 1016(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmq 1024(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL}
vptestnmq -1024(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmq -1032(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL}
.intel_syntax noprefix
vaddpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vaddpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vaddpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vaddpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vaddpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vaddpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vaddpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vaddpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vaddpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vaddpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vaddpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vaddpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vaddpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vaddpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vaddpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vaddpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vaddpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vaddpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vaddpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vaddpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vaddpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vaddpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vaddpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vaddpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vaddpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vaddpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vaddpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vaddpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vaddps xmm30, xmm29, xmm28 # AVX512{F,VL}
vaddps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vaddps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vaddps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vaddps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vaddps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vaddps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vaddps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vaddps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vaddps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vaddps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vaddps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vaddps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vaddps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vaddps ymm30, ymm29, ymm28 # AVX512{F,VL}
vaddps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vaddps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vaddps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vaddps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vaddps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vaddps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vaddps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vaddps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vaddps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vaddps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vaddps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vaddps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vaddps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
valignd xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
valignd xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
valignd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
valignd xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
valignd xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
valignd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
valignd xmm30, xmm29, [rcx]{1to4}, 123 # AVX512{F,VL}
valignd xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
valignd xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
valignd xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
valignd xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
valignd xmm30, xmm29, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
valignd xmm30, xmm29, [rdx+512]{1to4}, 123 # AVX512{F,VL}
valignd xmm30, xmm29, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
valignd xmm30, xmm29, [rdx-516]{1to4}, 123 # AVX512{F,VL}
valignd ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
valignd ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
valignd ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
valignd ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
valignd ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
valignd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
valignd ymm30, ymm29, [rcx]{1to8}, 123 # AVX512{F,VL}
valignd ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
valignd ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
valignd ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
valignd ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
valignd ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
valignd ymm30, ymm29, [rdx+512]{1to8}, 123 # AVX512{F,VL}
valignd ymm30, ymm29, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
valignd ymm30, ymm29, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vblendmpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vblendmpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vblendmpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vblendmpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vblendmpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vblendmpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vblendmpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vblendmpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vblendmpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vblendmpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vblendmpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vblendmpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vblendmpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vblendmpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vblendmpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vblendmpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vblendmpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vblendmpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vblendmpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vblendmpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vblendmpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vblendmpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vblendmpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vblendmpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vblendmpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vblendmpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vblendmpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vblendmpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vblendmps xmm30, xmm29, xmm28 # AVX512{F,VL}
vblendmps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vblendmps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vblendmps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vblendmps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vblendmps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vblendmps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vblendmps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vblendmps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vblendmps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vblendmps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vblendmps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vblendmps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vblendmps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vblendmps ymm30, ymm29, ymm28 # AVX512{F,VL}
vblendmps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vblendmps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vblendmps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vblendmps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vblendmps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vblendmps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vblendmps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vblendmps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vblendmps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vblendmps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vblendmps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vblendmps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vblendmps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vbroadcastf32x4 ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vbroadcastf32x4 ymm30{k7}, XMMWORD PTR [rcx] # AVX512{F,VL}
vbroadcastf32x4 ymm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{F,VL}
vbroadcastf32x4 ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vbroadcastf32x4 ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vbroadcastf32x4 ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vbroadcastf32x4 ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vbroadcastf32x4 ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vbroadcasti32x4 ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vbroadcasti32x4 ymm30{k7}, XMMWORD PTR [rcx] # AVX512{F,VL}
vbroadcasti32x4 ymm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{F,VL}
vbroadcasti32x4 ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vbroadcasti32x4 ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vbroadcasti32x4 ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vbroadcasti32x4 ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vbroadcasti32x4 ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vbroadcastsd ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vbroadcastsd ymm30{k7}, QWORD PTR [rcx] # AVX512{F,VL}
vbroadcastsd ymm30{k7}{z}, QWORD PTR [rcx] # AVX512{F,VL}
vbroadcastsd ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vbroadcastsd ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vbroadcastsd ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vbroadcastsd ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vbroadcastsd ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vbroadcastsd ymm30, xmm29 # AVX512{F,VL}
vbroadcastsd ymm30{k7}, xmm29 # AVX512{F,VL}
vbroadcastsd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vbroadcastss xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vbroadcastss xmm30{k7}, DWORD PTR [rcx] # AVX512{F,VL}
vbroadcastss xmm30{k7}{z}, DWORD PTR [rcx] # AVX512{F,VL}
vbroadcastss xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vbroadcastss xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vbroadcastss xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vbroadcastss xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vbroadcastss xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vbroadcastss ymm30, DWORD PTR [rcx] # AVX512{F,VL}
vbroadcastss ymm30{k7}, DWORD PTR [rcx] # AVX512{F,VL}
vbroadcastss ymm30{k7}{z}, DWORD PTR [rcx] # AVX512{F,VL}
vbroadcastss ymm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vbroadcastss ymm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vbroadcastss ymm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vbroadcastss ymm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vbroadcastss ymm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vbroadcastss xmm30, xmm29 # AVX512{F,VL}
vbroadcastss xmm30{k7}, xmm29 # AVX512{F,VL}
vbroadcastss xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vbroadcastss ymm30, xmm29 # AVX512{F,VL}
vbroadcastss ymm30{k7}, xmm29 # AVX512{F,VL}
vbroadcastss ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vcmppd k5, xmm29, xmm28, 0xab # AVX512{F,VL}
vcmppd k5{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vcmppd k5, xmm29, xmm28, 123 # AVX512{F,VL}
vcmppd k5, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vcmppd k5, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vcmppd k5, xmm29, [rcx]{1to2}, 123 # AVX512{F,VL}
vcmppd k5, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vcmppd k5, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vcmppd k5, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vcmppd k5, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vcmppd k5, xmm29, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vcmppd k5, xmm29, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vcmppd k5, xmm29, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vcmppd k5, xmm29, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vcmppd k5, ymm29, ymm28, 0xab # AVX512{F,VL}
vcmppd k5{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vcmppd k5, ymm29, ymm28, 123 # AVX512{F,VL}
vcmppd k5, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vcmppd k5, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vcmppd k5, ymm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vcmppd k5, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vcmppd k5, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vcmppd k5, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vcmppd k5, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vcmppd k5, ymm29, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vcmppd k5, ymm29, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vcmppd k5, ymm29, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vcmppd k5, ymm29, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vcmpps k5, xmm29, xmm28, 0xab # AVX512{F,VL}
vcmpps k5{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vcmpps k5, xmm29, xmm28, 123 # AVX512{F,VL}
vcmpps k5, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vcmpps k5, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vcmpps k5, xmm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vcmpps k5, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vcmpps k5, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vcmpps k5, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vcmpps k5, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vcmpps k5, xmm29, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vcmpps k5, xmm29, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vcmpps k5, xmm29, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vcmpps k5, xmm29, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vcmpps k5, ymm29, ymm28, 0xab # AVX512{F,VL}
vcmpps k5{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vcmpps k5, ymm29, ymm28, 123 # AVX512{F,VL}
vcmpps k5, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vcmpps k5, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vcmpps k5, ymm29, [rcx]{1to8}, 123 # AVX512{F,VL}
vcmpps k5, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vcmpps k5, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vcmpps k5, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vcmpps k5, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vcmpps k5, ymm29, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vcmpps k5, ymm29, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vcmpps k5, ymm29, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vcmpps k5, ymm29, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vcompresspd XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vcompresspd XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vcompresspd XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vcompresspd XMMWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vcompresspd XMMWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vcompresspd XMMWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vcompresspd XMMWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vcompresspd YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vcompresspd YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vcompresspd YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vcompresspd YMMWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vcompresspd YMMWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vcompresspd YMMWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vcompresspd YMMWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vcompresspd xmm30, xmm29 # AVX512{F,VL}
vcompresspd xmm30{k7}, xmm29 # AVX512{F,VL}
vcompresspd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcompresspd ymm30, ymm29 # AVX512{F,VL}
vcompresspd ymm30{k7}, ymm29 # AVX512{F,VL}
vcompresspd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcompressps XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vcompressps XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vcompressps XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vcompressps XMMWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vcompressps XMMWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vcompressps XMMWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vcompressps XMMWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vcompressps YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vcompressps YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vcompressps YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vcompressps YMMWORD PTR [rdx+508], ymm30 # AVX512{F,VL} Disp8
vcompressps YMMWORD PTR [rdx+512], ymm30 # AVX512{F,VL}
vcompressps YMMWORD PTR [rdx-512], ymm30 # AVX512{F,VL} Disp8
vcompressps YMMWORD PTR [rdx-516], ymm30 # AVX512{F,VL}
vcompressps xmm30, xmm29 # AVX512{F,VL}
vcompressps xmm30{k7}, xmm29 # AVX512{F,VL}
vcompressps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcompressps ymm30, ymm29 # AVX512{F,VL}
vcompressps ymm30{k7}, ymm29 # AVX512{F,VL}
vcompressps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtdq2pd xmm30, xmm29 # AVX512{F,VL}
vcvtdq2pd xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtdq2pd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtdq2pd xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vcvtdq2pd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtdq2pd xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvtdq2pd xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vcvtdq2pd xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vcvtdq2pd xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vcvtdq2pd xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vcvtdq2pd xmm30, [rdx+508]{1to2} # AVX512{F,VL} Disp8
vcvtdq2pd xmm30, [rdx+512]{1to2} # AVX512{F,VL}
vcvtdq2pd xmm30, [rdx-512]{1to2} # AVX512{F,VL} Disp8
vcvtdq2pd xmm30, [rdx-516]{1to2} # AVX512{F,VL}
vcvtdq2pd ymm30, xmm29 # AVX512{F,VL}
vcvtdq2pd ymm30{k7}, xmm29 # AVX512{F,VL}
vcvtdq2pd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtdq2pd ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtdq2pd ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtdq2pd ymm30, [rcx]{1to4} # AVX512{F,VL}
vcvtdq2pd ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtdq2pd ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtdq2pd ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtdq2pd ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtdq2pd ymm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvtdq2pd ymm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvtdq2pd ymm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvtdq2pd ymm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvtdq2ps xmm30, xmm29 # AVX512{F,VL}
vcvtdq2ps xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtdq2ps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtdq2ps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtdq2ps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtdq2ps xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvtdq2ps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtdq2ps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtdq2ps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtdq2ps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtdq2ps xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvtdq2ps xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvtdq2ps xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvtdq2ps xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvtdq2ps ymm30, ymm29 # AVX512{F,VL}
vcvtdq2ps ymm30{k7}, ymm29 # AVX512{F,VL}
vcvtdq2ps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtdq2ps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvtdq2ps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtdq2ps ymm30, [rcx]{1to8} # AVX512{F,VL}
vcvtdq2ps ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvtdq2ps ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvtdq2ps ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvtdq2ps ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvtdq2ps ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vcvtdq2ps ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vcvtdq2ps ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vcvtdq2ps ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vcvtpd2dq xmm30, xmm29 # AVX512{F,VL}
vcvtpd2dq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtpd2dq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtpd2dq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtpd2dq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtpd2dq xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvtpd2dq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtpd2dq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtpd2dq xmm30, QWORD BCST [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, QWORD BCST [rdx+1024]{1to2} # AVX512{F,VL}
vcvtpd2dq xmm30, QWORD BCST [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, QWORD BCST [rdx-1032]{1to2} # AVX512{F,VL}
vcvtpd2dq xmm30, ymm29 # AVX512{F,VL}
vcvtpd2dq xmm30{k7}, ymm29 # AVX512{F,VL}
vcvtpd2dq xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtpd2dq xmm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvtpd2dq xmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtpd2dq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvtpd2dq xmm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvtpd2dq xmm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvtpd2dq xmm30, QWORD BCST [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, QWORD BCST [rdx+1024]{1to4} # AVX512{F,VL}
vcvtpd2dq xmm30, QWORD BCST [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, QWORD BCST [rdx-1032]{1to4} # AVX512{F,VL}
vcvtpd2ps xmm30, xmm29 # AVX512{F,VL}
vcvtpd2ps xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtpd2ps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtpd2ps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtpd2ps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtpd2ps xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvtpd2ps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtpd2ps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtpd2ps xmm30, QWORD BCST [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, QWORD BCST [rdx+1024]{1to2} # AVX512{F,VL}
vcvtpd2ps xmm30, QWORD BCST [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, QWORD BCST [rdx-1032]{1to2} # AVX512{F,VL}
vcvtpd2ps xmm30, ymm29 # AVX512{F,VL}
vcvtpd2ps xmm30{k7}, ymm29 # AVX512{F,VL}
vcvtpd2ps xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtpd2ps xmm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvtpd2ps xmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtpd2ps xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvtpd2ps xmm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvtpd2ps xmm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvtpd2ps xmm30, QWORD BCST [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, QWORD BCST [rdx+1024]{1to4} # AVX512{F,VL}
vcvtpd2ps xmm30, QWORD BCST [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, QWORD BCST [rdx-1032]{1to4} # AVX512{F,VL}
vcvtpd2udq xmm30, xmm29 # AVX512{F,VL}
vcvtpd2udq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtpd2udq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtpd2udq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtpd2udq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtpd2udq xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvtpd2udq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtpd2udq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtpd2udq xmm30, QWORD BCST [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, QWORD BCST [rdx+1024]{1to2} # AVX512{F,VL}
vcvtpd2udq xmm30, QWORD BCST [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, QWORD BCST [rdx-1032]{1to2} # AVX512{F,VL}
vcvtpd2udq xmm30, ymm29 # AVX512{F,VL}
vcvtpd2udq xmm30{k7}, ymm29 # AVX512{F,VL}
vcvtpd2udq xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtpd2udq xmm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvtpd2udq xmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtpd2udq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvtpd2udq xmm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvtpd2udq xmm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvtpd2udq xmm30, QWORD BCST [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, QWORD BCST [rdx+1024]{1to4} # AVX512{F,VL}
vcvtpd2udq xmm30, QWORD BCST [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, QWORD BCST [rdx-1032]{1to4} # AVX512{F,VL}
vcvtph2ps xmm30, xmm29 # AVX512{F,VL}
vcvtph2ps xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtph2ps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtph2ps xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vcvtph2ps xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtph2ps xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vcvtph2ps xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vcvtph2ps xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vcvtph2ps xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vcvtph2ps ymm30, xmm29 # AVX512{F,VL}
vcvtph2ps ymm30{k7}, xmm29 # AVX512{F,VL}
vcvtph2ps ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtph2ps ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtph2ps ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtph2ps ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtph2ps ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtph2ps ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtph2ps ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtps2dq xmm30, xmm29 # AVX512{F,VL}
vcvtps2dq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtps2dq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtps2dq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtps2dq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtps2dq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvtps2dq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtps2dq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtps2dq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtps2dq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtps2dq xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvtps2dq xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvtps2dq xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvtps2dq xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvtps2dq ymm30, ymm29 # AVX512{F,VL}
vcvtps2dq ymm30{k7}, ymm29 # AVX512{F,VL}
vcvtps2dq ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtps2dq ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvtps2dq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtps2dq ymm30, [rcx]{1to8} # AVX512{F,VL}
vcvtps2dq ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvtps2dq ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvtps2dq ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvtps2dq ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvtps2dq ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vcvtps2dq ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vcvtps2dq ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vcvtps2dq ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vcvtps2pd xmm30, xmm29 # AVX512{F,VL}
vcvtps2pd xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtps2pd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtps2pd xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vcvtps2pd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtps2pd xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvtps2pd xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vcvtps2pd xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vcvtps2pd xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vcvtps2pd xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vcvtps2pd xmm30, [rdx+508]{1to2} # AVX512{F,VL} Disp8
vcvtps2pd xmm30, [rdx+512]{1to2} # AVX512{F,VL}
vcvtps2pd xmm30, [rdx-512]{1to2} # AVX512{F,VL} Disp8
vcvtps2pd xmm30, [rdx-516]{1to2} # AVX512{F,VL}
vcvtps2pd ymm30, xmm29 # AVX512{F,VL}
vcvtps2pd ymm30{k7}, xmm29 # AVX512{F,VL}
vcvtps2pd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtps2pd ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtps2pd ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtps2pd ymm30, [rcx]{1to4} # AVX512{F,VL}
vcvtps2pd ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtps2pd ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtps2pd ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtps2pd ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtps2pd ymm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvtps2pd ymm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvtps2pd ymm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvtps2pd ymm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvtps2ph xmm30, xmm29, 0xab # AVX512{F,VL}
vcvtps2ph xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vcvtps2ph xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vcvtps2ph xmm30, xmm29, 123 # AVX512{F,VL}
vcvtps2ph xmm30, ymm29, 0xab # AVX512{F,VL}
vcvtps2ph xmm30{k7}, ymm29, 0xab # AVX512{F,VL}
vcvtps2ph xmm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vcvtps2ph xmm30, ymm29, 123 # AVX512{F,VL}
vcvtps2udq xmm30, xmm29 # AVX512{F,VL}
vcvtps2udq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtps2udq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtps2udq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtps2udq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtps2udq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvtps2udq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtps2udq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtps2udq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtps2udq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtps2udq xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvtps2udq xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvtps2udq xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvtps2udq xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvtps2udq ymm30, ymm29 # AVX512{F,VL}
vcvtps2udq ymm30{k7}, ymm29 # AVX512{F,VL}
vcvtps2udq ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtps2udq ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvtps2udq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtps2udq ymm30, [rcx]{1to8} # AVX512{F,VL}
vcvtps2udq ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvtps2udq ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvtps2udq ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvtps2udq ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvtps2udq ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vcvtps2udq ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vcvtps2udq ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vcvtps2udq ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vcvttpd2dq xmm30, xmm29 # AVX512{F,VL}
vcvttpd2dq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvttpd2dq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvttpd2dq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvttpd2dq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttpd2dq xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvttpd2dq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvttpd2dq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvttpd2dq xmm30, QWORD BCST [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, QWORD BCST [rdx+1024]{1to2} # AVX512{F,VL}
vcvttpd2dq xmm30, QWORD BCST [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, QWORD BCST [rdx-1032]{1to2} # AVX512{F,VL}
vcvttpd2dq xmm30, ymm29 # AVX512{F,VL}
vcvttpd2dq xmm30{k7}, ymm29 # AVX512{F,VL}
vcvttpd2dq xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvttpd2dq xmm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvttpd2dq xmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttpd2dq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvttpd2dq xmm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvttpd2dq xmm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvttpd2dq xmm30, QWORD BCST [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, QWORD BCST [rdx+1024]{1to4} # AVX512{F,VL}
vcvttpd2dq xmm30, QWORD BCST [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, QWORD BCST [rdx-1032]{1to4} # AVX512{F,VL}
vcvttps2dq xmm30, xmm29 # AVX512{F,VL}
vcvttps2dq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvttps2dq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvttps2dq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvttps2dq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttps2dq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvttps2dq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvttps2dq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvttps2dq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvttps2dq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvttps2dq xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvttps2dq xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvttps2dq xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvttps2dq xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvttps2dq ymm30, ymm29 # AVX512{F,VL}
vcvttps2dq ymm30{k7}, ymm29 # AVX512{F,VL}
vcvttps2dq ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvttps2dq ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvttps2dq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttps2dq ymm30, [rcx]{1to8} # AVX512{F,VL}
vcvttps2dq ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvttps2dq ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvttps2dq ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvttps2dq ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvttps2dq ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vcvttps2dq ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vcvttps2dq ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vcvttps2dq ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vcvtudq2pd xmm30, xmm29 # AVX512{F,VL}
vcvtudq2pd xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtudq2pd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtudq2pd xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vcvtudq2pd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtudq2pd xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvtudq2pd xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vcvtudq2pd xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vcvtudq2pd xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vcvtudq2pd xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vcvtudq2pd xmm30, [rdx+508]{1to2} # AVX512{F,VL} Disp8
vcvtudq2pd xmm30, [rdx+512]{1to2} # AVX512{F,VL}
vcvtudq2pd xmm30, [rdx-512]{1to2} # AVX512{F,VL} Disp8
vcvtudq2pd xmm30, [rdx-516]{1to2} # AVX512{F,VL}
vcvtudq2pd ymm30, xmm29 # AVX512{F,VL}
vcvtudq2pd ymm30{k7}, xmm29 # AVX512{F,VL}
vcvtudq2pd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtudq2pd ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtudq2pd ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtudq2pd ymm30, [rcx]{1to4} # AVX512{F,VL}
vcvtudq2pd ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtudq2pd ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtudq2pd ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtudq2pd ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtudq2pd ymm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvtudq2pd ymm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvtudq2pd ymm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvtudq2pd ymm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvtudq2ps xmm30, xmm29 # AVX512{F,VL}
vcvtudq2ps xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtudq2ps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtudq2ps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtudq2ps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtudq2ps xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvtudq2ps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtudq2ps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtudq2ps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtudq2ps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtudq2ps xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvtudq2ps xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvtudq2ps xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvtudq2ps xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvtudq2ps ymm30, ymm29 # AVX512{F,VL}
vcvtudq2ps ymm30{k7}, ymm29 # AVX512{F,VL}
vcvtudq2ps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtudq2ps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvtudq2ps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtudq2ps ymm30, [rcx]{1to8} # AVX512{F,VL}
vcvtudq2ps ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvtudq2ps ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvtudq2ps ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvtudq2ps ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvtudq2ps ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vcvtudq2ps ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vcvtudq2ps ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vcvtudq2ps ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vdivpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vdivpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vdivpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vdivpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vdivpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vdivpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vdivpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vdivpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vdivpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vdivpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vdivpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vdivpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vdivpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vdivpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vdivpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vdivpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vdivpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vdivpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vdivpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vdivpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vdivpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vdivpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vdivpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vdivpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vdivpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vdivpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vdivpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vdivpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vdivps xmm30, xmm29, xmm28 # AVX512{F,VL}
vdivps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vdivps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vdivps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vdivps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vdivps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vdivps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vdivps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vdivps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vdivps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vdivps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vdivps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vdivps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vdivps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vdivps ymm30, ymm29, ymm28 # AVX512{F,VL}
vdivps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vdivps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vdivps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vdivps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vdivps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vdivps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vdivps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vdivps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vdivps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vdivps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vdivps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vdivps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vdivps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vexpandpd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vexpandpd xmm30{k7}, XMMWORD PTR [rcx] # AVX512{F,VL}
vexpandpd xmm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{F,VL}
vexpandpd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vexpandpd xmm30, XMMWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vexpandpd xmm30, XMMWORD PTR [rdx+1024] # AVX512{F,VL}
vexpandpd xmm30, XMMWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vexpandpd xmm30, XMMWORD PTR [rdx-1032] # AVX512{F,VL}
vexpandpd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vexpandpd ymm30{k7}, YMMWORD PTR [rcx] # AVX512{F,VL}
vexpandpd ymm30{k7}{z}, YMMWORD PTR [rcx] # AVX512{F,VL}
vexpandpd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vexpandpd ymm30, YMMWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vexpandpd ymm30, YMMWORD PTR [rdx+1024] # AVX512{F,VL}
vexpandpd ymm30, YMMWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vexpandpd ymm30, YMMWORD PTR [rdx-1032] # AVX512{F,VL}
vexpandpd xmm30, xmm29 # AVX512{F,VL}
vexpandpd xmm30{k7}, xmm29 # AVX512{F,VL}
vexpandpd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vexpandpd ymm30, ymm29 # AVX512{F,VL}
vexpandpd ymm30{k7}, ymm29 # AVX512{F,VL}
vexpandpd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vexpandps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vexpandps xmm30{k7}, XMMWORD PTR [rcx] # AVX512{F,VL}
vexpandps xmm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{F,VL}
vexpandps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vexpandps xmm30, XMMWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vexpandps xmm30, XMMWORD PTR [rdx+512] # AVX512{F,VL}
vexpandps xmm30, XMMWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vexpandps xmm30, XMMWORD PTR [rdx-516] # AVX512{F,VL}
vexpandps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vexpandps ymm30{k7}, YMMWORD PTR [rcx] # AVX512{F,VL}
vexpandps ymm30{k7}{z}, YMMWORD PTR [rcx] # AVX512{F,VL}
vexpandps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vexpandps ymm30, YMMWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vexpandps ymm30, YMMWORD PTR [rdx+512] # AVX512{F,VL}
vexpandps ymm30, YMMWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vexpandps ymm30, YMMWORD PTR [rdx-516] # AVX512{F,VL}
vexpandps xmm30, xmm29 # AVX512{F,VL}
vexpandps xmm30{k7}, xmm29 # AVX512{F,VL}
vexpandps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vexpandps ymm30, ymm29 # AVX512{F,VL}
vexpandps ymm30{k7}, ymm29 # AVX512{F,VL}
vexpandps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vextractf32x4 xmm30, ymm29, 0xab # AVX512{F,VL}
vextractf32x4 xmm30{k7}, ymm29, 0xab # AVX512{F,VL}
vextractf32x4 xmm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vextractf32x4 xmm30, ymm29, 123 # AVX512{F,VL}
vextracti32x4 xmm30, ymm29, 0xab # AVX512{F,VL}
vextracti32x4 xmm30{k7}, ymm29, 0xab # AVX512{F,VL}
vextracti32x4 xmm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vextracti32x4 xmm30, ymm29, 123 # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmadd132pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmadd132pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmadd132pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmadd132pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmadd132pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmadd132pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmadd132pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmadd132pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmadd132pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmadd132pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmadd132pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmadd132pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmadd132ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmadd132ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmadd132ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmadd132ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmadd132ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmadd132ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmadd132ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmadd132ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmadd132ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmadd132ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmadd132ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmadd132ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmadd213pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmadd213pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmadd213pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmadd213pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmadd213pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmadd213pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmadd213pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmadd213pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmadd213pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmadd213pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmadd213pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmadd213pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmadd213ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmadd213ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmadd213ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmadd213ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmadd213ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmadd213ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmadd213ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmadd213ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmadd213ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmadd213ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmadd213ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmadd213ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmadd231pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmadd231pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmadd231pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmadd231pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmadd231pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmadd231pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmadd231pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmadd231pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmadd231pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmadd231pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmadd231pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmadd231pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmadd231ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmadd231ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmadd231ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmadd231ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmadd231ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmadd231ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmadd231ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmadd231ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmadd231ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmadd231ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmadd231ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmadd231ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub132pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub132pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmaddsub132pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmaddsub132pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmaddsub132pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmaddsub132pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub132pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub132pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmaddsub132pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmaddsub132pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub132ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub132ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmaddsub132ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmaddsub132ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub132ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub132ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmaddsub132ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmaddsub132ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmaddsub132ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmaddsub132ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub213pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub213pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmaddsub213pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmaddsub213pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmaddsub213pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmaddsub213pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub213pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub213pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmaddsub213pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmaddsub213pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub213ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub213ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmaddsub213ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmaddsub213ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub213ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub213ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmaddsub213ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmaddsub213ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmaddsub213ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmaddsub213ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub231pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub231pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmaddsub231pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmaddsub231pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmaddsub231pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmaddsub231pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub231pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub231pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmaddsub231pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmaddsub231pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub231ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub231ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmaddsub231ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmaddsub231ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub231ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub231ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmaddsub231ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmaddsub231ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmaddsub231ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmaddsub231ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsub132pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsub132pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsub132pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsub132pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsub132pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsub132pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsub132pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsub132pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsub132pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsub132pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsub132pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsub132pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsub132ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsub132ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsub132ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsub132ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmsub132ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmsub132ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsub132ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsub132ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsub132ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsub132ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmsub132ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmsub132ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsub213pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsub213pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsub213pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsub213pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsub213pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsub213pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsub213pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsub213pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsub213pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsub213pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsub213pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsub213pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsub213ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsub213ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsub213ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsub213ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmsub213ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmsub213ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsub213ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsub213ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsub213ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsub213ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmsub213ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmsub213ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsub231pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsub231pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsub231pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsub231pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsub231pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsub231pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsub231pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsub231pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsub231pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsub231pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsub231pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsub231pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsub231ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsub231ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsub231ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsub231ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmsub231ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmsub231ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsub231ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsub231ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsub231ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsub231ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmsub231ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmsub231ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd132pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd132pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsubadd132pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsubadd132pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsubadd132pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsubadd132pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd132pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd132pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsubadd132pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsubadd132pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd132ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd132ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsubadd132ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsubadd132ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd132ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd132ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsubadd132ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsubadd132ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmsubadd132ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmsubadd132ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd213pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd213pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsubadd213pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsubadd213pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsubadd213pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsubadd213pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd213pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd213pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsubadd213pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsubadd213pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd213ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd213ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsubadd213ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsubadd213ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd213ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd213ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsubadd213ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsubadd213ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmsubadd213ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmsubadd213ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd231pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd231pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsubadd231pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsubadd231pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsubadd231pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsubadd231pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd231pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd231pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsubadd231pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsubadd231pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd231ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd231ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsubadd231ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsubadd231ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd231ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd231ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsubadd231ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsubadd231ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmsubadd231ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmsubadd231ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmadd132pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd132pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmadd132pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmadd132pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmadd132pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmadd132pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmadd132pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd132pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmadd132pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmadd132pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmadd132pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmadd132pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmadd132ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd132ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmadd132ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmadd132ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfnmadd132ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfnmadd132ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmadd132ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd132ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmadd132ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmadd132ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfnmadd132ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfnmadd132ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmadd213pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd213pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmadd213pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmadd213pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmadd213pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmadd213pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmadd213pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd213pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmadd213pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmadd213pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmadd213pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmadd213pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmadd213ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd213ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmadd213ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmadd213ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfnmadd213ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfnmadd213ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmadd213ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd213ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmadd213ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmadd213ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfnmadd213ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfnmadd213ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmadd231pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd231pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmadd231pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmadd231pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmadd231pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmadd231pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmadd231pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd231pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmadd231pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmadd231pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmadd231pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmadd231pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmadd231ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd231ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmadd231ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmadd231ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfnmadd231ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfnmadd231ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmadd231ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd231ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmadd231ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmadd231ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfnmadd231ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfnmadd231ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmsub132pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub132pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmsub132pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmsub132pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmsub132pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmsub132pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmsub132pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub132pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmsub132pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmsub132pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmsub132pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmsub132pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmsub132ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub132ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmsub132ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmsub132ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfnmsub132ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfnmsub132ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmsub132ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub132ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmsub132ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmsub132ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfnmsub132ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfnmsub132ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmsub213pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub213pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmsub213pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmsub213pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmsub213pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmsub213pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmsub213pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub213pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmsub213pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmsub213pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmsub213pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmsub213pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmsub213ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub213ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmsub213ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmsub213ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfnmsub213ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfnmsub213ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmsub213ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub213ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmsub213ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmsub213ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfnmsub213ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfnmsub213ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmsub231pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub231pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmsub231pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmsub231pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmsub231pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmsub231pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmsub231pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub231pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmsub231pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmsub231pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmsub231pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmsub231pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmsub231ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub231ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmsub231ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmsub231ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfnmsub231ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfnmsub231ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmsub231ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub231ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmsub231ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmsub231ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfnmsub231ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfnmsub231ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vgatherdpd xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vgatherdpd xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vgatherdpd xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vgatherdpd ymm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vgatherdpd ymm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vgatherdpd ymm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vgatherdps xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vgatherdps xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vgatherdps xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vgatherdps ymm30{k1}, [r14+ymm31*8-123] # AVX512{F,VL}
vgatherdps ymm30{k1}, [r9+ymm31+256] # AVX512{F,VL}
vgatherdps ymm30{k1}, [rcx+ymm31*4+1024] # AVX512{F,VL}
vgatherqpd xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vgatherqpd xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vgatherqpd xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vgatherqpd ymm30{k1}, [r14+ymm31*8-123] # AVX512{F,VL}
vgatherqpd ymm30{k1}, [r9+ymm31+256] # AVX512{F,VL}
vgatherqpd ymm30{k1}, [rcx+ymm31*4+1024] # AVX512{F,VL}
vgatherqps xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vgatherqps xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vgatherqps xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vgatherqps xmm30{k1}, [r14+ymm31*8-123] # AVX512{F,VL}
vgatherqps xmm30{k1}, [r9+ymm31+256] # AVX512{F,VL}
vgatherqps xmm30{k1}, [rcx+ymm31*4+1024] # AVX512{F,VL}
vgetexppd xmm30, xmm29 # AVX512{F,VL}
vgetexppd xmm30{k7}, xmm29 # AVX512{F,VL}
vgetexppd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vgetexppd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vgetexppd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vgetexppd xmm30, [rcx]{1to2} # AVX512{F,VL}
vgetexppd xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vgetexppd xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vgetexppd xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vgetexppd xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vgetexppd xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vgetexppd xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vgetexppd xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vgetexppd xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vgetexppd ymm30, ymm29 # AVX512{F,VL}
vgetexppd ymm30{k7}, ymm29 # AVX512{F,VL}
vgetexppd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vgetexppd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vgetexppd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vgetexppd ymm30, [rcx]{1to4} # AVX512{F,VL}
vgetexppd ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vgetexppd ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vgetexppd ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vgetexppd ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vgetexppd ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vgetexppd ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vgetexppd ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vgetexppd ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vgetexpps xmm30, xmm29 # AVX512{F,VL}
vgetexpps xmm30{k7}, xmm29 # AVX512{F,VL}
vgetexpps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vgetexpps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vgetexpps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vgetexpps xmm30, [rcx]{1to4} # AVX512{F,VL}
vgetexpps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vgetexpps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vgetexpps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vgetexpps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vgetexpps xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vgetexpps xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vgetexpps xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vgetexpps xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vgetexpps ymm30, ymm29 # AVX512{F,VL}
vgetexpps ymm30{k7}, ymm29 # AVX512{F,VL}
vgetexpps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vgetexpps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vgetexpps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vgetexpps ymm30, [rcx]{1to8} # AVX512{F,VL}
vgetexpps ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vgetexpps ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vgetexpps ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vgetexpps ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vgetexpps ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vgetexpps ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vgetexpps ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vgetexpps ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vgetmantpd xmm30, xmm29, 0xab # AVX512{F,VL}
vgetmantpd xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vgetmantpd xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vgetmantpd xmm30, xmm29, 123 # AVX512{F,VL}
vgetmantpd xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vgetmantpd xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vgetmantpd xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vgetmantpd xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vgetmantpd xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vgetmantpd xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vgetmantpd xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vgetmantpd xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vgetmantpd xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vgetmantpd xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vgetmantpd xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vgetmantpd ymm30, ymm29, 0xab # AVX512{F,VL}
vgetmantpd ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vgetmantpd ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vgetmantpd ymm30, ymm29, 123 # AVX512{F,VL}
vgetmantpd ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vgetmantpd ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vgetmantpd ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vgetmantpd ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vgetmantpd ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vgetmantpd ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vgetmantpd ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vgetmantpd ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantpd ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vgetmantpd ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantpd ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vgetmantps xmm30, xmm29, 0xab # AVX512{F,VL}
vgetmantps xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vgetmantps xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vgetmantps xmm30, xmm29, 123 # AVX512{F,VL}
vgetmantps xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vgetmantps xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vgetmantps xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vgetmantps xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vgetmantps xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vgetmantps xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vgetmantps xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vgetmantps xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantps xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vgetmantps xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantps xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vgetmantps ymm30, ymm29, 0xab # AVX512{F,VL}
vgetmantps ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vgetmantps ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vgetmantps ymm30, ymm29, 123 # AVX512{F,VL}
vgetmantps ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vgetmantps ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vgetmantps ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vgetmantps ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vgetmantps ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vgetmantps ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vgetmantps ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vgetmantps ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vgetmantps ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vgetmantps ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vgetmantps ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vinsertf32x4 ymm30, ymm29, xmm28, 0xab # AVX512{F,VL}
vinsertf32x4 ymm30{k7}, ymm29, xmm28, 0xab # AVX512{F,VL}
vinsertf32x4 ymm30{k7}{z}, ymm29, xmm28, 0xab # AVX512{F,VL}
vinsertf32x4 ymm30, ymm29, xmm28, 123 # AVX512{F,VL}
vinsertf32x4 ymm30, ymm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vinsertf32x4 ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vinsertf32x4 ymm30, ymm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vinsertf32x4 ymm30, ymm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vinsertf32x4 ymm30, ymm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vinsertf32x4 ymm30, ymm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vinserti32x4 ymm30, ymm29, xmm28, 0xab # AVX512{F,VL}
vinserti32x4 ymm30{k7}, ymm29, xmm28, 0xab # AVX512{F,VL}
vinserti32x4 ymm30{k7}{z}, ymm29, xmm28, 0xab # AVX512{F,VL}
vinserti32x4 ymm30, ymm29, xmm28, 123 # AVX512{F,VL}
vinserti32x4 ymm30, ymm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vinserti32x4 ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vinserti32x4 ymm30, ymm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vinserti32x4 ymm30, ymm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vinserti32x4 ymm30, ymm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vinserti32x4 ymm30, ymm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vmaxpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vmaxpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vmaxpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vmaxpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vmaxpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmaxpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vmaxpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmaxpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmaxpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmaxpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmaxpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vmaxpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vmaxpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vmaxpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vmaxpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vmaxpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vmaxpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vmaxpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vmaxpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmaxpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vmaxpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmaxpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmaxpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmaxpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmaxpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vmaxpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vmaxpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vmaxpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vmaxps xmm30, xmm29, xmm28 # AVX512{F,VL}
vmaxps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vmaxps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vmaxps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vmaxps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmaxps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vmaxps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmaxps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmaxps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmaxps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmaxps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vmaxps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vmaxps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vmaxps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vmaxps ymm30, ymm29, ymm28 # AVX512{F,VL}
vmaxps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vmaxps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vmaxps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vmaxps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmaxps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vmaxps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmaxps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmaxps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmaxps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmaxps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vmaxps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vmaxps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vmaxps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vminpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vminpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vminpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vminpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vminpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vminpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vminpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vminpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vminpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vminpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vminpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vminpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vminpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vminpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vminpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vminpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vminpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vminpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vminpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vminpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vminpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vminpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vminpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vminpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vminpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vminpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vminpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vminpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vminps xmm30, xmm29, xmm28 # AVX512{F,VL}
vminps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vminps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vminps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vminps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vminps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vminps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vminps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vminps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vminps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vminps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vminps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vminps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vminps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vminps ymm30, ymm29, ymm28 # AVX512{F,VL}
vminps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vminps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vminps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vminps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vminps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vminps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vminps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vminps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vminps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vminps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vminps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vminps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vminps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vmovapd xmm30, xmm29 # AVX512{F,VL}
vmovapd xmm30{k7}, xmm29 # AVX512{F,VL}
vmovapd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovapd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovapd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovapd xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovapd xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovapd xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovapd xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovapd ymm30, ymm29 # AVX512{F,VL}
vmovapd ymm30{k7}, ymm29 # AVX512{F,VL}
vmovapd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovapd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovapd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovapd ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovapd ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovapd ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovapd ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovaps xmm30, xmm29 # AVX512{F,VL}
vmovaps xmm30{k7}, xmm29 # AVX512{F,VL}
vmovaps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovaps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovaps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovaps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovaps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovaps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovaps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovaps ymm30, ymm29 # AVX512{F,VL}
vmovaps ymm30{k7}, ymm29 # AVX512{F,VL}
vmovaps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovaps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovaps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovaps ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovaps ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovaps ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovaps ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovddup xmm30, xmm29 # AVX512{F,VL}
vmovddup xmm30{k7}, xmm29 # AVX512{F,VL}
vmovddup xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovddup xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vmovddup xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovddup xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vmovddup xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vmovddup xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vmovddup xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vmovddup ymm30, ymm29 # AVX512{F,VL}
vmovddup ymm30{k7}, ymm29 # AVX512{F,VL}
vmovddup ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovddup ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovddup ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovddup ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovddup ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovddup ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovddup ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovdqa32 xmm30, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqa32 xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqa32 xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovdqa32 xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovdqa32 xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovdqa32 xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovdqa32 ymm30, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqa32 ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqa32 ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovdqa32 ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovdqa32 ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovdqa32 ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovdqa64 xmm30, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqa64 xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqa64 xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovdqa64 xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovdqa64 xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovdqa64 xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovdqa64 ymm30, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqa64 ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqa64 ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovdqa64 ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovdqa64 ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovdqa64 ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovdqu32 xmm30, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqu32 xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqu32 xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovdqu32 xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovdqu32 xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovdqu32 xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovdqu32 ymm30, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqu32 ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqu32 ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovdqu32 ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovdqu32 ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovdqu32 ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovdqu64 xmm30, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqu64 xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqu64 xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovdqu64 xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovdqu64 xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovdqu64 xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovdqu64 ymm30, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqu64 ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqu64 ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovdqu64 ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovdqu64 ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovdqu64 ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovntdq XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovntdq XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovntdq XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovntdq XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovntdq XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovntdq XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovntdq YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovntdq YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovntdq YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovntdq YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovntdq YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovntdq YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovntdqa xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovntdqa xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovntdqa xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovntdqa xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovntdqa xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovntdqa xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovntdqa ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovntdqa ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovntdqa ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovntdqa ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovntdqa ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovntdqa ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovntpd XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovntpd XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovntpd XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovntpd XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovntpd XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovntpd XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovntpd YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovntpd YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovntpd YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovntpd YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovntpd YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovntpd YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovntps XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovntps XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovntps XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovntps XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovntps XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovntps XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovntps YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovntps YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovntps YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovntps YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovntps YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovntps YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovshdup xmm30, xmm29 # AVX512{F,VL}
vmovshdup xmm30{k7}, xmm29 # AVX512{F,VL}
vmovshdup xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovshdup xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovshdup xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovshdup xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovshdup xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovshdup xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovshdup xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovshdup ymm30, ymm29 # AVX512{F,VL}
vmovshdup ymm30{k7}, ymm29 # AVX512{F,VL}
vmovshdup ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovshdup ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovshdup ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovshdup ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovshdup ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovshdup ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovshdup ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovsldup xmm30, xmm29 # AVX512{F,VL}
vmovsldup xmm30{k7}, xmm29 # AVX512{F,VL}
vmovsldup xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovsldup xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovsldup xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovsldup xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovsldup xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovsldup xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovsldup xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovsldup ymm30, ymm29 # AVX512{F,VL}
vmovsldup ymm30{k7}, ymm29 # AVX512{F,VL}
vmovsldup ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovsldup ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovsldup ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovsldup ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovsldup ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovsldup ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovsldup ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovupd xmm30, xmm29 # AVX512{F,VL}
vmovupd xmm30{k7}, xmm29 # AVX512{F,VL}
vmovupd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovupd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovupd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovupd xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovupd xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovupd xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovupd xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovupd ymm30, ymm29 # AVX512{F,VL}
vmovupd ymm30{k7}, ymm29 # AVX512{F,VL}
vmovupd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovupd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovupd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovupd ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovupd ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovupd ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovupd ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovups xmm30, xmm29 # AVX512{F,VL}
vmovups xmm30{k7}, xmm29 # AVX512{F,VL}
vmovups xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovups xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovups xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovups xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovups xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovups xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovups xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovups ymm30, ymm29 # AVX512{F,VL}
vmovups ymm30{k7}, ymm29 # AVX512{F,VL}
vmovups ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovups ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovups ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovups ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovups ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovups ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovups ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmulpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vmulpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vmulpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vmulpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vmulpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmulpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vmulpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmulpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmulpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmulpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmulpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vmulpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vmulpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vmulpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vmulpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vmulpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vmulpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vmulpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vmulpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmulpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vmulpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmulpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmulpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmulpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmulpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vmulpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vmulpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vmulpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vmulps xmm30, xmm29, xmm28 # AVX512{F,VL}
vmulps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vmulps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vmulps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vmulps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmulps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vmulps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmulps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmulps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmulps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmulps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vmulps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vmulps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vmulps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vmulps ymm30, ymm29, ymm28 # AVX512{F,VL}
vmulps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vmulps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vmulps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vmulps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmulps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vmulps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmulps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmulps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmulps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmulps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vmulps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vmulps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vmulps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpabsd xmm30, xmm29 # AVX512{F,VL}
vpabsd xmm30{k7}, xmm29 # AVX512{F,VL}
vpabsd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpabsd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpabsd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpabsd xmm30, [rcx]{1to4} # AVX512{F,VL}
vpabsd xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpabsd xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpabsd xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpabsd xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpabsd xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpabsd xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vpabsd xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpabsd xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vpabsd ymm30, ymm29 # AVX512{F,VL}
vpabsd ymm30{k7}, ymm29 # AVX512{F,VL}
vpabsd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vpabsd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpabsd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpabsd ymm30, [rcx]{1to8} # AVX512{F,VL}
vpabsd ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpabsd ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpabsd ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpabsd ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpabsd ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpabsd ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vpabsd ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpabsd ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vpabsq xmm30, xmm29 # AVX512{F,VL}
vpabsq xmm30{k7}, xmm29 # AVX512{F,VL}
vpabsq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpabsq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpabsq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpabsq xmm30, [rcx]{1to2} # AVX512{F,VL}
vpabsq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpabsq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpabsq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpabsq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpabsq xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpabsq xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vpabsq xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpabsq xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vpabsq ymm30, ymm29 # AVX512{F,VL}
vpabsq ymm30{k7}, ymm29 # AVX512{F,VL}
vpabsq ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vpabsq ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpabsq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpabsq ymm30, [rcx]{1to4} # AVX512{F,VL}
vpabsq ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpabsq ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpabsq ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpabsq ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpabsq ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpabsq ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vpabsq ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpabsq ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vpaddd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpaddd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpaddd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpaddd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpaddd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpaddd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpaddd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpaddd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpaddd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpaddd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpaddd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpaddd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpaddd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpaddd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpaddd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpaddd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpaddd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpaddd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpaddd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpaddd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpaddd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpaddd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpaddd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpaddd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpaddd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpaddd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpaddd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpaddd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpaddq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpaddq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpaddq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpaddq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpaddq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpaddq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpaddq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpaddq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpaddq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpaddq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpaddq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpaddq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpaddq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpaddq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpaddq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpaddq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpaddq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpaddq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpaddq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpaddq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpaddq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpaddq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpaddq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpaddq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpaddq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpaddq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpaddq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpaddq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpandd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpandd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpandd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpandd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpandd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpandd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpandd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpandd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpandd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpandd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpandd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpandd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpandd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpandd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpandd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpandd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpandd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpandd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpandd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpandd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpandd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpandd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpandd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpandd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpandd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpandd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpandnd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpandnd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpandnd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpandnd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpandnd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandnd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpandnd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpandnd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpandnd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpandnd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpandnd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpandnd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpandnd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpandnd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpandnd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpandnd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpandnd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpandnd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpandnd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandnd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpandnd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpandnd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpandnd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpandnd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpandnd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpandnd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpandnd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpandnd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpandnq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpandnq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpandnq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpandnq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpandnq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandnq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpandnq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpandnq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpandnq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpandnq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpandnq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpandnq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpandnq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpandnq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpandnq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpandnq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpandnq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpandnq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpandnq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandnq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpandnq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpandnq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpandnq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpandnq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpandnq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpandnq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpandnq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpandnq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpandq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpandq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpandq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpandq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpandq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpandq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpandq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpandq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpandq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpandq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpandq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpandq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpandq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpandq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpandq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpandq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpandq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpandq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpandq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpandq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpandq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpandq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpandq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpandq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpandq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpandq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpblendmd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpblendmd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpblendmd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpblendmd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpblendmd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpblendmd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpblendmd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpblendmd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpblendmd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpblendmd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpblendmd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpblendmd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpblendmd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpblendmd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpblendmd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpblendmd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpblendmd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpblendmd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpblendmd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpblendmd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpblendmd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpblendmd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpblendmd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpblendmd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpblendmd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpblendmd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpblendmd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpblendmd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpbroadcastd xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastd xmm30{k7}, DWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastd xmm30{k7}{z}, DWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastd xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpbroadcastd xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpbroadcastd xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpbroadcastd xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpbroadcastd xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpbroadcastd ymm30, DWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastd ymm30{k7}, DWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastd ymm30{k7}{z}, DWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastd ymm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpbroadcastd ymm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpbroadcastd ymm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpbroadcastd ymm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpbroadcastd ymm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpbroadcastd xmm30, xmm29 # AVX512{F,VL}
vpbroadcastd xmm30{k7}, xmm29 # AVX512{F,VL}
vpbroadcastd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpbroadcastd ymm30, xmm29 # AVX512{F,VL}
vpbroadcastd ymm30{k7}, xmm29 # AVX512{F,VL}
vpbroadcastd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpbroadcastd xmm30, eax # AVX512{F,VL}
vpbroadcastd xmm30{k7}, eax # AVX512{F,VL}
vpbroadcastd xmm30{k7}{z}, eax # AVX512{F,VL}
vpbroadcastd xmm30, ebp # AVX512{F,VL}
vpbroadcastd xmm30, r13d # AVX512{F,VL}
vpbroadcastd ymm30, eax # AVX512{F,VL}
vpbroadcastd ymm30{k7}, eax # AVX512{F,VL}
vpbroadcastd ymm30{k7}{z}, eax # AVX512{F,VL}
vpbroadcastd ymm30, ebp # AVX512{F,VL}
vpbroadcastd ymm30, r13d # AVX512{F,VL}
vpbroadcastq xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastq xmm30{k7}, QWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastq xmm30{k7}{z}, QWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpbroadcastq xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpbroadcastq xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpbroadcastq xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpbroadcastq xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpbroadcastq ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastq ymm30{k7}, QWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastq ymm30{k7}{z}, QWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastq ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpbroadcastq ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpbroadcastq ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpbroadcastq ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpbroadcastq ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpbroadcastq xmm30, xmm29 # AVX512{F,VL}
vpbroadcastq xmm30{k7}, xmm29 # AVX512{F,VL}
vpbroadcastq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpbroadcastq ymm30, xmm29 # AVX512{F,VL}
vpbroadcastq ymm30{k7}, xmm29 # AVX512{F,VL}
vpbroadcastq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpbroadcastq xmm30, rax # AVX512{F,VL}
vpbroadcastq xmm30{k7}, rax # AVX512{F,VL}
vpbroadcastq xmm30{k7}{z}, rax # AVX512{F,VL}
vpbroadcastq xmm30, r8 # AVX512{F,VL}
vpbroadcastq ymm30, rax # AVX512{F,VL}
vpbroadcastq ymm30{k7}, rax # AVX512{F,VL}
vpbroadcastq ymm30{k7}{z}, rax # AVX512{F,VL}
vpbroadcastq ymm30, r8 # AVX512{F,VL}
vpcmpd k5, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpd k5{k7}, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpd k5, xmm30, xmm29, 123 # AVX512{F,VL}
vpcmpd k5, xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpd k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpd k5, xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpcmpd k5, xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpcmpd k5, xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpcmpd k5, xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpcmpd k5, xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpcmpd k5, xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpd k5, xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpcmpd k5, xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpd k5, xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpcmpd k5, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpd k5{k7}, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpd k5, ymm30, ymm29, 123 # AVX512{F,VL}
vpcmpd k5, ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpd k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpd k5, ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vpcmpd k5, ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpcmpd k5, ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpcmpd k5, ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpcmpd k5, ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpcmpd k5, ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpd k5, ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpcmpd k5, ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpd k5, ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpcmpeqd k5, xmm30, xmm29 # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm30, xmm29 # AVX512{F,VL}
vpcmpeqd k5, xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpeqd k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpeqd k5, xmm30, [rcx]{1to4} # AVX512{F,VL}
vpcmpeqd k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpcmpeqd k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpcmpeqd k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpcmpeqd k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpcmpeqd k5, xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpcmpeqd k5, xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vpcmpeqd k5, xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpcmpeqd k5, xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vpcmpeqd k5, ymm30, ymm29 # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm30, ymm29 # AVX512{F,VL}
vpcmpeqd k5, ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpeqd k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpeqd k5, ymm30, [rcx]{1to8} # AVX512{F,VL}
vpcmpeqd k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpcmpeqd k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpcmpeqd k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpcmpeqd k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpcmpeqd k5, ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpcmpeqd k5, ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vpcmpeqd k5, ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpcmpeqd k5, ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vpcmpeqq k5, xmm30, xmm29 # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm30, xmm29 # AVX512{F,VL}
vpcmpeqq k5, xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpeqq k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpeqq k5, xmm30, [rcx]{1to2} # AVX512{F,VL}
vpcmpeqq k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpcmpeqq k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpcmpeqq k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpcmpeqq k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpcmpeqq k5, xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpcmpeqq k5, xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vpcmpeqq k5, xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpcmpeqq k5, xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vpcmpeqq k5, ymm30, ymm29 # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm30, ymm29 # AVX512{F,VL}
vpcmpeqq k5, ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpeqq k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpeqq k5, ymm30, [rcx]{1to4} # AVX512{F,VL}
vpcmpeqq k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpcmpeqq k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpcmpeqq k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpcmpeqq k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpcmpeqq k5, ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpcmpeqq k5, ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vpcmpeqq k5, ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpcmpeqq k5, ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vpcmpgtd k5, xmm30, xmm29 # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm30, xmm29 # AVX512{F,VL}
vpcmpgtd k5, xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpgtd k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpgtd k5, xmm30, [rcx]{1to4} # AVX512{F,VL}
vpcmpgtd k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpcmpgtd k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpcmpgtd k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpcmpgtd k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpcmpgtd k5, xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpcmpgtd k5, xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vpcmpgtd k5, xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpcmpgtd k5, xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vpcmpgtd k5, ymm30, ymm29 # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm30, ymm29 # AVX512{F,VL}
vpcmpgtd k5, ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpgtd k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpgtd k5, ymm30, [rcx]{1to8} # AVX512{F,VL}
vpcmpgtd k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpcmpgtd k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpcmpgtd k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpcmpgtd k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpcmpgtd k5, ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpcmpgtd k5, ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vpcmpgtd k5, ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpcmpgtd k5, ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vpcmpgtq k5, xmm30, xmm29 # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm30, xmm29 # AVX512{F,VL}
vpcmpgtq k5, xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpgtq k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpgtq k5, xmm30, [rcx]{1to2} # AVX512{F,VL}
vpcmpgtq k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpcmpgtq k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpcmpgtq k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpcmpgtq k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpcmpgtq k5, xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpcmpgtq k5, xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vpcmpgtq k5, xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpcmpgtq k5, xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vpcmpgtq k5, ymm30, ymm29 # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm30, ymm29 # AVX512{F,VL}
vpcmpgtq k5, ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpgtq k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpgtq k5, ymm30, [rcx]{1to4} # AVX512{F,VL}
vpcmpgtq k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpcmpgtq k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpcmpgtq k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpcmpgtq k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpcmpgtq k5, ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpcmpgtq k5, ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vpcmpgtq k5, ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpcmpgtq k5, ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vpcmpq k5, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpq k5{k7}, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpq k5, xmm30, xmm29, 123 # AVX512{F,VL}
vpcmpq k5, xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpq k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpq k5, xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vpcmpq k5, xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpcmpq k5, xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpcmpq k5, xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpcmpq k5, xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpcmpq k5, xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpq k5, xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vpcmpq k5, xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpq k5, xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vpcmpq k5, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpq k5{k7}, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpq k5, ymm30, ymm29, 123 # AVX512{F,VL}
vpcmpq k5, ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpq k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpq k5, ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpcmpq k5, ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpcmpq k5, ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpcmpq k5, ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpcmpq k5, ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpcmpq k5, ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpq k5, ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpcmpq k5, ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpq k5, ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpud k5{k7}, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpud k5, xmm30, xmm29, 123 # AVX512{F,VL}
vpcmpud k5, xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpud k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpud k5, xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5, xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpcmpud k5, xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpcmpud k5, xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpcmpud k5, xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpcmpud k5, xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpud k5, xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5, xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpud k5, xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpud k5{k7}, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpud k5, ymm30, ymm29, 123 # AVX512{F,VL}
vpcmpud k5, ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpud k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpud k5, ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vpcmpud k5, ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpcmpud k5, ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpcmpud k5, ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpcmpud k5, ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpcmpud k5, ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpud k5, ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpcmpud k5, ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpud k5, ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpuq k5{k7}, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpuq k5, xmm30, xmm29, 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpcmpuq k5, xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpcmpuq k5, xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5, xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5, xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpuq k5{k7}, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpuq k5, ymm30, ymm29, 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpcmpuq k5, ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpcmpuq k5, ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5, ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5, ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpblendmq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpblendmq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpblendmq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpblendmq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpblendmq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpblendmq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpblendmq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpblendmq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpblendmq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpblendmq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpblendmq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpblendmq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpblendmq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpblendmq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpblendmq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpblendmq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpblendmq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpblendmq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpblendmq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpblendmq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpblendmq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpblendmq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpblendmq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpblendmq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpblendmq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpblendmq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpblendmq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpblendmq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpcompressd XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpcompressd XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpcompressd XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpcompressd XMMWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vpcompressd XMMWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vpcompressd XMMWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vpcompressd XMMWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vpcompressd YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpcompressd YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpcompressd YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpcompressd YMMWORD PTR [rdx+508], ymm30 # AVX512{F,VL} Disp8
vpcompressd YMMWORD PTR [rdx+512], ymm30 # AVX512{F,VL}
vpcompressd YMMWORD PTR [rdx-512], ymm30 # AVX512{F,VL} Disp8
vpcompressd YMMWORD PTR [rdx-516], ymm30 # AVX512{F,VL}
vpcompressd xmm30, xmm29 # AVX512{F,VL}
vpcompressd xmm30{k7}, xmm29 # AVX512{F,VL}
vpcompressd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpcompressd ymm30, ymm29 # AVX512{F,VL}
vpcompressd ymm30{k7}, ymm29 # AVX512{F,VL}
vpcompressd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vpermd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpermd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpermd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpermd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpermd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpermilpd xmm30, xmm29, 0xab # AVX512{F,VL}
vpermilpd xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpermilpd xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpermilpd xmm30, xmm29, 123 # AVX512{F,VL}
vpermilpd xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpermilpd xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpermilpd xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vpermilpd xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpermilpd xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpermilpd xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpermilpd xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpermilpd xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpermilpd xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vpermilpd xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpermilpd xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vpermilpd ymm30, ymm29, 0xab # AVX512{F,VL}
vpermilpd ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpermilpd ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpermilpd ymm30, ymm29, 123 # AVX512{F,VL}
vpermilpd ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpermilpd ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpermilpd ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpermilpd ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpermilpd ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpermilpd ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpermilpd ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpermilpd ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilpd ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpermilpd ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilpd ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpermilpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermilpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermilpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermilpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermilpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermilpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpermilpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermilpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermilpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermilpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermilpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpermilpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpermilpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpermilpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpermilpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermilpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermilpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermilpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermilpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermilpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpermilpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermilpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermilpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermilpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermilpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpermilpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpermilpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpermilpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpermilps xmm30, xmm29, 0xab # AVX512{F,VL}
vpermilps xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpermilps xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpermilps xmm30, xmm29, 123 # AVX512{F,VL}
vpermilps xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpermilps xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpermilps xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpermilps xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpermilps xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpermilps xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpermilps xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpermilps xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilps xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpermilps xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilps xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpermilps ymm30, ymm29, 0xab # AVX512{F,VL}
vpermilps ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpermilps ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpermilps ymm30, ymm29, 123 # AVX512{F,VL}
vpermilps ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpermilps ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpermilps ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vpermilps ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpermilps ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpermilps ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpermilps ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpermilps ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpermilps ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpermilps ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpermilps ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpermilps xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermilps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermilps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermilps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermilps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermilps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpermilps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermilps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermilps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermilps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermilps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpermilps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpermilps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpermilps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpermilps ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermilps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermilps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermilps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermilps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermilps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpermilps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermilps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermilps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermilps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermilps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpermilps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpermilps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpermilps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpermpd ymm30, ymm29, 0xab # AVX512{F,VL}
vpermpd ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpermpd ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpermpd ymm30, ymm29, 123 # AVX512{F,VL}
vpermpd ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpermpd ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpermpd ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpermpd ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpermpd ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpermpd ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpermpd ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpermpd ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpermpd ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpermpd ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpermpd ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpermps ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpermps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpermps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpermps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpermps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpermq ymm30, ymm29, 0xab # AVX512{F,VL}
vpermq ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpermq ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpermq ymm30, ymm29, 123 # AVX512{F,VL}
vpermq ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpermq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpermq ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpermq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpermq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpermq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpermq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpermq ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpermq ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpermq ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpermq ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpexpandd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandd xmm30{k7}, XMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandd xmm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpexpandd xmm30, XMMWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpexpandd xmm30, XMMWORD PTR [rdx+512] # AVX512{F,VL}
vpexpandd xmm30, XMMWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpexpandd xmm30, XMMWORD PTR [rdx-516] # AVX512{F,VL}
vpexpandd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandd ymm30{k7}, YMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandd ymm30{k7}{z}, YMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpexpandd ymm30, YMMWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpexpandd ymm30, YMMWORD PTR [rdx+512] # AVX512{F,VL}
vpexpandd ymm30, YMMWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpexpandd ymm30, YMMWORD PTR [rdx-516] # AVX512{F,VL}
vpexpandd xmm30, xmm29 # AVX512{F,VL}
vpexpandd xmm30{k7}, xmm29 # AVX512{F,VL}
vpexpandd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpexpandd ymm30, ymm29 # AVX512{F,VL}
vpexpandd ymm30{k7}, ymm29 # AVX512{F,VL}
vpexpandd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vpexpandq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandq xmm30{k7}, XMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandq xmm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpexpandq xmm30, XMMWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpexpandq xmm30, XMMWORD PTR [rdx+1024] # AVX512{F,VL}
vpexpandq xmm30, XMMWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpexpandq xmm30, XMMWORD PTR [rdx-1032] # AVX512{F,VL}
vpexpandq ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandq ymm30{k7}, YMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandq ymm30{k7}{z}, YMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpexpandq ymm30, YMMWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpexpandq ymm30, YMMWORD PTR [rdx+1024] # AVX512{F,VL}
vpexpandq ymm30, YMMWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpexpandq ymm30, YMMWORD PTR [rdx-1032] # AVX512{F,VL}
vpexpandq xmm30, xmm29 # AVX512{F,VL}
vpexpandq xmm30{k7}, xmm29 # AVX512{F,VL}
vpexpandq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpexpandq ymm30, ymm29 # AVX512{F,VL}
vpexpandq ymm30{k7}, ymm29 # AVX512{F,VL}
vpexpandq ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vpgatherdd xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vpgatherdd xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vpgatherdd xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vpgatherdd ymm30{k1}, [r14+ymm31*8-123] # AVX512{F,VL}
vpgatherdd ymm30{k1}, [r9+ymm31+256] # AVX512{F,VL}
vpgatherdd ymm30{k1}, [rcx+ymm31*4+1024] # AVX512{F,VL}
vpgatherdq xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vpgatherdq xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vpgatherdq xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vpgatherdq ymm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vpgatherdq ymm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vpgatherdq ymm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vpgatherqd xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vpgatherqd xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vpgatherqd xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vpgatherqd xmm30{k1}, [r14+ymm31*8-123] # AVX512{F,VL}
vpgatherqd xmm30{k1}, [r9+ymm31+256] # AVX512{F,VL}
vpgatherqd xmm30{k1}, [rcx+ymm31*4+1024] # AVX512{F,VL}
vpgatherqq xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vpgatherqq xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vpgatherqq xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vpgatherqq ymm30{k1}, [r14+ymm31*8-123] # AVX512{F,VL}
vpgatherqq ymm30{k1}, [r9+ymm31+256] # AVX512{F,VL}
vpgatherqq ymm30{k1}, [rcx+ymm31*4+1024] # AVX512{F,VL}
vpmaxsd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpmaxsd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpmaxsd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpmaxsd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxsd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxsd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpmaxsd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmaxsd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmaxsd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmaxsd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmaxsd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpmaxsd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpmaxsd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpmaxsd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpmaxsd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpmaxsd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpmaxsd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpmaxsd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxsd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxsd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpmaxsd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpmaxsd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpmaxsd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpmaxsd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpmaxsd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpmaxsd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpmaxsd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpmaxsd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpmaxsq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpmaxsq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpmaxsq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpmaxsq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxsq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxsq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpmaxsq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmaxsq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmaxsq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmaxsq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmaxsq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpmaxsq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpmaxsq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpmaxsq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpmaxsq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpmaxsq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpmaxsq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpmaxsq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxsq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxsq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpmaxsq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpmaxsq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpmaxsq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpmaxsq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpmaxsq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpmaxsq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpmaxsq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpmaxsq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpmaxud xmm30, xmm29, xmm28 # AVX512{F,VL}
vpmaxud xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpmaxud xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpmaxud xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxud xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxud xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpmaxud xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmaxud xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmaxud xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmaxud xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmaxud xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpmaxud xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpmaxud xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpmaxud xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpmaxud ymm30, ymm29, ymm28 # AVX512{F,VL}
vpmaxud ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpmaxud ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpmaxud ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxud ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxud ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpmaxud ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpmaxud ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpmaxud ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpmaxud ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpmaxud ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpmaxud ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpmaxud ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpmaxud ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpmaxuq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpmaxuq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpmaxuq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpmaxuq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxuq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxuq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpmaxuq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmaxuq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmaxuq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmaxuq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmaxuq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpmaxuq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpmaxuq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpmaxuq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpmaxuq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpmaxuq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpmaxuq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpmaxuq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxuq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxuq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpmaxuq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpmaxuq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpmaxuq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpmaxuq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpmaxuq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpmaxuq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpmaxuq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpmaxuq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpminsd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpminsd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpminsd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpminsd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpminsd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminsd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpminsd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpminsd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpminsd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpminsd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpminsd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpminsd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpminsd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpminsd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpminsd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpminsd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpminsd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpminsd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpminsd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminsd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpminsd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpminsd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpminsd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpminsd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpminsd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpminsd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpminsd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpminsd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpminsq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpminsq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpminsq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpminsq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpminsq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminsq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpminsq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpminsq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpminsq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpminsq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpminsq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpminsq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpminsq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpminsq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpminsq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpminsq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpminsq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpminsq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpminsq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminsq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpminsq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpminsq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpminsq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpminsq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpminsq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpminsq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpminsq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpminsq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpminud xmm30, xmm29, xmm28 # AVX512{F,VL}
vpminud xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpminud xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpminud xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpminud xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminud xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpminud xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpminud xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpminud xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpminud xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpminud xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpminud xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpminud xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpminud xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpminud ymm30, ymm29, ymm28 # AVX512{F,VL}
vpminud ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpminud ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpminud ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpminud ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminud ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpminud ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpminud ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpminud ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpminud ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpminud ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpminud ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpminud ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpminud ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpminuq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpminuq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpminuq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpminuq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpminuq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminuq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpminuq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpminuq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpminuq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpminuq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpminuq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpminuq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpminuq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpminuq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpminuq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpminuq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpminuq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpminuq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpminuq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminuq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpminuq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpminuq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpminuq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpminuq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpminuq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpminuq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpminuq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpminuq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpmovsxbd xmm30, xmm29 # AVX512{F,VL}
vpmovsxbd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovsxbd xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovsxbd xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovsxbd ymm30, xmm29 # AVX512{F,VL}
vpmovsxbd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovsxbd ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovsxbd ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovsxbq xmm30, xmm29 # AVX512{F,VL}
vpmovsxbq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rcx] # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rdx+254] # AVX512{F,VL} Disp8
vpmovsxbq xmm30, WORD PTR [rdx+256] # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rdx-256] # AVX512{F,VL} Disp8
vpmovsxbq xmm30, WORD PTR [rdx-258] # AVX512{F,VL}
vpmovsxbq ymm30, xmm29 # AVX512{F,VL}
vpmovsxbq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovsxbq ymm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovsxbq ymm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovsxdq xmm30, xmm29 # AVX512{F,VL}
vpmovsxdq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxdq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxdq xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovsxdq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxdq xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovsxdq xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovsxdq xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovsxdq xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovsxdq ymm30, xmm29 # AVX512{F,VL}
vpmovsxdq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxdq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxdq ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmovsxdq ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxdq ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmovsxdq ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmovsxdq ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmovsxdq ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmovsxwd xmm30, xmm29 # AVX512{F,VL}
vpmovsxwd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovsxwd xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovsxwd xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovsxwd ymm30, xmm29 # AVX512{F,VL}
vpmovsxwd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmovsxwd ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmovsxwd ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmovsxwq xmm30, xmm29 # AVX512{F,VL}
vpmovsxwq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovsxwq xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovsxwq xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovsxwq ymm30, xmm29 # AVX512{F,VL}
vpmovsxwq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovsxwq ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovsxwq ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovzxbd xmm30, xmm29 # AVX512{F,VL}
vpmovzxbd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovzxbd xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovzxbd xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovzxbd ymm30, xmm29 # AVX512{F,VL}
vpmovzxbd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovzxbd ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovzxbd ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovzxbq xmm30, xmm29 # AVX512{F,VL}
vpmovzxbq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rcx] # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rdx+254] # AVX512{F,VL} Disp8
vpmovzxbq xmm30, WORD PTR [rdx+256] # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rdx-256] # AVX512{F,VL} Disp8
vpmovzxbq xmm30, WORD PTR [rdx-258] # AVX512{F,VL}
vpmovzxbq ymm30, xmm29 # AVX512{F,VL}
vpmovzxbq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovzxbq ymm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovzxbq ymm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovzxdq xmm30, xmm29 # AVX512{F,VL}
vpmovzxdq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxdq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxdq xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovzxdq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxdq xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovzxdq xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovzxdq xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovzxdq xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovzxdq ymm30, xmm29 # AVX512{F,VL}
vpmovzxdq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxdq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxdq ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmovzxdq ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxdq ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmovzxdq ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmovzxdq ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmovzxdq ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmovzxwd xmm30, xmm29 # AVX512{F,VL}
vpmovzxwd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovzxwd xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovzxwd xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovzxwd ymm30, xmm29 # AVX512{F,VL}
vpmovzxwd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmovzxwd ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmovzxwd ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmovzxwq xmm30, xmm29 # AVX512{F,VL}
vpmovzxwq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovzxwq xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovzxwq xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovzxwq ymm30, xmm29 # AVX512{F,VL}
vpmovzxwq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovzxwq ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovzxwq ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmuldq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpmuldq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpmuldq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpmuldq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmuldq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmuldq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpmuldq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmuldq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmuldq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmuldq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmuldq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpmuldq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpmuldq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpmuldq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpmuldq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpmuldq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpmuldq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpmuldq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpmuldq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmuldq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpmuldq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpmuldq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpmuldq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpmuldq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpmuldq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpmuldq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpmuldq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpmuldq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpmulld xmm30, xmm29, xmm28 # AVX512{F,VL}
vpmulld xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpmulld xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpmulld xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmulld xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmulld xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpmulld xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmulld xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmulld xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmulld xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmulld xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpmulld xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpmulld xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpmulld xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpmulld ymm30, ymm29, ymm28 # AVX512{F,VL}
vpmulld ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpmulld ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpmulld ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpmulld ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmulld ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpmulld ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpmulld ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpmulld ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpmulld ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpmulld ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpmulld ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpmulld ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpmulld ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpmuludq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpmuludq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpmuludq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpmuludq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmuludq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmuludq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpmuludq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmuludq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmuludq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmuludq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmuludq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpmuludq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpmuludq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpmuludq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpmuludq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpmuludq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpmuludq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpmuludq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpmuludq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmuludq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpmuludq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpmuludq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpmuludq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpmuludq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpmuludq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpmuludq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpmuludq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpmuludq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpord xmm30, xmm29, xmm28 # AVX512{F,VL}
vpord xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpord xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpord xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpord xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpord xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpord xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpord xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpord xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpord xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpord xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpord xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpord xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpord xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpord ymm30, ymm29, ymm28 # AVX512{F,VL}
vpord ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpord ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpord ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpord ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpord ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpord ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpord ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpord ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpord ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpord ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpord ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpord ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpord ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vporq xmm30, xmm29, xmm28 # AVX512{F,VL}
vporq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vporq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vporq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vporq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vporq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vporq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vporq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vporq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vporq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vporq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vporq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vporq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vporq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vporq ymm30, ymm29, ymm28 # AVX512{F,VL}
vporq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vporq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vporq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vporq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vporq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vporq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vporq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vporq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vporq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vporq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vporq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vporq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vporq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpscatterdd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterdd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterdd [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vpscatterdd [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vpscatterdd [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vpscatterdd [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vpscatterdd [r9+ymm31+256]{k1}, ymm30 # AVX512{F,VL}
vpscatterdd [rcx+ymm31*4+1024]{k1}, ymm30 # AVX512{F,VL}
vpscatterdq [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterdq [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterdq [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vpscatterdq [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vpscatterdq [r14+xmm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vpscatterdq [r14+xmm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vpscatterdq [r9+xmm31+256]{k1}, ymm30 # AVX512{F,VL}
vpscatterdq [rcx+xmm31*4+1024]{k1}, ymm30 # AVX512{F,VL}
vpscatterqd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterqd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterqd [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vpscatterqd [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vpscatterqd [r14+ymm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterqd [r14+ymm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterqd [r9+ymm31+256]{k1}, xmm30 # AVX512{F,VL}
vpscatterqd [rcx+ymm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vpscatterqq [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterqq [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterqq [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vpscatterqq [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vpscatterqq [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vpscatterqq [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vpscatterqq [r9+ymm31+256]{k1}, ymm30 # AVX512{F,VL}
vpscatterqq [rcx+ymm31*4+1024]{k1}, ymm30 # AVX512{F,VL}
vpshufd xmm30, xmm29, 0xab # AVX512{F,VL}
vpshufd xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpshufd xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpshufd xmm30, xmm29, 123 # AVX512{F,VL}
vpshufd xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpshufd xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpshufd xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpshufd xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpshufd xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpshufd xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpshufd xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpshufd xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpshufd xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpshufd xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpshufd xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpshufd ymm30, ymm29, 0xab # AVX512{F,VL}
vpshufd ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpshufd ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpshufd ymm30, ymm29, 123 # AVX512{F,VL}
vpshufd ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpshufd ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpshufd ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vpshufd ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpshufd ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpshufd ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpshufd ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpshufd ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpshufd ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpshufd ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpshufd ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpslld xmm30, xmm29, xmm28 # AVX512{F,VL}
vpslld xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpslld xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpslld xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpslld xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpslld xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpslld xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpslld xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpslld xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpslld ymm30, ymm29, xmm28 # AVX512{F,VL}
vpslld ymm30{k7}, ymm29, xmm28 # AVX512{F,VL}
vpslld ymm30{k7}{z}, ymm29, xmm28 # AVX512{F,VL}
vpslld ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpslld ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpslld ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpslld ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpslld ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpslld ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsllq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsllq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsllq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsllq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsllq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsllq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsllq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsllq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsllq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsllq ymm30, ymm29, xmm28 # AVX512{F,VL}
vpsllq ymm30{k7}, ymm29, xmm28 # AVX512{F,VL}
vpsllq ymm30{k7}{z}, ymm29, xmm28 # AVX512{F,VL}
vpsllq ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsllq ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsllq ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsllq ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsllq ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsllq ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsllvd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsllvd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsllvd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsllvd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsllvd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsllvd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpsllvd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsllvd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsllvd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsllvd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsllvd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpsllvd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpsllvd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpsllvd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpsllvd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsllvd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsllvd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsllvd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsllvd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsllvd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpsllvd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsllvd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsllvd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsllvd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsllvd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpsllvd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpsllvd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpsllvd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpsllvq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsllvq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsllvq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsllvq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsllvq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsllvq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpsllvq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsllvq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsllvq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsllvq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsllvq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpsllvq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpsllvq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpsllvq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpsllvq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsllvq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsllvq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsllvq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsllvq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsllvq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpsllvq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsllvq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsllvq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsllvq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsllvq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpsllvq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpsllvq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpsllvq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpsrad xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsrad xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsrad xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsrad xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrad xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrad xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrad xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrad xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrad xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsrad ymm30, ymm29, xmm28 # AVX512{F,VL}
vpsrad ymm30{k7}, ymm29, xmm28 # AVX512{F,VL}
vpsrad ymm30{k7}{z}, ymm29, xmm28 # AVX512{F,VL}
vpsrad ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrad ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrad ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrad ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrad ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrad ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsraq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsraq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsraq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsraq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsraq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsraq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsraq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsraq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsraq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsraq ymm30, ymm29, xmm28 # AVX512{F,VL}
vpsraq ymm30{k7}, ymm29, xmm28 # AVX512{F,VL}
vpsraq ymm30{k7}{z}, ymm29, xmm28 # AVX512{F,VL}
vpsraq ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsraq ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsraq ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsraq ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsraq ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsraq ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsravd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsravd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsravd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsravd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsravd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsravd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpsravd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsravd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsravd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsravd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsravd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpsravd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpsravd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpsravd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpsravd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsravd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsravd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsravd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsravd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsravd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpsravd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsravd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsravd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsravd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsravd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpsravd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpsravd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpsravd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpsravq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsravq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsravq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsravq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsravq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsravq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpsravq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsravq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsravq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsravq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsravq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpsravq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpsravq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpsravq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpsravq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsravq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsravq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsravq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsravq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsravq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpsravq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsravq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsravq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsravq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsravq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpsravq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpsravq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpsravq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpsrld xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsrld xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsrld xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsrld xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrld xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrld xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrld xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrld xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrld xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsrld ymm30, ymm29, xmm28 # AVX512{F,VL}
vpsrld ymm30{k7}, ymm29, xmm28 # AVX512{F,VL}
vpsrld ymm30{k7}{z}, ymm29, xmm28 # AVX512{F,VL}
vpsrld ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrld ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrld ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrld ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrld ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrld ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsrlq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsrlq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsrlq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsrlq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrlq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrlq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrlq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrlq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrlq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsrlq ymm30, ymm29, xmm28 # AVX512{F,VL}
vpsrlq ymm30{k7}, ymm29, xmm28 # AVX512{F,VL}
vpsrlq ymm30{k7}{z}, ymm29, xmm28 # AVX512{F,VL}
vpsrlq ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrlq ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrlq ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrlq ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrlq ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrlq ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsrlvd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsrlvd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsrlvd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsrlvd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrlvd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrlvd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpsrlvd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrlvd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrlvd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrlvd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsrlvd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpsrlvd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpsrlvd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpsrlvd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpsrlvd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsrlvd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsrlvd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsrlvd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsrlvd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrlvd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpsrlvd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsrlvd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsrlvd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsrlvd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsrlvd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpsrlvd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpsrlvd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpsrlvd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpsrlvq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsrlvq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsrlvq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsrlvq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrlvq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrlvq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpsrlvq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrlvq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrlvq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrlvq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsrlvq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpsrlvq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpsrlvq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpsrlvq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpsrlvq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsrlvq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsrlvq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsrlvq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsrlvq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrlvq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpsrlvq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsrlvq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsrlvq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsrlvq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsrlvq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpsrlvq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpsrlvq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpsrlvq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpsrld xmm30, xmm29, 0xab # AVX512{F,VL}
vpsrld xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpsrld xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpsrld xmm30, xmm29, 123 # AVX512{F,VL}
vpsrld xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsrld xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsrld xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpsrld xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpsrld xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpsrld xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpsrld xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpsrld xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrld xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpsrld xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrld xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpsrld ymm30, ymm29, 0xab # AVX512{F,VL}
vpsrld ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpsrld ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpsrld ymm30, ymm29, 123 # AVX512{F,VL}
vpsrld ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsrld ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsrld ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vpsrld ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpsrld ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpsrld ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpsrld ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpsrld ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrld ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpsrld ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrld ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpsrlq xmm30, xmm29, 0xab # AVX512{F,VL}
vpsrlq xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpsrlq xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpsrlq xmm30, xmm29, 123 # AVX512{F,VL}
vpsrlq xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsrlq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsrlq xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vpsrlq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpsrlq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpsrlq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpsrlq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpsrlq xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpsrlq xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vpsrlq xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpsrlq xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vpsrlq ymm30, ymm29, 0xab # AVX512{F,VL}
vpsrlq ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpsrlq ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpsrlq ymm30, ymm29, 123 # AVX512{F,VL}
vpsrlq ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsrlq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsrlq ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpsrlq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpsrlq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpsrlq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpsrlq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpsrlq ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrlq ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpsrlq ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrlq ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpsubd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsubd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsubd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsubd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsubd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsubd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpsubd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsubd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsubd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsubd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsubd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpsubd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpsubd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpsubd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpsubd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsubd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsubd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsubd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsubd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsubd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpsubd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsubd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsubd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsubd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsubd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpsubd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpsubd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpsubd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpsubq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsubq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsubq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsubq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsubq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsubq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpsubq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsubq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsubq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsubq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsubq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpsubq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpsubq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpsubq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpsubq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsubq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsubq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsubq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsubq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsubq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpsubq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsubq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsubq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsubq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsubq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpsubq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpsubq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpsubq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vptestmd k5, xmm30, xmm29 # AVX512{F,VL}
vptestmd k5{k7}, xmm30, xmm29 # AVX512{F,VL}
vptestmd k5, xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vptestmd k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestmd k5, xmm30, [rcx]{1to4} # AVX512{F,VL}
vptestmd k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vptestmd k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vptestmd k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vptestmd k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vptestmd k5, xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vptestmd k5, xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vptestmd k5, xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vptestmd k5, xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vptestmd k5, ymm30, ymm29 # AVX512{F,VL}
vptestmd k5{k7}, ymm30, ymm29 # AVX512{F,VL}
vptestmd k5, ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vptestmd k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestmd k5, ymm30, [rcx]{1to8} # AVX512{F,VL}
vptestmd k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vptestmd k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vptestmd k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vptestmd k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vptestmd k5, ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vptestmd k5, ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vptestmd k5, ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vptestmd k5, ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vptestmq k5, xmm30, xmm29 # AVX512{F,VL}
vptestmq k5{k7}, xmm30, xmm29 # AVX512{F,VL}
vptestmq k5, xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vptestmq k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestmq k5, xmm30, [rcx]{1to2} # AVX512{F,VL}
vptestmq k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vptestmq k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vptestmq k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vptestmq k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vptestmq k5, xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vptestmq k5, xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vptestmq k5, xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vptestmq k5, xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vptestmq k5, ymm30, ymm29 # AVX512{F,VL}
vptestmq k5{k7}, ymm30, ymm29 # AVX512{F,VL}
vptestmq k5, ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vptestmq k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestmq k5, ymm30, [rcx]{1to4} # AVX512{F,VL}
vptestmq k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vptestmq k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vptestmq k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vptestmq k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vptestmq k5, ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vptestmq k5, ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vptestmq k5, ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vptestmq k5, ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpunpckhdq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpunpckhdq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpunpckhdq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpunpckhdq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpunpckhdq xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpunpckhdq xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpunpckhdq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpunpckhdq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpunpckhdq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpunpckhdq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpunpckhdq ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpunpckhdq ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpunpckhqdq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpunpckhqdq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpunpckhqdq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpunpckhqdq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpunpckhqdq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpunpckhqdq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpunpckhqdq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpunpckhqdq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpunpckhqdq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpunpckhqdq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpunpckhqdq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpunpckhqdq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpunpckldq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpunpckldq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpunpckldq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpunpckldq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpunpckldq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpckldq xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpunpckldq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpunpckldq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpunpckldq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpunpckldq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpunpckldq xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpunpckldq xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpunpckldq xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpunpckldq xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpunpckldq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpunpckldq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpunpckldq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpunpckldq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpunpckldq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpckldq ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpunpckldq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpunpckldq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpunpckldq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpunpckldq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpunpckldq ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpunpckldq ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpunpckldq ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpunpckldq ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpunpcklqdq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpunpcklqdq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpunpcklqdq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpunpcklqdq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpunpcklqdq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpunpcklqdq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpunpcklqdq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpunpcklqdq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpunpcklqdq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpunpcklqdq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpunpcklqdq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpunpcklqdq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpxord xmm30, xmm29, xmm28 # AVX512{F,VL}
vpxord xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpxord xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpxord xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpxord xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpxord xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpxord xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpxord xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpxord xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpxord xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpxord xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpxord xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpxord xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpxord xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpxord ymm30, ymm29, ymm28 # AVX512{F,VL}
vpxord ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpxord ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpxord ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpxord ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpxord ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpxord ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpxord ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpxord ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpxord ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpxord ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpxord ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpxord ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpxord ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpxorq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpxorq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpxorq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpxorq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpxorq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpxorq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpxorq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpxorq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpxorq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpxorq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpxorq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpxorq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpxorq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpxorq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpxorq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpxorq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpxorq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpxorq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpxorq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpxorq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpxorq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpxorq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpxorq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpxorq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpxorq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpxorq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpxorq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpxorq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vrcp14pd xmm30, xmm29 # AVX512{F,VL}
vrcp14pd xmm30{k7}, xmm29 # AVX512{F,VL}
vrcp14pd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vrcp14pd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vrcp14pd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrcp14pd xmm30, [rcx]{1to2} # AVX512{F,VL}
vrcp14pd xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vrcp14pd xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vrcp14pd xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vrcp14pd xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vrcp14pd xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vrcp14pd xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vrcp14pd xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vrcp14pd xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vrcp14pd ymm30, ymm29 # AVX512{F,VL}
vrcp14pd ymm30{k7}, ymm29 # AVX512{F,VL}
vrcp14pd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vrcp14pd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vrcp14pd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrcp14pd ymm30, [rcx]{1to4} # AVX512{F,VL}
vrcp14pd ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vrcp14pd ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vrcp14pd ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vrcp14pd ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vrcp14pd ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vrcp14pd ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vrcp14pd ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vrcp14pd ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vrcp14ps xmm30, xmm29 # AVX512{F,VL}
vrcp14ps xmm30{k7}, xmm29 # AVX512{F,VL}
vrcp14ps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vrcp14ps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vrcp14ps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrcp14ps xmm30, [rcx]{1to4} # AVX512{F,VL}
vrcp14ps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vrcp14ps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vrcp14ps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vrcp14ps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vrcp14ps xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vrcp14ps xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vrcp14ps xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vrcp14ps xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vrcp14ps ymm30, ymm29 # AVX512{F,VL}
vrcp14ps ymm30{k7}, ymm29 # AVX512{F,VL}
vrcp14ps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vrcp14ps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vrcp14ps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrcp14ps ymm30, [rcx]{1to8} # AVX512{F,VL}
vrcp14ps ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vrcp14ps ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vrcp14ps ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vrcp14ps ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vrcp14ps ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vrcp14ps ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vrcp14ps ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vrcp14ps ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vrsqrt14pd xmm30, xmm29 # AVX512{F,VL}
vrsqrt14pd xmm30{k7}, xmm29 # AVX512{F,VL}
vrsqrt14pd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vrsqrt14pd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vrsqrt14pd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrsqrt14pd xmm30, [rcx]{1to2} # AVX512{F,VL}
vrsqrt14pd xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vrsqrt14pd xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vrsqrt14pd xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vrsqrt14pd xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vrsqrt14pd xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vrsqrt14pd xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vrsqrt14pd xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vrsqrt14pd xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vrsqrt14pd ymm30, ymm29 # AVX512{F,VL}
vrsqrt14pd ymm30{k7}, ymm29 # AVX512{F,VL}
vrsqrt14pd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vrsqrt14pd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vrsqrt14pd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrsqrt14pd ymm30, [rcx]{1to4} # AVX512{F,VL}
vrsqrt14pd ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vrsqrt14pd ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vrsqrt14pd ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vrsqrt14pd ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vrsqrt14pd ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vrsqrt14pd ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vrsqrt14pd ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vrsqrt14pd ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vrsqrt14ps xmm30, xmm29 # AVX512{F,VL}
vrsqrt14ps xmm30{k7}, xmm29 # AVX512{F,VL}
vrsqrt14ps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vrsqrt14ps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vrsqrt14ps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrsqrt14ps xmm30, [rcx]{1to4} # AVX512{F,VL}
vrsqrt14ps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vrsqrt14ps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vrsqrt14ps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vrsqrt14ps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vrsqrt14ps xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vrsqrt14ps xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vrsqrt14ps xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vrsqrt14ps xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vrsqrt14ps ymm30, ymm29 # AVX512{F,VL}
vrsqrt14ps ymm30{k7}, ymm29 # AVX512{F,VL}
vrsqrt14ps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vrsqrt14ps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vrsqrt14ps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrsqrt14ps ymm30, [rcx]{1to8} # AVX512{F,VL}
vrsqrt14ps ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vrsqrt14ps ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vrsqrt14ps ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vrsqrt14ps ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vrsqrt14ps ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vrsqrt14ps ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vrsqrt14ps ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vrsqrt14ps ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vscatterdpd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterdpd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterdpd [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vscatterdpd [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vscatterdpd [r14+xmm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vscatterdpd [r14+xmm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vscatterdpd [r9+xmm31+256]{k1}, ymm30 # AVX512{F,VL}
vscatterdpd [rcx+xmm31*4+1024]{k1}, ymm30 # AVX512{F,VL}
vscatterdps [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterdps [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterdps [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vscatterdps [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vscatterdps [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vscatterdps [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vscatterdps [r9+ymm31+256]{k1}, ymm30 # AVX512{F,VL}
vscatterdps [rcx+ymm31*4+1024]{k1}, ymm30 # AVX512{F,VL}
vscatterqpd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterqpd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterqpd [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vscatterqpd [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vscatterqpd [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vscatterqpd [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vscatterqpd [r9+ymm31+256]{k1}, ymm30 # AVX512{F,VL}
vscatterqpd [rcx+ymm31*4+1024]{k1}, ymm30 # AVX512{F,VL}
vscatterqps [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterqps [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterqps [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vscatterqps [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vscatterqps [r14+ymm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterqps [r14+ymm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterqps [r9+ymm31+256]{k1}, xmm30 # AVX512{F,VL}
vscatterqps [rcx+ymm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vshufpd xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
vshufpd xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vshufpd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
vshufpd xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
vshufpd xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshufpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshufpd xmm30, xmm29, [rcx]{1to2}, 123 # AVX512{F,VL}
vshufpd xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vshufpd xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vshufpd xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vshufpd xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vshufpd xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vshufpd xmm30, xmm29, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vshufpd xmm30, xmm29, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vshufpd xmm30, xmm29, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufpd ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufpd ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufpd ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vshufpd ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vshufpd ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vshufpd ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vshufpd ymm30, ymm29, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vshufps xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
vshufps xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vshufps xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
vshufps xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
vshufps xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshufps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshufps xmm30, xmm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vshufps xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vshufps xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vshufps xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vshufps xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vshufps xmm30, xmm29, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vshufps xmm30, xmm29, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vshufps xmm30, xmm29, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vshufps xmm30, xmm29, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vshufps ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufps ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufps ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufps ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vshufps ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshufps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshufps ymm30, ymm29, [rcx]{1to8}, 123 # AVX512{F,VL}
vshufps ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vshufps ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vshufps ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vshufps ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vshufps ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vshufps ymm30, ymm29, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vshufps ymm30, ymm29, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vshufps ymm30, ymm29, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vsqrtpd xmm30, xmm29 # AVX512{F,VL}
vsqrtpd xmm30{k7}, xmm29 # AVX512{F,VL}
vsqrtpd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vsqrtpd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vsqrtpd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsqrtpd xmm30, [rcx]{1to2} # AVX512{F,VL}
vsqrtpd xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vsqrtpd xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vsqrtpd xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vsqrtpd xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vsqrtpd xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vsqrtpd xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vsqrtpd xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vsqrtpd xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vsqrtpd ymm30, ymm29 # AVX512{F,VL}
vsqrtpd ymm30{k7}, ymm29 # AVX512{F,VL}
vsqrtpd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vsqrtpd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vsqrtpd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsqrtpd ymm30, [rcx]{1to4} # AVX512{F,VL}
vsqrtpd ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vsqrtpd ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vsqrtpd ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vsqrtpd ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vsqrtpd ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vsqrtpd ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vsqrtpd ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vsqrtpd ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vsqrtps xmm30, xmm29 # AVX512{F,VL}
vsqrtps xmm30{k7}, xmm29 # AVX512{F,VL}
vsqrtps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vsqrtps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vsqrtps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsqrtps xmm30, [rcx]{1to4} # AVX512{F,VL}
vsqrtps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vsqrtps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vsqrtps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vsqrtps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vsqrtps xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vsqrtps xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vsqrtps xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vsqrtps xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vsqrtps ymm30, ymm29 # AVX512{F,VL}
vsqrtps ymm30{k7}, ymm29 # AVX512{F,VL}
vsqrtps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vsqrtps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vsqrtps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsqrtps ymm30, [rcx]{1to8} # AVX512{F,VL}
vsqrtps ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vsqrtps ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vsqrtps ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vsqrtps ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vsqrtps ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vsqrtps ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vsqrtps ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vsqrtps ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vsubpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vsubpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vsubpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vsubpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vsubpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsubpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vsubpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vsubpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vsubpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vsubpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vsubpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vsubpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vsubpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vsubpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vsubpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vsubpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vsubpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vsubpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vsubpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsubpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vsubpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vsubpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vsubpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vsubpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vsubpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vsubpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vsubpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vsubpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vsubps xmm30, xmm29, xmm28 # AVX512{F,VL}
vsubps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vsubps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vsubps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vsubps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsubps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vsubps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vsubps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vsubps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vsubps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vsubps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vsubps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vsubps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vsubps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vsubps ymm30, ymm29, ymm28 # AVX512{F,VL}
vsubps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vsubps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vsubps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vsubps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsubps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vsubps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vsubps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vsubps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vsubps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vsubps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vsubps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vsubps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vsubps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vunpckhpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vunpckhpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vunpckhpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vunpckhpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vunpckhpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpckhpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vunpckhpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vunpckhpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vunpckhpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vunpckhpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vunpckhpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vunpckhpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vunpckhpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vunpckhpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vunpckhpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vunpckhpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vunpckhpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vunpckhpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vunpckhpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpckhpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vunpckhpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vunpckhpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vunpckhpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vunpckhpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vunpckhpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vunpckhpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vunpckhpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vunpckhpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vunpckhps xmm30, xmm29, xmm28 # AVX512{F,VL}
vunpckhps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vunpckhps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vunpckhps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vunpckhps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpckhps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vunpckhps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vunpckhps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vunpckhps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vunpckhps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vunpckhps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vunpckhps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vunpckhps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vunpckhps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vunpckhps ymm30, ymm29, ymm28 # AVX512{F,VL}
vunpckhps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vunpckhps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vunpckhps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vunpckhps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpckhps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vunpckhps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vunpckhps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vunpckhps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vunpckhps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vunpckhps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vunpckhps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vunpckhps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vunpckhps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vunpcklpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vunpcklpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vunpcklpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vunpcklpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vunpcklpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpcklpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vunpcklpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vunpcklpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vunpcklpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vunpcklpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vunpcklpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vunpcklpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vunpcklpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vunpcklpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vunpcklpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vunpcklpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vunpcklpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vunpcklpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vunpcklpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpcklpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vunpcklpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vunpcklpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vunpcklpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vunpcklpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vunpcklpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vunpcklpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vunpcklpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vunpcklpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vunpcklps xmm30, xmm29, xmm28 # AVX512{F,VL}
vunpcklps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vunpcklps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vunpcklps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vunpcklps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpcklps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vunpcklps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vunpcklps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vunpcklps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vunpcklps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vunpcklps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vunpcklps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vunpcklps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vunpcklps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vunpcklps ymm30, ymm29, ymm28 # AVX512{F,VL}
vunpcklps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vunpcklps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vunpcklps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vunpcklps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpcklps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vunpcklps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vunpcklps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vunpcklps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vunpcklps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vunpcklps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vunpcklps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vunpcklps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vunpcklps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpternlogd xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
vpternlogd xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vpternlogd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
vpternlogd xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
vpternlogd xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpternlogd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpternlogd xmm30, xmm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vpternlogd xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpternlogd xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpternlogd xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpternlogd xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpternlogd xmm30, xmm29, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogd xmm30, xmm29, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpternlogd xmm30, xmm29, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogd xmm30, xmm29, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vpternlogd ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vpternlogd ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vpternlogd ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, [rcx]{1to8}, 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpternlogd ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpternlogd ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpternlogd ymm30, ymm29, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpternlogd ymm30, ymm29, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
vpternlogq xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vpternlogq xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
vpternlogq xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, [rcx]{1to2}, 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpternlogq xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpternlogq xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpternlogq xmm30, xmm29, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpternlogq xmm30, xmm29, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vpternlogq ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vpternlogq ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vpternlogq ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpternlogq ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpternlogq ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogq ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogq ymm30, ymm29, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpmovqb xmm30, xmm29 # AVX512{F,VL}
vpmovqb xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovqb xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovqb xmm30, ymm29 # AVX512{F,VL}
vpmovqb xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovqb xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovsqb xmm30, xmm29 # AVX512{F,VL}
vpmovsqb xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsqb xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsqb xmm30, ymm29 # AVX512{F,VL}
vpmovsqb xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovsqb xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovusqb xmm30, xmm29 # AVX512{F,VL}
vpmovusqb xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovusqb xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovusqb xmm30, ymm29 # AVX512{F,VL}
vpmovusqb xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovusqb xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovqw xmm30, xmm29 # AVX512{F,VL}
vpmovqw xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovqw xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovqw xmm30, ymm29 # AVX512{F,VL}
vpmovqw xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovqw xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovsqw xmm30, xmm29 # AVX512{F,VL}
vpmovsqw xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsqw xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsqw xmm30, ymm29 # AVX512{F,VL}
vpmovsqw xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovsqw xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovusqw xmm30, xmm29 # AVX512{F,VL}
vpmovusqw xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovusqw xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovusqw xmm30, ymm29 # AVX512{F,VL}
vpmovusqw xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovusqw xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovqd xmm30, xmm29 # AVX512{F,VL}
vpmovqd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovqd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovqd xmm30, ymm29 # AVX512{F,VL}
vpmovqd xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovqd xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovsqd xmm30, xmm29 # AVX512{F,VL}
vpmovsqd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsqd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsqd xmm30, ymm29 # AVX512{F,VL}
vpmovsqd xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovsqd xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovusqd xmm30, xmm29 # AVX512{F,VL}
vpmovusqd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovusqd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovusqd xmm30, ymm29 # AVX512{F,VL}
vpmovusqd xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovusqd xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovdb xmm30, xmm29 # AVX512{F,VL}
vpmovdb xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovdb xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovdb xmm30, ymm29 # AVX512{F,VL}
vpmovdb xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovdb xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovsdb xmm30, xmm29 # AVX512{F,VL}
vpmovsdb xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsdb xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsdb xmm30, ymm29 # AVX512{F,VL}
vpmovsdb xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovsdb xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovusdb xmm30, xmm29 # AVX512{F,VL}
vpmovusdb xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovusdb xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovusdb xmm30, ymm29 # AVX512{F,VL}
vpmovusdb xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovusdb xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovdw xmm30, xmm29 # AVX512{F,VL}
vpmovdw xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovdw xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovdw xmm30, ymm29 # AVX512{F,VL}
vpmovdw xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovdw xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovsdw xmm30, xmm29 # AVX512{F,VL}
vpmovsdw xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsdw xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsdw xmm30, ymm29 # AVX512{F,VL}
vpmovsdw xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovsdw xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovusdw xmm30, xmm29 # AVX512{F,VL}
vpmovusdw xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovusdw xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovusdw xmm30, ymm29 # AVX512{F,VL}
vpmovusdw xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovusdw xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vshuff32x4 ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshuff32x4 ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, [rcx]{1to8}, 123 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm30, ymm29, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm30, ymm29, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vshuff64x2 ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshuff64x2 ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm30, ymm29, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufi32x4 ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufi32x4 ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, [rcx]{1to8}, 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm30, ymm29, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm30, ymm29, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufi64x2 ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufi64x2 ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm30, ymm29, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpermq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpermq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpermq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpermq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpermq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpermpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpermpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpermpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpermpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpermpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpermt2d xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermt2d xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermt2d xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermt2d xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2d xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2d xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpermt2d xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermt2d xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermt2d xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermt2d xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermt2d xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpermt2d xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpermt2d xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpermt2d xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpermt2d ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermt2d ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermt2d ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermt2d ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2d ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2d ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpermt2d ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermt2d ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermt2d ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermt2d ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermt2d ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpermt2d ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpermt2d ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpermt2d ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpermt2q xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermt2q xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermt2q xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermt2q xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2q xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2q xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpermt2q xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermt2q xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermt2q xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermt2q xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermt2q xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpermt2q xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpermt2q xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpermt2q xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpermt2q ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermt2q ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermt2q ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermt2q ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2q ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2q ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpermt2q ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermt2q ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermt2q ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermt2q ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermt2q ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpermt2q ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpermt2q ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpermt2q ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpermt2ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermt2ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermt2ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermt2ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpermt2ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermt2ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermt2ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermt2ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermt2ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpermt2ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpermt2ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpermt2ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpermt2ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermt2ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermt2ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermt2ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpermt2ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermt2ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermt2ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermt2ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermt2ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpermt2ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpermt2ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpermt2ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpermt2pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermt2pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermt2pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermt2pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpermt2pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermt2pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermt2pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermt2pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermt2pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpermt2pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpermt2pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpermt2pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpermt2pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermt2pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermt2pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermt2pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpermt2pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermt2pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermt2pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermt2pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermt2pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpermt2pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpermt2pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpermt2pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
valignq xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
valignq xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
valignq xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
valignq xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
valignq xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
valignq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
valignq xmm30, xmm29, [rcx]{1to2}, 123 # AVX512{F,VL}
valignq xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
valignq xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
valignq xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
valignq xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
valignq xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
valignq xmm30, xmm29, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
valignq xmm30, xmm29, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
valignq xmm30, xmm29, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
valignq ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
valignq ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
valignq ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
valignq ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
valignq ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
valignq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
valignq ymm30, ymm29, [rcx]{1to4}, 123 # AVX512{F,VL}
valignq ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
valignq ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
valignq ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
valignq ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
valignq ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
valignq ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
valignq ymm30, ymm29, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
valignq ymm30, ymm29, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vscalefpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vscalefpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vscalefpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vscalefpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vscalefpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vscalefpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vscalefpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vscalefpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vscalefpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vscalefpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vscalefpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vscalefpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vscalefpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vscalefpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vscalefpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vscalefpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vscalefpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vscalefpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vscalefpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vscalefpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vscalefpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vscalefpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vscalefpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vscalefpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vscalefpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vscalefpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vscalefpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vscalefpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vscalefps xmm30, xmm29, xmm28 # AVX512{F,VL}
vscalefps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vscalefps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vscalefps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vscalefps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vscalefps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vscalefps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vscalefps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vscalefps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vscalefps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vscalefps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vscalefps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vscalefps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vscalefps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vscalefps ymm30, ymm29, ymm28 # AVX512{F,VL}
vscalefps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vscalefps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vscalefps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vscalefps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vscalefps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vscalefps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vscalefps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vscalefps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vscalefps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vscalefps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vscalefps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vscalefps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vscalefps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
vfixupimmpd xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vfixupimmpd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, [rcx]{1to2}, 123 # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm30, xmm29, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm30, xmm29, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vfixupimmpd ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vfixupimmpd ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm30, ymm29, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
vfixupimmps xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vfixupimmps xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
vfixupimmps xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vfixupimmps xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vfixupimmps xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmps xmm30, xmm29, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmps xmm30, xmm29, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vfixupimmps ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vfixupimmps ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vfixupimmps ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, [rcx]{1to8}, 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vfixupimmps ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vfixupimmps ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vfixupimmps ymm30, ymm29, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vfixupimmps ymm30, ymm29, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpslld xmm30, xmm29, 0xab # AVX512{F,VL}
vpslld xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpslld xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpslld xmm30, xmm29, 123 # AVX512{F,VL}
vpslld xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpslld xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpslld xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpslld xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpslld xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpslld xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpslld xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpslld xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpslld xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpslld xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpslld xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpslld ymm30, ymm29, 0xab # AVX512{F,VL}
vpslld ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpslld ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpslld ymm30, ymm29, 123 # AVX512{F,VL}
vpslld ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpslld ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpslld ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vpslld ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpslld ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpslld ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpslld ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpslld ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpslld ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpslld ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpslld ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpsllq xmm30, xmm29, 0xab # AVX512{F,VL}
vpsllq xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpsllq xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpsllq xmm30, xmm29, 123 # AVX512{F,VL}
vpsllq xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsllq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsllq xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vpsllq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpsllq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpsllq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpsllq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpsllq xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpsllq xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vpsllq xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpsllq xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vpsllq ymm30, ymm29, 0xab # AVX512{F,VL}
vpsllq ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpsllq ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpsllq ymm30, ymm29, 123 # AVX512{F,VL}
vpsllq ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsllq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsllq ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpsllq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpsllq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpsllq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpsllq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpsllq ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpsllq ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpsllq ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpsllq ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpsrad xmm30, xmm29, 0xab # AVX512{F,VL}
vpsrad xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpsrad xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpsrad xmm30, xmm29, 123 # AVX512{F,VL}
vpsrad xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsrad xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsrad xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpsrad xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpsrad xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpsrad xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpsrad xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpsrad xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrad xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpsrad xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrad xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpsrad ymm30, ymm29, 0xab # AVX512{F,VL}
vpsrad ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpsrad ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpsrad ymm30, ymm29, 123 # AVX512{F,VL}
vpsrad ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsrad ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsrad ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vpsrad ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpsrad ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpsrad ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpsrad ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpsrad ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrad ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpsrad ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrad ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpsraq xmm30, xmm29, 0xab # AVX512{F,VL}
vpsraq xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpsraq xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpsraq xmm30, xmm29, 123 # AVX512{F,VL}
vpsraq xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsraq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsraq xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vpsraq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpsraq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpsraq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpsraq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpsraq xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpsraq xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vpsraq xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpsraq xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vpsraq ymm30, ymm29, 0xab # AVX512{F,VL}
vpsraq ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpsraq ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpsraq ymm30, ymm29, 123 # AVX512{F,VL}
vpsraq ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsraq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsraq ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpsraq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpsraq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpsraq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpsraq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpsraq ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpsraq ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpsraq ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpsraq ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vprolvd xmm30, xmm29, xmm28 # AVX512{F,VL}
vprolvd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vprolvd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vprolvd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vprolvd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprolvd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vprolvd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vprolvd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vprolvd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vprolvd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vprolvd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vprolvd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vprolvd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vprolvd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vprolvd ymm30, ymm29, ymm28 # AVX512{F,VL}
vprolvd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vprolvd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vprolvd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vprolvd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprolvd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vprolvd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vprolvd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vprolvd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vprolvd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vprolvd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vprolvd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vprolvd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vprolvd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vprold xmm30, xmm29, 0xab # AVX512{F,VL}
vprold xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vprold xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vprold xmm30, xmm29, 123 # AVX512{F,VL}
vprold xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprold xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprold xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vprold xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vprold xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vprold xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vprold xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vprold xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vprold xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vprold xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vprold xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vprold ymm30, ymm29, 0xab # AVX512{F,VL}
vprold ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vprold ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vprold ymm30, ymm29, 123 # AVX512{F,VL}
vprold ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprold ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprold ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vprold ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vprold ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vprold ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vprold ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vprold ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vprold ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vprold ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vprold ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vprolvq xmm30, xmm29, xmm28 # AVX512{F,VL}
vprolvq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vprolvq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vprolvq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vprolvq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprolvq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vprolvq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vprolvq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vprolvq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vprolvq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vprolvq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vprolvq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vprolvq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vprolvq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vprolvq ymm30, ymm29, ymm28 # AVX512{F,VL}
vprolvq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vprolvq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vprolvq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vprolvq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprolvq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vprolvq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vprolvq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vprolvq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vprolvq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vprolvq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vprolvq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vprolvq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vprolvq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vprolq xmm30, xmm29, 0xab # AVX512{F,VL}
vprolq xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vprolq xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vprolq xmm30, xmm29, 123 # AVX512{F,VL}
vprolq xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprolq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprolq xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vprolq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vprolq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vprolq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vprolq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vprolq xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vprolq xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vprolq xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vprolq xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vprolq ymm30, ymm29, 0xab # AVX512{F,VL}
vprolq ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vprolq ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vprolq ymm30, ymm29, 123 # AVX512{F,VL}
vprolq ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprolq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprolq ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vprolq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vprolq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vprolq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vprolq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vprolq ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vprolq ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vprolq ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vprolq ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vprorvd xmm30, xmm29, xmm28 # AVX512{F,VL}
vprorvd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vprorvd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vprorvd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vprorvd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprorvd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vprorvd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vprorvd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vprorvd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vprorvd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vprorvd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vprorvd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vprorvd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vprorvd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vprorvd ymm30, ymm29, ymm28 # AVX512{F,VL}
vprorvd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vprorvd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vprorvd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vprorvd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprorvd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vprorvd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vprorvd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vprorvd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vprorvd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vprorvd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vprorvd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vprorvd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vprorvd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vprord xmm30, xmm29, 0xab # AVX512{F,VL}
vprord xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vprord xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vprord xmm30, xmm29, 123 # AVX512{F,VL}
vprord xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprord xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprord xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vprord xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vprord xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vprord xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vprord xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vprord xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vprord xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vprord xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vprord xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vprord ymm30, ymm29, 0xab # AVX512{F,VL}
vprord ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vprord ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vprord ymm30, ymm29, 123 # AVX512{F,VL}
vprord ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprord ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprord ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vprord ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vprord ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vprord ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vprord ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vprord ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vprord ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vprord ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vprord ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vprorvq xmm30, xmm29, xmm28 # AVX512{F,VL}
vprorvq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vprorvq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vprorvq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vprorvq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprorvq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vprorvq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vprorvq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vprorvq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vprorvq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vprorvq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vprorvq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vprorvq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vprorvq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vprorvq ymm30, ymm29, ymm28 # AVX512{F,VL}
vprorvq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vprorvq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vprorvq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vprorvq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprorvq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vprorvq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vprorvq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vprorvq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vprorvq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vprorvq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vprorvq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vprorvq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vprorvq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vprorq xmm30, xmm29, 0xab # AVX512{F,VL}
vprorq xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vprorq xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vprorq xmm30, xmm29, 123 # AVX512{F,VL}
vprorq xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprorq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprorq xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vprorq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vprorq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vprorq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vprorq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vprorq xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vprorq xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vprorq xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vprorq xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vprorq ymm30, ymm29, 0xab # AVX512{F,VL}
vprorq ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vprorq ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vprorq ymm30, ymm29, 123 # AVX512{F,VL}
vprorq ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprorq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprorq ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vprorq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vprorq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vprorq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vprorq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vprorq ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vprorq ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vprorq ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vprorq ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vrndscalepd xmm30, xmm29, 0xab # AVX512{F,VL}
vrndscalepd xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vrndscalepd xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vrndscalepd xmm30, xmm29, 123 # AVX512{F,VL}
vrndscalepd xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vrndscalepd xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vrndscalepd xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vrndscalepd xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vrndscalepd xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vrndscalepd xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vrndscalepd xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vrndscalepd xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vrndscalepd xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vrndscalepd xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vrndscalepd xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vrndscalepd ymm30, ymm29, 0xab # AVX512{F,VL}
vrndscalepd ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vrndscalepd ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vrndscalepd ymm30, ymm29, 123 # AVX512{F,VL}
vrndscalepd ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vrndscalepd ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vrndscalepd ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vrndscalepd ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vrndscalepd ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vrndscalepd ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vrndscalepd ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vrndscalepd ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscalepd ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vrndscalepd ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscalepd ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vrndscaleps xmm30, xmm29, 0xab # AVX512{F,VL}
vrndscaleps xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vrndscaleps xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vrndscaleps xmm30, xmm29, 123 # AVX512{F,VL}
vrndscaleps xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vrndscaleps xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vrndscaleps xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vrndscaleps xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vrndscaleps xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vrndscaleps xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vrndscaleps xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vrndscaleps xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscaleps xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vrndscaleps xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscaleps xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vrndscaleps ymm30, ymm29, 0xab # AVX512{F,VL}
vrndscaleps ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vrndscaleps ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vrndscaleps ymm30, ymm29, 123 # AVX512{F,VL}
vrndscaleps ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vrndscaleps ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vrndscaleps ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vrndscaleps ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vrndscaleps ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vrndscaleps ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vrndscaleps ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vrndscaleps ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vrndscaleps ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vrndscaleps ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vrndscaleps ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpcompressq XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpcompressq XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpcompressq XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpcompressq XMMWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vpcompressq XMMWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vpcompressq XMMWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vpcompressq XMMWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vpcompressq YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpcompressq YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpcompressq YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpcompressq YMMWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vpcompressq YMMWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vpcompressq YMMWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vpcompressq YMMWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vpcompressq xmm30, xmm29 # AVX512{F,VL}
vpcompressq xmm30{k7}, xmm29 # AVX512{F,VL}
vpcompressq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpcompressq ymm30, ymm29 # AVX512{F,VL}
vpcompressq ymm30{k7}, ymm29 # AVX512{F,VL}
vpcompressq ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtps2ph QWORD PTR [rcx], xmm30, 0xab # AVX512{F,VL}
vcvtps2ph QWORD PTR [rcx]{k7}, xmm30, 0xab # AVX512{F,VL}
vcvtps2ph QWORD PTR [rcx], xmm30, 123 # AVX512{F,VL}
vcvtps2ph QWORD PTR [rax+r14*8+0x1234], xmm30, 123 # AVX512{F,VL}
vcvtps2ph QWORD PTR [rdx+1016], xmm30, 123 # AVX512{F,VL} Disp8
vcvtps2ph QWORD PTR [rdx+1024], xmm30, 123 # AVX512{F,VL}
vcvtps2ph QWORD PTR [rdx-1024], xmm30, 123 # AVX512{F,VL} Disp8
vcvtps2ph QWORD PTR [rdx-1032], xmm30, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [rcx], ymm30, 0xab # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [rcx]{k7}, ymm30, 0xab # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [rcx], ymm30, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [rax+r14*8+0x1234], ymm30, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [rdx+2032], ymm30, 123 # AVX512{F,VL} Disp8
vcvtps2ph XMMWORD PTR [rdx+2048], ymm30, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [rdx-2048], ymm30, 123 # AVX512{F,VL} Disp8
vcvtps2ph XMMWORD PTR [rdx-2064], ymm30, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [rcx], ymm29, 0xab # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [rcx]{k7}, ymm29, 0xab # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [rcx], ymm29, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [rax+r14*8+0x1234], ymm29, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [rdx+2032], ymm29, 123 # AVX512{F,VL} Disp8
vextractf32x4 XMMWORD PTR [rdx+2048], ymm29, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [rdx-2048], ymm29, 123 # AVX512{F,VL} Disp8
vextractf32x4 XMMWORD PTR [rdx-2064], ymm29, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [rcx], ymm29, 0xab # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [rcx]{k7}, ymm29, 0xab # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [rcx], ymm29, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [rax+r14*8+0x1234], ymm29, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [rdx+2032], ymm29, 123 # AVX512{F,VL} Disp8
vextracti32x4 XMMWORD PTR [rdx+2048], ymm29, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [rdx-2048], ymm29, 123 # AVX512{F,VL} Disp8
vextracti32x4 XMMWORD PTR [rdx-2064], ymm29, 123 # AVX512{F,VL}
vmovapd XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovapd XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovapd XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovapd XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovapd XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovapd XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovapd XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovapd YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovapd YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovapd YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovapd YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovapd YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovapd YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovapd YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovaps XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovaps XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovaps XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovaps XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovaps XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovaps XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovaps XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovaps YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovaps YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovaps YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovaps YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovaps YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovaps YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovaps YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovdqa32 XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovdqa32 XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovdqa32 YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovdqa32 YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovdqa64 XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovdqa64 XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovdqa64 YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovdqa64 YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovdqu32 XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovdqu32 XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovdqu32 YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovdqu32 YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovdqu64 XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovdqu64 XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovdqu64 YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovdqu64 YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovupd XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovupd XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovupd XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovupd XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovupd XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovupd XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovupd XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovupd YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovupd YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovupd YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovupd YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovupd YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovupd YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovupd YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovups XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovups XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovups XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovups XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovups XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovups XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovups XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovups YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovups YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovups YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovups YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovups YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovups YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovups YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vpmovqb WORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovqb WORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovqb WORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovqb WORD PTR [rdx+254], xmm30 # AVX512{F,VL} Disp8
vpmovqb WORD PTR [rdx+256], xmm30 # AVX512{F,VL}
vpmovqb WORD PTR [rdx-256], xmm30 # AVX512{F,VL} Disp8
vpmovqb WORD PTR [rdx-258], xmm30 # AVX512{F,VL}
vpmovqb DWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovqb DWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovqb DWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovqb DWORD PTR [rdx+508], ymm30 # AVX512{F,VL} Disp8
vpmovqb DWORD PTR [rdx+512], ymm30 # AVX512{F,VL}
vpmovqb DWORD PTR [rdx-512], ymm30 # AVX512{F,VL} Disp8
vpmovqb DWORD PTR [rdx-516], ymm30 # AVX512{F,VL}
vpmovsqb WORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovsqb WORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovsqb WORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovsqb WORD PTR [rdx+254], xmm30 # AVX512{F,VL} Disp8
vpmovsqb WORD PTR [rdx+256], xmm30 # AVX512{F,VL}
vpmovsqb WORD PTR [rdx-256], xmm30 # AVX512{F,VL} Disp8
vpmovsqb WORD PTR [rdx-258], xmm30 # AVX512{F,VL}
vpmovsqb DWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovsqb DWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovsqb DWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovsqb DWORD PTR [rdx+508], ymm30 # AVX512{F,VL} Disp8
vpmovsqb DWORD PTR [rdx+512], ymm30 # AVX512{F,VL}
vpmovsqb DWORD PTR [rdx-512], ymm30 # AVX512{F,VL} Disp8
vpmovsqb DWORD PTR [rdx-516], ymm30 # AVX512{F,VL}
vpmovusqb WORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovusqb WORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovusqb WORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovusqb WORD PTR [rdx+254], xmm30 # AVX512{F,VL} Disp8
vpmovusqb WORD PTR [rdx+256], xmm30 # AVX512{F,VL}
vpmovusqb WORD PTR [rdx-256], xmm30 # AVX512{F,VL} Disp8
vpmovusqb WORD PTR [rdx-258], xmm30 # AVX512{F,VL}
vpmovusqb DWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovusqb DWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovusqb DWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovusqb DWORD PTR [rdx+508], ymm30 # AVX512{F,VL} Disp8
vpmovusqb DWORD PTR [rdx+512], ymm30 # AVX512{F,VL}
vpmovusqb DWORD PTR [rdx-512], ymm30 # AVX512{F,VL} Disp8
vpmovusqb DWORD PTR [rdx-516], ymm30 # AVX512{F,VL}
vpmovqw DWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovqw DWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovqw DWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovqw DWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vpmovqw DWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vpmovqw DWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vpmovqw DWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vpmovqw QWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovqw QWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovqw QWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovqw QWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vpmovqw QWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vpmovqw QWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vpmovqw QWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vpmovsqw DWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovsqw DWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovsqw DWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovsqw DWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vpmovsqw DWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vpmovsqw DWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vpmovsqw DWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vpmovsqw QWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovsqw QWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovsqw QWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovsqw QWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vpmovsqw QWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vpmovsqw QWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vpmovsqw QWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vpmovusqw DWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovusqw DWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovusqw DWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovusqw DWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vpmovusqw DWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vpmovusqw DWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vpmovusqw DWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vpmovusqw QWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovusqw QWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovusqw QWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovusqw QWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vpmovusqw QWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vpmovusqw QWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vpmovusqw QWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vpmovqd QWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovqd QWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovqd QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovqd QWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vpmovqd QWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vpmovqd QWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vpmovqd QWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vpmovqd XMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovqd XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovqd XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovqd XMMWORD PTR [rdx+2032], ymm30 # AVX512{F,VL} Disp8
vpmovqd XMMWORD PTR [rdx+2048], ymm30 # AVX512{F,VL}
vpmovqd XMMWORD PTR [rdx-2048], ymm30 # AVX512{F,VL} Disp8
vpmovqd XMMWORD PTR [rdx-2064], ymm30 # AVX512{F,VL}
vpmovsqd QWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovsqd QWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovsqd QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovsqd QWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vpmovsqd QWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vpmovsqd QWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vpmovsqd QWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [rdx+2032], ymm30 # AVX512{F,VL} Disp8
vpmovsqd XMMWORD PTR [rdx+2048], ymm30 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [rdx-2048], ymm30 # AVX512{F,VL} Disp8
vpmovsqd XMMWORD PTR [rdx-2064], ymm30 # AVX512{F,VL}
vpmovusqd QWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovusqd QWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovusqd QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovusqd QWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vpmovusqd QWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vpmovusqd QWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vpmovusqd QWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [rdx+2032], ymm30 # AVX512{F,VL} Disp8
vpmovusqd XMMWORD PTR [rdx+2048], ymm30 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [rdx-2048], ymm30 # AVX512{F,VL} Disp8
vpmovusqd XMMWORD PTR [rdx-2064], ymm30 # AVX512{F,VL}
vpmovdb DWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovdb DWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovdb DWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovdb DWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vpmovdb DWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vpmovdb DWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vpmovdb DWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vpmovdb QWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovdb QWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovdb QWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovdb QWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vpmovdb QWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vpmovdb QWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vpmovdb QWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vpmovsdb DWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovsdb DWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovsdb DWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovsdb DWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vpmovsdb DWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vpmovsdb DWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vpmovsdb DWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vpmovsdb QWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovsdb QWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovsdb QWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovsdb QWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vpmovsdb QWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vpmovsdb QWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vpmovsdb QWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vpmovusdb DWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovusdb DWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovusdb DWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovusdb DWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vpmovusdb DWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vpmovusdb DWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vpmovusdb DWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vpmovusdb QWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovusdb QWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovusdb QWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovusdb QWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vpmovusdb QWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vpmovusdb QWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vpmovusdb QWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vpmovdw QWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovdw QWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovdw QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovdw QWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vpmovdw QWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vpmovdw QWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vpmovdw QWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vpmovdw XMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovdw XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovdw XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovdw XMMWORD PTR [rdx+2032], ymm30 # AVX512{F,VL} Disp8
vpmovdw XMMWORD PTR [rdx+2048], ymm30 # AVX512{F,VL}
vpmovdw XMMWORD PTR [rdx-2048], ymm30 # AVX512{F,VL} Disp8
vpmovdw XMMWORD PTR [rdx-2064], ymm30 # AVX512{F,VL}
vpmovsdw QWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovsdw QWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovsdw QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovsdw QWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vpmovsdw QWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vpmovsdw QWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vpmovsdw QWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [rdx+2032], ymm30 # AVX512{F,VL} Disp8
vpmovsdw XMMWORD PTR [rdx+2048], ymm30 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [rdx-2048], ymm30 # AVX512{F,VL} Disp8
vpmovsdw XMMWORD PTR [rdx-2064], ymm30 # AVX512{F,VL}
vpmovusdw QWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovusdw QWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovusdw QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovusdw QWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vpmovusdw QWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vpmovusdw QWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vpmovusdw QWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [rdx+2032], ymm30 # AVX512{F,VL} Disp8
vpmovusdw XMMWORD PTR [rdx+2048], ymm30 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [rdx-2048], ymm30 # AVX512{F,VL} Disp8
vpmovusdw XMMWORD PTR [rdx-2064], ymm30 # AVX512{F,VL}
vcvttpd2udq xmm30, xmm29 # AVX512{F,VL}
vcvttpd2udq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvttpd2udq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvttpd2udq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvttpd2udq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttpd2udq xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvttpd2udq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvttpd2udq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvttpd2udq xmm30, QWORD BCST [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, QWORD BCST [rdx+1024]{1to2} # AVX512{F,VL}
vcvttpd2udq xmm30, QWORD BCST [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, QWORD BCST [rdx-1032]{1to2} # AVX512{F,VL}
vcvttpd2udq xmm30, ymm29 # AVX512{F,VL}
vcvttpd2udq xmm30{k7}, ymm29 # AVX512{F,VL}
vcvttpd2udq xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvttpd2udq xmm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvttpd2udq xmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttpd2udq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvttpd2udq xmm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvttpd2udq xmm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvttpd2udq xmm30, QWORD BCST [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, QWORD BCST [rdx+1024]{1to4} # AVX512{F,VL}
vcvttpd2udq xmm30, QWORD BCST [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, QWORD BCST [rdx-1032]{1to4} # AVX512{F,VL}
vcvttps2udq xmm30, xmm29 # AVX512{F,VL}
vcvttps2udq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvttps2udq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvttps2udq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvttps2udq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttps2udq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvttps2udq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvttps2udq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvttps2udq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvttps2udq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvttps2udq xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvttps2udq xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvttps2udq xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvttps2udq xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvttps2udq ymm30, ymm29 # AVX512{F,VL}
vcvttps2udq ymm30{k7}, ymm29 # AVX512{F,VL}
vcvttps2udq ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvttps2udq ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvttps2udq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttps2udq ymm30, [rcx]{1to8} # AVX512{F,VL}
vcvttps2udq ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvttps2udq ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvttps2udq ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvttps2udq ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvttps2udq ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vcvttps2udq ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vcvttps2udq ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vcvttps2udq ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vpermi2d xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermi2d xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermi2d xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermi2d xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2d xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2d xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpermi2d xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermi2d xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermi2d xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermi2d xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermi2d xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpermi2d xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpermi2d xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpermi2d xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpermi2d ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermi2d ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermi2d ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermi2d ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2d ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2d ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpermi2d ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermi2d ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermi2d ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermi2d ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermi2d ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpermi2d ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpermi2d ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpermi2d ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpermi2q xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermi2q xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermi2q xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermi2q xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2q xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2q xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpermi2q xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermi2q xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermi2q xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermi2q xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermi2q xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpermi2q xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpermi2q xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpermi2q xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpermi2q ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermi2q ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermi2q ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermi2q ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2q ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2q ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpermi2q ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermi2q ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermi2q ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermi2q ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermi2q ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpermi2q ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpermi2q ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpermi2q ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpermi2ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermi2ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermi2ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermi2ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpermi2ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermi2ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermi2ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermi2ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermi2ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpermi2ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpermi2ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpermi2ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpermi2ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermi2ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermi2ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermi2ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpermi2ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermi2ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermi2ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermi2ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermi2ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpermi2ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpermi2ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpermi2ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpermi2pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermi2pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermi2pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermi2pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpermi2pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermi2pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermi2pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermi2pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermi2pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpermi2pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpermi2pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpermi2pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpermi2pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermi2pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermi2pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermi2pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpermi2pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermi2pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermi2pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermi2pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermi2pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpermi2pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpermi2pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpermi2pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vptestnmd k5, xmm29, xmm28 # AVX512{F,VL}
vptestnmd k5{k7}, xmm29, xmm28 # AVX512{F,VL}
vptestnmd k5, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vptestnmd k5, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestnmd k5, xmm29, [rcx]{1to4} # AVX512{F,VL}
vptestnmd k5, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vptestnmd k5, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vptestnmd k5, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vptestnmd k5, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vptestnmd k5, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vptestnmd k5, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vptestnmd k5, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vptestnmd k5, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vptestnmd k5, ymm29, ymm28 # AVX512{F,VL}
vptestnmd k5{k7}, ymm29, ymm28 # AVX512{F,VL}
vptestnmd k5, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vptestnmd k5, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestnmd k5, ymm29, [rcx]{1to8} # AVX512{F,VL}
vptestnmd k5, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vptestnmd k5, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vptestnmd k5, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vptestnmd k5, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vptestnmd k5, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vptestnmd k5, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vptestnmd k5, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vptestnmd k5, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vptestnmq k5, xmm29, xmm28 # AVX512{F,VL}
vptestnmq k5{k7}, xmm29, xmm28 # AVX512{F,VL}
vptestnmq k5, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vptestnmq k5, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestnmq k5, xmm29, [rcx]{1to2} # AVX512{F,VL}
vptestnmq k5, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vptestnmq k5, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vptestnmq k5, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vptestnmq k5, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vptestnmq k5, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vptestnmq k5, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vptestnmq k5, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vptestnmq k5, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vptestnmq k5, ymm29, ymm28 # AVX512{F,VL}
vptestnmq k5{k7}, ymm29, ymm28 # AVX512{F,VL}
vptestnmq k5, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vptestnmq k5, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestnmq k5, ymm29, [rcx]{1to4} # AVX512{F,VL}
vptestnmq k5, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vptestnmq k5, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vptestnmq k5, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vptestnmq k5, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vptestnmq k5, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vptestnmq k5, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vptestnmq k5, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vptestnmq k5, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 1,561
|
gas/testsuite/gas/i386/noextreg.s
|
.intel_syntax noprefix
.text
ix86:
vpand xmm0, xmm0, xmm0
.byte 0xc4, 0xc1, 0x79, 0xdb, 0xc0
.byte 0xc4, 0xc1, 0x39, 0xdb, 0xc0
vpandd xmm0, xmm0, xmm0
.byte 0x62, 0xd1, 0x7d, 0x08, 0xdb, 0xc0
.byte 0x62, 0xf1, 0x3d, 0x08, 0xdb, 0xc0
.byte 0x62, 0xf1, 0x7d, 0x00, 0xdb, 0xc0
vpblendvb xmm0, xmm0, xmm0, xmm0
.byte 0xc4, 0xc3, 0x79, 0x4c, 0xc0, 0x00
.byte 0xc4, 0xe3, 0x39, 0x4c, 0xc0, 0x00
.byte 0xc4, 0xe3, 0x79, 0x4c, 0xc0, 0x80
vpgatherdd xmm1{k7}, [eax+xmm0]
.byte 0x62, 0xd2, 0x7d, 0x0f, 0x90, 0x0c, 0x00
.byte 0x62, 0xf2, 0x7d, 0x07, 0x90, 0x0c, 0x00
andn eax, eax, [eax]
.byte 0xc4, 0xe2, 0x38, 0xf2, 0x00
.byte 0xc4, 0xc2, 0x78, 0xf2, 0x00
.byte 0xc4, 0xe2, 0xf8, 0xf2, 0x00
tzmsk eax, [eax]
.byte 0x8f, 0xc9, 0x78, 0x01, 0x20
.byte 0x8f, 0xe9, 0x38, 0x01, 0x20
.byte 0x8f, 0xe9, 0xf8, 0x01, 0x20
llwpcb eax
.byte 0x8f, 0xc9, 0x78, 0x12, 0xc0
.byte 0x8f, 0xe9, 0xf8, 0x12, 0xc0
vprotb xmm0, xmm0, 1
.byte 0x8f, 0xc8, 0x78, 0xc0, 0xc0, 0x01
vprotb xmm0, [eax], 1
.byte 0x8f, 0xc8, 0x78, 0xc0, 0x00, 0x01
vprotb xmm0, xmm0, xmm0
.byte 0x8f, 0xc9, 0xb8, 0x90, 0xc0
.byte 0x8f, 0xe9, 0x38, 0x90, 0xc0
vprotb xmm0, [eax], xmm0
.byte 0x8f, 0xc9, 0x78, 0x90, 0x00
vprotb xmm0, xmm0, [eax]
.byte 0x8f, 0xc9, 0xf8, 0x90, 0x00
vfmaddps xmm0, xmm0, [eax], xmm0
.byte 0xc4, 0xe3, 0x39, 0x68, 0x00, 0x00
.byte 0xc4, 0xe3, 0x79, 0x68, 0x00, 0x80
.byte 0xc4, 0xe3, 0x79, 0x68, 0x00, 0x0f
vpermil2ps xmm0, xmm0, [eax], xmm0, 0
.byte 0xc4, 0xe3, 0x39, 0x48, 0x00, 0x00
.byte 0xc4, 0xe3, 0x79, 0x48, 0x00, 0x80
ret
|
tactcomplabs/xbgas-binutils-gdb
| 2,092
|
gas/testsuite/gas/i386/x86-64-avx512bw-opts.s
|
# Check 64bit AVX512BW swap instructions
.allow_index_reg
.text
_start:
vmovdqu8 %zmm29, %zmm30 # AVX512BW
vmovdqu8.s %zmm29, %zmm30 # AVX512BW
vmovdqu8 %zmm29, %zmm30{%k7} # AVX512BW
vmovdqu8.s %zmm29, %zmm30{%k7} # AVX512BW
vmovdqu8 %zmm29, %zmm30{%k7}{z} # AVX512BW
vmovdqu8.s %zmm29, %zmm30{%k7}{z} # AVX512BW
vmovdqu8 %zmm29, %zmm30 # AVX512BW
vmovdqu8.s %zmm29, %zmm30 # AVX512BW
vmovdqu8 %zmm29, %zmm30{%k7} # AVX512BW
vmovdqu8.s %zmm29, %zmm30{%k7} # AVX512BW
vmovdqu8 %zmm29, %zmm30{%k7}{z} # AVX512BW
vmovdqu8.s %zmm29, %zmm30{%k7}{z} # AVX512BW
vmovdqu16 %zmm29, %zmm30 # AVX512BW
vmovdqu16.s %zmm29, %zmm30 # AVX512BW
vmovdqu16 %zmm29, %zmm30{%k7} # AVX512BW
vmovdqu16.s %zmm29, %zmm30{%k7} # AVX512BW
vmovdqu16 %zmm29, %zmm30{%k7}{z} # AVX512BW
vmovdqu16.s %zmm29, %zmm30{%k7}{z} # AVX512BW
vmovdqu16 %zmm29, %zmm30 # AVX512BW
vmovdqu16.s %zmm29, %zmm30 # AVX512BW
vmovdqu16 %zmm29, %zmm30{%k7} # AVX512BW
vmovdqu16.s %zmm29, %zmm30{%k7} # AVX512BW
vmovdqu16 %zmm29, %zmm30{%k7}{z} # AVX512BW
vmovdqu16.s %zmm29, %zmm30{%k7}{z} # AVX512BW
.intel_syntax noprefix
vmovdqu8 zmm30, zmm29 # AVX512BW
vmovdqu8.s zmm30, zmm29 # AVX512BW
vmovdqu8 zmm30{k7}, zmm29 # AVX512BW
vmovdqu8.s zmm30{k7}, zmm29 # AVX512BW
vmovdqu8 zmm30{k7}{z}, zmm29 # AVX512BW
vmovdqu8.s zmm30{k7}{z}, zmm29 # AVX512BW
vmovdqu8 zmm30, zmm29 # AVX512BW
vmovdqu8.s zmm30, zmm29 # AVX512BW
vmovdqu8 zmm30{k7}, zmm29 # AVX512BW
vmovdqu8.s zmm30{k7}, zmm29 # AVX512BW
vmovdqu8 zmm30{k7}{z}, zmm29 # AVX512BW
vmovdqu8.s zmm30{k7}{z}, zmm29 # AVX512BW
vmovdqu16 zmm30, zmm29 # AVX512BW
vmovdqu16.s zmm30, zmm29 # AVX512BW
vmovdqu16 zmm30{k7}, zmm29 # AVX512BW
vmovdqu16.s zmm30{k7}, zmm29 # AVX512BW
vmovdqu16 zmm30{k7}{z}, zmm29 # AVX512BW
vmovdqu16.s zmm30{k7}{z}, zmm29 # AVX512BW
vmovdqu16 zmm30, zmm29 # AVX512BW
vmovdqu16.s zmm30, zmm29 # AVX512BW
vmovdqu16 zmm30{k7}, zmm29 # AVX512BW
vmovdqu16.s zmm30{k7}, zmm29 # AVX512BW
vmovdqu16 zmm30{k7}{z}, zmm29 # AVX512BW
vmovdqu16.s zmm30{k7}{z}, zmm29 # AVX512BW
|
tactcomplabs/xbgas-binutils-gdb
| 5,772
|
gas/testsuite/gas/i386/x86-64-disassem.s
|
.text
.byte 0xFF, 0xEF
.byte 0xFF, 0xD8
.fill 0x5, 0x1, 0x90
.byte 0xC5, 0xEC, 0x4A, 0x9B
.byte 0xC5, 0xEC, 0x4A, 0x6F
.byte 0xC5, 0xEC, 0x4A, 0x3F
.byte 0xC5, 0xED, 0x4A, 0x9B
.byte 0xC5, 0xED, 0x4A, 0x6F
.byte 0xC5, 0xED, 0x4A, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x4A, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x4A, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x4A, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x4A, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x4A, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x4A, 0x3F
.byte 0xC5, 0xEC, 0x41, 0x9B
.byte 0xC5, 0xEC, 0x41, 0x6F
.byte 0xC5, 0xEC, 0x41, 0x3F
.byte 0xC5, 0xED, 0x41, 0x9B
.byte 0xC5, 0xED, 0x41, 0x6F
.byte 0xC5, 0xED, 0x41, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x41, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x41, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x41, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x41, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x41, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x41, 0x3F
.byte 0xC5, 0xEC, 0x42, 0x9B
.byte 0xC5, 0xEC, 0x42, 0x6F
.byte 0xC5, 0xEC, 0x42, 0x3F
.byte 0xC5, 0xED, 0x42, 0x9B
.byte 0xC5, 0xED, 0x42, 0x6F
.byte 0xC5, 0xED, 0x42, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x42, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x42, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x42, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x42, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x42, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x42, 0x3F
.byte 0xC5, 0xEC, 0x4B, 0x9B
.byte 0xC5, 0xEC, 0x4B, 0x6F
.byte 0xC5, 0xEC, 0x4B, 0x3F
.byte 0xC5, 0xED, 0x4B, 0x9B
.byte 0xC5, 0xED, 0x4B, 0x6F
.byte 0xC5, 0xED, 0x4B, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x4B, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x4B, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x4B, 0x3F
.byte 0xC5, 0xF8, 0x44, 0x9B
.byte 0xC5, 0xF8, 0x44, 0x6F
.byte 0xC5, 0xF8, 0x44, 0x3F
.byte 0xC5, 0xF9, 0x44, 0x9B
.byte 0xC5, 0xF9, 0x44, 0x6F
.byte 0xC5, 0xF9, 0x44, 0x3F
.byte 0xC4, 0xE1, 0xF8, 0x44, 0x9B
.byte 0xC4, 0xE1, 0xF8, 0x44, 0x6F
.byte 0xC4, 0xE1, 0xF8, 0x44, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x44, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x44, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x44, 0x3F
.byte 0xC5, 0xEC, 0x45, 0x9B
.byte 0xC5, 0xEC, 0x45, 0x6F
.byte 0xC5, 0xEC, 0x45, 0x3F
.byte 0xC5, 0xED, 0x45, 0x9B
.byte 0xC5, 0xED, 0x45, 0x6F
.byte 0xC5, 0xED, 0x45, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x45, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x45, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x45, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x45, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x45, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x45, 0x3F
.byte 0xC5, 0xF8, 0x98, 0x9B
.byte 0xC5, 0xF8, 0x98, 0x6F
.byte 0xC5, 0xF8, 0x98, 0x3F
.byte 0xC5, 0xF9, 0x98, 0x9B
.byte 0xC5, 0xF9, 0x98, 0x6F
.byte 0xC5, 0xF9, 0x98, 0x3F
.byte 0xC4, 0xE1, 0xF8, 0x98, 0x9B
.byte 0xC4, 0xE1, 0xF8, 0x98, 0x6F
.byte 0xC4, 0xE1, 0xF8, 0x98, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x98, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x98, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x98, 0x3F
.byte 0xC5, 0xEC, 0x46, 0x9B
.byte 0xC5, 0xEC, 0x46, 0x6F
.byte 0xC5, 0xEC, 0x46, 0x3F
.byte 0xC5, 0xED, 0x46, 0x9B
.byte 0xC5, 0xED, 0x46, 0x6F
.byte 0xC5, 0xED, 0x46, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x46, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x46, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x46, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x46, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x46, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x46, 0x3F
.byte 0xC5, 0xEC, 0x47, 0x9B
.byte 0xC5, 0xEC, 0x47, 0x6F
.byte 0xC5, 0xEC, 0x47, 0x3F
.byte 0xC5, 0xED, 0x47, 0x9B
.byte 0xC5, 0xED, 0x47, 0x6F
.byte 0xC5, 0xED, 0x47, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x47, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x47, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x47, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x47, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x47, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x47, 0x3F
.byte 0xC5, 0xF8, 0x99, 0x9B
.byte 0xC5, 0xF8, 0x99, 0x6F
.byte 0xC5, 0xF8, 0x99, 0x3F
.byte 0xC5, 0xF9, 0x99, 0x9B
.byte 0xC5, 0xF9, 0x99, 0x6F
.byte 0xC5, 0xF9, 0x99, 0x3F
.byte 0xC4, 0xE1, 0xF8, 0x99, 0x9B
.byte 0xC4, 0xE1, 0xF8, 0x99, 0x6F
.byte 0xC4, 0xE1, 0xF8, 0x99, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x99, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x99, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x99, 0x3F
.byte 0xC4, 0xE3, 0xF9, 0x30, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x30, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x30, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x30, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x30, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x30, 0x04, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x31, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x31, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x31, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x31, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x31, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x31, 0x04, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x32, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x32, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x32, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x32, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x32, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x32, 0x04, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x33, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x33, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x33, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x33, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x33, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x33, 0x04, 0x01
.byte 0xC5, 0xF8, 0x92, 0x9B
.byte 0xC5, 0xF8, 0x92, 0x6F
.byte 0xC5, 0xF8, 0x92, 0x3F
.byte 0xC5, 0xF9, 0x92, 0x9B
.byte 0xC5, 0xF9, 0x92, 0x6F
.byte 0xC5, 0xF9, 0x92, 0x3F
.byte 0xC5, 0xFB, 0x92, 0x9B
.byte 0xC5, 0xFB, 0x92, 0x6F
.byte 0xC5, 0xFB, 0x92, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x92, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x92, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x92, 0x3F
.byte 0xC5, 0xF8, 0x93, 0x9B
.byte 0xC5, 0xF8, 0x93, 0x6F
.byte 0xC5, 0xF8, 0x93, 0x3F
.byte 0xC5, 0xF9, 0x93, 0x9B
.byte 0xC5, 0xF9, 0x93, 0x6F
.byte 0xC5, 0xF9, 0x93, 0x3F
.byte 0xC5, 0xFB, 0x93, 0x9B
.byte 0xC5, 0xFB, 0x93, 0x6F
.byte 0xC5, 0xFB, 0x93, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x93, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x93, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x93, 0x3F
.byte 0xc4, 0x62, 0x1, 0x1c, 0x41, 0x37
.byte 0x62, 0x72, 0xad, 0x08, 0x1c, 0x01
.byte 0x1
.byte 0x62, 0xf3, 0x7d, 0x28, 0x1b, 0xc8, 0x25
.byte 0x62, 0xf3
.byte 0x62, 0xf3, 0x75, 0x08, 0x23, 0xc2, 0x25
.byte 0x62
.byte 0x62, 0xf2, 0x7d, 0x28, 0x5b, 0x41, 0x37
|
tactcomplabs/xbgas-binutils-gdb
| 3,085
|
gas/testsuite/gas/i386/x86-64-avx512vl_vaes.s
|
# Check 64bit AVX512VL,VAES instructions
.allow_index_reg
.text
_start:
vaesdec %xmm28, %xmm29, %xmm30 # AVX512VL,VAES
vaesdec 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512VL,VAES
vaesdec 2032(%rdx), %xmm29, %xmm30 # AVX512VL,VAES Disp8
vaesdec %ymm28, %ymm29, %ymm30 # AVX512VL,VAES
vaesdec (%rcx), %ymm29, %ymm30 # AVX512VL,VAES
vaesdec 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512VL,VAES
vaesdec 4064(%rdx), %ymm29, %ymm30 # AVX512VL,VAES Disp8
vaesdeclast %xmm28, %xmm29, %xmm30 # AVX512VL,VAES
vaesdeclast 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512VL,VAES
vaesdeclast 2032(%rdx), %xmm29, %xmm30 # AVX512VL,VAES Disp8
vaesdeclast %ymm28, %ymm29, %ymm30 # AVX512VL,VAES
vaesdeclast 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512VL,VAES
vaesdeclast 4064(%rdx), %ymm29, %ymm30 # AVX512VL,VAES Disp8
vaesenc %xmm28, %xmm29, %xmm30 # AVX512VL,VAES
vaesenc 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512VL,VAES
vaesenc 2032(%rdx), %xmm29, %xmm30 # AVX512VL,VAES Disp8
vaesenc %ymm28, %ymm29, %ymm30 # AVX512VL,VAES
vaesenc 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512VL,VAES
vaesenc 4064(%rdx), %ymm29, %ymm30 # AVX512VL,VAES Disp8
vaesenclast %xmm28, %xmm29, %xmm30 # AVX512VL,VAES
vaesenclast 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512VL,VAES
vaesenclast 2032(%rdx), %xmm29, %xmm30 # AVX512VL,VAES Disp8
vaesenclast %ymm28, %ymm29, %ymm30 # AVX512VL,VAES
vaesenclast 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512VL,VAES
vaesenclast 4064(%rdx), %ymm29, %ymm30 # AVX512VL,VAES Disp8
.intel_syntax noprefix
vaesdec xmm30, xmm29, xmm28 # AVX512VL,VAES
vaesdec xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512VL,VAES
vaesdec xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512VL,VAES Disp8
vaesdec ymm30, ymm29, ymm28 # AVX512VL,VAES
vaesdec ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512VL,VAES
vaesdec ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512VL,VAES Disp8
vaesdeclast xmm30, xmm29, xmm28 # AVX512VL,VAES
vaesdeclast xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512VL,VAES
vaesdeclast xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512VL,VAES Disp8
vaesdeclast ymm30, ymm29, ymm28 # AVX512VL,VAES
vaesdeclast ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512VL,VAES
vaesdeclast ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512VL,VAES Disp8
vaesenc xmm30, xmm29, xmm28 # AVX512VL,VAES
vaesenc xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512VL,VAES
vaesenc xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512VL,VAES Disp8
vaesenc ymm30, ymm29, ymm28 # AVX512VL,VAES
vaesenc ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512VL,VAES
vaesenc ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512VL,VAES Disp8
vaesenclast xmm30, xmm29, xmm28 # AVX512VL,VAES
vaesenclast xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512VL,VAES
vaesenclast xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512VL,VAES Disp8
vaesenclast ymm30, ymm29, ymm28 # AVX512VL,VAES
vaesenclast ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512VL,VAES
vaesenclast ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512VL,VAES Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 2,454
|
gas/testsuite/gas/i386/intel-cmps.s
|
.text
.intel_syntax noprefix
.ifdef x86_16
.code16
.endif
.ifdef x86_64
.equ adi, rdi
.equ asi, rsi
.else
.equ adi, di
.equ asi, si
.endif
cmps:
cmpsb
cmpsb [esi], es:[edi]
cmpsb fs:[esi], es:[edi]
cmpsb [esi], [edi]
cmpsb byte ptr [esi], es:[edi]
cmpsb [esi], byte ptr es:[edi]
cmpsb byte ptr [esi], byte ptr es:[edi]
cmps byte ptr [esi], es:[edi]
cmps [esi], byte ptr es:[edi]
cmps byte ptr [esi], byte ptr es:[edi]
cmpsb [asi], es:[adi]
cmpsb fs:[asi], es:[adi]
cmpsb [asi], [adi]
cmpsb byte ptr [asi], es:[adi]
cmpsb [asi], byte ptr es:[adi]
cmpsb byte ptr [asi], byte ptr es:[adi]
cmps byte ptr [asi], es:[adi]
cmps [asi], byte ptr es:[adi]
cmps byte ptr [asi], byte ptr es:[adi]
cmpsw
cmpsw [esi], es:[edi]
cmpsw fs:[esi], es:[edi]
cmpsw [esi], [edi]
cmpsw word ptr [esi], es:[edi]
cmpsw [esi], word ptr es:[edi]
cmpsw word ptr [esi], word ptr es:[edi]
cmps word ptr [esi], es:[edi]
cmps [esi], word ptr es:[edi]
cmps word ptr [esi], word ptr es:[edi]
cmpsw [asi], es:[adi]
cmpsw fs:[asi], es:[adi]
cmpsw [asi], [adi]
cmpsw word ptr [asi], es:[adi]
cmpsw [asi], word ptr es:[adi]
cmpsw word ptr [asi], word ptr es:[adi]
cmps word ptr [asi], es:[adi]
cmps [asi], word ptr es:[adi]
cmps word ptr [asi], word ptr es:[adi]
cmpsd
cmpsd [esi], es:[edi]
cmpsd fs:[esi], es:[edi]
cmpsd [esi], [edi]
cmpsd dword ptr [esi], es:[edi]
cmpsd [esi], dword ptr es:[edi]
cmpsd dword ptr [esi], dword ptr es:[edi]
cmps dword ptr [esi], es:[edi]
cmps [esi], dword ptr es:[edi]
cmps dword ptr [esi], dword ptr es:[edi]
cmpsd [asi], es:[adi]
cmpsd fs:[asi], es:[adi]
cmpsd [asi], [adi]
cmpsd dword ptr [asi], es:[adi]
cmpsd [asi], dword ptr es:[adi]
cmpsd dword ptr [asi], dword ptr es:[adi]
cmps dword ptr [asi], es:[adi]
cmps [asi], dword ptr es:[adi]
cmps dword ptr [asi], dword ptr es:[adi]
.ifdef x86_64
cmpsq
cmpsq [rsi], es:[rdi]
cmpsq fs:[rsi], es:[rdi]
cmpsq [rsi], [rdi]
cmpsq qword ptr [rsi], es:[rdi]
cmpsq [rsi], qword ptr es:[rdi]
cmpsq qword ptr [rsi], qword ptr es:[rdi]
cmps qword ptr [rsi], es:[rdi]
cmps [rsi], qword ptr es:[rdi]
cmps qword ptr [rsi], qword ptr es:[rdi]
cmpsq [esi], es:[edi]
cmpsq fs:[esi], es:[edi]
cmpsq [esi], [edi]
cmpsq qword ptr [esi], es:[edi]
cmpsq [esi], qword ptr es:[edi]
cmpsq qword ptr [esi], qword ptr es:[edi]
cmps qword ptr [esi], es:[edi]
cmps [esi], qword ptr es:[edi]
cmps qword ptr [esi], qword ptr es:[edi]
.endif
|
tactcomplabs/xbgas-binutils-gdb
| 4,894
|
gas/testsuite/gas/i386/reloc64.s
|
.macro bad args:vararg
.ifdef _bad_
\args
.endif
.endm
.macro ill args:vararg
# This is used to mark entries that aren't handled consistently,
# and thus shouldn't currently be checked for.
# \args
.endm
.text
_start:
movabs $xtrn, %rax
add $xtrn, %rax
mov $xtrn, %eax
mov $xtrn, %ax
mov $xtrn, %al
mov xtrn(%rbx), %eax
mov xtrn(%ebx), %eax
movabs $(xtrn - .), %rax
add $(xtrn - .), %rax
ill mov $(xtrn - .), %eax
mov $(xtrn - .), %ax
mov $(xtrn - .), %al
mov xtrn(%rip), %eax
mov xtrn(%eip), %eax
call xtrn
jrcxz xtrn
movabs $xtrn@got, %rax
add $xtrn@got, %rax
bad mov $xtrn@got, %eax
bad mov $xtrn@got, %ax
bad mov $xtrn@got, %al
mov xtrn@got(%rbx), %eax
bad mov xtrn@got(%ebx), %eax
bad call xtrn@got
movabs $xtrn@gotoff, %rax
bad add $xtrn@gotoff, %rax
bad mov $xtrn@gotoff, %eax
bad mov $xtrn@gotoff, %ax
bad mov $xtrn@gotoff, %al
bad mov xtrn@gotoff(%rbx), %eax
bad mov xtrn@gotoff(%ebx), %eax
bad call xtrn@gotoff
bad movabs $xtrn@gotpcrel, %rax
add $xtrn@gotpcrel, %rax
bad mov $xtrn@gotpcrel, %eax
bad mov $xtrn@gotpcrel, %ax
bad mov $xtrn@gotpcrel, %al
mov xtrn@gotpcrel(%rbx), %eax
bad mov xtrn@gotpcrel(%ebx), %eax
call xtrn@gotpcrel
ill movabs $_GLOBAL_OFFSET_TABLE_, %rax
add $_GLOBAL_OFFSET_TABLE_, %rax
ill add $_GLOBAL_OFFSET_TABLE_, %eax
ill add $_GLOBAL_OFFSET_TABLE_, %ax
ill add $_GLOBAL_OFFSET_TABLE_, %al
lea _GLOBAL_OFFSET_TABLE_(%rip), %rax
lea _GLOBAL_OFFSET_TABLE_(%eip), %rax
ill movabs $(_GLOBAL_OFFSET_TABLE_ - .), %rax
add $(_GLOBAL_OFFSET_TABLE_ - .), %rax
ill add $(_GLOBAL_OFFSET_TABLE_ - .), %eax
ill add $(_GLOBAL_OFFSET_TABLE_ - .), %ax
ill add $(_GLOBAL_OFFSET_TABLE_ - .), %al
bad movabs $xtrn@plt, %rax
add $xtrn@plt, %rax
bad mov $xtrn@plt, %eax
bad mov $xtrn@plt, %ax
bad mov $xtrn@plt, %al
mov xtrn@plt(%rbx), %eax
bad mov xtrn@plt(%ebx), %eax
call xtrn@plt
bad jrcxz xtrn@plt
bad movabs $xtrn@tlsgd, %rax
add $xtrn@tlsgd, %rax
bad mov $xtrn@tlsgd, %eax
bad mov $xtrn@tlsgd, %ax
bad mov $xtrn@tlsgd, %al
mov xtrn@tlsgd(%rbx), %eax
bad mov xtrn@tlsgd(%ebx), %eax
call xtrn@tlsgd
bad movabs $xtrn@gottpoff, %rax
add $xtrn@gottpoff, %rax
bad mov $xtrn@gottpoff, %eax
bad mov $xtrn@gottpoff, %ax
bad mov $xtrn@gottpoff, %al
mov xtrn@gottpoff(%rbx), %eax
bad mov xtrn@gottpoff(%ebx), %eax
call xtrn@gottpoff
bad movabs $xtrn@tlsld, %rax
add $xtrn@tlsld, %rax
bad mov $xtrn@tlsld, %eax
bad mov $xtrn@tlsld, %ax
bad mov $xtrn@tlsld, %al
mov xtrn@tlsld(%rbx), %eax
bad mov xtrn@tlsld(%ebx), %eax
call xtrn@tlsld
movabs $xtrn@dtpoff, %rax
add $xtrn@dtpoff, %rax
bad mov $xtrn@dtpoff, %eax
bad mov $xtrn@dtpoff, %ax
bad mov $xtrn@dtpoff, %al
mov xtrn@dtpoff(%rbx), %eax
bad mov xtrn@dtpoff(%ebx), %eax
bad call xtrn@dtpoff
movabs $xtrn@tpoff, %rax
add $xtrn@tpoff, %rax
bad mov $xtrn@tpoff, %eax
bad mov $xtrn@tpoff, %ax
bad mov $xtrn@tpoff, %al
mov xtrn@tpoff(%rbx), %eax
bad mov xtrn@tpoff(%ebx), %eax
bad call xtrn@tpoff
.data
.quad xtrn
.quad xtrn - .
.quad xtrn@got
.quad xtrn@gotoff
.quad xtrn@gotpcrel
ill .quad _GLOBAL_OFFSET_TABLE_
ill .quad _GLOBAL_OFFSET_TABLE_ - .
bad .quad xtrn@plt
bad .quad xtrn@tlsgd
bad .quad xtrn@gottpoff
bad .quad xtrn@tlsld
.quad xtrn@dtpoff
.quad xtrn@tpoff
.long xtrn
.long xtrn - .
.long xtrn@got
bad .long xtrn@gotoff
.long xtrn@gotpcrel
.long _GLOBAL_OFFSET_TABLE_
.long _GLOBAL_OFFSET_TABLE_ - .
.long xtrn@plt
.long xtrn@tlsgd
.long xtrn@gottpoff
.long xtrn@tlsld
.long xtrn@dtpoff
.long xtrn@tpoff
.slong xtrn
.slong xtrn - .
.slong xtrn@got
bad .slong xtrn@gotoff
.slong xtrn@gotpcrel
.slong _GLOBAL_OFFSET_TABLE_
.slong _GLOBAL_OFFSET_TABLE_ - .
.slong xtrn@plt
.slong xtrn@tlsgd
.slong xtrn@gottpoff
.slong xtrn@tlsld
.slong xtrn@dtpoff
.slong xtrn@tpoff
.word xtrn
.word xtrn - .
bad .word xtrn@got
bad .word xtrn@gotoff
bad .word xtrn@gotpcrel
ill .word _GLOBAL_OFFSET_TABLE_
ill .word _GLOBAL_OFFSET_TABLE_ - .
bad .word xtrn@plt
bad .word xtrn@tlsgd
bad .word xtrn@gottpoff
bad .word xtrn@tlsld
bad .word xtrn@dtpoff
bad .word xtrn@tpoff
.byte xtrn
.byte xtrn - .
bad .byte xtrn@got
bad .byte xtrn@gotoff
bad .byte xtrn@gotpcrel
ill .byte _GLOBAL_OFFSET_TABLE_
ill .byte _GLOBAL_OFFSET_TABLE_ - .
bad .byte xtrn@plt
bad .byte xtrn@tlsgd
bad .byte xtrn@gottpoff
bad .byte xtrn@tlsld
bad .byte xtrn@dtpoff
bad .byte xtrn@tpoff
.text
mov xtrn@tpoff (%rbx), %eax
.data
.long xtrn@got - 4
.long xtrn@got + 4
.text
movabs $xtrn@gotplt, %rax
bad add $xtrn@gotplt, %rax
bad mov $xtrn@gotplt, %eax
bad mov $xtrn@gotplt, %ax
bad mov $xtrn@gotplt, %al
bad mov xtrn@gotplt(%rbx), %eax
bad mov xtrn@gotplt(%ebx), %eax
bad call xtrn@gotplt
.data
.quad xtrn@gotplt
bad .long xtrn@gotplt
bad .word xtrn@gotplt
bad .byte xtrn@gotplt
.text
mov xtrn(,%rbx), %eax
mov xtrn(,%ebx), %eax
vgatherdps %xmm2, xtrn(,%xmm1), %xmm0
addr32 vgatherdps %xmm2, xtrn(,%xmm1), %xmm0
bad .long xtrn@plt - .
|
tactcomplabs/xbgas-binutils-gdb
| 2,093
|
gas/testsuite/gas/i386/x86-64-dw2-compress-2.s
|
.file "dw2-compress-2.c"
.section .debug_abbrev,"",@progbits
.Ldebug_abbrev0:
.section .debug_info,"",@progbits
.Ldebug_info0:
.section .debug_line,"",@progbits
.Ldebug_line0:
.text
.Ltext0:
.cfi_sections .debug_frame
.p2align 4,,15
.globl foo2
.type foo2, @function
foo2:
.LFB1:
.file 1 "dw2-compress-2.c"
.loc 1 11 0
.cfi_startproc
.loc 1 12 0
rep
ret
.cfi_endproc
.LFE1:
.size foo2, .-foo2
.p2align 4,,15
.globl foo1
.type foo1, @function
foo1:
.LFB0:
.loc 1 5 0
.cfi_startproc
.loc 1 6 0
jmp bar
.cfi_endproc
.LFE0:
.size foo1, .-foo1
.Letext0:
.section .debug_info
.long 0x5e
.value 0x3
.long .Ldebug_abbrev0
.byte 0x8
.uleb128 0x1
.long .LASF2
.byte 0x1
.long .LASF3
.long .LASF4
.quad .Ltext0
.quad .Letext0
.long .Ldebug_line0
.uleb128 0x2
.byte 0x1
.long .LASF0
.byte 0x1
.byte 0xa
.quad .LFB1
.quad .LFE1
.byte 0x1
.byte 0x9c
.uleb128 0x2
.byte 0x1
.long .LASF1
.byte 0x1
.byte 0x4
.quad .LFB0
.quad .LFE0
.byte 0x1
.byte 0x9c
.byte 0x0
.section .debug_abbrev
.uleb128 0x1
.uleb128 0x11
.byte 0x1
.uleb128 0x25
.uleb128 0xe
.uleb128 0x13
.uleb128 0xb
.uleb128 0x3
.uleb128 0xe
.uleb128 0x1b
.uleb128 0xe
.uleb128 0x11
.uleb128 0x1
.uleb128 0x12
.uleb128 0x1
.uleb128 0x10
.uleb128 0x6
.byte 0x0
.byte 0x0
.uleb128 0x2
.uleb128 0x2e
.byte 0x0
.uleb128 0x3f
.uleb128 0xc
.uleb128 0x3
.uleb128 0xe
.uleb128 0x3a
.uleb128 0xb
.uleb128 0x3b
.uleb128 0xb
.uleb128 0x11
.uleb128 0x1
.uleb128 0x12
.uleb128 0x1
.uleb128 0x40
.uleb128 0xa
.byte 0x0
.byte 0x0
.byte 0x0
.section .debug_pubnames,"",@progbits
.long 0x20
.value 0x2
.long .Ldebug_info0
.long 0x62
.long 0x2d
.string "foo2"
.long 0x47
.string "foo1"
.long 0x0
.section .debug_aranges,"",@progbits
.long 0x2c
.value 0x2
.long .Ldebug_info0
.byte 0x8
.byte 0x0
.value 0x0
.value 0x0
.quad .Ltext0
.quad .Letext0-.Ltext0
.quad 0x0
.quad 0x0
.section .debug_str,"MS",@progbits,1
.LASF2:
.string "GNU C 4.4.4"
.LASF0:
.string "foo2"
.LASF1:
.string "foo1"
.LASF4:
.string "."
.LASF3:
.string "dw2-compress-2.c"
|
tactcomplabs/xbgas-binutils-gdb
| 10,181
|
gas/testsuite/gas/i386/inval-avx512f.s
|
# Check illegal AVX512F instructions
.text
.allow_index_reg
_start:
mov {sae}, %eax{%k1}
mov {sae}, %eax
mov %ebx, %eax{%k2}
vaddps %zmm3, %zmm1, %zmm2{z}{%k1}{z}
vaddps %zmm3, %zmm1{%k3}, %zmm2{z}
vaddps %zmm3, %zmm1{%k1}, %zmm2{%k2}
vcvtps2pd (%eax), %zmm1{1to8}
vcvtps2pd (%eax){1to16}, %zmm1
vcvtps2pd (%eax){%k1}, %zmm1
vcvtps2pd (%eax){z}, %zmm1
vgatherqpd (%rdi,%zmm2,8),%zmm6
vgatherqpd (%edi),%zmm6{%k1}
vgatherqpd (%zmm2),%zmm6{%k1}
vpscatterdd %zmm6,(%edi){%k1}
vpscatterdd %zmm6,(%zmm2){%k1}
.intel_syntax noprefix
mov eax{k1}, {sae}
mov eax, {sae}
mov eax{k2}, ebx
vaddps zmm2{z}{k1}{z}, zmm1, zmm3
vaddps zmm2{z}, zmm1{k3}, zmm3
vaddps zmm2{k2}, zmm1{k1}, zmm3
vcvtps2pd zmm1{1to8}, [eax]
vcvtps2pd zmm1, [eax]{1to16}
vcvtps2pd zmm1, [eax]{k1}
vcvtps2pd zmm1, [eax]{z}
vgatherqpd zmm6, ZMMWORD PTR [rdi+zmm2*8]
vgatherqpd zmm6{k1}, ZMMWORD PTR [edi]
vgatherqpd zmm6{k1}, ZMMWORD PTR [zmm2+eiz]
vpscatterdd ZMMWORD PTR [edi]{k1}, zmm6
vpscatterdd ZMMWORD PTR [zmm2+eiz]{k1}, zmm6
vaddps zmm2, zmm1, QWORD PTR [eax]{1to8}
vaddps zmm2, zmm1, QWORD PTR [eax]{1to16}
vaddpd zmm2, zmm1, DWORD PTR [eax]{1to8}
vaddpd zmm2, zmm1, DWORD PTR [eax]{1to16}
vaddps zmm2, zmm1, ZMMWORD PTR [eax]{1to16}
vaddps zmm2, zmm1, DWORD PTR [eax]
vaddpd zmm2, zmm1, QWORD PTR [eax]
.att_syntax prefix
vaddps %zmm0, %zmm1, %zmm2{%ecx}
vaddps %zmm0, %zmm1, %zmm2{z}
.intel_syntax noprefix
vaddps zmm2{ecx}, zmm1, zmm0
vaddps zmm2{z}, zmm1, zmm0
.att_syntax prefix
vmovaps (%eax){1to2}, %zmm1
vmovaps (%eax){1to4}, %zmm1
vmovaps (%eax){1to8}, %zmm1
vmovaps (%eax){1to16}, %zmm1
vcvtps2pd (%eax){1to2}, %zmm1
vcvtps2pd (%eax){1to4}, %zmm1
vcvtps2pd (%eax){1to8}, %zmm1
vcvtps2pd (%eax){1to16}, %zmm1
vcvtps2pd (%eax){1to2}, %ymm1
vcvtps2pd (%eax){1to4}, %ymm1
vcvtps2pd (%eax){1to8}, %ymm1
vcvtps2pd (%eax){1to16}, %ymm1
vcvtps2pd (%eax){1to2}, %xmm1
vcvtps2pd (%eax){1to4}, %xmm1
vcvtps2pd (%eax){1to8}, %xmm1
vcvtps2pd (%eax){1to16}, %xmm1
vaddps (%eax){1to2}, %zmm1, %zmm2
vaddps (%eax){1to4}, %zmm1, %zmm2
vaddps (%eax){1to8}, %zmm1, %zmm2
vaddps (%eax){1to16}, %zmm1, %zmm2
vaddps (%eax){1to2}, %ymm1, %ymm2
vaddps (%eax){1to4}, %ymm1, %ymm2
vaddps (%eax){1to8}, %ymm1, %ymm2
vaddps (%eax){1to16}, %ymm1, %ymm2
vaddps (%eax){1to2}, %xmm1, %xmm2
vaddps (%eax){1to4}, %xmm1, %xmm2
vaddps (%eax){1to8}, %xmm1, %xmm2
vaddps (%eax){1to16}, %xmm1, %xmm2
vaddpd (%eax){1to2}, %zmm1, %zmm2
vaddpd (%eax){1to4}, %zmm1, %zmm2
vaddpd (%eax){1to8}, %zmm1, %zmm2
vaddpd (%eax){1to16}, %zmm1, %zmm2
vaddpd (%eax){1to2}, %ymm1, %ymm2
vaddpd (%eax){1to4}, %ymm1, %ymm2
vaddpd (%eax){1to8}, %ymm1, %ymm2
vaddpd (%eax){1to16}, %ymm1, %ymm2
vaddpd (%eax){1to2}, %xmm1, %xmm2
vaddpd (%eax){1to4}, %xmm1, %xmm2
vaddpd (%eax){1to8}, %xmm1, %xmm2
vaddpd (%eax){1to16}, %xmm1, %xmm2
.intel_syntax noprefix
vcvtps2pd zmm1, QWORD PTR [eax]
vcvtps2pd ymm1, QWORD PTR [eax]
vcvtps2pd xmm1, QWORD PTR [eax]
vcvtps2pd xmm1, DWORD PTR [eax]{1to2}
vcvtps2pd xmm1, DWORD PTR [eax]{1to4}
vcvtps2pd xmm1, DWORD PTR [eax]{1to8}
vcvtps2pd xmm1, DWORD PTR [eax]{1to16}
vaddps zmm2, zmm1, QWORD PTR [eax]
vaddps ymm2, ymm1, QWORD PTR [eax]
vaddps xmm2, xmm1, QWORD PTR [eax]
vaddps zmm2, zmm1, DWORD PTR [eax]{1to2}
vaddps zmm2, zmm1, DWORD PTR [eax]{1to4}
vaddps zmm2, zmm1, DWORD PTR [eax]{1to8}
vaddps zmm2, zmm1, DWORD PTR [eax]{1to16}
vaddps ymm2, ymm1, DWORD PTR [eax]{1to2}
vaddps ymm2, ymm1, DWORD PTR [eax]{1to4}
vaddps ymm2, ymm1, DWORD PTR [eax]{1to8}
vaddps ymm2, ymm1, DWORD PTR [eax]{1to16}
vaddps xmm2, xmm1, DWORD PTR [eax]{1to2}
vaddps xmm2, xmm1, DWORD PTR [eax]{1to4}
vaddps xmm2, xmm1, DWORD PTR [eax]{1to8}
vaddps xmm2, xmm1, DWORD PTR [eax]{1to16}
vaddpd zmm2, zmm1, DWORD PTR [eax]
vaddpd ymm2, ymm1, DWORD PTR [eax]
vaddpd xmm2, xmm1, DWORD PTR [eax]
vaddpd zmm2, zmm1, QWORD PTR [eax]{1to2}
vaddpd zmm2, zmm1, QWORD PTR [eax]{1to4}
vaddpd zmm2, zmm1, QWORD PTR [eax]{1to8}
vaddpd zmm2, zmm1, QWORD PTR [eax]{1to16}
vaddpd ymm2, ymm1, QWORD PTR [eax]{1to2}
vaddpd ymm2, ymm1, QWORD PTR [eax]{1to4}
vaddpd ymm2, ymm1, QWORD PTR [eax]{1to8}
vaddpd ymm2, ymm1, QWORD PTR [eax]{1to16}
vaddpd xmm2, xmm1, QWORD PTR [eax]{1to2}
vaddpd xmm2, xmm1, QWORD PTR [eax]{1to4}
vaddpd xmm2, xmm1, QWORD PTR [eax]{1to8}
vaddpd xmm2, xmm1, QWORD PTR [eax]{1to16}
vcvtps2qq xmm0, DWORD PTR [eax]
.att_syntax prefix
vcmppd $0, %zmm0, %zmm0, %k0{%k1}{z}
vcmpps $0, %zmm0, %zmm0, %k0{%k1}{z}
vcmpsd $0, %xmm0, %xmm0, %k0{%k1}{z}
vcmpss $0, %xmm0, %xmm0, %k0{%k1}{z}
vcompresspd %zmm0, (%eax){%k1}{z}
vcompressps %zmm0, (%eax){%k1}{z}
vcvtps2ph $0, %zmm0, (%eax){%k1}{z}
vextractf32x4 $0, %zmm0, (%eax){%k1}{z}
vextractf32x8 $0, %zmm0, (%eax){%k1}{z}
vextractf64x2 $0, %zmm0, (%eax){%k1}{z}
vextractf64x4 $0, %zmm0, (%eax){%k1}{z}
vextracti32x4 $0, %zmm0, (%eax){%k1}{z}
vextracti32x8 $0, %zmm0, (%eax){%k1}{z}
vextracti64x2 $0, %zmm0, (%eax){%k1}{z}
vextracti64x4 $0, %zmm0, (%eax){%k1}{z}
vfpclasspd $0, %zmm0, %k0{%k1}{z}
vfpclassps $0, %zmm0, %k0{%k1}{z}
vfpclasssd $0, %xmm0, %k0{%k1}{z}
vfpclassss $0, %xmm0, %k0{%k1}{z}
vgatherdpd (%eax,%ymm1), %zmm0{%k1}{z}
vgatherdps (%eax,%zmm1), %zmm0{%k1}{z}
vgatherqpd (%eax,%zmm1), %zmm0{%k1}{z}
vgatherqps (%eax,%zmm1), %ymm0{%k1}{z}
vgatherpf0dpd (%eax,%ymm1){%k1}{z}
vgatherpf0dps (%eax,%zmm1){%k1}{z}
vgatherpf0qpd (%eax,%zmm1){%k1}{z}
vgatherpf0qps (%eax,%zmm1){%k1}{z}
vgatherpf1dpd (%eax,%ymm1){%k1}{z}
vgatherpf1dps (%eax,%zmm1){%k1}{z}
vgatherpf1qpd (%eax,%zmm1){%k1}{z}
vgatherpf1qps (%eax,%zmm1){%k1}{z}
vmovapd %zmm0, (%eax){%k1}{z}
vmovaps %zmm0, (%eax){%k1}{z}
vmovdqa32 %zmm0, (%eax){%k1}{z}
vmovdqa64 %zmm0, (%eax){%k1}{z}
vmovdqu8 %zmm0, (%eax){%k1}{z}
vmovdqu16 %zmm0, (%eax){%k1}{z}
vmovdqu32 %zmm0, (%eax){%k1}{z}
vmovdqu64 %zmm0, (%eax){%k1}{z}
vmovsd %xmm0, (%eax){%k1}{z}
vmovss %xmm0, (%eax){%k1}{z}
vmovupd %zmm0, (%eax){%k1}{z}
vmovups %zmm0, (%eax){%k1}{z}
vpcmpb $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcmpd $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcmpq $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcmpw $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcmpeqb %zmm0, %zmm0, %k0{%k1}{z}
vpcmpeqd %zmm0, %zmm0, %k0{%k1}{z}
vpcmpeqq %zmm0, %zmm0, %k0{%k1}{z}
vpcmpeqw %zmm0, %zmm0, %k0{%k1}{z}
vpcmpgtb %zmm0, %zmm0, %k0{%k1}{z}
vpcmpgtd %zmm0, %zmm0, %k0{%k1}{z}
vpcmpgtq %zmm0, %zmm0, %k0{%k1}{z}
vpcmpgtw %zmm0, %zmm0, %k0{%k1}{z}
vpcmpub $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcmpud $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcmpuq $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcmpuw $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcompressb %zmm0, (%eax){%k1}{z}
vpcompressd %zmm0, (%eax){%k1}{z}
vpcompressq %zmm0, (%eax){%k1}{z}
vpcompressw %zmm0, (%eax){%k1}{z}
vpgatherdd (%eax,%zmm1), %zmm0{%k1}{z}
vpgatherdq (%eax,%ymm1), %zmm0{%k1}{z}
vpgatherqd (%eax,%zmm1), %ymm0{%k1}{z}
vpgatherqq (%eax,%zmm1), %zmm0{%k1}{z}
vpmovdb %zmm0, (%eax){%k1}{z}
vpmovdw %zmm0, (%eax){%k1}{z}
vpmovqb %zmm0, (%eax){%k1}{z}
vpmovqd %zmm0, (%eax){%k1}{z}
vpmovqw %zmm0, (%eax){%k1}{z}
vpmovsdb %zmm0, (%eax){%k1}{z}
vpmovsdw %zmm0, (%eax){%k1}{z}
vpmovsqb %zmm0, (%eax){%k1}{z}
vpmovsqd %zmm0, (%eax){%k1}{z}
vpmovsqw %zmm0, (%eax){%k1}{z}
vpmovswb %zmm0, (%eax){%k1}{z}
vpmovusdb %zmm0, (%eax){%k1}{z}
vpmovusdw %zmm0, (%eax){%k1}{z}
vpmovusqb %zmm0, (%eax){%k1}{z}
vpmovusqd %zmm0, (%eax){%k1}{z}
vpmovusqw %zmm0, (%eax){%k1}{z}
vpmovuswb %zmm0, (%eax){%k1}{z}
vpmovwb %zmm0, (%eax){%k1}{z}
vpscatterdd %zmm0, (%eax,%zmm1){%k1}{z}
vpscatterdq %zmm0, (%eax,%ymm1){%k1}{z}
vpscatterqd %ymm0, (%eax,%zmm1){%k1}{z}
vpscatterqq %zmm0, (%eax,%zmm1){%k1}{z}
vpshufbitqmb %zmm0, %zmm0, %k0{%k1}{z}
vptestmb %zmm0, %zmm0, %k0{%k1}{z}
vptestmd %zmm0, %zmm0, %k0{%k1}{z}
vptestmq %zmm0, %zmm0, %k0{%k1}{z}
vptestmw %zmm0, %zmm0, %k0{%k1}{z}
vptestnmb %zmm0, %zmm0, %k0{%k1}{z}
vptestnmd %zmm0, %zmm0, %k0{%k1}{z}
vptestnmq %zmm0, %zmm0, %k0{%k1}{z}
vptestnmw %zmm0, %zmm0, %k0{%k1}{z}
vscatterdpd %zmm0, (%eax,%ymm1){%k1}{z}
vscatterdps %zmm0, (%eax,%zmm1){%k1}{z}
vscatterqpd %zmm0, (%eax,%zmm1){%k1}{z}
vscatterqps %ymm0, (%eax,%zmm1){%k1}{z}
vscatterpf0dpd (%eax,%ymm1){%k1}{z}
vscatterpf0dps (%eax,%zmm1){%k1}{z}
vscatterpf0qpd (%eax,%zmm1){%k1}{z}
vscatterpf0qps (%eax,%zmm1){%k1}{z}
vscatterpf1dpd (%eax,%ymm1){%k1}{z}
vscatterpf1dps (%eax,%zmm1){%k1}{z}
vscatterpf1qpd (%eax,%zmm1){%k1}{z}
vscatterpf1qps (%eax,%zmm1){%k1}{z}
vdpbf16ps 8(%eax){1to8}, %zmm2, %zmm2
vcvtne2ps2bf16 8(%eax){1to8}, %zmm2, %zmm2
vcvtneps2bf16 (%eax){1to2}, %ymm1
vcvtneps2bf16 (%eax){1to4}, %ymm1
vcvtneps2bf16 (%eax){1to8}, %ymm1
vcvtneps2bf16 (%eax){1to16}, %ymm1
vcvtneps2bf16 (%eax){1to2}, %xmm1
vcvtneps2bf16 (%eax){1to4}, %xmm1
vcvtneps2bf16 (%eax){1to8}, %xmm1
vcvtneps2bf16 (%eax){1to16}, %xmm1
vaddps $0xcc, %zmm0, %zmm0, %zmm0
vcmpss $0, $0xcc, %xmm0, %xmm0, %k0
vaddss {sae}, %xmm0, %xmm0, %xmm0
vcmpps $0, {rn-sae}, %zmm0, %zmm0, %k0
.intel_syntax noprefix
vaddps zmm2, zmm1, WORD BCST [eax]
vaddps zmm2, zmm1, DWORD BCST [eax]
vaddps zmm2, zmm1, QWORD BCST [eax]
vaddps zmm2, zmm1, ZMMWORD BCST [eax]
vaddpd zmm2, zmm1, WORD BCST [eax]
vaddpd zmm2, zmm1, DWORD BCST [eax]
vaddpd zmm2, zmm1, QWORD BCST [eax]
vaddpd zmm2, zmm1, ZMMWORD BCST [eax]
.att_syntax prefix
vaddps {rn-sae}, %zmm0, %zmm0, %zmm0
vaddps %zmm0, {rn-sae}, %zmm0, %zmm0
vaddps %zmm0, %zmm0, {rn-sae}, %zmm0
vaddps %zmm0, %zmm0, %zmm0, {rn-sae}
vcmpps {sae}, $0, %zmm0, %zmm0, %k0
vcmpps $0, {sae}, %zmm0, %zmm0, %k0
vcmpps $0, %zmm0, {sae}, %zmm0, %k0
vcmpps $0, %zmm0, %zmm0, {sae}, %k0
vcmpps $0, %zmm0, %zmm0, %k0, {sae}
vcvtsi2ss {rn-sae}, %eax, %xmm0, %xmm0
vcvtsi2ss %eax, {rn-sae}, %xmm0, %xmm0
vcvtsi2ss %eax, %xmm0, {rn-sae}, %xmm0
vcvtsi2ss %eax, %xmm0, %xmm0, {rn-sae}
.intel_syntax noprefix
vaddps zmm0{rn-sae}, zmm0, zmm0
vaddps zmm0, zmm0{rn-sae}, zmm0
vaddps zmm0, zmm0, zmm0{rn-sae}
vcmpps k0{sae}, zmm0, zmm0, 0
vcmpps k0, zmm0{sae}, zmm0, 0
vcmpps k0, zmm0, zmm0{sae}, 0
vcmpps k0, zmm0, zmm0, 0{sae}
vcvtsi2ss xmm0{rn-sae}, xmm0, eax
vcvtsi2ss xmm0, xmm0{rn-sae}, eax
vcvtsi2ss xmm0, xmm0, eax{rn-sae}
.p2align 4
|
tactcomplabs/xbgas-binutils-gdb
| 6,431
|
gas/testsuite/gas/i386/avx512pf.s
|
# Check 32bit AVX512PF instructions
.allow_index_reg
.text
_start:
vgatherpf0dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vgatherpf0dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vgatherpf0dpd 256(%eax,%ymm7){%k1} # AVX512PF
vgatherpf0dpd 1024(%ecx,%ymm7,4){%k1} # AVX512PF
vgatherpf0dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf0dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf0dps 256(%eax,%zmm7){%k1} # AVX512PF
vgatherpf0dps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vgatherpf0qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf0qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf0qpd 256(%eax,%zmm7){%k1} # AVX512PF
vgatherpf0qpd 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vgatherpf0qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf0qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf0qps 256(%eax,%zmm7){%k1} # AVX512PF
vgatherpf0qps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vgatherpf1dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vgatherpf1dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vgatherpf1dpd 256(%eax,%ymm7){%k1} # AVX512PF
vgatherpf1dpd 1024(%ecx,%ymm7,4){%k1} # AVX512PF
vgatherpf1dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf1dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf1dps 256(%eax,%zmm7){%k1} # AVX512PF
vgatherpf1dps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vgatherpf1qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf1qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf1qpd 256(%eax,%zmm7){%k1} # AVX512PF
vgatherpf1qpd 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vgatherpf1qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf1qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf1qps 256(%eax,%zmm7){%k1} # AVX512PF
vgatherpf1qps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vscatterpf0dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vscatterpf0dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vscatterpf0dpd 256(%eax,%ymm7){%k1} # AVX512PF
vscatterpf0dpd 1024(%ecx,%ymm7,4){%k1} # AVX512PF
vscatterpf0dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf0dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf0dps 256(%eax,%zmm7){%k1} # AVX512PF
vscatterpf0dps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vscatterpf0qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf0qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf0qpd 256(%eax,%zmm7){%k1} # AVX512PF
vscatterpf0qpd 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vscatterpf0qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf0qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf0qps 256(%eax,%zmm7){%k1} # AVX512PF
vscatterpf0qps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vscatterpf1dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vscatterpf1dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vscatterpf1dpd 256(%eax,%ymm7){%k1} # AVX512PF
vscatterpf1dpd 1024(%ecx,%ymm7,4){%k1} # AVX512PF
vscatterpf1dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf1dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf1dps 256(%eax,%zmm7){%k1} # AVX512PF
vscatterpf1dps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vscatterpf1qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf1qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf1qpd 256(%eax,%zmm7){%k1} # AVX512PF
vscatterpf1qpd 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vscatterpf1qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf1qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf1qps 256(%eax,%zmm7){%k1} # AVX512PF
vscatterpf1qps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
.intel_syntax noprefix
vgatherpf0dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vgatherpf0dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vgatherpf0dpd [eax+ymm7+256]{k1} # AVX512PF
vgatherpf0dpd [ecx+ymm7*4+1024]{k1} # AVX512PF
vgatherpf0dps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf0dps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf0dps [eax+zmm7+256]{k1} # AVX512PF
vgatherpf0dps [ecx+zmm7*4+1024]{k1} # AVX512PF
vgatherpf0qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf0qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf0qpd [eax+zmm7+256]{k1} # AVX512PF
vgatherpf0qpd [ecx+zmm7*4+1024]{k1} # AVX512PF
vgatherpf0qps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf0qps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf0qps [eax+zmm7+256]{k1} # AVX512PF
vgatherpf0qps [ecx+zmm7*4+1024]{k1} # AVX512PF
vgatherpf1dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vgatherpf1dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vgatherpf1dpd [eax+ymm7+256]{k1} # AVX512PF
vgatherpf1dpd [ecx+ymm7*4+1024]{k1} # AVX512PF
vgatherpf1dps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf1dps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf1dps [eax+zmm7+256]{k1} # AVX512PF
vgatherpf1dps [ecx+zmm7*4+1024]{k1} # AVX512PF
vgatherpf1qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf1qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf1qpd [eax+zmm7+256]{k1} # AVX512PF
vgatherpf1qpd [ecx+zmm7*4+1024]{k1} # AVX512PF
vgatherpf1qps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf1qps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf1qps [eax+zmm7+256]{k1} # AVX512PF
vgatherpf1qps [ecx+zmm7*4+1024]{k1} # AVX512PF
vscatterpf0dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vscatterpf0dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vscatterpf0dpd [eax+ymm7+256]{k1} # AVX512PF
vscatterpf0dpd [ecx+ymm7*4+1024]{k1} # AVX512PF
vscatterpf0dps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf0dps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf0dps [eax+zmm7+256]{k1} # AVX512PF
vscatterpf0dps [ecx+zmm7*4+1024]{k1} # AVX512PF
vscatterpf0qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf0qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf0qpd [eax+zmm7+256]{k1} # AVX512PF
vscatterpf0qpd [ecx+zmm7*4+1024]{k1} # AVX512PF
vscatterpf0qps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf0qps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf0qps [eax+zmm7+256]{k1} # AVX512PF
vscatterpf0qps [ecx+zmm7*4+1024]{k1} # AVX512PF
vscatterpf1dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vscatterpf1dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vscatterpf1dpd [eax+ymm7+256]{k1} # AVX512PF
vscatterpf1dpd [ecx+ymm7*4+1024]{k1} # AVX512PF
vscatterpf1dps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf1dps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf1dps [eax+zmm7+256]{k1} # AVX512PF
vscatterpf1dps [ecx+zmm7*4+1024]{k1} # AVX512PF
vscatterpf1qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf1qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf1qpd [eax+zmm7+256]{k1} # AVX512PF
vscatterpf1qpd [ecx+zmm7*4+1024]{k1} # AVX512PF
vscatterpf1qps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf1qps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf1qps [eax+zmm7+256]{k1} # AVX512PF
vscatterpf1qps [ecx+zmm7*4+1024]{k1} # AVX512PF
|
tactcomplabs/xbgas-binutils-gdb
| 1,186
|
gas/testsuite/gas/i386/x86-64-optimize-1.s
|
# Check 64bit instructions with optimized encoding
.allow_index_reg
.text
_start:
andq $foo, %rax
andq $((1<<31) - 1), %rax
andq $((1<<31) - 1), %rbx
andq $((1<<31) - 1), %r14
andq $-((1<<31)), %rax
andq $-((1<<31)), %rbx
andq $-((1<<31)), %r14
andq $((1<<7) - 1), %rax
andq $((1<<7) - 1), %rbx
andq $((1<<7) - 1), %r14
andq $-((1<<7)), %rax
andq $-((1<<7)), %rbx
andq $-((1<<7)), %r14
testq $((1<<31) - 1), %rax
testq $((1<<31) - 1), %rbx
testq $((1<<31) - 1), %r14
testq $-((1<<31)), %rax
testq $-((1<<31)), %rbx
testq $-((1<<31)), %r14
xorq (%rsi), %rax
xorq %rax, %rax
xorq %rbx, %rbx
xorq %r14, %r14
xorq %rdx, %rax
xorq %rdx, %rbx
xorq %rdx, %r14
subq %rax, %rax
subq %rbx, %rbx
subq %r14, %r14
subq %rdx, %rax
subq %rdx, %rbx
subq %rdx, %r14
andq $((1<<31) - 1), (%rax)
andq $-((1<<31)), (%rax)
testq $((1<<31) - 1), (%rax)
testq $-((1<<31)), (%rax)
mov $((1<<31) - 1),%rax
movq $((1<<31) - 1),%rax
mov $((1<<31) - 1),%r8
movq $((1<<31) - 1),%r8
mov $0xffffffff,%rax
movq $0xffffffff,%rax
mov $0xffffffff,%r8
movq $0xffffffff,%r8
mov $1023,%rax
movq $1023,%rax
mov $0x100000000,%rax
movq $0x100000000,%rax
clrq %rax
clrq %r14
|
tactcomplabs/xbgas-binutils-gdb
| 1,498
|
gas/testsuite/gas/i386/ifunc-2.s
|
.section .text.1,"ax",@progbits
start1:
.long bar1-foo1
.long bar2-foo2
.long bar1-bar2
.long bar2-bar1
.long start1-bar1
.long start1-bar2
.long bar1-abs1
.long abs1-bar1
.long .-bar1
.type foo1,%gnu_indirect_function
foo1:
ret
.size foo1,.-foo1
.long bar1-foo1
.long bar2-foo2
.long bar1-bar2
.long bar2-bar1
.long start1-bar1
.long start1-bar2
.long bar1-abs1
.long abs1-bar1
.long .-bar1
.type bar1,%gnu_indirect_function
bar1:
ret
.size bar1,.-bar1
.long bar1-foo1
.long bar2-foo2
.long bar1-bar2
.long bar2-bar1
.long start1-bar1
.long start1-bar2
.long bar1-abs1
.long abs1-bar1
.long .-bar1
.long abs1-abs2
.long abs2-abs1
.equ abs1,0x11223300
.type abs1,%gnu_indirect_function
.long abs1-abs2
.long abs2-abs1
.equ abs2,0x11223380
.type abs2,%gnu_indirect_function
.long abs1-abs2
.long abs2-abs1
.section .text.2,"ax",@progbits
start2:
.long bar1-foo1
.long bar2-foo2
.long bar1-bar2
.long bar2-bar1
.long start2-bar1
.long start2-bar2
.long bar2-abs1
.long abs1-bar2
.long .-bar2
.type foo2,%gnu_indirect_function
foo2:
ret
.size foo2,.-foo2
.long bar1-foo1
.long bar2-foo2
.long bar1-bar2
.long bar2-bar1
.long start2-bar1
.long start2-bar2
.long bar2-abs1
.long abs1-bar2
.long .-bar2
.type bar2,%gnu_indirect_function
bar2:
ret
.size bar2,.-bar2
.long bar1-foo1
.long bar2-foo2
.long bar1-bar2
.long bar2-bar1
.long start2-bar1
.long start2-bar2
.long bar2-abs1
.long abs1-bar2
.long .-bar2
|
tactcomplabs/xbgas-binutils-gdb
| 2,274
|
gas/testsuite/gas/i386/x86-64-avx512vl_vpclmulqdq.s
|
# Check 64bit AVX512VL,VPCLMULQDQ instructions
.allow_index_reg
.text
_start:
vpclmulqdq $0xab, %xmm18, %xmm29, %xmm25 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 0x123(%rax,%r14,8), %xmm29, %xmm25 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 2032(%rdx), %xmm29, %xmm25 # AVX512VL,VPCLMULQDQ Disp8
vpclmulqdq $0xab, %ymm18, %ymm18, %ymm29 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 0x123(%rax,%r14,8), %ymm18, %ymm29 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 4064(%rdx), %ymm18, %ymm29 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq $0xab, %xmm18, %xmm29, %xmm25 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 0x123(%rax,%r14,8), %xmm29, %xmm25 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 2032(%rdx), %xmm29, %xmm25 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq $0xab, %ymm18, %ymm18, %ymm29 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 0x123(%rax,%r14,8), %ymm18, %ymm29 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 4064(%rdx), %ymm18, %ymm29 # AVX512VL,VPCLMULQDQ Disp8
vpclmulhqhqdq %xmm20, %xmm21, %xmm22
vpclmulhqlqdq %xmm21, %xmm22, %xmm23
vpclmullqhqdq %xmm22, %xmm23, %xmm24
vpclmullqlqdq %xmm23, %xmm24, %xmm25
vpclmulhqhqdq %ymm20, %ymm21, %ymm22
vpclmulhqlqdq %ymm21, %ymm22, %ymm23
vpclmullqhqdq %ymm22, %ymm23, %ymm24
vpclmullqlqdq %ymm23, %ymm24, %ymm25
.intel_syntax noprefix
vpclmulqdq xmm19, xmm26, xmm20, 0xab # AVX512VL,VPCLMULQDQ
vpclmulqdq xmm19, xmm26, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
vpclmulqdq xmm19, xmm26, XMMWORD PTR [rdx+2032], 123 # AVX512VL,VPCLMULQDQ Disp8
vpclmulqdq ymm23, ymm29, ymm27, 0xab # AVX512VL,VPCLMULQDQ
vpclmulqdq ymm23, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
vpclmulqdq ymm23, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq xmm19, xmm26, xmm20, 0xab # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq xmm19, xmm26, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq xmm19, xmm26, XMMWORD PTR [rdx+2032], 123 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq ymm23, ymm29, ymm27, 0xab # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq ymm23, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq ymm23, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512VL,VPCLMULQDQ Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 1,386
|
gas/testsuite/gas/i386/x86-64-vp2intersect.s
|
# Check AVX512_VP2INTERSECT new instructions.
.text
vp2intersectd %zmm1, %zmm2, %k3
vp2intersectd 64(%rax), %zmm2, %k3
vp2intersectd 8(%rax){1to16}, %zmm2, %k3
vp2intersectd %ymm1, %ymm2, %k3
vp2intersectd 32(%rax), %ymm2, %k3
vp2intersectd 8(%rax){1to8}, %ymm2, %k3
vp2intersectd %xmm1, %xmm2, %k3
vp2intersectd 16(%rax), %xmm2, %k3
vp2intersectd 8(%rax){1to4}, %xmm2, %k3
vp2intersectq %zmm1, %zmm2, %k3
vp2intersectq 64(%rax), %zmm2, %k3
vp2intersectq 8(%rax){1to8}, %zmm2, %k3
vp2intersectq %ymm1, %ymm2, %k3
vp2intersectq 32(%rax), %ymm2, %k3
vp2intersectq 8(%rax){1to4}, %ymm2, %k3
vp2intersectq %xmm1, %xmm2, %k3
vp2intersectq 16(%rax), %xmm2, %k3
vp2intersectq 8(%rax){1to2}, %xmm2, %k3
.intel_syntax noprefix
vp2intersectd k3, zmm2, zmm1
vp2intersectd k3, zmm2, 64[rax]
vp2intersectd k3, zmm2, dword bcst 8[rax]
vp2intersectd k3, ymm2, ymm1
vp2intersectd k3, ymm2, 32[rax]
vp2intersectd k3, ymm2, dword bcst 8[rax]
vp2intersectd k3, xmm2, xmm1
vp2intersectd k3, xmm2, 16[rax]
vp2intersectd k3, xmm2, dword bcst 8[rax]
vp2intersectq k3, zmm2, zmm1
vp2intersectq k3, zmm2, 64[rax]
vp2intersectq k3, zmm2, qword bcst 8[rax]
vp2intersectq k3, ymm2, ymm1
vp2intersectq k3, ymm2, 32[rax]
vp2intersectq k3, ymm2, qword bcst 8[rax]
vp2intersectq k3, xmm2, xmm1
vp2intersectq k3, xmm2, 16[rax]
vp2intersectq k3, xmm2, qword bcst 8[rax]
|
tactcomplabs/xbgas-binutils-gdb
| 18,422
|
gas/testsuite/gas/i386/x86-64-avx-scalar.s
|
# Check 64bit AVX scalar instructions
.allow_index_reg
.text
_start:
# Tests for op xmm/mem64, xmm
vcomisd %xmm4,%xmm6
vcomisd (%rcx),%xmm4
vucomisd %xmm4,%xmm6
vucomisd (%rcx),%xmm4
# Tests for op mem64, xmm
vmovsd (%rcx),%xmm4
# Tests for op xmm, mem64
vmovsd %xmm4,(%rcx)
# Tests for op xmm/mem64, regl
vcvtsd2si %xmm4,%ecx
vcvtsd2si (%rcx),%ecx
vcvttsd2si %xmm4,%ecx
vcvttsd2si (%rcx),%ecx
# Tests for op xmm/mem64, regq
vcvtsd2si %xmm4,%rcx
vcvtsd2si (%rcx),%rcx
vcvttsd2si %xmm4,%rcx
vcvttsd2si (%rcx),%rcx
# Tests for op regq/mem64, xmm, xmm
vcvtsi2sdq %rcx,%xmm4,%xmm6
vcvtsi2sdq (%rcx),%xmm4,%xmm6
vcvtsi2ssq %rcx,%xmm4,%xmm6
vcvtsi2ssq (%rcx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd $7,%xmm4,%xmm6,%xmm2
vcmpsd $7,(%rcx),%xmm6,%xmm2
vroundsd $7,%xmm4,%xmm6,%xmm2
vroundsd $7,(%rcx),%xmm6,%xmm2
# Tests for op xmm/mem64, xmm, xmm
vaddsd %xmm4,%xmm6,%xmm2
vaddsd (%rcx),%xmm6,%xmm2
vcvtsd2ss %xmm4,%xmm6,%xmm2
vcvtsd2ss (%rcx),%xmm6,%xmm2
vdivsd %xmm4,%xmm6,%xmm2
vdivsd (%rcx),%xmm6,%xmm2
vmaxsd %xmm4,%xmm6,%xmm2
vmaxsd (%rcx),%xmm6,%xmm2
vminsd %xmm4,%xmm6,%xmm2
vminsd (%rcx),%xmm6,%xmm2
vmulsd %xmm4,%xmm6,%xmm2
vmulsd (%rcx),%xmm6,%xmm2
vsqrtsd %xmm4,%xmm6,%xmm2
vsqrtsd (%rcx),%xmm6,%xmm2
vsubsd %xmm4,%xmm6,%xmm2
vsubsd (%rcx),%xmm6,%xmm2
vcmpeqsd %xmm4,%xmm6,%xmm2
vcmpeqsd (%rcx),%xmm6,%xmm2
vcmpltsd %xmm4,%xmm6,%xmm2
vcmpltsd (%rcx),%xmm6,%xmm2
vcmplesd %xmm4,%xmm6,%xmm2
vcmplesd (%rcx),%xmm6,%xmm2
vcmpunordsd %xmm4,%xmm6,%xmm2
vcmpunordsd (%rcx),%xmm6,%xmm2
vcmpneqsd %xmm4,%xmm6,%xmm2
vcmpneqsd (%rcx),%xmm6,%xmm2
vcmpnltsd %xmm4,%xmm6,%xmm2
vcmpnltsd (%rcx),%xmm6,%xmm2
vcmpnlesd %xmm4,%xmm6,%xmm2
vcmpnlesd (%rcx),%xmm6,%xmm2
vcmpordsd %xmm4,%xmm6,%xmm2
vcmpordsd (%rcx),%xmm6,%xmm2
vcmpeq_uqsd %xmm4,%xmm6,%xmm2
vcmpeq_uqsd (%rcx),%xmm6,%xmm2
vcmpngesd %xmm4,%xmm6,%xmm2
vcmpngesd (%rcx),%xmm6,%xmm2
vcmpngtsd %xmm4,%xmm6,%xmm2
vcmpngtsd (%rcx),%xmm6,%xmm2
vcmpfalsesd %xmm4,%xmm6,%xmm2
vcmpfalsesd (%rcx),%xmm6,%xmm2
vcmpneq_oqsd %xmm4,%xmm6,%xmm2
vcmpneq_oqsd (%rcx),%xmm6,%xmm2
vcmpgesd %xmm4,%xmm6,%xmm2
vcmpgesd (%rcx),%xmm6,%xmm2
vcmpgtsd %xmm4,%xmm6,%xmm2
vcmpgtsd (%rcx),%xmm6,%xmm2
vcmptruesd %xmm4,%xmm6,%xmm2
vcmptruesd (%rcx),%xmm6,%xmm2
vcmpeq_ossd %xmm4,%xmm6,%xmm2
vcmpeq_ossd (%rcx),%xmm6,%xmm2
vcmplt_oqsd %xmm4,%xmm6,%xmm2
vcmplt_oqsd (%rcx),%xmm6,%xmm2
vcmple_oqsd %xmm4,%xmm6,%xmm2
vcmple_oqsd (%rcx),%xmm6,%xmm2
vcmpunord_ssd %xmm4,%xmm6,%xmm2
vcmpunord_ssd (%rcx),%xmm6,%xmm2
vcmpneq_ussd %xmm4,%xmm6,%xmm2
vcmpneq_ussd (%rcx),%xmm6,%xmm2
vcmpnlt_uqsd %xmm4,%xmm6,%xmm2
vcmpnlt_uqsd (%rcx),%xmm6,%xmm2
vcmpnle_uqsd %xmm4,%xmm6,%xmm2
vcmpnle_uqsd (%rcx),%xmm6,%xmm2
vcmpord_ssd %xmm4,%xmm6,%xmm2
vcmpord_ssd (%rcx),%xmm6,%xmm2
vcmpeq_ussd %xmm4,%xmm6,%xmm2
vcmpeq_ussd (%rcx),%xmm6,%xmm2
vcmpnge_uqsd %xmm4,%xmm6,%xmm2
vcmpnge_uqsd (%rcx),%xmm6,%xmm2
vcmpngt_uqsd %xmm4,%xmm6,%xmm2
vcmpngt_uqsd (%rcx),%xmm6,%xmm2
vcmpfalse_ossd %xmm4,%xmm6,%xmm2
vcmpfalse_ossd (%rcx),%xmm6,%xmm2
vcmpneq_ossd %xmm4,%xmm6,%xmm2
vcmpneq_ossd (%rcx),%xmm6,%xmm2
vcmpge_oqsd %xmm4,%xmm6,%xmm2
vcmpge_oqsd (%rcx),%xmm6,%xmm2
vcmpgt_oqsd %xmm4,%xmm6,%xmm2
vcmpgt_oqsd (%rcx),%xmm6,%xmm2
vcmptrue_ussd %xmm4,%xmm6,%xmm2
vcmptrue_ussd (%rcx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm, xmm
vaddss %xmm4,%xmm6,%xmm2
vaddss (%rcx),%xmm6,%xmm2
vcvtss2sd %xmm4,%xmm6,%xmm2
vcvtss2sd (%rcx),%xmm6,%xmm2
vdivss %xmm4,%xmm6,%xmm2
vdivss (%rcx),%xmm6,%xmm2
vmaxss %xmm4,%xmm6,%xmm2
vmaxss (%rcx),%xmm6,%xmm2
vminss %xmm4,%xmm6,%xmm2
vminss (%rcx),%xmm6,%xmm2
vmulss %xmm4,%xmm6,%xmm2
vmulss (%rcx),%xmm6,%xmm2
vrcpss %xmm4,%xmm6,%xmm2
vrcpss (%rcx),%xmm6,%xmm2
vrsqrtss %xmm4,%xmm6,%xmm2
vrsqrtss (%rcx),%xmm6,%xmm2
vsqrtss %xmm4,%xmm6,%xmm2
vsqrtss (%rcx),%xmm6,%xmm2
vsubss %xmm4,%xmm6,%xmm2
vsubss (%rcx),%xmm6,%xmm2
vcmpeqss %xmm4,%xmm6,%xmm2
vcmpeqss (%rcx),%xmm6,%xmm2
vcmpltss %xmm4,%xmm6,%xmm2
vcmpltss (%rcx),%xmm6,%xmm2
vcmpless %xmm4,%xmm6,%xmm2
vcmpless (%rcx),%xmm6,%xmm2
vcmpunordss %xmm4,%xmm6,%xmm2
vcmpunordss (%rcx),%xmm6,%xmm2
vcmpneqss %xmm4,%xmm6,%xmm2
vcmpneqss (%rcx),%xmm6,%xmm2
vcmpnltss %xmm4,%xmm6,%xmm2
vcmpnltss (%rcx),%xmm6,%xmm2
vcmpnless %xmm4,%xmm6,%xmm2
vcmpnless (%rcx),%xmm6,%xmm2
vcmpordss %xmm4,%xmm6,%xmm2
vcmpordss (%rcx),%xmm6,%xmm2
vcmpeq_uqss %xmm4,%xmm6,%xmm2
vcmpeq_uqss (%rcx),%xmm6,%xmm2
vcmpngess %xmm4,%xmm6,%xmm2
vcmpngess (%rcx),%xmm6,%xmm2
vcmpngtss %xmm4,%xmm6,%xmm2
vcmpngtss (%rcx),%xmm6,%xmm2
vcmpfalsess %xmm4,%xmm6,%xmm2
vcmpfalsess (%rcx),%xmm6,%xmm2
vcmpneq_oqss %xmm4,%xmm6,%xmm2
vcmpneq_oqss (%rcx),%xmm6,%xmm2
vcmpgess %xmm4,%xmm6,%xmm2
vcmpgess (%rcx),%xmm6,%xmm2
vcmpgtss %xmm4,%xmm6,%xmm2
vcmpgtss (%rcx),%xmm6,%xmm2
vcmptruess %xmm4,%xmm6,%xmm2
vcmptruess (%rcx),%xmm6,%xmm2
vcmpeq_osss %xmm4,%xmm6,%xmm2
vcmpeq_osss (%rcx),%xmm6,%xmm2
vcmplt_oqss %xmm4,%xmm6,%xmm2
vcmplt_oqss (%rcx),%xmm6,%xmm2
vcmple_oqss %xmm4,%xmm6,%xmm2
vcmple_oqss (%rcx),%xmm6,%xmm2
vcmpunord_sss %xmm4,%xmm6,%xmm2
vcmpunord_sss (%rcx),%xmm6,%xmm2
vcmpneq_usss %xmm4,%xmm6,%xmm2
vcmpneq_usss (%rcx),%xmm6,%xmm2
vcmpnlt_uqss %xmm4,%xmm6,%xmm2
vcmpnlt_uqss (%rcx),%xmm6,%xmm2
vcmpnle_uqss %xmm4,%xmm6,%xmm2
vcmpnle_uqss (%rcx),%xmm6,%xmm2
vcmpord_sss %xmm4,%xmm6,%xmm2
vcmpord_sss (%rcx),%xmm6,%xmm2
vcmpeq_usss %xmm4,%xmm6,%xmm2
vcmpeq_usss (%rcx),%xmm6,%xmm2
vcmpnge_uqss %xmm4,%xmm6,%xmm2
vcmpnge_uqss (%rcx),%xmm6,%xmm2
vcmpngt_uqss %xmm4,%xmm6,%xmm2
vcmpngt_uqss (%rcx),%xmm6,%xmm2
vcmpfalse_osss %xmm4,%xmm6,%xmm2
vcmpfalse_osss (%rcx),%xmm6,%xmm2
vcmpneq_osss %xmm4,%xmm6,%xmm2
vcmpneq_osss (%rcx),%xmm6,%xmm2
vcmpge_oqss %xmm4,%xmm6,%xmm2
vcmpge_oqss (%rcx),%xmm6,%xmm2
vcmpgt_oqss %xmm4,%xmm6,%xmm2
vcmpgt_oqss (%rcx),%xmm6,%xmm2
vcmptrue_usss %xmm4,%xmm6,%xmm2
vcmptrue_usss (%rcx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm
vcomiss %xmm4,%xmm6
vcomiss (%rcx),%xmm4
vucomiss %xmm4,%xmm6
vucomiss (%rcx),%xmm4
# Tests for op mem32, xmm
vmovss (%rcx),%xmm4
# Tests for op xmm, mem32
vmovss %xmm4,(%rcx)
# Tests for op xmm/mem32, regl
vcvtss2si %xmm4,%ecx
vcvtss2si (%rcx),%ecx
vcvttss2si %xmm4,%ecx
vcvttss2si (%rcx),%ecx
# Tests for op xmm/mem32, regq
vcvtss2si %xmm4,%rcx
vcvtss2si (%rcx),%rcx
vcvttss2si %xmm4,%rcx
vcvttss2si (%rcx),%rcx
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd %ecx,%xmm4,%xmm6
vcvtsi2sdl (%rcx),%xmm4,%xmm6
vcvtsi2ss %ecx,%xmm4,%xmm6
vcvtsi2ssl (%rcx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss $7,%xmm4,%xmm6,%xmm2
vcmpss $7,(%rcx),%xmm6,%xmm2
vroundss $7,%xmm4,%xmm6,%xmm2
vroundss $7,(%rcx),%xmm6,%xmm2
# Tests for op xmm, xmm, xmm
vmovsd %xmm4,%xmm6,%xmm2
vmovss %xmm4,%xmm6,%xmm2
#Tests with different memory and register operands.
vcvtsi2sdl 0x12345678,%xmm8,%xmm15
vcvtsi2sdl (%rbp),%xmm8,%xmm15
vcvtsi2sdl (%rsp),%xmm8,%xmm15
vcvtsi2sdl 0x99(%rbp),%xmm8,%xmm15
vcvtsi2sdl 0x99(%r15),%xmm8,%xmm15
vcvtsi2sdl 0x99(%rip),%xmm8,%xmm15
vcvtsi2sdl 0x99(%rsp),%xmm8,%xmm15
vcvtsi2sdl 0x99(%r12),%xmm8,%xmm15
vcvtsi2sdl -0x99(,%riz),%xmm8,%xmm15
vcvtsi2sdl -0x99(,%riz,2),%xmm8,%xmm15
vcvtsi2sdl -0x99(%rbx,%riz),%xmm8,%xmm15
vcvtsi2sdl -0x99(%rbx,%riz,2),%xmm8,%xmm15
vcvtsi2sdl -0x99(%r12,%r15,4),%xmm8,%xmm15
vcvtsi2sdl -0x99(%r8,%r15,8),%xmm8,%xmm15
vcvtsi2sdl -0x99(%rbp,%r13,4),%xmm8,%xmm15
vcvtsi2sdl -0x99(%rsp,%r12,1),%xmm8,%xmm15
# Tests for all register operands.
vcvtsd2si %xmm8,%r8d
vcvtsi2sdl %r8d,%xmm8,%xmm15
# Tests for different memory/register operand
vcvtsd2si (%rcx),%r8
vcvtss2si (%rcx),%r8
.intel_syntax noprefix
# Tests for op xmm/mem64, xmm
vcomisd xmm6,xmm4
vcomisd xmm4,QWORD PTR [rcx]
vcomisd xmm4,[rcx]
vucomisd xmm6,xmm4
vucomisd xmm4,QWORD PTR [rcx]
vucomisd xmm4,[rcx]
# Tests for op mem64, xmm
vmovsd xmm4,QWORD PTR [rcx]
vmovsd xmm4,[rcx]
# Tests for op xmm, mem64
vmovsd QWORD PTR [rcx],xmm4
vmovsd [rcx],xmm4
# Tests for op xmm/mem64, regl
vcvtsd2si ecx,xmm4
vcvtsd2si ecx,QWORD PTR [rcx]
vcvtsd2si ecx,[rcx]
vcvttsd2si ecx,xmm4
vcvttsd2si ecx,QWORD PTR [rcx]
vcvttsd2si ecx,[rcx]
# Tests for op xmm/mem64, regq
vcvtsd2si rcx,xmm4
vcvtsd2si rcx,QWORD PTR [rcx]
vcvtsd2si rcx,[rcx]
vcvttsd2si rcx,xmm4
vcvttsd2si rcx,QWORD PTR [rcx]
vcvttsd2si rcx,[rcx]
# Tests for op regq/mem64, xmm, xmm
vcvtsi2sdq xmm6,xmm4,rcx
vcvtsi2sdq xmm6,xmm4,QWORD PTR [rcx]
vcvtsi2sdq xmm6,xmm4,[rcx]
vcvtsi2ssq xmm6,xmm4,rcx
vcvtsi2ssq xmm6,xmm4,QWORD PTR [rcx]
vcvtsi2ssq xmm6,xmm4,[rcx]
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd xmm2,xmm6,xmm4,7
vcmpsd xmm2,xmm6,QWORD PTR [rcx],7
vcmpsd xmm2,xmm6,[rcx],7
vroundsd xmm2,xmm6,xmm4,7
vroundsd xmm2,xmm6,QWORD PTR [rcx],7
vroundsd xmm2,xmm6,[rcx],7
# Tests for op xmm/mem64, xmm, xmm
vaddsd xmm2,xmm6,xmm4
vaddsd xmm2,xmm6,QWORD PTR [rcx]
vaddsd xmm2,xmm6,[rcx]
vcvtsd2ss xmm2,xmm6,xmm4
vcvtsd2ss xmm2,xmm6,QWORD PTR [rcx]
vcvtsd2ss xmm2,xmm6,[rcx]
vdivsd xmm2,xmm6,xmm4
vdivsd xmm2,xmm6,QWORD PTR [rcx]
vdivsd xmm2,xmm6,[rcx]
vmaxsd xmm2,xmm6,xmm4
vmaxsd xmm2,xmm6,QWORD PTR [rcx]
vmaxsd xmm2,xmm6,[rcx]
vminsd xmm2,xmm6,xmm4
vminsd xmm2,xmm6,QWORD PTR [rcx]
vminsd xmm2,xmm6,[rcx]
vmulsd xmm2,xmm6,xmm4
vmulsd xmm2,xmm6,QWORD PTR [rcx]
vmulsd xmm2,xmm6,[rcx]
vsqrtsd xmm2,xmm6,xmm4
vsqrtsd xmm2,xmm6,QWORD PTR [rcx]
vsqrtsd xmm2,xmm6,[rcx]
vsubsd xmm2,xmm6,xmm4
vsubsd xmm2,xmm6,QWORD PTR [rcx]
vsubsd xmm2,xmm6,[rcx]
vcmpeqsd xmm2,xmm6,xmm4
vcmpeqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpeqsd xmm2,xmm6,[rcx]
vcmpltsd xmm2,xmm6,xmm4
vcmpltsd xmm2,xmm6,QWORD PTR [rcx]
vcmpltsd xmm2,xmm6,[rcx]
vcmplesd xmm2,xmm6,xmm4
vcmplesd xmm2,xmm6,QWORD PTR [rcx]
vcmplesd xmm2,xmm6,[rcx]
vcmpunordsd xmm2,xmm6,xmm4
vcmpunordsd xmm2,xmm6,QWORD PTR [rcx]
vcmpunordsd xmm2,xmm6,[rcx]
vcmpneqsd xmm2,xmm6,xmm4
vcmpneqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpneqsd xmm2,xmm6,[rcx]
vcmpnltsd xmm2,xmm6,xmm4
vcmpnltsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnltsd xmm2,xmm6,[rcx]
vcmpnlesd xmm2,xmm6,xmm4
vcmpnlesd xmm2,xmm6,QWORD PTR [rcx]
vcmpnlesd xmm2,xmm6,[rcx]
vcmpordsd xmm2,xmm6,xmm4
vcmpordsd xmm2,xmm6,QWORD PTR [rcx]
vcmpordsd xmm2,xmm6,[rcx]
vcmpeq_uqsd xmm2,xmm6,xmm4
vcmpeq_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpeq_uqsd xmm2,xmm6,[rcx]
vcmpngesd xmm2,xmm6,xmm4
vcmpngesd xmm2,xmm6,QWORD PTR [rcx]
vcmpngesd xmm2,xmm6,[rcx]
vcmpngtsd xmm2,xmm6,xmm4
vcmpngtsd xmm2,xmm6,QWORD PTR [rcx]
vcmpngtsd xmm2,xmm6,[rcx]
vcmpfalsesd xmm2,xmm6,xmm4
vcmpfalsesd xmm2,xmm6,QWORD PTR [rcx]
vcmpfalsesd xmm2,xmm6,[rcx]
vcmpneq_oqsd xmm2,xmm6,xmm4
vcmpneq_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpneq_oqsd xmm2,xmm6,[rcx]
vcmpgesd xmm2,xmm6,xmm4
vcmpgesd xmm2,xmm6,QWORD PTR [rcx]
vcmpgesd xmm2,xmm6,[rcx]
vcmpgtsd xmm2,xmm6,xmm4
vcmpgtsd xmm2,xmm6,QWORD PTR [rcx]
vcmpgtsd xmm2,xmm6,[rcx]
vcmptruesd xmm2,xmm6,xmm4
vcmptruesd xmm2,xmm6,QWORD PTR [rcx]
vcmptruesd xmm2,xmm6,[rcx]
vcmpeq_ossd xmm2,xmm6,xmm4
vcmpeq_ossd xmm2,xmm6,QWORD PTR [rcx]
vcmpeq_ossd xmm2,xmm6,[rcx]
vcmplt_oqsd xmm2,xmm6,xmm4
vcmplt_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmplt_oqsd xmm2,xmm6,[rcx]
vcmple_oqsd xmm2,xmm6,xmm4
vcmple_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmple_oqsd xmm2,xmm6,[rcx]
vcmpunord_ssd xmm2,xmm6,xmm4
vcmpunord_ssd xmm2,xmm6,QWORD PTR [rcx]
vcmpunord_ssd xmm2,xmm6,[rcx]
vcmpneq_ussd xmm2,xmm6,xmm4
vcmpneq_ussd xmm2,xmm6,QWORD PTR [rcx]
vcmpneq_ussd xmm2,xmm6,[rcx]
vcmpnlt_uqsd xmm2,xmm6,xmm4
vcmpnlt_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnlt_uqsd xmm2,xmm6,[rcx]
vcmpnle_uqsd xmm2,xmm6,xmm4
vcmpnle_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnle_uqsd xmm2,xmm6,[rcx]
vcmpord_ssd xmm2,xmm6,xmm4
vcmpord_ssd xmm2,xmm6,QWORD PTR [rcx]
vcmpord_ssd xmm2,xmm6,[rcx]
vcmpeq_ussd xmm2,xmm6,xmm4
vcmpeq_ussd xmm2,xmm6,QWORD PTR [rcx]
vcmpeq_ussd xmm2,xmm6,[rcx]
vcmpnge_uqsd xmm2,xmm6,xmm4
vcmpnge_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnge_uqsd xmm2,xmm6,[rcx]
vcmpngt_uqsd xmm2,xmm6,xmm4
vcmpngt_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpngt_uqsd xmm2,xmm6,[rcx]
vcmpfalse_ossd xmm2,xmm6,xmm4
vcmpfalse_ossd xmm2,xmm6,QWORD PTR [rcx]
vcmpfalse_ossd xmm2,xmm6,[rcx]
vcmpneq_ossd xmm2,xmm6,xmm4
vcmpneq_ossd xmm2,xmm6,QWORD PTR [rcx]
vcmpneq_ossd xmm2,xmm6,[rcx]
vcmpge_oqsd xmm2,xmm6,xmm4
vcmpge_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpge_oqsd xmm2,xmm6,[rcx]
vcmpgt_oqsd xmm2,xmm6,xmm4
vcmpgt_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpgt_oqsd xmm2,xmm6,[rcx]
vcmptrue_ussd xmm2,xmm6,xmm4
vcmptrue_ussd xmm2,xmm6,QWORD PTR [rcx]
vcmptrue_ussd xmm2,xmm6,[rcx]
# Tests for op xmm/mem32, xmm, xmm
vaddss xmm2,xmm6,xmm4
vaddss xmm2,xmm6,DWORD PTR [rcx]
vaddss xmm2,xmm6,[rcx]
vcvtss2sd xmm2,xmm6,xmm4
vcvtss2sd xmm2,xmm6,DWORD PTR [rcx]
vcvtss2sd xmm2,xmm6,[rcx]
vdivss xmm2,xmm6,xmm4
vdivss xmm2,xmm6,DWORD PTR [rcx]
vdivss xmm2,xmm6,[rcx]
vmaxss xmm2,xmm6,xmm4
vmaxss xmm2,xmm6,DWORD PTR [rcx]
vmaxss xmm2,xmm6,[rcx]
vminss xmm2,xmm6,xmm4
vminss xmm2,xmm6,DWORD PTR [rcx]
vminss xmm2,xmm6,[rcx]
vmulss xmm2,xmm6,xmm4
vmulss xmm2,xmm6,DWORD PTR [rcx]
vmulss xmm2,xmm6,[rcx]
vrcpss xmm2,xmm6,xmm4
vrcpss xmm2,xmm6,DWORD PTR [rcx]
vrcpss xmm2,xmm6,[rcx]
vrsqrtss xmm2,xmm6,xmm4
vrsqrtss xmm2,xmm6,DWORD PTR [rcx]
vrsqrtss xmm2,xmm6,[rcx]
vsqrtss xmm2,xmm6,xmm4
vsqrtss xmm2,xmm6,DWORD PTR [rcx]
vsqrtss xmm2,xmm6,[rcx]
vsubss xmm2,xmm6,xmm4
vsubss xmm2,xmm6,DWORD PTR [rcx]
vsubss xmm2,xmm6,[rcx]
vcmpeqss xmm2,xmm6,xmm4
vcmpeqss xmm2,xmm6,DWORD PTR [rcx]
vcmpeqss xmm2,xmm6,[rcx]
vcmpltss xmm2,xmm6,xmm4
vcmpltss xmm2,xmm6,DWORD PTR [rcx]
vcmpltss xmm2,xmm6,[rcx]
vcmpless xmm2,xmm6,xmm4
vcmpless xmm2,xmm6,DWORD PTR [rcx]
vcmpless xmm2,xmm6,[rcx]
vcmpunordss xmm2,xmm6,xmm4
vcmpunordss xmm2,xmm6,DWORD PTR [rcx]
vcmpunordss xmm2,xmm6,[rcx]
vcmpneqss xmm2,xmm6,xmm4
vcmpneqss xmm2,xmm6,DWORD PTR [rcx]
vcmpneqss xmm2,xmm6,[rcx]
vcmpnltss xmm2,xmm6,xmm4
vcmpnltss xmm2,xmm6,DWORD PTR [rcx]
vcmpnltss xmm2,xmm6,[rcx]
vcmpnless xmm2,xmm6,xmm4
vcmpnless xmm2,xmm6,DWORD PTR [rcx]
vcmpnless xmm2,xmm6,[rcx]
vcmpordss xmm2,xmm6,xmm4
vcmpordss xmm2,xmm6,DWORD PTR [rcx]
vcmpordss xmm2,xmm6,[rcx]
vcmpeq_uqss xmm2,xmm6,xmm4
vcmpeq_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpeq_uqss xmm2,xmm6,[rcx]
vcmpngess xmm2,xmm6,xmm4
vcmpngess xmm2,xmm6,DWORD PTR [rcx]
vcmpngess xmm2,xmm6,[rcx]
vcmpngtss xmm2,xmm6,xmm4
vcmpngtss xmm2,xmm6,DWORD PTR [rcx]
vcmpngtss xmm2,xmm6,[rcx]
vcmpfalsess xmm2,xmm6,xmm4
vcmpfalsess xmm2,xmm6,DWORD PTR [rcx]
vcmpfalsess xmm2,xmm6,[rcx]
vcmpneq_oqss xmm2,xmm6,xmm4
vcmpneq_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmpneq_oqss xmm2,xmm6,[rcx]
vcmpgess xmm2,xmm6,xmm4
vcmpgess xmm2,xmm6,DWORD PTR [rcx]
vcmpgess xmm2,xmm6,[rcx]
vcmpgtss xmm2,xmm6,xmm4
vcmpgtss xmm2,xmm6,DWORD PTR [rcx]
vcmpgtss xmm2,xmm6,[rcx]
vcmptruess xmm2,xmm6,xmm4
vcmptruess xmm2,xmm6,DWORD PTR [rcx]
vcmptruess xmm2,xmm6,[rcx]
vcmpeq_osss xmm2,xmm6,xmm4
vcmpeq_osss xmm2,xmm6,DWORD PTR [rcx]
vcmpeq_osss xmm2,xmm6,[rcx]
vcmplt_oqss xmm2,xmm6,xmm4
vcmplt_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmplt_oqss xmm2,xmm6,[rcx]
vcmple_oqss xmm2,xmm6,xmm4
vcmple_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmple_oqss xmm2,xmm6,[rcx]
vcmpunord_sss xmm2,xmm6,xmm4
vcmpunord_sss xmm2,xmm6,DWORD PTR [rcx]
vcmpunord_sss xmm2,xmm6,[rcx]
vcmpneq_usss xmm2,xmm6,xmm4
vcmpneq_usss xmm2,xmm6,DWORD PTR [rcx]
vcmpneq_usss xmm2,xmm6,[rcx]
vcmpnlt_uqss xmm2,xmm6,xmm4
vcmpnlt_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpnlt_uqss xmm2,xmm6,[rcx]
vcmpnle_uqss xmm2,xmm6,xmm4
vcmpnle_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpnle_uqss xmm2,xmm6,[rcx]
vcmpord_sss xmm2,xmm6,xmm4
vcmpord_sss xmm2,xmm6,DWORD PTR [rcx]
vcmpord_sss xmm2,xmm6,[rcx]
vcmpeq_usss xmm2,xmm6,xmm4
vcmpeq_usss xmm2,xmm6,DWORD PTR [rcx]
vcmpeq_usss xmm2,xmm6,[rcx]
vcmpnge_uqss xmm2,xmm6,xmm4
vcmpnge_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpnge_uqss xmm2,xmm6,[rcx]
vcmpngt_uqss xmm2,xmm6,xmm4
vcmpngt_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpngt_uqss xmm2,xmm6,[rcx]
vcmpfalse_osss xmm2,xmm6,xmm4
vcmpfalse_osss xmm2,xmm6,DWORD PTR [rcx]
vcmpfalse_osss xmm2,xmm6,[rcx]
vcmpneq_osss xmm2,xmm6,xmm4
vcmpneq_osss xmm2,xmm6,DWORD PTR [rcx]
vcmpneq_osss xmm2,xmm6,[rcx]
vcmpge_oqss xmm2,xmm6,xmm4
vcmpge_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmpge_oqss xmm2,xmm6,[rcx]
vcmpgt_oqss xmm2,xmm6,xmm4
vcmpgt_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmpgt_oqss xmm2,xmm6,[rcx]
vcmptrue_usss xmm2,xmm6,xmm4
vcmptrue_usss xmm2,xmm6,DWORD PTR [rcx]
vcmptrue_usss xmm2,xmm6,[rcx]
# Tests for op xmm/mem32, xmm
vcomiss xmm6,xmm4
vcomiss xmm4,DWORD PTR [rcx]
vcomiss xmm4,[rcx]
vucomiss xmm6,xmm4
vucomiss xmm4,DWORD PTR [rcx]
vucomiss xmm4,[rcx]
# Tests for op mem32, xmm
vmovss xmm4,DWORD PTR [rcx]
vmovss xmm4,[rcx]
# Tests for op xmm, mem32
vmovss DWORD PTR [rcx],xmm4
vmovss [rcx],xmm4
# Tests for op xmm/mem32, regl
vcvtss2si ecx,xmm4
vcvtss2si ecx,DWORD PTR [rcx]
vcvtss2si ecx,[rcx]
vcvttss2si ecx,xmm4
vcvttss2si ecx,DWORD PTR [rcx]
vcvttss2si ecx,[rcx]
# Tests for op xmm/mem32, regq
vcvtss2si rcx,xmm4
vcvtss2si rcx,DWORD PTR [rcx]
vcvtss2si rcx,[rcx]
vcvttss2si rcx,xmm4
vcvttss2si rcx,DWORD PTR [rcx]
vcvttss2si rcx,[rcx]
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd xmm6,xmm4,ecx
vcvtsi2sd xmm6,xmm4,DWORD PTR [rcx]
vcvtsi2ss xmm6,xmm4,ecx
vcvtsi2ss xmm6,xmm4,DWORD PTR [rcx]
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss xmm2,xmm6,xmm4,7
vcmpss xmm2,xmm6,DWORD PTR [rcx],7
vcmpss xmm2,xmm6,[rcx],7
vroundss xmm2,xmm6,xmm4,7
vroundss xmm2,xmm6,DWORD PTR [rcx],7
vroundss xmm2,xmm6,[rcx],7
# Tests for op xmm, xmm, xmm
vmovsd xmm2,xmm6,xmm4
vmovss xmm2,xmm6,xmm4
#Tests with different memory and register operands.
vcvtsi2sd xmm15,xmm8,DWORD PTR ds:0x12345678
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbp]
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbp+0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [r15+0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [rip+0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [rsp+0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [r12+0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [riz*1-0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [riz*2-0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbx+riz*1-0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbx+riz*2-0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [r12+r15*4-0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [r8+r15*8-0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbp+r12*4-0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [rsp+r13*1-0x99]
# Tests for all register operands.
vcvtsd2si r8d,xmm8
vcvtsi2sd xmm15,xmm8,r8d
# Tests for different memory/register operand
vcvtsd2si r8,QWORD PTR [rcx]
vcvtss2si r8,DWORD PTR [rcx]
|
tactcomplabs/xbgas-binutils-gdb
| 1,217
|
gas/testsuite/gas/i386/secrel.s
|
.text
.ascii ">>>>"
pre04: .ascii "<<<<"
.ascii ">>>>>"
pre0d: .ascii "<<<"
.ascii ">>>>>>"
pre16: .ascii "<<"
.ascii ">>>>>>>"
pre1f: .ascii "<"
.data
.ascii ">>>>"
sam04: .ascii "<<<<"
.ascii ">>>>>"
sam0d: .ascii "<<<"
.ascii ">>>>>>"
sam16: .ascii "<<"
.ascii ">>>>>>>"
sam1f: .ascii "<"
.ascii ">>>>"
.secrel32 pre04
.byte 0x11
.secrel32 pre0d
.byte 0x11
.secrel32 pre16
.byte 0x11
.long pre1f@secrel32
.byte 0x11
.ascii "<<<<<<<<"
.ascii ">>>>"
.secrel32 sam04
.byte 0x11
.secrel32 sam0d
.byte 0x11
.long sam16@secrel32
.byte 0x11
.secrel32 sam1f
.byte 0x11
.ascii "<<<<<<<<"
.ascii ">>>>"
.secrel32 nex04
.byte 0x11
.long nex0d@secrel32
.byte 0x11
.secrel32 nex16
.byte 0x11
.secrel32 nex1f
.byte 0x11
.ascii "<<<<<<<<"
.ascii ">>>>"
.long ext24@secrel32
.byte 0x11
.secrel32 ext2d
.byte 0x11
.secrel32 ext36
.byte 0x11
.secrel32 ext3f
.byte 0x11
.ascii "<<<<<<<<"
.long pre0d@secrel32+3
.long pre16@secrel32+six
leal bar@SECREL32+44(%eax), %edx
.section .rdata
.ascii ">>>>"
nex04: .ascii "<<<<"
.ascii ">>>>>"
nex0d: .ascii "<<<"
.ascii ">>>>>>"
nex16: .ascii "<<"
.ascii ">>>>>>>"
nex1f: .ascii "<"
.ascii ">>>>"
.p2align 4,0
.equ six,6
|
tactcomplabs/xbgas-binutils-gdb
| 1,337
|
gas/testsuite/gas/i386/x86-64-align-branch-1.s
|
.text
.globl foo
.p2align 4
foo:
movl %eax, %fs:0x1
pushq %rbp
pushq %rbp
pushq %rbp
movq %rsp, %rbp
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
cmp %rax, %rbp
je .L_2
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %edi, -8(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
popq %rbp
popq %rbp
je .L_2
popq %rbp
je .L_2
movl %eax, -4(%rbp)
movl %esi, -12(%rbp)
movl %edi, -8(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
popq %rbp
popq %rbp
jmp .L_3
jmp .L_3
jmp .L_3
movl %eax, -4(%rbp)
movl %esi, -12(%rbp)
movl %edi, -8(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
popq %rbp
popq %rbp
cmp %rax, %rbp
je .L_2
jmp .L_3
.L_2:
movl -12(%rbp), %eax
movl %eax, -4(%rbp)
.L_3:
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
jmp .L_3
popq %rbp
retq
|
tactcomplabs/xbgas-binutils-gdb
| 5,841
|
gas/testsuite/gas/i386/prefix.s
|
.text ; foo: addr16 fstcw %es:(%si)
fstsw; fstsw %ax;
addr16 fstsw %ax ;addr16 rep cmpsw %es:(%di),%ss:(%si)
es fwait
fwait
movl $0,%gs:fpu_owner_task
.byte 0x66
.byte 0xf2
.byte 0x0f
.byte 0x38
.byte 0x17
.byte 0xf2
.byte 0x66
.byte 0x0f
.byte 0x54
.byte 0xf2
.byte 0x0f
.byte 0x54
# data16 movsd %xmm4,(%edx)
.byte 0xf2
.byte 0x66
.byte 0x0f
.byte 0x11
.byte 0x22
# data16 movsd %xmm4,(%bp,%si)
.byte 0xf2
.byte 0x67
.byte 0x66
.byte 0x0f
.byte 0x11
.byte 0x22
# lock data16 movsd %xmm4,(%bp,%si)
.byte 0xf2
.byte 0x67
.byte 0xf0
.byte 0x66
.byte 0x0f
.byte 0x11
.byte 0x22
# data16 movss %xmm4,(%edx)
.byte 0xf3
.byte 0x66
.byte 0x0f
.byte 0x11
.byte 0x22
# lock data16 movss %xmm4,(%bp,%si)
.byte 0xf3
.byte 0x67
.byte 0xf0
.byte 0x66
.byte 0x0f
.byte 0x11
.byte 0x22
# repz data16 movsd %xmm4,(%bp,%si)
.byte 0xf3
.byte 0x67
.byte 0xf2
.byte 0x66
.byte 0x0f
.byte 0x11
.byte 0x22
# data16 movss %xmm4,%ds:(%edx)
.byte 0xf3
.byte 0x66
.byte 0x3e
.byte 0x0f
.byte 0x11
.byte 0x22
# data16 movsd %xmm4,%ss:(%edx)
.byte 0xf2
.byte 0x66
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# repz lock data16 movsd %xmm4,%ss:(%edx)
.byte 0xf3
.byte 0xf0
.byte 0xf2
.byte 0x66
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# data16 ds movsd %xmm4,%ss:(%edx)
.byte 0xf2
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# data16 ds movsd %xmm4,%ss:(%bp,%si)
.byte 0xf2
.byte 0x67
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# lock data16 ds movsd %xmm4,%ss:(%bp,%si)
.byte 0xf2
.byte 0x67
.byte 0xf0
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# data16 ds movss %xmm4,%ss:(%edx)
.byte 0xf3
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# lock data16 ds movss %xmm4,%ss:(%edx)
.byte 0xf3
.byte 0xf0
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# repz data16 ds movsd %xmm4,%ss:(%bp,%si)
.byte 0xf3
.byte 0x67
.byte 0xf2
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x0f
.byte 0x11
.byte 0x22
# repnz; xchg %ax,%ax
.byte 0xf2
.byte 0x66
.byte 0x90
# repnz; addr16 xchg %ax,%ax
.byte 0xf2
.byte 0x67
.byte 0x66
.byte 0x90
# repnz; addr16 lock xchg %ax,%ax
.byte 0xf2
.byte 0x67
.byte 0xf0
.byte 0x66
.byte 0x90
# data16 pause
.byte 0xf3
.byte 0x66
.byte 0x90
# addr16 lock data16 pause
.byte 0xf3
.byte 0x67
.byte 0xf0
.byte 0x66
.byte 0x90
# repz; addr16; repnz; xchg %ax,%ax
.byte 0xf3
.byte 0x67
.byte 0xf2
.byte 0x66
.byte 0x90
# repnz; ds nop
.byte 0xf2
.byte 0x3e
.byte 0x90
# repnz; lock addr16 ds nop
.byte 0xf2
.byte 0xf0
.byte 0x67
.byte 0x3e
.byte 0x90
# ds pause
.byte 0xf3
.byte 0x3e
.byte 0x90
# data16 ds pause
.byte 0xf3
.byte 0x66
.byte 0x3e
.byte 0x90
# lock ds pause
.byte 0xf3
.byte 0xf0
.byte 0x3e
.byte 0x90
# lock addr16 ds pause
.byte 0xf3
.byte 0xf0
.byte 0x67
.byte 0x3e
.byte 0x90
# repz; repnz; addr16 ds nop
.byte 0xf3
.byte 0xf2
.byte 0x67
.byte 0x3e
.byte 0x90
# lock ss xchg %ax,%ax
.byte 0x66
.byte 0xf0
.byte 0x36
.byte 0x90
# repnz; ss nop
.byte 0xf2
.byte 0x36
.byte 0x90
# repnz; ss xchg %ax,%ax
.byte 0xf2
.byte 0x66
.byte 0x36
.byte 0x90
# repnz; lock ss nop
.byte 0xf2
.byte 0xf0
.byte 0x36
.byte 0x90
# repnz; lock addr16 ss nop
.byte 0xf2
.byte 0xf0
.byte 0x67
.byte 0x36
.byte 0x90
# ss pause
.byte 0xf3
.byte 0x36
.byte 0x90
# addr16 ss pause
.byte 0xf3
.byte 0x67
.byte 0x36
.byte 0x90
# lock addr16 ss pause
.byte 0xf3
.byte 0xf0
.byte 0x67
.byte 0x36
.byte 0x90
# repz; repnz; ss nop
.byte 0xf3
.byte 0xf2
.byte 0x36
.byte 0x90
# repz; repnz; addr16 ss nop
.byte 0xf3
.byte 0xf2
.byte 0x67
.byte 0x36
.byte 0x90
# repz; lock; repnz; ss xchg %ax,%ax
.byte 0xf3
.byte 0xf0
.byte 0xf2
.byte 0x66
.byte 0x36
.byte 0x90
# ds ss xchg %ax,%ax
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x90
# addr16 ds ss xchg %ax,%ax
.byte 0x67
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x90
# addr16 lock ds ss xchg %ax,%ax
.byte 0x67
.byte 0xf0
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x90
# data16 ds ss pause
.byte 0xf3
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x90
# lock data16 ds ss pause
.byte 0xf3
.byte 0xf0
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x90
# repz; repnz; addr16 ds ss nop
.byte 0xf3
.byte 0xf2
.byte 0x67
.byte 0x3e
.byte 0x36
.byte 0x90
# repz; addr16; repnz; ds ss xchg %ax,%ax
.byte 0xf3
.byte 0x67
.byte 0xf2
.byte 0x66
.byte 0x3e
.byte 0x36
.byte 0x90
# repz; rdseed %eax
.byte 0xf3
.byte 0x0f
.byte 0xc7
.byte 0xf8
nop
# repz; rdrand %eax
.byte 0xf3
.byte 0x0f
.byte 0xc7
.byte 0xf0
nop
# repnz; rdseed %eax
.byte 0xf2
.byte 0x0f
.byte 0xc7
.byte 0xf8
nop
# repnz; rdrand %eax
.byte 0xf2
.byte 0x0f
.byte 0xc7
.byte 0xf0
nop
repz; movaps %xmm7, %xmm7
int $3
# "repz" vmovaps %xmm7, %xmm7
.byte 0xc5
.byte 0xfa
.byte 0x28
.byte 0xff
int $3
# "repnz" {vex3} vmovaps %xmm7, %xmm7
.byte 0xc4
.byte 0xe1
.byte 0x7b
.byte 0x28
.byte 0xff
int $3
# "EVEX.W1" vmovaps %xmm7, %xmm7
.byte 0x62
.byte 0xf1
.byte 0xfc
.byte 0x08
.byte 0x28
.byte 0xff
int $3
# "repz" vmovaps %xmm7, %xmm7
.byte 0x62
.byte 0xf1
.byte 0x7e
.byte 0x08
.byte 0x28
.byte 0xff
int $3
# "EVEX.W0" vmovapd %xmm7, %xmm7
.byte 0x62
.byte 0xf1
.byte 0x7d
.byte 0x08
.byte 0x28
.byte 0xff
int $3
# "repnz" vmovapd %xmm7, %xmm7
.byte 0x62
.byte 0xf1
.byte 0xff
.byte 0x08
.byte 0x28
.byte 0xff
int $3
.byte 0x66; vmovaps %xmm0, %xmm0
repz; {vex3} vmovaps %xmm0, %xmm0
repnz; vmovaps %xmm0, %xmm0
lock; {evex} vmovaps %xmm0, %xmm0
vcvtpd2dqx 0x20(%eax),%xmm0
vcvtpd2dq 0x20(%eax){1to2},%xmm0
vcvtpd2dqx 0x20(%eax),%xmm0
# Get a good alignment.
.p2align 4,0
|
tactcomplabs/xbgas-binutils-gdb
| 13,226
|
gas/testsuite/gas/i386/opcode.s
|
.text
foo:
add %dl,0x90909090(%eax)
add %edx,0x90909090(%eax)
add 0x90909090(%eax),%dl
add 0x90909090(%eax),%edx
add $0x90,%al
add $0x90909090,%eax
push %es
pop %es
or %dl,0x90909090(%eax)
or %edx,0x90909090(%eax)
or 0x90909090(%eax),%dl
or 0x90909090(%eax),%edx
or $0x90,%al
or $0x90909090,%eax
push %cs
adc %dl,0x90909090(%eax)
adc %edx,0x90909090(%eax)
adc 0x90909090(%eax),%dl
adc 0x90909090(%eax),%edx
adc $0x90,%al
adc $0x90909090,%eax
push %ss
pop %ss
sbb %dl,0x90909090(%eax)
sbb %edx,0x90909090(%eax)
sbb 0x90909090(%eax),%dl
sbb 0x90909090(%eax),%edx
sbb $0x90,%al
sbb $0x90909090,%eax
push %ds
pop %ds
and %dl,0x90909090(%eax)
and %edx,0x90909090(%eax)
and 0x90909090(%eax),%dl
and 0x90909090(%eax),%edx
and $0x90,%al
and $0x90909090,%eax
daa
sub %dl,0x90909090(%eax)
sub %edx,0x90909090(%eax)
sub 0x90909090(%eax),%dl
sub 0x90909090(%eax),%edx
sub $0x90,%al
sub $0x90909090,%eax
das
xor %dl,0x90909090(%eax)
xor %edx,0x90909090(%eax)
xor 0x90909090(%eax),%dl
xor 0x90909090(%eax),%edx
xor $0x90,%al
xor $0x90909090,%eax
aaa
cmp %dl,0x90909090(%eax)
cmp %edx,0x90909090(%eax)
cmp 0x90909090(%eax),%dl
cmp 0x90909090(%eax),%edx
cmp $0x90,%al
cmp $0x90909090,%eax
aas
inc %eax
inc %ecx
inc %edx
inc %ebx
inc %esp
inc %ebp
inc %esi
inc %edi
dec %eax
dec %ecx
dec %edx
dec %ebx
dec %esp
dec %ebp
dec %esi
dec %edi
push %eax
push %ecx
push %edx
push %ebx
push %esp
push %ebp
push %esi
push %edi
pop %eax
pop %ecx
pop %edx
pop %ebx
pop %esp
pop %ebp
pop %esi
pop %edi
pusha
popa
bound %edx,0x90909090(%eax)
arpl %dx,0x90909090(%eax)
push $0x90909090
imul $0x90909090,0x90909090(%eax),%edx
push $0xffffff90
imul $0xffffff90,0x90909090(%eax),%edx
insb (%dx),%es:(%edi)
insl (%dx),%es:(%edi)
outsb %ds:(%esi),(%dx)
outsl %ds:(%esi),(%dx)
jo .+2-0x70
jno .+2-0x70
jb .+2-0x70
jae .+2-0x70
je .+2-0x70
jne .+2-0x70
jbe .+2-0x70
ja .+2-0x70
js .+2-0x70
jns .+2-0x70
jp .+2-0x70
jnp .+2-0x70
jl .+2-0x70
jge .+2-0x70
jle .+2-0x70
jg .+2-0x70
adcb $0x90,0x90909090(%eax)
adcl $0x90909090,0x90909090(%eax)
adcl $0xffffff90,0x90909090(%eax)
test %dl,0x90909090(%eax)
test %edx,0x90909090(%eax)
xchg %dl,0x90909090(%eax)
xchg %edx,0x90909090(%eax)
mov %dl,0x90909090(%eax)
mov %edx,0x90909090(%eax)
mov 0x90909090(%eax),%dl
mov 0x90909090(%eax),%edx
movw %ss,0x90909090(%eax)
lea 0x90909090(%eax),%edx
movw 0x90909090(%eax),%ss
popl 0x90909090(%eax)
xchg %eax,%eax
xchg %eax,%ecx
xchg %eax,%edx
xchg %eax,%ebx
xchg %eax,%esp
xchg %eax,%ebp
xchg %eax,%esi
xchg %eax,%edi
cwtl
cltd
lcall $0x9090,$0x90909090
fwait
pushf
popf
sahf
lahf
mov 0x90909090,%al
mov 0x90909090,%eax
mov %al,0x90909090
mov %eax,0x90909090
movsb %ds:(%esi),%es:(%edi)
movsl %ds:(%esi),%es:(%edi)
cmpsb %es:(%edi),%ds:(%esi)
cmpsl %es:(%edi),%ds:(%esi)
test $0x90,%al
test $0x90909090,%eax
stos %al,%es:(%edi)
stos %eax,%es:(%edi)
lods %ds:(%esi),%al
lods %ds:(%esi),%eax
scas %es:(%edi),%al
scas %es:(%edi),%eax
mov $0x90,%al
mov $0x90,%cl
mov $0x90,%dl
mov $0x90,%bl
mov $0x90,%ah
mov $0x90,%ch
mov $0x90,%dh
mov $0x90,%bh
mov $0x90909090,%eax
mov $0x90909090,%ecx
mov $0x90909090,%edx
mov $0x90909090,%ebx
mov $0x90909090,%esp
mov $0x90909090,%ebp
mov $0x90909090,%esi
mov $0x90909090,%edi
rclb $0x90,0x90909090(%eax)
rcll $0x90,0x90909090(%eax)
ret $0x9090
ret
les 0x90909090(%eax),%edx
lds 0x90909090(%eax),%edx
movb $0x90,0x90909090(%eax)
movl $0x90909090,0x90909090(%eax)
enter $0x9090,$0x90
leave
lret $0x9090
lret
int3
int $0x90
into
iret
rclb 0x90909090(%eax)
rcll 0x90909090(%eax)
rclb %cl,0x90909090(%eax)
rcll %cl,0x90909090(%eax)
aam $0xffffff90
aad $0xffffff90
xlat %ds:(%ebx)
fcoms 0x90909090(%eax)
fsts 0x90909090(%eax)
ficoml 0x90909090(%eax)
fistl 0x90909090(%eax)
fcoml 0x90909090(%eax)
fstl 0x90909090(%eax)
ficoms 0x90909090(%eax)
fists 0x90909090(%eax)
loopne .+2-0x70
loope .+2-0x70
loop .+2-0x70
jecxz .+2-0x70
in $0x90,%al
in $0x90,%eax
out %al,$0x90
out %eax,$0x90
call .+5+0x90909090
jmp .+5+0x90909090
ljmp $0x9090,$0x90909090
jmp .+2-0x70
in (%dx),%al
in (%dx),%eax
out %al,(%dx)
out %eax,(%dx)
hlt
cmc
notb 0x90909090(%eax)
notl 0x90909090(%eax)
clc
stc
cli
sti
cld
std
call *0x90909090(%eax)
lldt 0x90909090(%eax)
lgdt 0x90909090(%eax)
lar 0x90909090(%eax),%edx
lsl 0x90909090(%eax),%edx
clts
invd
wbinvd
ud2a
mov %cr2,%eax
mov %db2,%eax
mov %eax,%cr2
mov %eax,%db2
mov %tr2,%eax
mov %eax,%tr2
wrmsr
rdtsc
rdmsr
rdpmc
cmovo 0x90909090(%eax),%edx
cmovno 0x90909090(%eax),%edx
cmovb 0x90909090(%eax),%edx
cmovae 0x90909090(%eax),%edx
cmove 0x90909090(%eax),%edx
cmovne 0x90909090(%eax),%edx
cmovbe 0x90909090(%eax),%edx
cmova 0x90909090(%eax),%edx
cmovs 0x90909090(%eax),%edx
cmovns 0x90909090(%eax),%edx
cmovp 0x90909090(%eax),%edx
cmovnp 0x90909090(%eax),%edx
cmovl 0x90909090(%eax),%edx
cmovge 0x90909090(%eax),%edx
cmovle 0x90909090(%eax),%edx
cmovg 0x90909090(%eax),%edx
punpcklbw 0x90909090(%eax),%mm2
punpcklwd 0x90909090(%eax),%mm2
punpckldq 0x90909090(%eax),%mm2
packsswb 0x90909090(%eax),%mm2
pcmpgtb 0x90909090(%eax),%mm2
pcmpgtw 0x90909090(%eax),%mm2
pcmpgtd 0x90909090(%eax),%mm2
packuswb 0x90909090(%eax),%mm2
punpckhbw 0x90909090(%eax),%mm2
punpckhwd 0x90909090(%eax),%mm2
punpckhdq 0x90909090(%eax),%mm2
packssdw 0x90909090(%eax),%mm2
movd 0x90909090(%eax),%mm2
movq 0x90909090(%eax),%mm2
psrlw $0x90,%mm0
psrld $0x90,%mm0
psrlq $0x90,%mm0
pcmpeqb 0x90909090(%eax),%mm2
pcmpeqw 0x90909090(%eax),%mm2
pcmpeqd 0x90909090(%eax),%mm2
emms
movd %mm2,0x90909090(%eax)
movq %mm2,0x90909090(%eax)
jo .+6+0x90909090
jno .+6+0x90909090
jb .+6+0x90909090
jae .+6+0x90909090
je .+6+0x90909090
jne .+6+0x90909090
jbe .+6+0x90909090
ja .+6+0x90909090
js .+6+0x90909090
jns .+6+0x90909090
jp .+6+0x90909090
jnp .+6+0x90909090
jl .+6+0x90909090
jge .+6+0x90909090
jle .+6+0x90909090
jg .+6+0x90909090
seto 0x90909090(%eax)
setno 0x90909090(%eax)
setb 0x90909090(%eax)
setae 0x90909090(%eax)
sete 0x90909090(%eax)
setne 0x90909090(%eax)
setbe 0x90909090(%eax)
seta 0x90909090(%eax)
sets 0x90909090(%eax)
setns 0x90909090(%eax)
setp 0x90909090(%eax)
setnp 0x90909090(%eax)
setl 0x90909090(%eax)
setge 0x90909090(%eax)
setle 0x90909090(%eax)
setg 0x90909090(%eax)
push %fs
pop %fs
cpuid
bt %edx,0x90909090(%eax)
shld $0x90,%edx,0x90909090(%eax)
shld %cl,%edx,0x90909090(%eax)
push %gs
pop %gs
rsm
bts %edx,0x90909090(%eax)
shrd $0x90,%edx,0x90909090(%eax)
shrd %cl,%edx,0x90909090(%eax)
imul 0x90909090(%eax),%edx
cmpxchg %dl,0x90909090(%eax)
cmpxchg %edx,0x90909090(%eax)
lss 0x90909090(%eax),%edx
btr %edx,0x90909090(%eax)
lfs 0x90909090(%eax),%edx
lgs 0x90909090(%eax),%edx
movzbl 0x90909090(%eax),%edx
movzwl 0x90909090(%eax),%edx
ud2
btc %edx,0x90909090(%eax)
bsf 0x90909090(%eax),%edx
bsr 0x90909090(%eax),%edx
movsbl 0x90909090(%eax),%edx
movswl 0x90909090(%eax),%edx
xadd %dl,0x90909090(%eax)
xadd %edx,0x90909090(%eax)
bswap %eax
bswap %ecx
bswap %edx
bswap %ebx
bswap %esp
bswap %ebp
bswap %esi
bswap %edi
psrlw 0x90909090(%eax),%mm2
psrld 0x90909090(%eax),%mm2
psrlq 0x90909090(%eax),%mm2
pmullw 0x90909090(%eax),%mm2
psubusb 0x90909090(%eax),%mm2
psubusw 0x90909090(%eax),%mm2
pand 0x90909090(%eax),%mm2
paddusb 0x90909090(%eax),%mm2
paddusw 0x90909090(%eax),%mm2
pandn 0x90909090(%eax),%mm2
psraw 0x90909090(%eax),%mm2
psrad 0x90909090(%eax),%mm2
pmulhw 0x90909090(%eax),%mm2
psubsb 0x90909090(%eax),%mm2
psubsw 0x90909090(%eax),%mm2
por 0x90909090(%eax),%mm2
paddsb 0x90909090(%eax),%mm2
paddsw 0x90909090(%eax),%mm2
pxor 0x90909090(%eax),%mm2
psllw 0x90909090(%eax),%mm2
pslld 0x90909090(%eax),%mm2
psllq 0x90909090(%eax),%mm2
pmaddwd 0x90909090(%eax),%mm2
psubb 0x90909090(%eax),%mm2
psubw 0x90909090(%eax),%mm2
psubd 0x90909090(%eax),%mm2
paddb 0x90909090(%eax),%mm2
paddw 0x90909090(%eax),%mm2
paddd 0x90909090(%eax),%mm2
add %dx,0x90909090(%eax)
add 0x90909090(%eax),%dx
add $0x9090,%ax
pushw %es
popw %es
or %dx,0x90909090(%eax)
or 0x90909090(%eax),%dx
or $0x9090,%ax
pushw %cs
adc %dx,0x90909090(%eax)
adc 0x90909090(%eax),%dx
adc $0x9090,%ax
pushw %ss
popw %ss
sbb %dx,0x90909090(%eax)
sbb 0x90909090(%eax),%dx
sbb $0x9090,%ax
pushw %ds
popw %ds
and %dx,0x90909090(%eax)
and 0x90909090(%eax),%dx
and $0x9090,%ax
sub %dx,0x90909090(%eax)
sub 0x90909090(%eax),%dx
sub $0x9090,%ax
xor %dx,0x90909090(%eax)
xor 0x90909090(%eax),%dx
xor $0x9090,%ax
cmp %dx,0x90909090(%eax)
cmp 0x90909090(%eax),%dx
cmp $0x9090,%ax
inc %ax
inc %cx
inc %dx
inc %bx
inc %sp
inc %bp
inc %si
inc %di
dec %ax
dec %cx
dec %dx
dec %bx
dec %sp
dec %bp
dec %si
dec %di
push %ax
push %cx
push %dx
push %bx
push %sp
push %bp
push %si
push %di
pop %ax
pop %cx
pop %dx
pop %bx
pop %sp
pop %bp
pop %si
pop %di
pushaw
popaw
bound %dx,0x90909090(%eax)
pushw $0x9090
imul $0x9090,0x90909090(%eax),%dx
pushw $0xffffff90
imul $0xffffff90,0x90909090(%eax),%dx
insw (%dx),%es:(%edi)
outsw %ds:(%esi),(%dx)
adcw $0x9090,0x90909090(%eax)
adcw $0xffffff90,0x90909090(%eax)
test %dx,0x90909090(%eax)
xchg %dx,0x90909090(%eax)
mov %dx,0x90909090(%eax)
mov 0x90909090(%eax),%dx
movw %ss,0x90909090(%eax)
lea 0x90909090(%eax),%dx
popw 0x90909090(%eax)
xchg %ax,%cx
xchg %ax,%dx
xchg %ax,%bx
xchg %ax,%sp
xchg %ax,%bp
xchg %ax,%si
xchg %ax,%di
cbtw
cwtd
lcallw $0x9090,$0x9090
pushfw
popfw
mov 0x90909090,%ax
mov %ax,0x90909090
movsw %ds:(%esi),%es:(%edi)
cmpsw %es:(%edi),%ds:(%esi)
test $0x9090,%ax
stos %ax,%es:(%edi)
lods %ds:(%esi),%ax
scas %es:(%edi),%ax
mov $0x9090,%ax
mov $0x9090,%cx
mov $0x9090,%dx
mov $0x9090,%bx
mov $0x9090,%sp
mov $0x9090,%bp
mov $0x9090,%si
mov $0x9090,%di
rclw $0x90,0x90909090(%eax)
retw $0x9090
retw
les 0x90909090(%eax),%dx
lds 0x90909090(%eax),%dx
movw $0x9090,0x90909090(%eax)
enterw $0x9090,$0x90
leavew
lretw $0x9090
lretw
iretw
rclw 0x90909090(%eax)
rclw %cl,0x90909090(%eax)
in $0x90,%ax
out %ax,$0x90
callw .+3+0x9090
ljmpw $0x9090,$0x9090
in (%dx),%ax
out %ax,(%dx)
notw 0x90909090(%eax)
callw *0x90909090(%eax)
lar 0x90909090(%eax),%dx
lsl 0x90909090(%eax),%dx
cmovo 0x90909090(%eax),%dx
cmovno 0x90909090(%eax),%dx
cmovb 0x90909090(%eax),%dx
cmovae 0x90909090(%eax),%dx
cmove 0x90909090(%eax),%dx
cmovne 0x90909090(%eax),%dx
cmovbe 0x90909090(%eax),%dx
cmova 0x90909090(%eax),%dx
cmovs 0x90909090(%eax),%dx
cmovns 0x90909090(%eax),%dx
cmovp 0x90909090(%eax),%dx
cmovnp 0x90909090(%eax),%dx
cmovl 0x90909090(%eax),%dx
cmovge 0x90909090(%eax),%dx
cmovle 0x90909090(%eax),%dx
cmovg 0x90909090(%eax),%dx
pushw %fs
popw %fs
bt %dx,0x90909090(%eax)
shld $0x90,%dx,0x90909090(%eax)
shld %cl,%dx,0x90909090(%eax)
pushw %gs
popw %gs
bts %dx,0x90909090(%eax)
shrd $0x90,%dx,0x90909090(%eax)
shrd %cl,%dx,0x90909090(%eax)
imul 0x90909090(%eax),%dx
cmpxchg %dx,0x90909090(%eax)
lss 0x90909090(%eax),%dx
btr %dx,0x90909090(%eax)
lfs 0x90909090(%eax),%dx
lgs 0x90909090(%eax),%dx
movzbw 0x90909090(%eax),%dx
btc %dx,0x90909090(%eax)
bsf 0x90909090(%eax),%dx
bsr 0x90909090(%eax),%dx
movsbw 0x90909090(%eax),%dx
xadd %dx,0x90909090(%eax)
xchg %ax,%ax
sldt %eax
sldt %ax
sldt (%eax)
smsw %eax
smsw %ax
smsw (%eax)
str %eax
str %ax
str (%eax)
shrd %cl,%edx,%eax
shld %cl,%edx,%eax
test %eax,%ebx
test %ebx,%eax
test (%eax),%ebx
int1
cmovpe 0x90909090(%eax),%edx
cmovpo 0x90909090(%eax),%edx
cmovpe 0x90909090(%eax),%dx
cmovpo 0x90909090(%eax),%dx
.byte 0x82, 0xc3, 0x01
.byte 0x82, 0xf3, 0x01
.byte 0x82, 0xd3, 0x01
.byte 0x82, 0xdb, 0x01
.byte 0x82, 0xe3, 0x01
.byte 0x82, 0xeb, 0x01
.byte 0x82, 0xf3, 0x01
.byte 0x82, 0xfb, 0x01
.byte 0x62, 0xf3, 0x7d, 0x08, 0x15, 0xe8, 0xab
.byte 0xf6, 0xc9, 0x01
.byte 0x66, 0xf7, 0xc9, 0x02, 0x00
.byte 0xf7, 0xc9, 0x04, 0x00, 0x00, 0x00
.byte 0xc0, 0xf0, 0x02
.byte 0xc1, 0xf0, 0x01
.byte 0xd0, 0xf0
.byte 0xd1, 0xf0
.byte 0xd2, 0xf0
.byte 0xd3, 0xf0
|
tactcomplabs/xbgas-binutils-gdb
| 14,692
|
gas/testsuite/gas/i386/x86-64-hlebad.s
|
# Check 64bit unsupported HLE instructions
.allow_index_reg
.text
_start:
# Tests for op imm8 al
xacquire adc $100,%al
xacquire lock adc $100,%al
lock xacquire adc $100,%al
xrelease adc $100,%al
xrelease lock adc $100,%al
lock xrelease adc $100,%al
# Tests for op imm16 ax
xacquire adc $1000,%ax
xacquire lock adc $1000,%ax
lock xacquire adc $1000,%ax
xrelease adc $1000,%ax
xrelease lock adc $1000,%ax
lock xrelease adc $1000,%ax
# Tests for op imm32 eax
xacquire adc $10000000,%eax
xacquire lock adc $10000000,%eax
lock xacquire adc $10000000,%eax
xrelease adc $10000000,%eax
xrelease lock adc $10000000,%eax
lock xrelease adc $10000000,%eax
# Tests for op imm32 rax
xacquire adc $10000000,%rax
xacquire lock adc $10000000,%rax
lock xacquire adc $10000000,%rax
xrelease adc $10000000,%rax
xrelease lock adc $10000000,%rax
lock xrelease adc $10000000,%rax
# Tests for op imm8 regb/m8
xacquire adcb $100,%cl
xacquire lock adcb $100,%cl
lock xacquire adcb $100,%cl
xrelease adcb $100,%cl
xrelease lock adcb $100,%cl
lock xrelease adcb $100,%cl
xacquire adcb $100,(%rcx)
xrelease adcb $100,(%rcx)
# Tests for op imm16 regs/m16
xacquire adcw $1000,%cx
xacquire lock adcw $1000,%cx
lock xacquire adcw $1000,%cx
xrelease adcw $1000,%cx
xrelease lock adcw $1000,%cx
lock xrelease adcw $1000,%cx
xacquire adcw $1000,(%rcx)
xrelease adcw $1000,(%rcx)
# Tests for op imm32 regl/m32
xacquire adcl $10000000,%ecx
xacquire lock adcl $10000000,%ecx
lock xacquire adcl $10000000,%ecx
xrelease adcl $10000000,%ecx
xrelease lock adcl $10000000,%ecx
lock xrelease adcl $10000000,%ecx
xacquire adcl $10000000,(%rcx)
xrelease adcl $10000000,(%rcx)
# Tests for op imm32 regq/m64
xacquire adcq $10000000,%rcx
xacquire lock adcq $10000000,%rcx
lock xacquire adcq $10000000,%rcx
xrelease adcq $10000000,%rcx
xrelease lock adcq $10000000,%rcx
lock xrelease adcq $10000000,%rcx
xacquire adcq $10000000,(%rcx)
xrelease adcq $10000000,(%rcx)
# Tests for op imm8 regs/m16
xacquire adcw $100,%cx
xacquire lock adcw $100,%cx
lock xacquire adcw $100,%cx
xrelease adcw $100,%cx
xrelease lock adcw $100,%cx
lock xrelease adcw $100,%cx
xacquire adcw $100,(%rcx)
xrelease adcw $100,(%rcx)
# Tests for op imm8 regl/m32
xacquire adcl $100,%ecx
xacquire lock adcl $100,%ecx
lock xacquire adcl $100,%ecx
xrelease adcl $100,%ecx
xrelease lock adcl $100,%ecx
lock xrelease adcl $100,%ecx
xacquire adcl $100,(%rcx)
xrelease adcl $100,(%rcx)
# Tests for op imm8 regq/m64
xacquire adcq $100,%rcx
xacquire lock adcq $100,%rcx
lock xacquire adcq $100,%rcx
xrelease adcq $100,%rcx
xrelease lock adcq $100,%rcx
lock xrelease adcq $100,%rcx
xacquire adcq $100,(%rcx)
xrelease adcq $100,(%rcx)
# Tests for op imm8 regb/m8
xacquire adcb $100,%cl
xacquire lock adcb $100,%cl
lock xacquire adcb $100,%cl
xrelease adcb $100,%cl
xrelease lock adcb $100,%cl
lock xrelease adcb $100,%cl
xacquire adcb $100,(%rcx)
xrelease adcb $100,(%rcx)
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire adcb %al,%cl
xacquire lock adcb %al,%cl
lock xacquire adcb %al,%cl
xrelease adcb %al,%cl
xrelease lock adcb %al,%cl
lock xrelease adcb %al,%cl
xacquire adcb %al,(%rcx)
xrelease adcb %al,(%rcx)
xacquire adcb %cl,%al
xacquire lock adcb %cl,%al
lock xacquire adcb %cl,%al
xrelease adcb %cl,%al
xrelease lock adcb %cl,%al
lock xrelease adcb %cl,%al
xacquire adcb (%rcx),%al
xacquire lock adcb (%rcx),%al
lock xacquire adcb (%rcx),%al
xrelease adcb (%rcx),%al
xrelease lock adcb (%rcx),%al
lock xrelease adcb (%rcx),%al
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire adcw %ax,%cx
xacquire lock adcw %ax,%cx
lock xacquire adcw %ax,%cx
xrelease adcw %ax,%cx
xrelease lock adcw %ax,%cx
lock xrelease adcw %ax,%cx
xacquire adcw %ax,(%rcx)
xrelease adcw %ax,(%rcx)
xacquire adcw %cx,%ax
xacquire lock adcw %cx,%ax
lock xacquire adcw %cx,%ax
xrelease adcw %cx,%ax
xrelease lock adcw %cx,%ax
lock xrelease adcw %cx,%ax
xacquire adcw (%rcx),%ax
xacquire lock adcw (%rcx),%ax
lock xacquire adcw (%rcx),%ax
xrelease adcw (%rcx),%ax
xrelease lock adcw (%rcx),%ax
lock xrelease adcw (%rcx),%ax
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire adcl %eax,%ecx
xacquire lock adcl %eax,%ecx
lock xacquire adcl %eax,%ecx
xrelease adcl %eax,%ecx
xrelease lock adcl %eax,%ecx
lock xrelease adcl %eax,%ecx
xacquire adcl %eax,(%rcx)
xrelease adcl %eax,(%rcx)
xacquire adcl %ecx,%eax
xacquire lock adcl %ecx,%eax
lock xacquire adcl %ecx,%eax
xrelease adcl %ecx,%eax
xrelease lock adcl %ecx,%eax
lock xrelease adcl %ecx,%eax
xacquire adcl (%rcx),%eax
xacquire lock adcl (%rcx),%eax
lock xacquire adcl (%rcx),%eax
xrelease adcl (%rcx),%eax
xrelease lock adcl (%rcx),%eax
lock xrelease adcl (%rcx),%eax
# Tests for op regq regq/m64
# Tests for op regq/m64 regq
xacquire adcq %rax,%rcx
xacquire lock adcq %rax,%rcx
lock xacquire adcq %rax,%rcx
xrelease adcq %rax,%rcx
xrelease lock adcq %rax,%rcx
lock xrelease adcq %rax,%rcx
xacquire adcq %rax,(%rcx)
xrelease adcq %rax,(%rcx)
xacquire adcq %rcx,%rax
xacquire lock adcq %rcx,%rax
lock xacquire adcq %rcx,%rax
xrelease adcq %rcx,%rax
xrelease lock adcq %rcx,%rax
lock xrelease adcq %rcx,%rax
xacquire adcq (%rcx),%rax
xacquire lock adcq (%rcx),%rax
lock xacquire adcq (%rcx),%rax
xrelease adcq (%rcx),%rax
xrelease lock adcq (%rcx),%rax
lock xrelease adcq (%rcx),%rax
# Tests for op regs, regs/m16
xacquire btcw %ax,%cx
xacquire lock btcw %ax,%cx
lock xacquire btcw %ax,%cx
xrelease btcw %ax,%cx
xrelease lock btcw %ax,%cx
lock xrelease btcw %ax,%cx
xacquire btcw %ax,(%rcx)
xrelease btcw %ax,(%rcx)
# Tests for op regl regl/m32
xacquire btcl %eax,%ecx
xacquire lock btcl %eax,%ecx
lock xacquire btcl %eax,%ecx
xrelease btcl %eax,%ecx
xrelease lock btcl %eax,%ecx
lock xrelease btcl %eax,%ecx
xacquire btcl %eax,(%rcx)
xrelease btcl %eax,(%rcx)
# Tests for op regq regq/m64
xacquire btcq %rax,%rcx
xacquire lock btcq %rax,%rcx
lock xacquire btcq %rax,%rcx
xrelease btcq %rax,%rcx
xrelease lock btcq %rax,%rcx
lock xrelease btcq %rax,%rcx
xacquire btcq %rax,(%rcx)
xrelease btcq %rax,(%rcx)
# Tests for op regb/m8
xacquire decb %cl
xacquire lock decb %cl
lock xacquire decb %cl
xrelease decb %cl
xrelease lock decb %cl
lock xrelease decb %cl
xacquire decb (%rcx)
xrelease decb (%rcx)
# Tests for op regs/m16
xacquire decw %cx
xacquire lock decw %cx
lock xacquire decw %cx
xrelease decw %cx
xrelease lock decw %cx
lock xrelease decw %cx
xacquire decw (%rcx)
xrelease decw (%rcx)
# Tests for op regl/m32
xacquire decl %ecx
xacquire lock decl %ecx
lock xacquire decl %ecx
xrelease decl %ecx
xrelease lock decl %ecx
lock xrelease decl %ecx
xacquire decl (%rcx)
xrelease decl (%rcx)
# Tests for op regq/m64
xacquire decq %rcx
xacquire lock decq %rcx
lock xacquire decq %rcx
xrelease decq %rcx
xrelease lock decq %rcx
lock xrelease decq %rcx
xacquire decq (%rcx)
xrelease decq (%rcx)
# Tests for op m64
xacquire cmpxchg8bq (%rcx)
xrelease cmpxchg8bq (%rcx)
# Tests for op regb, regb/m8
xacquire cmpxchgb %cl,%al
xacquire lock cmpxchgb %cl,%al
lock xacquire cmpxchgb %cl,%al
xrelease cmpxchgb %cl,%al
xrelease lock cmpxchgb %cl,%al
lock xrelease cmpxchgb %cl,%al
xacquire cmpxchgb %cl,(%rcx)
xrelease cmpxchgb %cl,(%rcx)
.intel_syntax noprefix
# Tests for op imm8 al
xacquire adc al,100
xacquire lock adc al,100
lock xacquire adc al,100
xrelease adc al,100
xrelease lock adc al,100
lock xrelease adc al,100
# Tests for op imm16 ax
xacquire adc ax,1000
xacquire lock adc ax,1000
lock xacquire adc ax,1000
xrelease adc ax,1000
xrelease lock adc ax,1000
lock xrelease adc ax,1000
# Tests for op imm32 eax
xacquire adc eax,10000000
xacquire lock adc eax,10000000
lock xacquire adc eax,10000000
xrelease adc eax,10000000
xrelease lock adc eax,10000000
lock xrelease adc eax,10000000
# Tests for op imm32 rax
xacquire adc rax,10000000
xacquire lock adc rax,10000000
lock xacquire adc rax,10000000
xrelease adc rax,10000000
xrelease lock adc rax,10000000
lock xrelease adc rax,10000000
# Tests for op imm8 regb/m8
xacquire adc cl,100
xacquire lock adc cl,100
lock xacquire adc cl,100
xrelease adc cl,100
xrelease lock adc cl,100
lock xrelease adc cl,100
xacquire adc BYTE PTR [rcx],100
xrelease adc BYTE PTR [rcx],100
# Tests for op imm16 regs/m16
xacquire adc cx,1000
xacquire lock adc cx,1000
lock xacquire adc cx,1000
xrelease adc cx,1000
xrelease lock adc cx,1000
lock xrelease adc cx,1000
xacquire adc WORD PTR [rcx],1000
xrelease adc WORD PTR [rcx],1000
# Tests for op imm32 regl/m32
xacquire adc ecx,10000000
xacquire lock adc ecx,10000000
lock xacquire adc ecx,10000000
xrelease adc ecx,10000000
xrelease lock adc ecx,10000000
lock xrelease adc ecx,10000000
xacquire adc DWORD PTR [rcx],10000000
xrelease adc DWORD PTR [rcx],10000000
# Tests for op imm32 regq/m64
xacquire adc rcx,10000000
xacquire lock adc rcx,10000000
lock xacquire adc rcx,10000000
xrelease adc rcx,10000000
xrelease lock adc rcx,10000000
lock xrelease adc rcx,10000000
xacquire adc QWORD PTR [rcx],10000000
xrelease adc QWORD PTR [rcx],10000000
# Tests for op imm8 regs/m16
xacquire adc cx,100
xacquire lock adc cx,100
lock xacquire adc cx,100
xrelease adc cx,100
xrelease lock adc cx,100
lock xrelease adc cx,100
xacquire adc WORD PTR [rcx],100
xrelease adc WORD PTR [rcx],100
# Tests for op imm8 regl/m32
xacquire adc ecx,100
xacquire lock adc ecx,100
lock xacquire adc ecx,100
xrelease adc ecx,100
xrelease lock adc ecx,100
lock xrelease adc ecx,100
xacquire adc DWORD PTR [rcx],100
xrelease adc DWORD PTR [rcx],100
# Tests for op imm8 regq/m64
xacquire adc rcx,100
xacquire lock adc rcx,100
lock xacquire adc rcx,100
xrelease adc rcx,100
xrelease lock adc rcx,100
lock xrelease adc rcx,100
xacquire adc QWORD PTR [rcx],100
xrelease adc QWORD PTR [rcx],100
# Tests for op imm8 regb/m8
xacquire adc cl,100
xacquire lock adc cl,100
lock xacquire adc cl,100
xrelease adc cl,100
xrelease lock adc cl,100
lock xrelease adc cl,100
xacquire adc BYTE PTR [rcx],100
xrelease adc BYTE PTR [rcx],100
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire adc cl,al
xacquire lock adc cl,al
lock xacquire adc cl,al
xrelease adc cl,al
xrelease lock adc cl,al
lock xrelease adc cl,al
xacquire adc BYTE PTR [rcx],al
xrelease adc BYTE PTR [rcx],al
xacquire adc al,cl
xacquire lock adc al,cl
lock xacquire adc al,cl
xrelease adc al,cl
xrelease lock adc al,cl
lock xrelease adc al,cl
xacquire adc al,BYTE PTR [rcx]
xacquire lock adc al,BYTE PTR [rcx]
lock xacquire adc al,BYTE PTR [rcx]
xrelease adc al,BYTE PTR [rcx]
xrelease lock adc al,BYTE PTR [rcx]
lock xrelease adc al,BYTE PTR [rcx]
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire adc cx,ax
xacquire lock adc cx,ax
lock xacquire adc cx,ax
xrelease adc cx,ax
xrelease lock adc cx,ax
lock xrelease adc cx,ax
xacquire adc WORD PTR [rcx],ax
xrelease adc WORD PTR [rcx],ax
xacquire adc ax,cx
xacquire lock adc ax,cx
lock xacquire adc ax,cx
xrelease adc ax,cx
xrelease lock adc ax,cx
lock xrelease adc ax,cx
xacquire adc ax,WORD PTR [rcx]
xacquire lock adc ax,WORD PTR [rcx]
lock xacquire adc ax,WORD PTR [rcx]
xrelease adc ax,WORD PTR [rcx]
xrelease lock adc ax,WORD PTR [rcx]
lock xrelease adc ax,WORD PTR [rcx]
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire adc ecx,eax
xacquire lock adc ecx,eax
lock xacquire adc ecx,eax
xrelease adc ecx,eax
xrelease lock adc ecx,eax
lock xrelease adc ecx,eax
xacquire adc DWORD PTR [rcx],eax
xrelease adc DWORD PTR [rcx],eax
xacquire adc eax,ecx
xacquire lock adc eax,ecx
lock xacquire adc eax,ecx
xrelease adc eax,ecx
xrelease lock adc eax,ecx
lock xrelease adc eax,ecx
xacquire adc eax,DWORD PTR [rcx]
xacquire lock adc eax,DWORD PTR [rcx]
lock xacquire adc eax,DWORD PTR [rcx]
xrelease adc eax,DWORD PTR [rcx]
xrelease lock adc eax,DWORD PTR [rcx]
lock xrelease adc eax,DWORD PTR [rcx]
# Tests for op regq regq/m64
# Tests for op regq/m64 regq
xacquire adc rcx,rax
xacquire lock adc rcx,rax
lock xacquire adc rcx,rax
xrelease adc rcx,rax
xrelease lock adc rcx,rax
lock xrelease adc rcx,rax
xacquire adc QWORD PTR [rcx],rax
xrelease adc QWORD PTR [rcx],rax
xacquire adc rax,rcx
xacquire lock adc rax,rcx
lock xacquire adc rax,rcx
xrelease adc rax,rcx
xrelease lock adc rax,rcx
lock xrelease adc rax,rcx
xacquire adc rax,QWORD PTR [rcx]
xacquire lock adc rax,QWORD PTR [rcx]
lock xacquire adc rax,QWORD PTR [rcx]
xrelease adc rax,QWORD PTR [rcx]
xrelease lock adc rax,QWORD PTR [rcx]
lock xrelease adc rax,QWORD PTR [rcx]
# Tests for op regs, regs/m16
xacquire btc cx,ax
xacquire lock btc cx,ax
lock xacquire btc cx,ax
xrelease btc cx,ax
xrelease lock btc cx,ax
lock xrelease btc cx,ax
xacquire btc WORD PTR [rcx],ax
xrelease btc WORD PTR [rcx],ax
# Tests for op regl regl/m32
xacquire btc ecx,eax
xacquire lock btc ecx,eax
lock xacquire btc ecx,eax
xrelease btc ecx,eax
xrelease lock btc ecx,eax
lock xrelease btc ecx,eax
xacquire btc DWORD PTR [rcx],eax
xrelease btc DWORD PTR [rcx],eax
# Tests for op regq regq/m64
xacquire btc rcx,rax
xacquire lock btc rcx,rax
lock xacquire btc rcx,rax
xrelease btc rcx,rax
xrelease lock btc rcx,rax
lock xrelease btc rcx,rax
xacquire btc QWORD PTR [rcx],rax
xrelease btc QWORD PTR [rcx],rax
# Tests for op regb/m8
xacquire dec cl
xacquire lock dec cl
lock xacquire dec cl
xrelease dec cl
xrelease lock dec cl
lock xrelease dec cl
xacquire dec BYTE PTR [rcx]
xrelease dec BYTE PTR [rcx]
# Tests for op regs/m16
xacquire dec cx
xacquire lock dec cx
lock xacquire dec cx
xrelease dec cx
xrelease lock dec cx
lock xrelease dec cx
xacquire dec WORD PTR [rcx]
xrelease dec WORD PTR [rcx]
# Tests for op regl/m32
xacquire dec ecx
xacquire lock dec ecx
lock xacquire dec ecx
xrelease dec ecx
xrelease lock dec ecx
lock xrelease dec ecx
xacquire dec DWORD PTR [rcx]
xrelease dec DWORD PTR [rcx]
# Tests for op regq/m64
xacquire dec rcx
xacquire lock dec rcx
lock xacquire dec rcx
xrelease dec rcx
xrelease lock dec rcx
lock xrelease dec rcx
xacquire dec QWORD PTR [rcx]
xrelease dec QWORD PTR [rcx]
# Tests for op m64
xacquire cmpxchg8b QWORD PTR [rcx]
xrelease cmpxchg8b QWORD PTR [rcx]
# Tests for op regb, regb/m8
xacquire cmpxchg al,cl
xacquire lock cmpxchg al,cl
lock xacquire cmpxchg al,cl
xrelease cmpxchg al,cl
xrelease lock cmpxchg al,cl
lock xrelease cmpxchg al,cl
xacquire cmpxchg BYTE PTR [rcx],cl
xrelease cmpxchg BYTE PTR [rcx],cl
|
tactcomplabs/xbgas-binutils-gdb
| 2,881
|
gas/testsuite/gas/i386/avx2-wig.s
|
# Check AVX2 WIG instructions
.allow_index_reg
.text
_start:
vmovntdqa (%ecx),%ymm4
vmpsadbw $7,%ymm4,%ymm6,%ymm2
vpabsb %ymm4,%ymm6
vpabsd %ymm4,%ymm6
vpabsw %ymm4,%ymm6
vpackssdw %ymm4,%ymm6,%ymm2
vpacksswb %ymm4,%ymm6,%ymm2
vpackusdw %ymm4,%ymm6,%ymm2
vpackuswb %ymm4,%ymm6,%ymm2
vpaddb %ymm4,%ymm6,%ymm2
vpaddd %ymm4,%ymm6,%ymm2
vpaddq %ymm4,%ymm6,%ymm2
vpaddsb %ymm4,%ymm6,%ymm2
vpaddsw %ymm4,%ymm6,%ymm2
vpaddusb %ymm4,%ymm6,%ymm2
vpaddusw %ymm4,%ymm6,%ymm2
vpaddw %ymm4,%ymm6,%ymm2
vpalignr $7,%ymm4,%ymm6,%ymm2
vpand %ymm4,%ymm6,%ymm2
vpandn %ymm4,%ymm6,%ymm2
vpavgb %ymm4,%ymm6,%ymm2
vpavgw %ymm4,%ymm6,%ymm2
vpblendw $7,%ymm4,%ymm6,%ymm2
vpcmpeqb %ymm4,%ymm6,%ymm2
vpcmpeqd %ymm4,%ymm6,%ymm2
vpcmpeqq %ymm4,%ymm6,%ymm2
vpcmpeqw %ymm4,%ymm6,%ymm2
vpcmpgtb %ymm4,%ymm6,%ymm2
vpcmpgtd %ymm4,%ymm6,%ymm2
vpcmpgtq %ymm4,%ymm6,%ymm2
vpcmpgtw %ymm4,%ymm6,%ymm2
vphaddd %ymm4,%ymm6,%ymm2
vphaddsw %ymm4,%ymm6,%ymm2
vphaddw %ymm4,%ymm6,%ymm2
vphsubd %ymm4,%ymm6,%ymm2
vphsubsw %ymm4,%ymm6,%ymm2
vphsubw %ymm4,%ymm6,%ymm2
vpmaddubsw %ymm4,%ymm6,%ymm2
vpmaddwd %ymm4,%ymm6,%ymm2
vpmaxsb %ymm4,%ymm6,%ymm2
vpmaxsd %ymm4,%ymm6,%ymm2
vpmaxsw %ymm4,%ymm6,%ymm2
vpmaxub %ymm4,%ymm6,%ymm2
vpmaxud %ymm4,%ymm6,%ymm2
vpmaxuw %ymm4,%ymm6,%ymm2
vpminsb %ymm4,%ymm6,%ymm2
vpminsd %ymm4,%ymm6,%ymm2
vpminsw %ymm4,%ymm6,%ymm2
vpminub %ymm4,%ymm6,%ymm2
vpminud %ymm4,%ymm6,%ymm2
vpminuw %ymm4,%ymm6,%ymm2
vpmovmskb %ymm4,%ecx
vpmovsxbd %xmm4,%ymm6
vpmovsxbq %xmm4,%ymm4
vpmovsxbw %xmm4,%ymm4
vpmovsxdq %xmm4,%ymm4
vpmovsxwd %xmm4,%ymm4
vpmovsxwq %xmm4,%ymm6
vpmovzxbd %xmm4,%ymm6
vpmovzxbq %xmm4,%ymm4
vpmovzxbw %xmm4,%ymm4
vpmovzxdq %xmm4,%ymm4
vpmovzxwd %xmm4,%ymm4
vpmovzxwq %xmm4,%ymm6
vpmuldq %ymm4,%ymm6,%ymm2
vpmulhrsw %ymm4,%ymm6,%ymm2
vpmulhuw %ymm4,%ymm6,%ymm2
vpmulhw %ymm4,%ymm6,%ymm2
vpmulld %ymm4,%ymm6,%ymm2
vpmullw %ymm4,%ymm6,%ymm2
vpmuludq %ymm4,%ymm6,%ymm2
vpor %ymm4,%ymm6,%ymm2
vpsadbw %ymm4,%ymm6,%ymm2
vpshufb %ymm4,%ymm6,%ymm2
vpshufd $7,%ymm6,%ymm2
vpshufhw $7,%ymm6,%ymm2
vpshuflw $7,%ymm6,%ymm2
vpsignb %ymm4,%ymm6,%ymm2
vpsignd %ymm4,%ymm6,%ymm2
vpsignw %ymm4,%ymm6,%ymm2
vpslld $7,%ymm6,%ymm2
vpslldq $7,%ymm6,%ymm2
vpsllq $7,%ymm6,%ymm2
vpsllw $7,%ymm6,%ymm2
vpsrad $7,%ymm6,%ymm2
vpsraw $7,%ymm6,%ymm2
vpsrld $7,%ymm6,%ymm2
vpsrldq $7,%ymm6,%ymm2
vpsrlq $7,%ymm6,%ymm2
vpsrlw $7,%ymm6,%ymm2
vpsubb %ymm4,%ymm6,%ymm2
vpsubd %ymm4,%ymm6,%ymm2
vpsubq %ymm4,%ymm6,%ymm2
vpsubsb %ymm4,%ymm6,%ymm2
vpsubsw %ymm4,%ymm6,%ymm2
vpsubusb %ymm4,%ymm6,%ymm2
vpsubusw %ymm4,%ymm6,%ymm2
vpsubw %ymm4,%ymm6,%ymm2
vpunpckhbw %ymm4,%ymm6,%ymm2
vpunpckhdq %ymm4,%ymm6,%ymm2
vpunpckhqdq %ymm4,%ymm6,%ymm2
vpunpckhwd %ymm4,%ymm6,%ymm2
vpunpcklbw %ymm4,%ymm6,%ymm2
vpunpckldq %ymm4,%ymm6,%ymm2
vpunpcklqdq %ymm4,%ymm6,%ymm2
vpunpcklwd %ymm4,%ymm6,%ymm2
vpxor %ymm4,%ymm6,%ymm2
|
tactcomplabs/xbgas-binutils-gdb
| 2,330
|
gas/testsuite/gas/i386/avx512f_gfni.s
|
# Check 32bit AVX512F,GFNI instructions
.allow_index_reg
.text
_start:
vgf2p8affineqb $0xab, %zmm4, %zmm5, %zmm6 # AVX512F,GFNI
vgf2p8affineqb $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F,GFNI
vgf2p8affineqb $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F,GFNI
vgf2p8affineqb $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,GFNI
vgf2p8affineqb $123, 8128(%edx), %zmm5, %zmm6 # AVX512F,GFNI Disp8
vgf2p8affineqb $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F,GFNI Disp8
vgf2p8affineinvqb $0xab, %zmm4, %zmm5, %zmm6 # AVX512F,GFNI
vgf2p8affineinvqb $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F,GFNI
vgf2p8affineinvqb $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F,GFNI
vgf2p8affineinvqb $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,GFNI
vgf2p8affineinvqb $123, 8128(%edx), %zmm5, %zmm6 # AVX512F,GFNI Disp8
vgf2p8affineinvqb $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F,GFNI Disp8
vgf2p8mulb %zmm4, %zmm5, %zmm6 # AVX512F,GFNI
vgf2p8mulb %zmm4, %zmm5, %zmm6{%k7} # AVX512F,GFNI
vgf2p8mulb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F,GFNI
vgf2p8mulb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,GFNI
vgf2p8mulb 8128(%edx), %zmm5, %zmm6 # AVX512F,GFNI Disp8
.intel_syntax noprefix
vgf2p8affineqb zmm6, zmm5, zmm4, 0xab # AVX512F,GFNI
vgf2p8affineqb zmm6{k7}, zmm5, zmm4, 0xab # AVX512F,GFNI
vgf2p8affineqb zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F,GFNI
vgf2p8affineqb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F,GFNI
vgf2p8affineqb zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F,GFNI Disp8
vgf2p8affineqb zmm6, zmm5, [edx+1016]{1to8}, 123 # AVX512F,GFNI Disp8
vgf2p8affineinvqb zmm6, zmm5, zmm4, 0xab # AVX512F,GFNI
vgf2p8affineinvqb zmm6{k7}, zmm5, zmm4, 0xab # AVX512F,GFNI
vgf2p8affineinvqb zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F,GFNI
vgf2p8affineinvqb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F,GFNI
vgf2p8affineinvqb zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F,GFNI Disp8
vgf2p8affineinvqb zmm6, zmm5, [edx+1016]{1to8}, 123 # AVX512F,GFNI Disp8
vgf2p8mulb zmm6, zmm5, zmm4 # AVX512F,GFNI
vgf2p8mulb zmm6{k7}, zmm5, zmm4 # AVX512F,GFNI
vgf2p8mulb zmm6{k7}{z}, zmm5, zmm4 # AVX512F,GFNI
vgf2p8mulb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F,GFNI
vgf2p8mulb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F,GFNI Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 3,221
|
gas/testsuite/gas/i386/avx512_vpopcntdq.s
|
# Check 32bit AVX512_VPOPCNTDQ instructions
.allow_index_reg
.text
_start:
vpopcntd %zmm5, %zmm6 # AVX512_VPOPCNTDQ
vpopcntd %zmm5, %zmm6{%k7} # AVX512_VPOPCNTDQ
vpopcntd %zmm5, %zmm6{%k7}{z} # AVX512_VPOPCNTDQ
vpopcntd (%ecx), %zmm6 # AVX512_VPOPCNTDQ
vpopcntd -123456(%esp,%esi,8), %zmm6 # AVX512_VPOPCNTDQ
vpopcntd (%eax){1to16}, %zmm6 # AVX512_VPOPCNTDQ
vpopcntd 8128(%edx), %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntd 8192(%edx), %zmm6 # AVX512_VPOPCNTDQ
vpopcntd -8192(%edx), %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntd -8256(%edx), %zmm6 # AVX512_VPOPCNTDQ
vpopcntd 508(%edx){1to16}, %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntd 512(%edx){1to16}, %zmm6 # AVX512_VPOPCNTDQ
vpopcntd -512(%edx){1to16}, %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntd -516(%edx){1to16}, %zmm6 # AVX512_VPOPCNTDQ
vpopcntq %zmm5, %zmm6 # AVX512_VPOPCNTDQ
vpopcntq %zmm5, %zmm6{%k7} # AVX512_VPOPCNTDQ
vpopcntq %zmm5, %zmm6{%k7}{z} # AVX512_VPOPCNTDQ
vpopcntq (%ecx), %zmm6 # AVX512_VPOPCNTDQ
vpopcntq -123456(%esp,%esi,8), %zmm6 # AVX512_VPOPCNTDQ
vpopcntq (%eax){1to8}, %zmm6 # AVX512_VPOPCNTDQ
vpopcntq 8128(%edx), %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntq 8192(%edx), %zmm6 # AVX512_VPOPCNTDQ
vpopcntq -8192(%edx), %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntq -8256(%edx), %zmm6 # AVX512_VPOPCNTDQ
vpopcntq 1016(%edx){1to8}, %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntq 1024(%edx){1to8}, %zmm6 # AVX512_VPOPCNTDQ
vpopcntq -1024(%edx){1to8}, %zmm6 # AVX512_VPOPCNTDQ Disp8
vpopcntq -1032(%edx){1to8}, %zmm6 # AVX512_VPOPCNTDQ
.intel_syntax noprefix
vpopcntd zmm6, zmm5 # AVX512_VPOPCNTDQ
vpopcntd zmm6{k7}, zmm5 # AVX512_VPOPCNTDQ
vpopcntd zmm6{k7}{z}, zmm5 # AVX512_VPOPCNTDQ
vpopcntd zmm6, ZMMWORD PTR [ecx] # AVX512_VPOPCNTDQ
vpopcntd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512_VPOPCNTDQ
vpopcntd zmm6, [eax]{1to16} # AVX512_VPOPCNTDQ
vpopcntd zmm6, DWORD BCST [eax] # AVX512_VPOPCNTDQ
vpopcntd zmm6, ZMMWORD PTR [edx+8128] # AVX512_VPOPCNTDQ Disp8
vpopcntd zmm6, ZMMWORD PTR [edx+8192] # AVX512_VPOPCNTDQ
vpopcntd zmm6, ZMMWORD PTR [edx-8192] # AVX512_VPOPCNTDQ Disp8
vpopcntd zmm6, ZMMWORD PTR [edx-8256] # AVX512_VPOPCNTDQ
vpopcntd zmm6, [edx+508]{1to16} # AVX512_VPOPCNTDQ Disp8
vpopcntd zmm6, [edx+512]{1to16} # AVX512_VPOPCNTDQ
vpopcntd zmm6, [edx-512]{1to16} # AVX512_VPOPCNTDQ Disp8
vpopcntd zmm6, [edx-516]{1to16} # AVX512_VPOPCNTDQ
vpopcntq zmm6, zmm5 # AVX512_VPOPCNTDQ
vpopcntq zmm6{k7}, zmm5 # AVX512_VPOPCNTDQ
vpopcntq zmm6{k7}{z}, zmm5 # AVX512_VPOPCNTDQ
vpopcntq zmm6, ZMMWORD PTR [ecx] # AVX512_VPOPCNTDQ
vpopcntq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512_VPOPCNTDQ
vpopcntq zmm6, [eax]{1to8} # AVX512_VPOPCNTDQ
vpopcntq zmm6, QWORD BCST [eax] # AVX512_VPOPCNTDQ
vpopcntq zmm6, ZMMWORD PTR [edx+8128] # AVX512_VPOPCNTDQ Disp8
vpopcntq zmm6, ZMMWORD PTR [edx+8192] # AVX512_VPOPCNTDQ
vpopcntq zmm6, ZMMWORD PTR [edx-8192] # AVX512_VPOPCNTDQ Disp8
vpopcntq zmm6, ZMMWORD PTR [edx-8256] # AVX512_VPOPCNTDQ
vpopcntq zmm6, [edx+1016]{1to8} # AVX512_VPOPCNTDQ Disp8
vpopcntq zmm6, [edx+1024]{1to8} # AVX512_VPOPCNTDQ
vpopcntq zmm6, [edx-1024]{1to8} # AVX512_VPOPCNTDQ Disp8
vpopcntq zmm6, [edx-1032]{1to8} # AVX512_VPOPCNTDQ
|
tactcomplabs/xbgas-binutils-gdb
| 46,714
|
gas/testsuite/gas/i386/hle.s
|
# Check 32bit HLE instructions
.allow_index_reg
.text
_start:
# Tests for op imm8 regb/m8
xacquire lock adcb $100,(%ecx)
lock xacquire adcb $100,(%ecx)
xrelease lock adcb $100,(%ecx)
lock xrelease adcb $100,(%ecx)
.byte 0xf0; .byte 0xf2; adcb $100,(%ecx)
.byte 0xf0; .byte 0xf3; adcb $100,(%ecx)
xacquire lock addb $100,(%ecx)
lock xacquire addb $100,(%ecx)
xrelease lock addb $100,(%ecx)
lock xrelease addb $100,(%ecx)
.byte 0xf0; .byte 0xf2; addb $100,(%ecx)
.byte 0xf0; .byte 0xf3; addb $100,(%ecx)
xacquire lock andb $100,(%ecx)
lock xacquire andb $100,(%ecx)
xrelease lock andb $100,(%ecx)
lock xrelease andb $100,(%ecx)
.byte 0xf0; .byte 0xf2; andb $100,(%ecx)
.byte 0xf0; .byte 0xf3; andb $100,(%ecx)
xrelease movb $100,(%ecx)
xacquire lock orb $100,(%ecx)
lock xacquire orb $100,(%ecx)
xrelease lock orb $100,(%ecx)
lock xrelease orb $100,(%ecx)
.byte 0xf0; .byte 0xf2; orb $100,(%ecx)
.byte 0xf0; .byte 0xf3; orb $100,(%ecx)
xacquire lock sbbb $100,(%ecx)
lock xacquire sbbb $100,(%ecx)
xrelease lock sbbb $100,(%ecx)
lock xrelease sbbb $100,(%ecx)
.byte 0xf0; .byte 0xf2; sbbb $100,(%ecx)
.byte 0xf0; .byte 0xf3; sbbb $100,(%ecx)
xacquire lock subb $100,(%ecx)
lock xacquire subb $100,(%ecx)
xrelease lock subb $100,(%ecx)
lock xrelease subb $100,(%ecx)
.byte 0xf0; .byte 0xf2; subb $100,(%ecx)
.byte 0xf0; .byte 0xf3; subb $100,(%ecx)
xacquire lock xorb $100,(%ecx)
lock xacquire xorb $100,(%ecx)
xrelease lock xorb $100,(%ecx)
lock xrelease xorb $100,(%ecx)
.byte 0xf0; .byte 0xf2; xorb $100,(%ecx)
.byte 0xf0; .byte 0xf3; xorb $100,(%ecx)
# Tests for op imm16 regs/m16
xacquire lock adcw $1000,(%ecx)
lock xacquire adcw $1000,(%ecx)
xrelease lock adcw $1000,(%ecx)
lock xrelease adcw $1000,(%ecx)
.byte 0xf0; .byte 0xf2; adcw $1000,(%ecx)
.byte 0xf0; .byte 0xf3; adcw $1000,(%ecx)
xacquire lock addw $1000,(%ecx)
lock xacquire addw $1000,(%ecx)
xrelease lock addw $1000,(%ecx)
lock xrelease addw $1000,(%ecx)
.byte 0xf0; .byte 0xf2; addw $1000,(%ecx)
.byte 0xf0; .byte 0xf3; addw $1000,(%ecx)
xacquire lock andw $1000,(%ecx)
lock xacquire andw $1000,(%ecx)
xrelease lock andw $1000,(%ecx)
lock xrelease andw $1000,(%ecx)
.byte 0xf0; .byte 0xf2; andw $1000,(%ecx)
.byte 0xf0; .byte 0xf3; andw $1000,(%ecx)
xrelease movw $1000,(%ecx)
xacquire lock orw $1000,(%ecx)
lock xacquire orw $1000,(%ecx)
xrelease lock orw $1000,(%ecx)
lock xrelease orw $1000,(%ecx)
.byte 0xf0; .byte 0xf2; orw $1000,(%ecx)
.byte 0xf0; .byte 0xf3; orw $1000,(%ecx)
xacquire lock sbbw $1000,(%ecx)
lock xacquire sbbw $1000,(%ecx)
xrelease lock sbbw $1000,(%ecx)
lock xrelease sbbw $1000,(%ecx)
.byte 0xf0; .byte 0xf2; sbbw $1000,(%ecx)
.byte 0xf0; .byte 0xf3; sbbw $1000,(%ecx)
xacquire lock subw $1000,(%ecx)
lock xacquire subw $1000,(%ecx)
xrelease lock subw $1000,(%ecx)
lock xrelease subw $1000,(%ecx)
.byte 0xf0; .byte 0xf2; subw $1000,(%ecx)
.byte 0xf0; .byte 0xf3; subw $1000,(%ecx)
xacquire lock xorw $1000,(%ecx)
lock xacquire xorw $1000,(%ecx)
xrelease lock xorw $1000,(%ecx)
lock xrelease xorw $1000,(%ecx)
.byte 0xf0; .byte 0xf2; xorw $1000,(%ecx)
.byte 0xf0; .byte 0xf3; xorw $1000,(%ecx)
# Tests for op imm32 regl/m32
xacquire lock adcl $10000000,(%ecx)
lock xacquire adcl $10000000,(%ecx)
xrelease lock adcl $10000000,(%ecx)
lock xrelease adcl $10000000,(%ecx)
.byte 0xf0; .byte 0xf2; adcl $10000000,(%ecx)
.byte 0xf0; .byte 0xf3; adcl $10000000,(%ecx)
xacquire lock addl $10000000,(%ecx)
lock xacquire addl $10000000,(%ecx)
xrelease lock addl $10000000,(%ecx)
lock xrelease addl $10000000,(%ecx)
.byte 0xf0; .byte 0xf2; addl $10000000,(%ecx)
.byte 0xf0; .byte 0xf3; addl $10000000,(%ecx)
xacquire lock andl $10000000,(%ecx)
lock xacquire andl $10000000,(%ecx)
xrelease lock andl $10000000,(%ecx)
lock xrelease andl $10000000,(%ecx)
.byte 0xf0; .byte 0xf2; andl $10000000,(%ecx)
.byte 0xf0; .byte 0xf3; andl $10000000,(%ecx)
xrelease movl $10000000,(%ecx)
xacquire lock orl $10000000,(%ecx)
lock xacquire orl $10000000,(%ecx)
xrelease lock orl $10000000,(%ecx)
lock xrelease orl $10000000,(%ecx)
.byte 0xf0; .byte 0xf2; orl $10000000,(%ecx)
.byte 0xf0; .byte 0xf3; orl $10000000,(%ecx)
xacquire lock sbbl $10000000,(%ecx)
lock xacquire sbbl $10000000,(%ecx)
xrelease lock sbbl $10000000,(%ecx)
lock xrelease sbbl $10000000,(%ecx)
.byte 0xf0; .byte 0xf2; sbbl $10000000,(%ecx)
.byte 0xf0; .byte 0xf3; sbbl $10000000,(%ecx)
xacquire lock subl $10000000,(%ecx)
lock xacquire subl $10000000,(%ecx)
xrelease lock subl $10000000,(%ecx)
lock xrelease subl $10000000,(%ecx)
.byte 0xf0; .byte 0xf2; subl $10000000,(%ecx)
.byte 0xf0; .byte 0xf3; subl $10000000,(%ecx)
xacquire lock xorl $10000000,(%ecx)
lock xacquire xorl $10000000,(%ecx)
xrelease lock xorl $10000000,(%ecx)
lock xrelease xorl $10000000,(%ecx)
.byte 0xf0; .byte 0xf2; xorl $10000000,(%ecx)
.byte 0xf0; .byte 0xf3; xorl $10000000,(%ecx)
# Tests for op imm8 regs/m16
xacquire lock adcw $100,(%ecx)
lock xacquire adcw $100,(%ecx)
xrelease lock adcw $100,(%ecx)
lock xrelease adcw $100,(%ecx)
.byte 0xf0; .byte 0xf2; adcw $100,(%ecx)
.byte 0xf0; .byte 0xf3; adcw $100,(%ecx)
xacquire lock addw $100,(%ecx)
lock xacquire addw $100,(%ecx)
xrelease lock addw $100,(%ecx)
lock xrelease addw $100,(%ecx)
.byte 0xf0; .byte 0xf2; addw $100,(%ecx)
.byte 0xf0; .byte 0xf3; addw $100,(%ecx)
xacquire lock andw $100,(%ecx)
lock xacquire andw $100,(%ecx)
xrelease lock andw $100,(%ecx)
lock xrelease andw $100,(%ecx)
.byte 0xf0; .byte 0xf2; andw $100,(%ecx)
.byte 0xf0; .byte 0xf3; andw $100,(%ecx)
xacquire lock btcw $100,(%ecx)
lock xacquire btcw $100,(%ecx)
xrelease lock btcw $100,(%ecx)
lock xrelease btcw $100,(%ecx)
.byte 0xf0; .byte 0xf2; btcw $100,(%ecx)
.byte 0xf0; .byte 0xf3; btcw $100,(%ecx)
xacquire lock btrw $100,(%ecx)
lock xacquire btrw $100,(%ecx)
xrelease lock btrw $100,(%ecx)
lock xrelease btrw $100,(%ecx)
.byte 0xf0; .byte 0xf2; btrw $100,(%ecx)
.byte 0xf0; .byte 0xf3; btrw $100,(%ecx)
xacquire lock btsw $100,(%ecx)
lock xacquire btsw $100,(%ecx)
xrelease lock btsw $100,(%ecx)
lock xrelease btsw $100,(%ecx)
.byte 0xf0; .byte 0xf2; btsw $100,(%ecx)
.byte 0xf0; .byte 0xf3; btsw $100,(%ecx)
xrelease movw $100,(%ecx)
xacquire lock orw $100,(%ecx)
lock xacquire orw $100,(%ecx)
xrelease lock orw $100,(%ecx)
lock xrelease orw $100,(%ecx)
.byte 0xf0; .byte 0xf2; orw $100,(%ecx)
.byte 0xf0; .byte 0xf3; orw $100,(%ecx)
xacquire lock sbbw $100,(%ecx)
lock xacquire sbbw $100,(%ecx)
xrelease lock sbbw $100,(%ecx)
lock xrelease sbbw $100,(%ecx)
.byte 0xf0; .byte 0xf2; sbbw $100,(%ecx)
.byte 0xf0; .byte 0xf3; sbbw $100,(%ecx)
xacquire lock subw $100,(%ecx)
lock xacquire subw $100,(%ecx)
xrelease lock subw $100,(%ecx)
lock xrelease subw $100,(%ecx)
.byte 0xf0; .byte 0xf2; subw $100,(%ecx)
.byte 0xf0; .byte 0xf3; subw $100,(%ecx)
xacquire lock xorw $100,(%ecx)
lock xacquire xorw $100,(%ecx)
xrelease lock xorw $100,(%ecx)
lock xrelease xorw $100,(%ecx)
.byte 0xf0; .byte 0xf2; xorw $100,(%ecx)
.byte 0xf0; .byte 0xf3; xorw $100,(%ecx)
# Tests for op imm8 regl/m32
xacquire lock adcl $100,(%ecx)
lock xacquire adcl $100,(%ecx)
xrelease lock adcl $100,(%ecx)
lock xrelease adcl $100,(%ecx)
.byte 0xf0; .byte 0xf2; adcl $100,(%ecx)
.byte 0xf0; .byte 0xf3; adcl $100,(%ecx)
xacquire lock addl $100,(%ecx)
lock xacquire addl $100,(%ecx)
xrelease lock addl $100,(%ecx)
lock xrelease addl $100,(%ecx)
.byte 0xf0; .byte 0xf2; addl $100,(%ecx)
.byte 0xf0; .byte 0xf3; addl $100,(%ecx)
xacquire lock andl $100,(%ecx)
lock xacquire andl $100,(%ecx)
xrelease lock andl $100,(%ecx)
lock xrelease andl $100,(%ecx)
.byte 0xf0; .byte 0xf2; andl $100,(%ecx)
.byte 0xf0; .byte 0xf3; andl $100,(%ecx)
xacquire lock btcl $100,(%ecx)
lock xacquire btcl $100,(%ecx)
xrelease lock btcl $100,(%ecx)
lock xrelease btcl $100,(%ecx)
.byte 0xf0; .byte 0xf2; btcl $100,(%ecx)
.byte 0xf0; .byte 0xf3; btcl $100,(%ecx)
xacquire lock btrl $100,(%ecx)
lock xacquire btrl $100,(%ecx)
xrelease lock btrl $100,(%ecx)
lock xrelease btrl $100,(%ecx)
.byte 0xf0; .byte 0xf2; btrl $100,(%ecx)
.byte 0xf0; .byte 0xf3; btrl $100,(%ecx)
xacquire lock btsl $100,(%ecx)
lock xacquire btsl $100,(%ecx)
xrelease lock btsl $100,(%ecx)
lock xrelease btsl $100,(%ecx)
.byte 0xf0; .byte 0xf2; btsl $100,(%ecx)
.byte 0xf0; .byte 0xf3; btsl $100,(%ecx)
xrelease movl $100,(%ecx)
xacquire lock orl $100,(%ecx)
lock xacquire orl $100,(%ecx)
xrelease lock orl $100,(%ecx)
lock xrelease orl $100,(%ecx)
.byte 0xf0; .byte 0xf2; orl $100,(%ecx)
.byte 0xf0; .byte 0xf3; orl $100,(%ecx)
xacquire lock sbbl $100,(%ecx)
lock xacquire sbbl $100,(%ecx)
xrelease lock sbbl $100,(%ecx)
lock xrelease sbbl $100,(%ecx)
.byte 0xf0; .byte 0xf2; sbbl $100,(%ecx)
.byte 0xf0; .byte 0xf3; sbbl $100,(%ecx)
xacquire lock subl $100,(%ecx)
lock xacquire subl $100,(%ecx)
xrelease lock subl $100,(%ecx)
lock xrelease subl $100,(%ecx)
.byte 0xf0; .byte 0xf2; subl $100,(%ecx)
.byte 0xf0; .byte 0xf3; subl $100,(%ecx)
xacquire lock xorl $100,(%ecx)
lock xacquire xorl $100,(%ecx)
xrelease lock xorl $100,(%ecx)
lock xrelease xorl $100,(%ecx)
.byte 0xf0; .byte 0xf2; xorl $100,(%ecx)
.byte 0xf0; .byte 0xf3; xorl $100,(%ecx)
# Tests for op imm8 regb/m8
xacquire lock adcb $100,(%ecx)
lock xacquire adcb $100,(%ecx)
xrelease lock adcb $100,(%ecx)
lock xrelease adcb $100,(%ecx)
.byte 0xf0; .byte 0xf2; adcb $100,(%ecx)
.byte 0xf0; .byte 0xf3; adcb $100,(%ecx)
xacquire lock addb $100,(%ecx)
lock xacquire addb $100,(%ecx)
xrelease lock addb $100,(%ecx)
lock xrelease addb $100,(%ecx)
.byte 0xf0; .byte 0xf2; addb $100,(%ecx)
.byte 0xf0; .byte 0xf3; addb $100,(%ecx)
xacquire lock andb $100,(%ecx)
lock xacquire andb $100,(%ecx)
xrelease lock andb $100,(%ecx)
lock xrelease andb $100,(%ecx)
.byte 0xf0; .byte 0xf2; andb $100,(%ecx)
.byte 0xf0; .byte 0xf3; andb $100,(%ecx)
xrelease movb $100,(%ecx)
xacquire lock orb $100,(%ecx)
lock xacquire orb $100,(%ecx)
xrelease lock orb $100,(%ecx)
lock xrelease orb $100,(%ecx)
.byte 0xf0; .byte 0xf2; orb $100,(%ecx)
.byte 0xf0; .byte 0xf3; orb $100,(%ecx)
xacquire lock sbbb $100,(%ecx)
lock xacquire sbbb $100,(%ecx)
xrelease lock sbbb $100,(%ecx)
lock xrelease sbbb $100,(%ecx)
.byte 0xf0; .byte 0xf2; sbbb $100,(%ecx)
.byte 0xf0; .byte 0xf3; sbbb $100,(%ecx)
xacquire lock subb $100,(%ecx)
lock xacquire subb $100,(%ecx)
xrelease lock subb $100,(%ecx)
lock xrelease subb $100,(%ecx)
.byte 0xf0; .byte 0xf2; subb $100,(%ecx)
.byte 0xf0; .byte 0xf3; subb $100,(%ecx)
xacquire lock xorb $100,(%ecx)
lock xacquire xorb $100,(%ecx)
xrelease lock xorb $100,(%ecx)
lock xrelease xorb $100,(%ecx)
.byte 0xf0; .byte 0xf2; xorb $100,(%ecx)
.byte 0xf0; .byte 0xf3; xorb $100,(%ecx)
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire lock adcb %al,(%ecx)
lock xacquire adcb %al,(%ecx)
xrelease lock adcb %al,(%ecx)
lock xrelease adcb %al,(%ecx)
.byte 0xf0; .byte 0xf2; adcb %al,(%ecx)
.byte 0xf0; .byte 0xf3; adcb %al,(%ecx)
xacquire lock addb %al,(%ecx)
lock xacquire addb %al,(%ecx)
xrelease lock addb %al,(%ecx)
lock xrelease addb %al,(%ecx)
.byte 0xf0; .byte 0xf2; addb %al,(%ecx)
.byte 0xf0; .byte 0xf3; addb %al,(%ecx)
xacquire lock andb %al,(%ecx)
lock xacquire andb %al,(%ecx)
xrelease lock andb %al,(%ecx)
lock xrelease andb %al,(%ecx)
.byte 0xf0; .byte 0xf2; andb %al,(%ecx)
.byte 0xf0; .byte 0xf3; andb %al,(%ecx)
xrelease movb %al,(%ecx)
xrelease movb %al,0x12345678
xacquire lock orb %al,(%ecx)
lock xacquire orb %al,(%ecx)
xrelease lock orb %al,(%ecx)
lock xrelease orb %al,(%ecx)
.byte 0xf0; .byte 0xf2; orb %al,(%ecx)
.byte 0xf0; .byte 0xf3; orb %al,(%ecx)
xacquire lock sbbb %al,(%ecx)
lock xacquire sbbb %al,(%ecx)
xrelease lock sbbb %al,(%ecx)
lock xrelease sbbb %al,(%ecx)
.byte 0xf0; .byte 0xf2; sbbb %al,(%ecx)
.byte 0xf0; .byte 0xf3; sbbb %al,(%ecx)
xacquire lock subb %al,(%ecx)
lock xacquire subb %al,(%ecx)
xrelease lock subb %al,(%ecx)
lock xrelease subb %al,(%ecx)
.byte 0xf0; .byte 0xf2; subb %al,(%ecx)
.byte 0xf0; .byte 0xf3; subb %al,(%ecx)
xacquire lock xchgb %al,(%ecx)
lock xacquire xchgb %al,(%ecx)
xacquire xchgb %al,(%ecx)
xrelease lock xchgb %al,(%ecx)
lock xrelease xchgb %al,(%ecx)
xrelease xchgb %al,(%ecx)
.byte 0xf0; .byte 0xf2; xchgb %al,(%ecx)
.byte 0xf0; .byte 0xf3; xchgb %al,(%ecx)
xacquire lock xorb %al,(%ecx)
lock xacquire xorb %al,(%ecx)
xrelease lock xorb %al,(%ecx)
lock xrelease xorb %al,(%ecx)
.byte 0xf0; .byte 0xf2; xorb %al,(%ecx)
.byte 0xf0; .byte 0xf3; xorb %al,(%ecx)
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire lock adcw %ax,(%ecx)
lock xacquire adcw %ax,(%ecx)
xrelease lock adcw %ax,(%ecx)
lock xrelease adcw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; adcw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; adcw %ax,(%ecx)
xacquire lock addw %ax,(%ecx)
lock xacquire addw %ax,(%ecx)
xrelease lock addw %ax,(%ecx)
lock xrelease addw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; addw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; addw %ax,(%ecx)
xacquire lock andw %ax,(%ecx)
lock xacquire andw %ax,(%ecx)
xrelease lock andw %ax,(%ecx)
lock xrelease andw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; andw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; andw %ax,(%ecx)
xrelease movw %ax,(%ecx)
xrelease movw %ax,0x12345678
xacquire lock orw %ax,(%ecx)
lock xacquire orw %ax,(%ecx)
xrelease lock orw %ax,(%ecx)
lock xrelease orw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; orw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; orw %ax,(%ecx)
xacquire lock sbbw %ax,(%ecx)
lock xacquire sbbw %ax,(%ecx)
xrelease lock sbbw %ax,(%ecx)
lock xrelease sbbw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; sbbw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; sbbw %ax,(%ecx)
xacquire lock subw %ax,(%ecx)
lock xacquire subw %ax,(%ecx)
xrelease lock subw %ax,(%ecx)
lock xrelease subw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; subw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; subw %ax,(%ecx)
xacquire lock xchgw %ax,(%ecx)
lock xacquire xchgw %ax,(%ecx)
xacquire xchgw %ax,(%ecx)
xrelease lock xchgw %ax,(%ecx)
lock xrelease xchgw %ax,(%ecx)
xrelease xchgw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; xchgw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; xchgw %ax,(%ecx)
xacquire lock xorw %ax,(%ecx)
lock xacquire xorw %ax,(%ecx)
xrelease lock xorw %ax,(%ecx)
lock xrelease xorw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; xorw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; xorw %ax,(%ecx)
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire lock adcl %eax,(%ecx)
lock xacquire adcl %eax,(%ecx)
xrelease lock adcl %eax,(%ecx)
lock xrelease adcl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; adcl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; adcl %eax,(%ecx)
xacquire lock addl %eax,(%ecx)
lock xacquire addl %eax,(%ecx)
xrelease lock addl %eax,(%ecx)
lock xrelease addl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; addl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; addl %eax,(%ecx)
xacquire lock andl %eax,(%ecx)
lock xacquire andl %eax,(%ecx)
xrelease lock andl %eax,(%ecx)
lock xrelease andl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; andl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; andl %eax,(%ecx)
xrelease movl %eax,(%ecx)
xrelease movl %eax,0x12345678
xacquire lock orl %eax,(%ecx)
lock xacquire orl %eax,(%ecx)
xrelease lock orl %eax,(%ecx)
lock xrelease orl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; orl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; orl %eax,(%ecx)
xacquire lock sbbl %eax,(%ecx)
lock xacquire sbbl %eax,(%ecx)
xrelease lock sbbl %eax,(%ecx)
lock xrelease sbbl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; sbbl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; sbbl %eax,(%ecx)
xacquire lock subl %eax,(%ecx)
lock xacquire subl %eax,(%ecx)
xrelease lock subl %eax,(%ecx)
lock xrelease subl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; subl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; subl %eax,(%ecx)
xacquire lock xchgl %eax,(%ecx)
lock xacquire xchgl %eax,(%ecx)
xacquire xchgl %eax,(%ecx)
xrelease lock xchgl %eax,(%ecx)
lock xrelease xchgl %eax,(%ecx)
xrelease xchgl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; xchgl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; xchgl %eax,(%ecx)
xacquire lock xorl %eax,(%ecx)
lock xacquire xorl %eax,(%ecx)
xrelease lock xorl %eax,(%ecx)
lock xrelease xorl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; xorl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; xorl %eax,(%ecx)
# Tests for op regs, regs/m16
xacquire lock btcw %ax,(%ecx)
lock xacquire btcw %ax,(%ecx)
xrelease lock btcw %ax,(%ecx)
lock xrelease btcw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; btcw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; btcw %ax,(%ecx)
xacquire lock btrw %ax,(%ecx)
lock xacquire btrw %ax,(%ecx)
xrelease lock btrw %ax,(%ecx)
lock xrelease btrw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; btrw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; btrw %ax,(%ecx)
xacquire lock btsw %ax,(%ecx)
lock xacquire btsw %ax,(%ecx)
xrelease lock btsw %ax,(%ecx)
lock xrelease btsw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; btsw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; btsw %ax,(%ecx)
xacquire lock cmpxchgw %ax,(%ecx)
lock xacquire cmpxchgw %ax,(%ecx)
xrelease lock cmpxchgw %ax,(%ecx)
lock xrelease cmpxchgw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; cmpxchgw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; cmpxchgw %ax,(%ecx)
xacquire lock xaddw %ax,(%ecx)
lock xacquire xaddw %ax,(%ecx)
xrelease lock xaddw %ax,(%ecx)
lock xrelease xaddw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; xaddw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; xaddw %ax,(%ecx)
# Tests for op regl regl/m32
xacquire lock btcl %eax,(%ecx)
lock xacquire btcl %eax,(%ecx)
xrelease lock btcl %eax,(%ecx)
lock xrelease btcl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; btcl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; btcl %eax,(%ecx)
xacquire lock btrl %eax,(%ecx)
lock xacquire btrl %eax,(%ecx)
xrelease lock btrl %eax,(%ecx)
lock xrelease btrl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; btrl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; btrl %eax,(%ecx)
xacquire lock btsl %eax,(%ecx)
lock xacquire btsl %eax,(%ecx)
xrelease lock btsl %eax,(%ecx)
lock xrelease btsl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; btsl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; btsl %eax,(%ecx)
xacquire lock cmpxchgl %eax,(%ecx)
lock xacquire cmpxchgl %eax,(%ecx)
xrelease lock cmpxchgl %eax,(%ecx)
lock xrelease cmpxchgl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; cmpxchgl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; cmpxchgl %eax,(%ecx)
xacquire lock xaddl %eax,(%ecx)
lock xacquire xaddl %eax,(%ecx)
xrelease lock xaddl %eax,(%ecx)
lock xrelease xaddl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; xaddl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; xaddl %eax,(%ecx)
# Tests for op regb/m8
xacquire lock decb (%ecx)
lock xacquire decb (%ecx)
xrelease lock decb (%ecx)
lock xrelease decb (%ecx)
.byte 0xf0; .byte 0xf2; decb (%ecx)
.byte 0xf0; .byte 0xf3; decb (%ecx)
xacquire lock incb (%ecx)
lock xacquire incb (%ecx)
xrelease lock incb (%ecx)
lock xrelease incb (%ecx)
.byte 0xf0; .byte 0xf2; incb (%ecx)
.byte 0xf0; .byte 0xf3; incb (%ecx)
xacquire lock negb (%ecx)
lock xacquire negb (%ecx)
xrelease lock negb (%ecx)
lock xrelease negb (%ecx)
.byte 0xf0; .byte 0xf2; negb (%ecx)
.byte 0xf0; .byte 0xf3; negb (%ecx)
xacquire lock notb (%ecx)
lock xacquire notb (%ecx)
xrelease lock notb (%ecx)
lock xrelease notb (%ecx)
.byte 0xf0; .byte 0xf2; notb (%ecx)
.byte 0xf0; .byte 0xf3; notb (%ecx)
# Tests for op regs/m16
xacquire lock decw (%ecx)
lock xacquire decw (%ecx)
xrelease lock decw (%ecx)
lock xrelease decw (%ecx)
.byte 0xf0; .byte 0xf2; decw (%ecx)
.byte 0xf0; .byte 0xf3; decw (%ecx)
xacquire lock incw (%ecx)
lock xacquire incw (%ecx)
xrelease lock incw (%ecx)
lock xrelease incw (%ecx)
.byte 0xf0; .byte 0xf2; incw (%ecx)
.byte 0xf0; .byte 0xf3; incw (%ecx)
xacquire lock negw (%ecx)
lock xacquire negw (%ecx)
xrelease lock negw (%ecx)
lock xrelease negw (%ecx)
.byte 0xf0; .byte 0xf2; negw (%ecx)
.byte 0xf0; .byte 0xf3; negw (%ecx)
xacquire lock notw (%ecx)
lock xacquire notw (%ecx)
xrelease lock notw (%ecx)
lock xrelease notw (%ecx)
.byte 0xf0; .byte 0xf2; notw (%ecx)
.byte 0xf0; .byte 0xf3; notw (%ecx)
# Tests for op regl/m32
xacquire lock decl (%ecx)
lock xacquire decl (%ecx)
xrelease lock decl (%ecx)
lock xrelease decl (%ecx)
.byte 0xf0; .byte 0xf2; decl (%ecx)
.byte 0xf0; .byte 0xf3; decl (%ecx)
xacquire lock incl (%ecx)
lock xacquire incl (%ecx)
xrelease lock incl (%ecx)
lock xrelease incl (%ecx)
.byte 0xf0; .byte 0xf2; incl (%ecx)
.byte 0xf0; .byte 0xf3; incl (%ecx)
xacquire lock negl (%ecx)
lock xacquire negl (%ecx)
xrelease lock negl (%ecx)
lock xrelease negl (%ecx)
.byte 0xf0; .byte 0xf2; negl (%ecx)
.byte 0xf0; .byte 0xf3; negl (%ecx)
xacquire lock notl (%ecx)
lock xacquire notl (%ecx)
xrelease lock notl (%ecx)
lock xrelease notl (%ecx)
.byte 0xf0; .byte 0xf2; notl (%ecx)
.byte 0xf0; .byte 0xf3; notl (%ecx)
# Tests for op m64
xacquire lock cmpxchg8bq (%ecx)
lock xacquire cmpxchg8bq (%ecx)
xrelease lock cmpxchg8bq (%ecx)
lock xrelease cmpxchg8bq (%ecx)
.byte 0xf0; .byte 0xf2; cmpxchg8bq (%ecx)
.byte 0xf0; .byte 0xf3; cmpxchg8bq (%ecx)
# Tests for op regb, regb/m8
xacquire lock cmpxchgb %cl,(%ecx)
lock xacquire cmpxchgb %cl,(%ecx)
xrelease lock cmpxchgb %cl,(%ecx)
lock xrelease cmpxchgb %cl,(%ecx)
.byte 0xf0; .byte 0xf2; cmpxchgb %cl,(%ecx)
.byte 0xf0; .byte 0xf3; cmpxchgb %cl,(%ecx)
xacquire lock xaddb %cl,(%ecx)
lock xacquire xaddb %cl,(%ecx)
xrelease lock xaddb %cl,(%ecx)
lock xrelease xaddb %cl,(%ecx)
.byte 0xf0; .byte 0xf2; xaddb %cl,(%ecx)
.byte 0xf0; .byte 0xf3; xaddb %cl,(%ecx)
.intel_syntax noprefix
# Tests for op imm8 regb/m8
xacquire lock adc BYTE PTR [ecx],100
lock xacquire adc BYTE PTR [ecx],100
xrelease lock adc BYTE PTR [ecx],100
lock xrelease adc BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; adc BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; adc BYTE PTR [ecx],100
xacquire lock add BYTE PTR [ecx],100
lock xacquire add BYTE PTR [ecx],100
xrelease lock add BYTE PTR [ecx],100
lock xrelease add BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; add BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; add BYTE PTR [ecx],100
xacquire lock and BYTE PTR [ecx],100
lock xacquire and BYTE PTR [ecx],100
xrelease lock and BYTE PTR [ecx],100
lock xrelease and BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; and BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; and BYTE PTR [ecx],100
xrelease mov BYTE PTR [ecx],100
xacquire lock or BYTE PTR [ecx],100
lock xacquire or BYTE PTR [ecx],100
xrelease lock or BYTE PTR [ecx],100
lock xrelease or BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; or BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; or BYTE PTR [ecx],100
xacquire lock sbb BYTE PTR [ecx],100
lock xacquire sbb BYTE PTR [ecx],100
xrelease lock sbb BYTE PTR [ecx],100
lock xrelease sbb BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; sbb BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; sbb BYTE PTR [ecx],100
xacquire lock sub BYTE PTR [ecx],100
lock xacquire sub BYTE PTR [ecx],100
xrelease lock sub BYTE PTR [ecx],100
lock xrelease sub BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; sub BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; sub BYTE PTR [ecx],100
xacquire lock xor BYTE PTR [ecx],100
lock xacquire xor BYTE PTR [ecx],100
xrelease lock xor BYTE PTR [ecx],100
lock xrelease xor BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; xor BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; xor BYTE PTR [ecx],100
# Tests for op imm16 regs/m16
xacquire lock adc WORD PTR [ecx],1000
lock xacquire adc WORD PTR [ecx],1000
xrelease lock adc WORD PTR [ecx],1000
lock xrelease adc WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf2; adc WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf3; adc WORD PTR [ecx],1000
xacquire lock add WORD PTR [ecx],1000
lock xacquire add WORD PTR [ecx],1000
xrelease lock add WORD PTR [ecx],1000
lock xrelease add WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf2; add WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf3; add WORD PTR [ecx],1000
xacquire lock and WORD PTR [ecx],1000
lock xacquire and WORD PTR [ecx],1000
xrelease lock and WORD PTR [ecx],1000
lock xrelease and WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf2; and WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf3; and WORD PTR [ecx],1000
xrelease mov WORD PTR [ecx],1000
xacquire lock or WORD PTR [ecx],1000
lock xacquire or WORD PTR [ecx],1000
xrelease lock or WORD PTR [ecx],1000
lock xrelease or WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf2; or WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf3; or WORD PTR [ecx],1000
xacquire lock sbb WORD PTR [ecx],1000
lock xacquire sbb WORD PTR [ecx],1000
xrelease lock sbb WORD PTR [ecx],1000
lock xrelease sbb WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf2; sbb WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf3; sbb WORD PTR [ecx],1000
xacquire lock sub WORD PTR [ecx],1000
lock xacquire sub WORD PTR [ecx],1000
xrelease lock sub WORD PTR [ecx],1000
lock xrelease sub WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf2; sub WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf3; sub WORD PTR [ecx],1000
xacquire lock xor WORD PTR [ecx],1000
lock xacquire xor WORD PTR [ecx],1000
xrelease lock xor WORD PTR [ecx],1000
lock xrelease xor WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf2; xor WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf3; xor WORD PTR [ecx],1000
# Tests for op imm32 regl/m32
xacquire lock adc DWORD PTR [ecx],10000000
lock xacquire adc DWORD PTR [ecx],10000000
xrelease lock adc DWORD PTR [ecx],10000000
lock xrelease adc DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf2; adc DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf3; adc DWORD PTR [ecx],10000000
xacquire lock add DWORD PTR [ecx],10000000
lock xacquire add DWORD PTR [ecx],10000000
xrelease lock add DWORD PTR [ecx],10000000
lock xrelease add DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf2; add DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf3; add DWORD PTR [ecx],10000000
xacquire lock and DWORD PTR [ecx],10000000
lock xacquire and DWORD PTR [ecx],10000000
xrelease lock and DWORD PTR [ecx],10000000
lock xrelease and DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf2; and DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf3; and DWORD PTR [ecx],10000000
xrelease mov DWORD PTR [ecx],10000000
xacquire lock or DWORD PTR [ecx],10000000
lock xacquire or DWORD PTR [ecx],10000000
xrelease lock or DWORD PTR [ecx],10000000
lock xrelease or DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf2; or DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf3; or DWORD PTR [ecx],10000000
xacquire lock sbb DWORD PTR [ecx],10000000
lock xacquire sbb DWORD PTR [ecx],10000000
xrelease lock sbb DWORD PTR [ecx],10000000
lock xrelease sbb DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf2; sbb DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf3; sbb DWORD PTR [ecx],10000000
xacquire lock sub DWORD PTR [ecx],10000000
lock xacquire sub DWORD PTR [ecx],10000000
xrelease lock sub DWORD PTR [ecx],10000000
lock xrelease sub DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf2; sub DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf3; sub DWORD PTR [ecx],10000000
xacquire lock xor DWORD PTR [ecx],10000000
lock xacquire xor DWORD PTR [ecx],10000000
xrelease lock xor DWORD PTR [ecx],10000000
lock xrelease xor DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf2; xor DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf3; xor DWORD PTR [ecx],10000000
# Tests for op imm8 regs/m16
xacquire lock adc WORD PTR [ecx],100
lock xacquire adc WORD PTR [ecx],100
xrelease lock adc WORD PTR [ecx],100
lock xrelease adc WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; adc WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; adc WORD PTR [ecx],100
xacquire lock add WORD PTR [ecx],100
lock xacquire add WORD PTR [ecx],100
xrelease lock add WORD PTR [ecx],100
lock xrelease add WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; add WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; add WORD PTR [ecx],100
xacquire lock and WORD PTR [ecx],100
lock xacquire and WORD PTR [ecx],100
xrelease lock and WORD PTR [ecx],100
lock xrelease and WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; and WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; and WORD PTR [ecx],100
xacquire lock btc WORD PTR [ecx],100
lock xacquire btc WORD PTR [ecx],100
xrelease lock btc WORD PTR [ecx],100
lock xrelease btc WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; btc WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; btc WORD PTR [ecx],100
xacquire lock btr WORD PTR [ecx],100
lock xacquire btr WORD PTR [ecx],100
xrelease lock btr WORD PTR [ecx],100
lock xrelease btr WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; btr WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; btr WORD PTR [ecx],100
xacquire lock bts WORD PTR [ecx],100
lock xacquire bts WORD PTR [ecx],100
xrelease lock bts WORD PTR [ecx],100
lock xrelease bts WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; bts WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; bts WORD PTR [ecx],100
xrelease mov WORD PTR [ecx],100
xacquire lock or WORD PTR [ecx],100
lock xacquire or WORD PTR [ecx],100
xrelease lock or WORD PTR [ecx],100
lock xrelease or WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; or WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; or WORD PTR [ecx],100
xacquire lock sbb WORD PTR [ecx],100
lock xacquire sbb WORD PTR [ecx],100
xrelease lock sbb WORD PTR [ecx],100
lock xrelease sbb WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; sbb WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; sbb WORD PTR [ecx],100
xacquire lock sub WORD PTR [ecx],100
lock xacquire sub WORD PTR [ecx],100
xrelease lock sub WORD PTR [ecx],100
lock xrelease sub WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; sub WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; sub WORD PTR [ecx],100
xacquire lock xor WORD PTR [ecx],100
lock xacquire xor WORD PTR [ecx],100
xrelease lock xor WORD PTR [ecx],100
lock xrelease xor WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; xor WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; xor WORD PTR [ecx],100
# Tests for op imm8 regl/m32
xacquire lock adc DWORD PTR [ecx],100
lock xacquire adc DWORD PTR [ecx],100
xrelease lock adc DWORD PTR [ecx],100
lock xrelease adc DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; adc DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; adc DWORD PTR [ecx],100
xacquire lock add DWORD PTR [ecx],100
lock xacquire add DWORD PTR [ecx],100
xrelease lock add DWORD PTR [ecx],100
lock xrelease add DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; add DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; add DWORD PTR [ecx],100
xacquire lock and DWORD PTR [ecx],100
lock xacquire and DWORD PTR [ecx],100
xrelease lock and DWORD PTR [ecx],100
lock xrelease and DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; and DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; and DWORD PTR [ecx],100
xacquire lock btc DWORD PTR [ecx],100
lock xacquire btc DWORD PTR [ecx],100
xrelease lock btc DWORD PTR [ecx],100
lock xrelease btc DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; btc DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; btc DWORD PTR [ecx],100
xacquire lock btr DWORD PTR [ecx],100
lock xacquire btr DWORD PTR [ecx],100
xrelease lock btr DWORD PTR [ecx],100
lock xrelease btr DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; btr DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; btr DWORD PTR [ecx],100
xacquire lock bts DWORD PTR [ecx],100
lock xacquire bts DWORD PTR [ecx],100
xrelease lock bts DWORD PTR [ecx],100
lock xrelease bts DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; bts DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; bts DWORD PTR [ecx],100
xrelease mov DWORD PTR [ecx],100
xacquire lock or DWORD PTR [ecx],100
lock xacquire or DWORD PTR [ecx],100
xrelease lock or DWORD PTR [ecx],100
lock xrelease or DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; or DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; or DWORD PTR [ecx],100
xacquire lock sbb DWORD PTR [ecx],100
lock xacquire sbb DWORD PTR [ecx],100
xrelease lock sbb DWORD PTR [ecx],100
lock xrelease sbb DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; sbb DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; sbb DWORD PTR [ecx],100
xacquire lock sub DWORD PTR [ecx],100
lock xacquire sub DWORD PTR [ecx],100
xrelease lock sub DWORD PTR [ecx],100
lock xrelease sub DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; sub DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; sub DWORD PTR [ecx],100
xacquire lock xor DWORD PTR [ecx],100
lock xacquire xor DWORD PTR [ecx],100
xrelease lock xor DWORD PTR [ecx],100
lock xrelease xor DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; xor DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; xor DWORD PTR [ecx],100
# Tests for op imm8 regb/m8
xacquire lock adc BYTE PTR [ecx],100
lock xacquire adc BYTE PTR [ecx],100
xrelease lock adc BYTE PTR [ecx],100
lock xrelease adc BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; adc BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; adc BYTE PTR [ecx],100
xacquire lock add BYTE PTR [ecx],100
lock xacquire add BYTE PTR [ecx],100
xrelease lock add BYTE PTR [ecx],100
lock xrelease add BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; add BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; add BYTE PTR [ecx],100
xacquire lock and BYTE PTR [ecx],100
lock xacquire and BYTE PTR [ecx],100
xrelease lock and BYTE PTR [ecx],100
lock xrelease and BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; and BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; and BYTE PTR [ecx],100
xrelease mov BYTE PTR [ecx],100
xacquire lock or BYTE PTR [ecx],100
lock xacquire or BYTE PTR [ecx],100
xrelease lock or BYTE PTR [ecx],100
lock xrelease or BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; or BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; or BYTE PTR [ecx],100
xacquire lock sbb BYTE PTR [ecx],100
lock xacquire sbb BYTE PTR [ecx],100
xrelease lock sbb BYTE PTR [ecx],100
lock xrelease sbb BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; sbb BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; sbb BYTE PTR [ecx],100
xacquire lock sub BYTE PTR [ecx],100
lock xacquire sub BYTE PTR [ecx],100
xrelease lock sub BYTE PTR [ecx],100
lock xrelease sub BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; sub BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; sub BYTE PTR [ecx],100
xacquire lock xor BYTE PTR [ecx],100
lock xacquire xor BYTE PTR [ecx],100
xrelease lock xor BYTE PTR [ecx],100
lock xrelease xor BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; xor BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; xor BYTE PTR [ecx],100
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire lock adc BYTE PTR [ecx],al
lock xacquire adc BYTE PTR [ecx],al
xrelease lock adc BYTE PTR [ecx],al
lock xrelease adc BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; adc BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; adc BYTE PTR [ecx],al
xacquire lock add BYTE PTR [ecx],al
lock xacquire add BYTE PTR [ecx],al
xrelease lock add BYTE PTR [ecx],al
lock xrelease add BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; add BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; add BYTE PTR [ecx],al
xacquire lock and BYTE PTR [ecx],al
lock xacquire and BYTE PTR [ecx],al
xrelease lock and BYTE PTR [ecx],al
lock xrelease and BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; and BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; and BYTE PTR [ecx],al
xrelease mov BYTE PTR [ecx],al
xacquire lock or BYTE PTR [ecx],al
lock xacquire or BYTE PTR [ecx],al
xrelease lock or BYTE PTR [ecx],al
lock xrelease or BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; or BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; or BYTE PTR [ecx],al
xacquire lock sbb BYTE PTR [ecx],al
lock xacquire sbb BYTE PTR [ecx],al
xrelease lock sbb BYTE PTR [ecx],al
lock xrelease sbb BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; sbb BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; sbb BYTE PTR [ecx],al
xacquire lock sub BYTE PTR [ecx],al
lock xacquire sub BYTE PTR [ecx],al
xrelease lock sub BYTE PTR [ecx],al
lock xrelease sub BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; sub BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; sub BYTE PTR [ecx],al
xacquire lock xchg BYTE PTR [ecx],al
lock xacquire xchg BYTE PTR [ecx],al
xacquire xchg BYTE PTR [ecx],al
xrelease lock xchg BYTE PTR [ecx],al
lock xrelease xchg BYTE PTR [ecx],al
xrelease xchg BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; xchg BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; xchg BYTE PTR [ecx],al
xacquire lock xor BYTE PTR [ecx],al
lock xacquire xor BYTE PTR [ecx],al
xrelease lock xor BYTE PTR [ecx],al
lock xrelease xor BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; xor BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; xor BYTE PTR [ecx],al
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire lock adc WORD PTR [ecx],ax
lock xacquire adc WORD PTR [ecx],ax
xrelease lock adc WORD PTR [ecx],ax
lock xrelease adc WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; adc WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; adc WORD PTR [ecx],ax
xacquire lock add WORD PTR [ecx],ax
lock xacquire add WORD PTR [ecx],ax
xrelease lock add WORD PTR [ecx],ax
lock xrelease add WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; add WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; add WORD PTR [ecx],ax
xacquire lock and WORD PTR [ecx],ax
lock xacquire and WORD PTR [ecx],ax
xrelease lock and WORD PTR [ecx],ax
lock xrelease and WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; and WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; and WORD PTR [ecx],ax
xrelease mov WORD PTR [ecx],ax
xacquire lock or WORD PTR [ecx],ax
lock xacquire or WORD PTR [ecx],ax
xrelease lock or WORD PTR [ecx],ax
lock xrelease or WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; or WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; or WORD PTR [ecx],ax
xacquire lock sbb WORD PTR [ecx],ax
lock xacquire sbb WORD PTR [ecx],ax
xrelease lock sbb WORD PTR [ecx],ax
lock xrelease sbb WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; sbb WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; sbb WORD PTR [ecx],ax
xacquire lock sub WORD PTR [ecx],ax
lock xacquire sub WORD PTR [ecx],ax
xrelease lock sub WORD PTR [ecx],ax
lock xrelease sub WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; sub WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; sub WORD PTR [ecx],ax
xacquire lock xchg WORD PTR [ecx],ax
lock xacquire xchg WORD PTR [ecx],ax
xacquire xchg WORD PTR [ecx],ax
xrelease lock xchg WORD PTR [ecx],ax
lock xrelease xchg WORD PTR [ecx],ax
xrelease xchg WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; xchg WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; xchg WORD PTR [ecx],ax
xacquire lock xor WORD PTR [ecx],ax
lock xacquire xor WORD PTR [ecx],ax
xrelease lock xor WORD PTR [ecx],ax
lock xrelease xor WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; xor WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; xor WORD PTR [ecx],ax
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire lock adc DWORD PTR [ecx],eax
lock xacquire adc DWORD PTR [ecx],eax
xrelease lock adc DWORD PTR [ecx],eax
lock xrelease adc DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; adc DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; adc DWORD PTR [ecx],eax
xacquire lock add DWORD PTR [ecx],eax
lock xacquire add DWORD PTR [ecx],eax
xrelease lock add DWORD PTR [ecx],eax
lock xrelease add DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; add DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; add DWORD PTR [ecx],eax
xacquire lock and DWORD PTR [ecx],eax
lock xacquire and DWORD PTR [ecx],eax
xrelease lock and DWORD PTR [ecx],eax
lock xrelease and DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; and DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; and DWORD PTR [ecx],eax
xrelease mov DWORD PTR [ecx],eax
xacquire lock or DWORD PTR [ecx],eax
lock xacquire or DWORD PTR [ecx],eax
xrelease lock or DWORD PTR [ecx],eax
lock xrelease or DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; or DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; or DWORD PTR [ecx],eax
xacquire lock sbb DWORD PTR [ecx],eax
lock xacquire sbb DWORD PTR [ecx],eax
xrelease lock sbb DWORD PTR [ecx],eax
lock xrelease sbb DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; sbb DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; sbb DWORD PTR [ecx],eax
xacquire lock sub DWORD PTR [ecx],eax
lock xacquire sub DWORD PTR [ecx],eax
xrelease lock sub DWORD PTR [ecx],eax
lock xrelease sub DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; sub DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; sub DWORD PTR [ecx],eax
xacquire lock xchg DWORD PTR [ecx],eax
lock xacquire xchg DWORD PTR [ecx],eax
xacquire xchg DWORD PTR [ecx],eax
xrelease lock xchg DWORD PTR [ecx],eax
lock xrelease xchg DWORD PTR [ecx],eax
xrelease xchg DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; xchg DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; xchg DWORD PTR [ecx],eax
xacquire lock xor DWORD PTR [ecx],eax
lock xacquire xor DWORD PTR [ecx],eax
xrelease lock xor DWORD PTR [ecx],eax
lock xrelease xor DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; xor DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; xor DWORD PTR [ecx],eax
# Tests for op regs, regs/m16
xacquire lock btc WORD PTR [ecx],ax
lock xacquire btc WORD PTR [ecx],ax
xrelease lock btc WORD PTR [ecx],ax
lock xrelease btc WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; btc WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; btc WORD PTR [ecx],ax
xacquire lock btr WORD PTR [ecx],ax
lock xacquire btr WORD PTR [ecx],ax
xrelease lock btr WORD PTR [ecx],ax
lock xrelease btr WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; btr WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; btr WORD PTR [ecx],ax
xacquire lock bts WORD PTR [ecx],ax
lock xacquire bts WORD PTR [ecx],ax
xrelease lock bts WORD PTR [ecx],ax
lock xrelease bts WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; bts WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; bts WORD PTR [ecx],ax
xacquire lock cmpxchg WORD PTR [ecx],ax
lock xacquire cmpxchg WORD PTR [ecx],ax
xrelease lock cmpxchg WORD PTR [ecx],ax
lock xrelease cmpxchg WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; cmpxchg WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; cmpxchg WORD PTR [ecx],ax
xacquire lock xadd WORD PTR [ecx],ax
lock xacquire xadd WORD PTR [ecx],ax
xrelease lock xadd WORD PTR [ecx],ax
lock xrelease xadd WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; xadd WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; xadd WORD PTR [ecx],ax
# Tests for op regl regl/m32
xacquire lock btc DWORD PTR [ecx],eax
lock xacquire btc DWORD PTR [ecx],eax
xrelease lock btc DWORD PTR [ecx],eax
lock xrelease btc DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; btc DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; btc DWORD PTR [ecx],eax
xacquire lock btr DWORD PTR [ecx],eax
lock xacquire btr DWORD PTR [ecx],eax
xrelease lock btr DWORD PTR [ecx],eax
lock xrelease btr DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; btr DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; btr DWORD PTR [ecx],eax
xacquire lock bts DWORD PTR [ecx],eax
lock xacquire bts DWORD PTR [ecx],eax
xrelease lock bts DWORD PTR [ecx],eax
lock xrelease bts DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; bts DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; bts DWORD PTR [ecx],eax
xacquire lock cmpxchg DWORD PTR [ecx],eax
lock xacquire cmpxchg DWORD PTR [ecx],eax
xrelease lock cmpxchg DWORD PTR [ecx],eax
lock xrelease cmpxchg DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; cmpxchg DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; cmpxchg DWORD PTR [ecx],eax
xacquire lock xadd DWORD PTR [ecx],eax
lock xacquire xadd DWORD PTR [ecx],eax
xrelease lock xadd DWORD PTR [ecx],eax
lock xrelease xadd DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; xadd DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; xadd DWORD PTR [ecx],eax
# Tests for op regb/m8
xacquire lock dec BYTE PTR [ecx]
lock xacquire dec BYTE PTR [ecx]
xrelease lock dec BYTE PTR [ecx]
lock xrelease dec BYTE PTR [ecx]
.byte 0xf0; .byte 0xf2; dec BYTE PTR [ecx]
.byte 0xf0; .byte 0xf3; dec BYTE PTR [ecx]
xacquire lock inc BYTE PTR [ecx]
lock xacquire inc BYTE PTR [ecx]
xrelease lock inc BYTE PTR [ecx]
lock xrelease inc BYTE PTR [ecx]
.byte 0xf0; .byte 0xf2; inc BYTE PTR [ecx]
.byte 0xf0; .byte 0xf3; inc BYTE PTR [ecx]
xacquire lock neg BYTE PTR [ecx]
lock xacquire neg BYTE PTR [ecx]
xrelease lock neg BYTE PTR [ecx]
lock xrelease neg BYTE PTR [ecx]
.byte 0xf0; .byte 0xf2; neg BYTE PTR [ecx]
.byte 0xf0; .byte 0xf3; neg BYTE PTR [ecx]
xacquire lock not BYTE PTR [ecx]
lock xacquire not BYTE PTR [ecx]
xrelease lock not BYTE PTR [ecx]
lock xrelease not BYTE PTR [ecx]
.byte 0xf0; .byte 0xf2; not BYTE PTR [ecx]
.byte 0xf0; .byte 0xf3; not BYTE PTR [ecx]
# Tests for op regs/m16
xacquire lock dec WORD PTR [ecx]
lock xacquire dec WORD PTR [ecx]
xrelease lock dec WORD PTR [ecx]
lock xrelease dec WORD PTR [ecx]
.byte 0xf0; .byte 0xf2; dec WORD PTR [ecx]
.byte 0xf0; .byte 0xf3; dec WORD PTR [ecx]
xacquire lock inc WORD PTR [ecx]
lock xacquire inc WORD PTR [ecx]
xrelease lock inc WORD PTR [ecx]
lock xrelease inc WORD PTR [ecx]
.byte 0xf0; .byte 0xf2; inc WORD PTR [ecx]
.byte 0xf0; .byte 0xf3; inc WORD PTR [ecx]
xacquire lock neg WORD PTR [ecx]
lock xacquire neg WORD PTR [ecx]
xrelease lock neg WORD PTR [ecx]
lock xrelease neg WORD PTR [ecx]
.byte 0xf0; .byte 0xf2; neg WORD PTR [ecx]
.byte 0xf0; .byte 0xf3; neg WORD PTR [ecx]
xacquire lock not WORD PTR [ecx]
lock xacquire not WORD PTR [ecx]
xrelease lock not WORD PTR [ecx]
lock xrelease not WORD PTR [ecx]
.byte 0xf0; .byte 0xf2; not WORD PTR [ecx]
.byte 0xf0; .byte 0xf3; not WORD PTR [ecx]
# Tests for op regl/m32
xacquire lock dec DWORD PTR [ecx]
lock xacquire dec DWORD PTR [ecx]
xrelease lock dec DWORD PTR [ecx]
lock xrelease dec DWORD PTR [ecx]
.byte 0xf0; .byte 0xf2; dec DWORD PTR [ecx]
.byte 0xf0; .byte 0xf3; dec DWORD PTR [ecx]
xacquire lock inc DWORD PTR [ecx]
lock xacquire inc DWORD PTR [ecx]
xrelease lock inc DWORD PTR [ecx]
lock xrelease inc DWORD PTR [ecx]
.byte 0xf0; .byte 0xf2; inc DWORD PTR [ecx]
.byte 0xf0; .byte 0xf3; inc DWORD PTR [ecx]
xacquire lock neg DWORD PTR [ecx]
lock xacquire neg DWORD PTR [ecx]
xrelease lock neg DWORD PTR [ecx]
lock xrelease neg DWORD PTR [ecx]
.byte 0xf0; .byte 0xf2; neg DWORD PTR [ecx]
.byte 0xf0; .byte 0xf3; neg DWORD PTR [ecx]
xacquire lock not DWORD PTR [ecx]
lock xacquire not DWORD PTR [ecx]
xrelease lock not DWORD PTR [ecx]
lock xrelease not DWORD PTR [ecx]
.byte 0xf0; .byte 0xf2; not DWORD PTR [ecx]
.byte 0xf0; .byte 0xf3; not DWORD PTR [ecx]
# Tests for op m64
xacquire lock cmpxchg8b QWORD PTR [ecx]
lock xacquire cmpxchg8b QWORD PTR [ecx]
xrelease lock cmpxchg8b QWORD PTR [ecx]
lock xrelease cmpxchg8b QWORD PTR [ecx]
.byte 0xf0; .byte 0xf2; cmpxchg8b QWORD PTR [ecx]
.byte 0xf0; .byte 0xf3; cmpxchg8b QWORD PTR [ecx]
# Tests for op regb, regb/m8
xacquire lock cmpxchg BYTE PTR [ecx],cl
lock xacquire cmpxchg BYTE PTR [ecx],cl
xrelease lock cmpxchg BYTE PTR [ecx],cl
lock xrelease cmpxchg BYTE PTR [ecx],cl
.byte 0xf0; .byte 0xf2; cmpxchg BYTE PTR [ecx],cl
.byte 0xf0; .byte 0xf3; cmpxchg BYTE PTR [ecx],cl
xacquire lock xadd BYTE PTR [ecx],cl
lock xacquire xadd BYTE PTR [ecx],cl
xrelease lock xadd BYTE PTR [ecx],cl
lock xrelease xadd BYTE PTR [ecx],cl
.byte 0xf0; .byte 0xf2; xadd BYTE PTR [ecx],cl
.byte 0xf0; .byte 0xf3; xadd BYTE PTR [ecx],cl
|
tactcomplabs/xbgas-binutils-gdb
| 2,228
|
gas/testsuite/gas/i386/x86-64-avx512dq-rcig.s
|
# Check 64bit AVX512DQ-RCIG instructions
.allow_index_reg
.text
_start:
vrangepd $0xab, {sae}, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangepd $123, {sae}, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangeps $0xab, {sae}, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangeps $123, {sae}, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangesd $0xab, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vrangesd $123, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vrangess $0xab, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vrangess $123, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducepd $0xab, {sae}, %zmm29, %zmm30 # AVX512DQ
vreducepd $123, {sae}, %zmm29, %zmm30 # AVX512DQ
vreduceps $0xab, {sae}, %zmm29, %zmm30 # AVX512DQ
vreduceps $123, {sae}, %zmm29, %zmm30 # AVX512DQ
vreducesd $0xab, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducesd $123, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducess $0xab, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducess $123, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vcvttpd2qq {sae}, %zmm29, %zmm30 # AVX512DQ
vcvttpd2uqq {sae}, %zmm29, %zmm30 # AVX512DQ
vcvttps2qq {sae}, %ymm29, %zmm30 # AVX512DQ
vcvttps2uqq {sae}, %ymm29, %zmm30 # AVX512DQ
.intel_syntax noprefix
vrangepd zmm30, zmm29, zmm28, {sae}, 0xab # AVX512DQ
vrangepd zmm30, zmm29, zmm28, {sae}, 123 # AVX512DQ
vrangeps zmm30, zmm29, zmm28, {sae}, 0xab # AVX512DQ
vrangeps zmm30, zmm29, zmm28, {sae}, 123 # AVX512DQ
vrangesd xmm30, xmm29, xmm28, {sae}, 0xab # AVX512DQ
vrangesd xmm30, xmm29, xmm28, {sae}, 123 # AVX512DQ
vrangess xmm30, xmm29, xmm28, {sae}, 0xab # AVX512DQ
vrangess xmm30, xmm29, xmm28, {sae}, 123 # AVX512DQ
vreducepd zmm30, zmm29, {sae}, 0xab # AVX512DQ
vreducepd zmm30, zmm29, {sae}, 123 # AVX512DQ
vreduceps zmm30, zmm29, {sae}, 0xab # AVX512DQ
vreduceps zmm30, zmm29, {sae}, 123 # AVX512DQ
vreducesd xmm30, xmm29, xmm28, {sae}, 0xab # AVX512DQ
vreducesd xmm30, xmm29, xmm28, {sae}, 123 # AVX512DQ
vreducess xmm30, xmm29, xmm28, {sae}, 0xab # AVX512DQ
vreducess xmm30, xmm29, xmm28, {sae}, 123 # AVX512DQ
vcvttpd2qq zmm30, zmm29, {sae} # AVX512DQ
vcvttpd2uqq zmm30, zmm29, {sae} # AVX512DQ
vcvttps2qq zmm30, ymm29, {sae} # AVX512DQ
vcvttps2uqq zmm30, ymm29, {sae} # AVX512DQ
|
tactcomplabs/xbgas-binutils-gdb
| 2,776
|
gas/testsuite/gas/i386/x86-64-avx512bitalg.s
|
# Check 64bit AVX512BITALG instructions
.allow_index_reg
.text
_start:
vpshufbitqmb %zmm28, %zmm29, %k5 # AVX512BITALG
vpshufbitqmb %zmm28, %zmm29, %k5{%k7} # AVX512BITALG
vpshufbitqmb 0x123(%rax,%r14,8), %zmm29, %k5 # AVX512BITALG
vpshufbitqmb 8128(%rdx), %zmm29, %k5 # AVX512BITALG Disp8
vpopcntb %zmm29, %zmm30 # AVX512BITALG
vpopcntb %zmm29, %zmm30{%k7} # AVX512BITALG
vpopcntb %zmm29, %zmm30{%k7}{z} # AVX512BITALG
vpopcntb 0x123(%rax,%r14,8), %zmm30 # AVX512BITALG
vpopcntb 8128(%rdx), %zmm30 # AVX512BITALG Disp8
vpopcntw %zmm29, %zmm30 # AVX512BITALG
vpopcntw %zmm29, %zmm30{%k7} # AVX512BITALG
vpopcntw %zmm29, %zmm30{%k7}{z} # AVX512BITALG
vpopcntw 0x123(%rax,%r14,8), %zmm30 # AVX512BITALG
vpopcntw 8128(%rdx), %zmm30 # AVX512BITALG Disp8
vpopcntd %zmm29, %zmm30 # AVX512BITALG
vpopcntd %zmm29, %zmm30{%k7} # AVX512BITALG
vpopcntd %zmm29, %zmm30{%k7}{z} # AVX512BITALG
vpopcntd 0x123(%rax,%r14,8), %zmm30 # AVX512BITALG
vpopcntd 8128(%rdx), %zmm30 # AVX512BITALG Disp8
vpopcntd 508(%rdx){1to16}, %zmm30 # AVX512BITALG Disp8
vpopcntq %zmm29, %zmm30 # AVX512BITALG
vpopcntq %zmm29, %zmm30{%k7} # AVX512BITALG
vpopcntq %zmm29, %zmm30{%k7}{z} # AVX512BITALG
vpopcntq 0x123(%rax,%r14,8), %zmm30 # AVX512BITALG
vpopcntq 8128(%rdx), %zmm30 # AVX512BITALG Disp8
vpopcntq 1016(%rdx){1to8}, %zmm30 # AVX512BITALG Disp8
.intel_syntax noprefix
vpshufbitqmb k5, zmm29, zmm28 # AVX512BITALG
vpshufbitqmb k5{k7}, zmm29, zmm28 # AVX512BITALG
vpshufbitqmb k5, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BITALG
vpshufbitqmb k5, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BITALG Disp8
vpopcntb zmm30, zmm29 # AVX512BITALG
vpopcntb zmm30{k7}, zmm29 # AVX512BITALG
vpopcntb zmm30{k7}{z}, zmm29 # AVX512BITALG
vpopcntb zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BITALG
vpopcntb zmm30, ZMMWORD PTR [rdx+8128] # AVX512BITALG Disp8
vpopcntw zmm30, zmm29 # AVX512BITALG
vpopcntw zmm30{k7}, zmm29 # AVX512BITALG
vpopcntw zmm30{k7}{z}, zmm29 # AVX512BITALG
vpopcntw zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BITALG
vpopcntw zmm30, ZMMWORD PTR [rdx+8128] # AVX512BITALG Disp8
vpopcntd zmm30, zmm29 # AVX512BITALG
vpopcntd zmm30{k7}, zmm29 # AVX512BITALG
vpopcntd zmm30{k7}{z}, zmm29 # AVX512BITALG
vpopcntd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BITALG
vpopcntd zmm30, ZMMWORD PTR [rdx+8128] # AVX512BITALG Disp8
vpopcntd zmm30, [rdx+508]{1to16} # AVX512BITALG Disp8
vpopcntq zmm30, zmm29 # AVX512BITALG
vpopcntq zmm30{k7}, zmm29 # AVX512BITALG
vpopcntq zmm30{k7}{z}, zmm29 # AVX512BITALG
vpopcntq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BITALG
vpopcntq zmm30, ZMMWORD PTR [rdx+8128] # AVX512BITALG Disp8
vpopcntq zmm30, [rdx+1016]{1to8} # AVX512BITALG Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 1,777
|
gas/testsuite/gas/i386/x86-64-mpx-inval-2.s
|
# MPX instructions
.allow_index_reg
.text
### bndmk
bndmk (%eax), %bnd1
bndmk 0x3(%ecx,%ebx,1), %bnd1
bndmk (%rip), %bnd3
bndmk (%eip), %bnd2
### bndmov
bndmov (%r8d), %bnd1
bndmov 0x3(%r9d,%edx,1), %bnd1
bndmov %bnd1, (%eax)
bndmov %bnd1, 0x3(%ecx,%eax,1)
### bndcl
bndcl (%ecx), %bnd1
bndcl 0x3(%ecx,%eax,1), %bnd1
bndcl %ecx, %bnd1
bndcl %cx, %bnd1
### bndcu
bndcu (%ecx), %bnd1
bndcu 0x3(%ecx,%eax,1), %bnd1
bndcu %ecx, %bnd1
bndcu %cx, %bnd1
### bndcn
bndcn (%ecx), %bnd1
bndcn 0x3(%ecx,%eax,1), %bnd1
bndcn %ecx, %bnd1
bndcn %cx, %bnd1
### bndstx
bndstx %bnd0, 0x3(%eax,%ebx,1)
bndstx %bnd2, 3(%ebx,1)
bndstx %bnd1, (%r15,%rax,2)
bndstx %bnd3, base(%rip)
bndstx %bnd1, base(%eip)
### bndldx
bndldx 0x3(%eax,%ebx,1), %bnd0
bndldx 3(%ebx,1), %bnd2
bndldx (%rax,%r15,4), %bnd3
bndldx base(%rip), %bnd1
bndldx base(%eip), %bnd3
.intel_syntax noprefix
bndmk bnd1, [eax]
bndmk bnd1, [edx+1*eax+0x3]
bndmk bnd3, [rip]
bndmk bnd2, [eip]
bndmk bnd2, [rax+rsp]
### bndmov
bndmov bnd1, [eax]
bndmov bnd1, [edx+1*eax+0x3]
bndmov [eax], bnd1
bndmov [edx+1*eax+0x3], bnd1
### bndcl
bndcl bnd1, [eax]
bndcl bnd1, [edx+1*eax+0x3]
bndcl bnd1, eax
bndcl bnd1, dx
### bndcu
bndcu bnd1, [eax]
bndcu bnd1, [edx+1*eax+0x3]
bndcu bnd1, eax
bndcu bnd1, dx
### bndcn
bndcn bnd1, [eax]
bndcn bnd1, [edx+1*eax+0x3]
bndcn bnd1, eax
bndcn bnd1, dx
### bndstx
bndstx [eax+ebx*1+0x3], bnd0
bndstx [1*ebx+3], bnd2
bndstx [r8+rdi*4], bnd2
bndstx [rip+base], bnd1
bndstx [eip+base], bnd3
bndstx [rax+rsp], bnd3
### bndldx
bndldx bnd0, [eax+ebx*1+0x3]
bndldx bnd2, [1*ebx+3]
bndldx bnd2, [rdi+r8*8]
bndldx bnd1, [rip+base]
bndldx bnd3, [eip+base]
bndldx bnd3, [rax+rsp]
# Force a good alignment.
.p2align 4,0
|
tactcomplabs/xbgas-binutils-gdb
| 6,527
|
gas/testsuite/gas/i386/x86-64-avx512pf.s
|
# Check 64bit AVX512PF instructions
.allow_index_reg
.text
_start:
vgatherpf0dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vgatherpf0dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vgatherpf0dpd 256(%r9,%ymm31){%k1} # AVX512PF
vgatherpf0dpd 1024(%rcx,%ymm31,4){%k1} # AVX512PF
vgatherpf0dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf0dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf0dps 256(%r9,%zmm31){%k1} # AVX512PF
vgatherpf0dps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vgatherpf0qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf0qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf0qpd 256(%r9,%zmm31){%k1} # AVX512PF
vgatherpf0qpd 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vgatherpf0qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf0qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf0qps 256(%r9,%zmm31){%k1} # AVX512PF
vgatherpf0qps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vgatherpf1dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vgatherpf1dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vgatherpf1dpd 256(%r9,%ymm31){%k1} # AVX512PF
vgatherpf1dpd 1024(%rcx,%ymm31,4){%k1} # AVX512PF
vgatherpf1dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf1dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf1dps 256(%r9,%zmm31){%k1} # AVX512PF
vgatherpf1dps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vgatherpf1qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf1qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf1qpd 256(%r9,%zmm31){%k1} # AVX512PF
vgatherpf1qpd 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vgatherpf1qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf1qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf1qps 256(%r9,%zmm31){%k1} # AVX512PF
vgatherpf1qps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vscatterpf0dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vscatterpf0dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vscatterpf0dpd 256(%r9,%ymm31){%k1} # AVX512PF
vscatterpf0dpd 1024(%rcx,%ymm31,4){%k1} # AVX512PF
vscatterpf0dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf0dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf0dps 256(%r9,%zmm31){%k1} # AVX512PF
vscatterpf0dps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vscatterpf0qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf0qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf0qpd 256(%r9,%zmm31){%k1} # AVX512PF
vscatterpf0qpd 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vscatterpf0qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf0qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf0qps 256(%r9,%zmm31){%k1} # AVX512PF
vscatterpf0qps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vscatterpf1dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vscatterpf1dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vscatterpf1dpd 256(%r9,%ymm31){%k1} # AVX512PF
vscatterpf1dpd 1024(%rcx,%ymm31,4){%k1} # AVX512PF
vscatterpf1dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf1dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf1dps 256(%r9,%zmm31){%k1} # AVX512PF
vscatterpf1dps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vscatterpf1qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf1qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf1qpd 256(%r9,%zmm31){%k1} # AVX512PF
vscatterpf1qpd 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vscatterpf1qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf1qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf1qps 256(%r9,%zmm31){%k1} # AVX512PF
vscatterpf1qps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
.intel_syntax noprefix
vgatherpf0dpd [r14+ymm31*8-123]{k1} # AVX512PF
vgatherpf0dpd [r14+ymm31*8-123]{k1} # AVX512PF
vgatherpf0dpd [r9+ymm31+256]{k1} # AVX512PF
vgatherpf0dpd [rcx+ymm31*4+1024]{k1} # AVX512PF
vgatherpf0dps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf0dps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf0dps [r9+zmm31+256]{k1} # AVX512PF
vgatherpf0dps [rcx+zmm31*4+1024]{k1} # AVX512PF
vgatherpf0qpd [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf0qpd [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf0qpd [r9+zmm31+256]{k1} # AVX512PF
vgatherpf0qpd [rcx+zmm31*4+1024]{k1} # AVX512PF
vgatherpf0qps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf0qps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf0qps [r9+zmm31+256]{k1} # AVX512PF
vgatherpf0qps [rcx+zmm31*4+1024]{k1} # AVX512PF
vgatherpf1dpd [r14+ymm31*8-123]{k1} # AVX512PF
vgatherpf1dpd [r14+ymm31*8-123]{k1} # AVX512PF
vgatherpf1dpd [r9+ymm31+256]{k1} # AVX512PF
vgatherpf1dpd [rcx+ymm31*4+1024]{k1} # AVX512PF
vgatherpf1dps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf1dps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf1dps [r9+zmm31+256]{k1} # AVX512PF
vgatherpf1dps [rcx+zmm31*4+1024]{k1} # AVX512PF
vgatherpf1qpd [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf1qpd [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf1qpd [r9+zmm31+256]{k1} # AVX512PF
vgatherpf1qpd [rcx+zmm31*4+1024]{k1} # AVX512PF
vgatherpf1qps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf1qps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf1qps [r9+zmm31+256]{k1} # AVX512PF
vgatherpf1qps [rcx+zmm31*4+1024]{k1} # AVX512PF
vscatterpf0dpd [r14+ymm31*8-123]{k1} # AVX512PF
vscatterpf0dpd [r14+ymm31*8-123]{k1} # AVX512PF
vscatterpf0dpd [r9+ymm31+256]{k1} # AVX512PF
vscatterpf0dpd [rcx+ymm31*4+1024]{k1} # AVX512PF
vscatterpf0dps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf0dps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf0dps [r9+zmm31+256]{k1} # AVX512PF
vscatterpf0dps [rcx+zmm31*4+1024]{k1} # AVX512PF
vscatterpf0qpd [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf0qpd [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf0qpd [r9+zmm31+256]{k1} # AVX512PF
vscatterpf0qpd [rcx+zmm31*4+1024]{k1} # AVX512PF
vscatterpf0qps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf0qps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf0qps [r9+zmm31+256]{k1} # AVX512PF
vscatterpf0qps [rcx+zmm31*4+1024]{k1} # AVX512PF
vscatterpf1dpd [r14+ymm31*8-123]{k1} # AVX512PF
vscatterpf1dpd [r14+ymm31*8-123]{k1} # AVX512PF
vscatterpf1dpd [r9+ymm31+256]{k1} # AVX512PF
vscatterpf1dpd [rcx+ymm31*4+1024]{k1} # AVX512PF
vscatterpf1dps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf1dps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf1dps [r9+zmm31+256]{k1} # AVX512PF
vscatterpf1dps [rcx+zmm31*4+1024]{k1} # AVX512PF
vscatterpf1qpd [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf1qpd [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf1qpd [r9+zmm31+256]{k1} # AVX512PF
vscatterpf1qpd [rcx+zmm31*4+1024]{k1} # AVX512PF
vscatterpf1qps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf1qps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf1qps [r9+zmm31+256]{k1} # AVX512PF
vscatterpf1qps [rcx+zmm31*4+1024]{k1} # AVX512PF
|
tactcomplabs/xbgas-binutils-gdb
| 15,491
|
gas/testsuite/gas/i386/intel.s
|
.text
.intel_syntax noprefix
foo:
add byte ptr 0x90909090[eax], dl
add dword ptr 0x90909090[eax], edx
add dl, byte ptr 0x90909090[eax]
add edx, dword ptr 0x90909090[eax]
add al, 0x90
add eax, 0x90909090
push es
pop es
or [eax+0x90909090], dl
or [eax+0x90909090], edx
or dl, [eax+0x90909090]
or edx, [eax+0x90909090]
or al, 0x90
or eax, 0x90909090
push cs
adc byte ptr [eax+0x90909090], dl
adc dword ptr [eax+0x90909090], edx
adc dl, byte ptr [eax+0x90909090]
adc edx, dword ptr [eax+0x90909090]
adc al, 0x90
adc eax, 0x90909090
push ss
pop ss
sbb 0x90909090[eax], dl
sbb 0x90909090[eax], edx
sbb dl, 0x90909090[eax]
sbb edx, 0x90909090[eax]
sbb al, 0x90
sbb eax, 0x90909090
push ds
pop ds
and 0x90909090[eax], dl
and 0x90909090[eax], edx
and dl, 0x90909090[eax]
and edx, 0x90909090[eax]
and al, 0x90
and eax, 0x90909090
daa
sub 0x90909090[eax], dl
sub 0x90909090[eax], edx
sub dl, 0x90909090[eax]
sub edx, 0x90909090[eax]
sub al, 0x90
sub eax, 0x90909090
das
xor 0x90909090[eax], dl
xor 0x90909090[eax], edx
xor dl, 0x90909090[eax]
xor edx, 0x90909090[eax]
xor al, 0x90
xor eax, 0x90909090
aaa
cmp 0x90909090[eax], dl
cmp 0x90909090[eax], edx
cmp dl, 0x90909090[eax]
cmp edx, 0x90909090[eax]
cmp al, 0x90
cmp eax, 0x90909090
aas
inc eax
inc ecx
inc edx
inc ebx
inc esp
inc ebp
inc esi
inc edi
dec eax
dec ecx
dec edx
dec ebx
dec esp
dec ebp
dec esi
dec edi
push eax
push ecx
push edx
push ebx
push esp
push ebp
push esi
push edi
pop eax
pop ecx
pop edx
pop ebx
pop esp
pop ebp
pop esi
pop edi
pusha
popa
bound edx, 0x90909090[eax]
arpl 0x90909090[eax], dx
push 0x90909090
imul edx, 0x90909090[eax], 0x90909090
push 0xffffff90
imul edx, 0x90909090[eax], 0xffffff90
ins byte ptr es:[edi], dx
ins dword ptr es:[edi], dx
outs dx, byte ptr ds:[esi]
outs dx, dword ptr ds:[esi]
jo .+2-0x70
jno .+2-0x70
jb .+2-0x70
jae .+2-0x70
je .+2-0x70
jne .+2-0x70
jbe .+2-0x70
ja .+2-0x70
js .+2-0x70
jns .+2-0x70
jp .+2-0x70
jnp .+2-0x70
jl .+2-0x70
jge .+2-0x70
jle .+2-0x70
jg .+2-0x70
adc byte ptr 0x90909090[eax], 0x90
adc dword ptr 0x90909090[eax], 0x90909090
adc dword ptr 0x90909090[eax], 0xffffff90
test 0x90909090[eax], dl
test 0x90909090[eax], edx
xchg 0x90909090[eax], dl
xchg 0x90909090[eax], edx
mov 0x90909090[eax], dl
mov 0x90909090[eax], edx
mov dl, 0x90909090[eax]
mov edx, 0x90909090[eax]
mov word ptr 0x90909090[eax], ss
lea edx, 0x90909090[eax]
mov ss, word ptr 0x90909090[eax]
pop dword ptr 0x90909090[eax]
xchg eax, eax
xchg ecx, eax
xchg edx, eax
xchg ebx, eax
xchg esp, eax
xchg ebp, eax
xchg esi, eax
xchg edi, eax
cwde
cdq
call 0x9090:0x90909090
fwait
pushf
popf
sahf
lahf
mov al, FLAT:[0x90909090]
mov eax, FLAT:[0x90909090]
mov FLAT:[0x90909090], al
mov FLAT:[0x90909090], eax
movs byte ptr es:[edi], byte ptr ds:[esi]
movs dword ptr es:[edi], dword ptr ds:[esi]
cmps byte ptr ds:[esi], byte ptr es:[edi]
cmps dword ptr ds:[esi], dword ptr es:[edi]
test al, 0x90
test eax, 0x90909090
stos byte ptr es:[edi], al
stos dword ptr es:[edi], eax
lods al, byte ptr ds:[esi]
lods eax, dword ptr ds:[esi]
scas al, byte ptr es:[edi]
scas eax, dword ptr es:[edi]
mov al, 0x90
mov cl, 0x90
mov dl, 0x90
mov bl, 0x90
mov ah, 0x90
mov ch, 0x90
mov dh, 0x90
mov bh, 0x90
mov eax, 0x90909090
mov ecx, 0x90909090
mov edx, 0x90909090
mov ebx, 0x90909090
mov esp, 0x90909090
mov ebp, 0x90909090
mov esi, 0x90909090
mov edi, 0x90909090
rcl byte ptr 0x90909090[eax], 0x90
rcl dword ptr 0x90909090[eax], 0x90
ret 0x9090
ret
les edx, 0x90909090[eax]
lds edx, 0x90909090[eax]
mov byte ptr 0x90909090[eax], 0x90
mov dword ptr 0x90909090[eax], 0x90909090
enter 0x9090, 0x90
leave
retf 0x9090
retf
lret 0x9090
lret
int3
int 0x90
into
iret
rcl byte ptr 0x90909090[eax]
rcl dword ptr 0x90909090[eax]
rcl byte ptr 0x90909090[eax], cl
rcl dword ptr 0x90909090[eax], cl
aam 0xffffff90
aad 0xffffff90
xlat byte ptr ds:[ebx]
fcom dword ptr 0x90909090[eax]
fst dword ptr 0x90909090[eax]
ficom dword ptr 0x90909090[eax]
fist dword ptr 0x90909090[eax]
fcom qword ptr 0x90909090[eax]
fst qword ptr 0x90909090[eax]
ficom word ptr 0x90909090[eax]
fist word ptr 0x90909090[eax]
loopne .+2-0x70
loope .+2-0x70
loop .+2-0x70
jecxz .+2-0x70
in al, 0x90
in eax, 0x90
out 0x90, al
out 0x90, eax
call .+5+0x90909090
jmp .+5+0x90909090
jmp 0x9090:0x90909090
jmp .+2-0x70
in al, dx
in eax, dx
out dx, al
out dx, eax
hlt
cmc
not byte ptr 0x90909090[eax]
not dword ptr 0x90909090[eax]
clc
stc
cli
sti
cld
std
call dword ptr 0x90909090[eax]
lldt 0x90909090[eax]
lgdt 0x90909090[eax]
lar edx, 0x90909090[eax]
lsl edx, 0x90909090[eax]
clts
invd
wbinvd
ud2a
mov eax, cr2
mov eax, dr2
mov cr2, eax
mov dr2, eax
mov eax, tr2
mov tr2, eax
wrmsr
rdtsc
rdmsr
rdpmc
cmovo edx, 0x90909090[eax]
cmovno edx, 0x90909090[eax]
cmovb edx, 0x90909090[eax]
cmovae edx, 0x90909090[eax]
cmove edx, 0x90909090[eax]
cmovne edx, 0x90909090[eax]
cmovbe edx, 0x90909090[eax]
cmova edx, 0x90909090[eax]
cmovs edx, 0x90909090[eax]
cmovns edx, 0x90909090[eax]
cmovp edx, 0x90909090[eax]
cmovnp edx, 0x90909090[eax]
cmovl edx, 0x90909090[eax]
cmovge edx, 0x90909090[eax]
cmovle edx, 0x90909090[eax]
cmovg edx, 0x90909090[eax]
punpcklbw mm2, 0x90909090[eax]
punpcklwd mm2, 0x90909090[eax]
punpckldq mm2, 0x90909090[eax]
packsswb mm2, 0x90909090[eax]
pcmpgtb mm2, 0x90909090[eax]
pcmpgtw mm2, 0x90909090[eax]
pcmpgtd mm2, 0x90909090[eax]
packuswb mm2, 0x90909090[eax]
punpckhbw mm2, 0x90909090[eax]
punpckhwd mm2, 0x90909090[eax]
punpckhdq mm2, 0x90909090[eax]
packssdw mm2, 0x90909090[eax]
movd mm2, 0x90909090[eax]
movq mm2, 0x90909090[eax]
psrlw mm0, 0x90
psrld mm0, 0x90
psrlq mm0, 0x90
pcmpeqb mm2, 0x90909090[eax]
pcmpeqw mm2, 0x90909090[eax]
pcmpeqd mm2, 0x90909090[eax]
emms
movd 0x90909090[eax], mm2
movq 0x90909090[eax], mm2
jo .+6+0x90909090
jno .+6+0x90909090
jb .+6+0x90909090
jae .+6+0x90909090
je .+6+0x90909090
jne .+6+0x90909090
jbe .+6+0x90909090
ja .+6+0x90909090
js .+6+0x90909090
jns .+6+0x90909090
jp .+6+0x90909090
jnp .+6+0x90909090
jl .+6+0x90909090
jge .+6+0x90909090
jle .+6+0x90909090
jg .+6+0x90909090
seto 0x90909090[eax]
setno 0x90909090[eax]
setb 0x90909090[eax]
setae 0x90909090[eax]
sete 0x90909090[eax]
setne 0x90909090[eax]
setbe 0x90909090[eax]
seta 0x90909090[eax]
sets 0x90909090[eax]
setns 0x90909090[eax]
setp 0x90909090[eax]
setnp 0x90909090[eax]
setl 0x90909090[eax]
setge 0x90909090[eax]
setle 0x90909090[eax]
setg 0x90909090[eax]
push fs
pop fs
cpuid
bt 0x90909090[eax], edx
shld 0x90909090[eax], edx, 0x90
shld 0x90909090[eax], edx, cl
push gs
pop gs
rsm
bts 0x90909090[eax], edx
shrd 0x90909090[eax], edx, 0x90
shrd 0x90909090[eax], edx, cl
imul edx, 0x90909090[eax]
cmpxchg 0x90909090[eax], dl
cmpxchg 0x90909090[eax], edx
lss edx, 0x90909090[eax]
btr 0x90909090[eax], edx
lfs edx, 0x90909090[eax]
lgs edx, 0x90909090[eax]
movzx edx, byte ptr 0x90909090[eax]
movzx edx, word ptr 0x90909090[eax]
ud2
btc 0x90909090[eax], edx
bsf edx, 0x90909090[eax]
bsr edx, 0x90909090[eax]
movsx edx, byte ptr 0x90909090[eax]
movsx edx, word ptr 0x90909090[eax]
xadd 0x90909090[eax], dl
xadd 0x90909090[eax], edx
bswap eax
bswap ecx
bswap edx
bswap ebx
bswap esp
bswap ebp
bswap esi
bswap edi
psrlw mm2, 0x90909090[eax]
psrld mm2, 0x90909090[eax]
psrlq mm2, 0x90909090[eax]
pmullw mm2, 0x90909090[eax]
psubusb mm2, 0x90909090[eax]
psubusw mm2, 0x90909090[eax]
pand mm2, 0x90909090[eax]
paddusb mm2, 0x90909090[eax]
paddusw mm2, 0x90909090[eax]
pandn mm2, 0x90909090[eax]
psraw mm2, 0x90909090[eax]
psrad mm2, 0x90909090[eax]
pmulhw mm2, 0x90909090[eax]
psubsb mm2, 0x90909090[eax]
psubsw mm2, 0x90909090[eax]
por mm2, 0x90909090[eax]
paddsb mm2, 0x90909090[eax]
paddsw mm2, 0x90909090[eax]
pxor mm2, 0x90909090[eax]
psllw mm2, 0x90909090[eax]
pslld mm2, 0x90909090[eax]
psllq mm2, 0x90909090[eax]
pmaddwd mm2, 0x90909090[eax]
psubb mm2, 0x90909090[eax]
psubw mm2, 0x90909090[eax]
psubd mm2, 0x90909090[eax]
paddb mm2, 0x90909090[eax]
paddw mm2, 0x90909090[eax]
paddd mm2, 0x90909090[eax]
add 0x90909090[eax], dx
add dx, 0x90909090[eax]
add ax, 0x9090
pushw es
popw es
or 0x90909090[eax], dx
or dx, 0x90909090[eax]
or ax, 0x9090
pushw cs
adc 0x90909090[eax], dx
adc dx, 0x90909090[eax]
adc ax, 0x9090
pushw ss
popw ss
sbb 0x90909090[eax], dx
sbb dx, 0x90909090[eax]
sbb ax, 0x9090
pushw ds
popw ds
and 0x90909090[eax], dx
and dx, 0x90909090[eax]
and ax, 0x9090
sub 0x90909090[eax], dx
sub dx, 0x90909090[eax]
sub ax, 0x9090
xor 0x90909090[eax], dx
xor dx, 0x90909090[eax]
xor ax, 0x9090
cmp 0x90909090[eax], dx
cmp dx, 0x90909090[eax]
cmp ax, 0x9090
inc ax
inc cx
inc dx
inc bx
inc sp
inc bp
inc si
inc di
dec ax
dec cx
dec dx
dec bx
dec sp
dec bp
dec si
dec di
push ax
push cx
push dx
push bx
push sp
push bp
push si
push di
pop ax
pop cx
pop dx
pop bx
pop sp
pop bp
pop si
pop di
pushaw # how should we specify a word push all regs?
popaw # ditto for popa
bound dx, 0x90909090[eax]
pushw 0x9090
imul dx, 0x90909090[eax], 0x9090
pushw 0xffffff90
imul dx, 0x90909090[eax], 0xffffff90
ins word ptr es:[edi], dx
outs dx, word ptr ds:[esi]
adc word ptr 0x90909090[eax], 0x9090
adc word ptr 0x90909090[eax], 0xffffff90
test 0x90909090[eax], dx
xchg 0x90909090[eax], dx
mov 0x90909090[eax], dx
mov dx, 0x90909090[eax]
mov word ptr 0x90909090[eax], ss
lea dx, 0x90909090[eax]
pop word ptr 0x90909090[eax]
xchg cx, ax
xchg dx, ax
xchg bx, ax
xchg sp, ax
xchg bp, ax
xchg si, ax
xchg di, ax
cbw
cwd
callw 0x9090:0x9090
pushfw
popfw
mov ax, FLAT:[0x90909090]
mov FLAT:[0x90909090], ax
movs word ptr es:[edi], word ptr ds:[esi]
cmps word ptr ds:[esi], word ptr es:[edi]
test ax, 0x9090
stos word ptr es:[edi], ax
lods ax, word ptr ds:[esi]
scas ax, word ptr es:[edi]
mov ax, 0x9090
mov cx, 0x9090
mov dx, 0x9090
mov bx, 0x9090
mov sp, 0x9090
mov bp, 0x9090
mov si, 0x9090
mov di, 0x9090
rcl word ptr 0x90909090[eax], 0x90
retw 0x9090
retw
les dx, 0x90909090[eax]
lds dx, 0x90909090[eax]
mov word ptr 0x90909090[eax], 0x9090
enterw 0x9090, 0x90
leavew
retfw 0x9090
retfw
lretw 0x9090
lretw
iretw
rcl word ptr 0x90909090[eax]
rcl word ptr 0x90909090[eax], cl
in ax, 0x90
out 0x90, ax
callw .+3+0x9090
jmpw 0x9090:0x9090
in ax, dx
out dx, ax
not word ptr 0x90909090[eax]
call word ptr 0x90909090[eax]
lar dx, 0x90909090[eax]
lsl dx, 0x90909090[eax]
cmovo dx, 0x90909090[eax]
cmovno dx, 0x90909090[eax]
cmovb dx, 0x90909090[eax]
cmovae dx, 0x90909090[eax]
cmove dx, 0x90909090[eax]
cmovne dx, 0x90909090[eax]
cmovbe dx, 0x90909090[eax]
cmova dx, 0x90909090[eax]
cmovs dx, 0x90909090[eax]
cmovns dx, 0x90909090[eax]
cmovp dx, 0x90909090[eax]
cmovnp dx, 0x90909090[eax]
cmovl dx, 0x90909090[eax]
cmovge dx, 0x90909090[eax]
cmovle dx, 0x90909090[eax]
cmovg dx, 0x90909090[eax]
pushw fs
popw fs
bt 0x90909090[eax], dx
shld 0x90909090[eax], dx, 0x90
shld 0x90909090[eax], dx, cl
pushw gs
popw gs
bts 0x90909090[eax], dx
shrd 0x90909090[eax], dx, 0x90
shrd 0x90909090[eax], dx, cl
imul dx, 0x90909090[eax]
cmpxchg 0x90909090[eax], dx
lss dx, 0x90909090[eax]
btr 0x90909090[eax], dx
lfs dx, 0x90909090[eax]
lgs dx, 0x90909090[eax]
movzx dx, byte ptr 0x90909090[eax]
btc 0x90909090[eax], dx
bsf dx, 0x90909090[eax]
bsr dx, 0x90909090[eax]
movsx dx, byte ptr 0x90909090[eax]
xadd 0x90909090[eax], dx
gs_foo:
ret
short_foo:
ret
bar:
call gs_foo
call short_foo
fstp QWORD PTR [eax+edx*8]
mov ecx, OFFSET FLAT:xyz
mov BYTE PTR [esi+edx], al
mov BYTE PTR [edx+esi], al
mov BYTE PTR [edx*2+esi], al
mov BYTE PTR [esi+edx*2], al
jmp short rot5
ins byte ptr es:[edi], dx
xadd 0x90909090[eax], dx
and %eax, -8
rot5:
mov %eax, DWORD PTR [%esi+4+%ecx*8]
ins BYTE PTR es:[edi], dx
or al, 0x90
or eax, 0x90909090
push cs
mov eax, [ebx*2]
adc BYTE PTR [eax*4+0x90909090], dl
das
jmp 0x9090:0x90909090
movs WORD PTR es:[edi], WORD PTR ds:[esi]
jo .+2-0x70
1:
jne 1b
movq mm6, [QWORD PTR .LC5+40]
add edi, dword ptr [ebx+8*eax]
movd mm0, dword ptr [ebx+8*eax+4]
add edi, dword ptr [ebx+8*ecx+((4095+1)*8)]
movd mm1, dword ptr [ebx+8*ecx+((4095+1)*8)+4]
movd mm2, dword ptr [ebx+8*eax+(2*(4095+1)*8)+4]
add edi, dword ptr [ebx+8*eax+(2*(4095+1)*8)]
mov ax, word ptr [ebx+2*eax]
mov cx, word ptr [ebx+2*ecx+((4095+1)*2)]
mov ax, word ptr [ebx+2*eax+(2*(4095+1)*2)]
jmp eax
jmp [eax]
jmp FLAT:[bar]
jmp bar
# Check arithmetic operators
mov %eax,(( 17 ) + 1)
and %eax,~(1 << ( 18 ))
and %eax,0xFFFBFFFF
mov %al, (( 0x4711 ) & 0xff)
mov %al, 0x11
mov %bl, ((( 0x4711 ) >> 8) & 0xff)
mov %bl, 0x47
shrd eax, edx, cl
shld eax, edx, cl
fadd
fadd st(3)
fadd st,st(3)
fadd st(3),st
fadd DWORD PTR [ebx]
fadd QWORD PTR [ebx]
faddp
faddp st(3)
faddp st(3),st
fdiv
fdiv st(3)
fdiv st,st(3)
fdiv st(3),st
fdiv DWORD PTR [ebx]
fdiv QWORD PTR [ebx]
fdivp
fdivp st(3)
fdivp st(3),st
fdiv st,st(3)
fdivr
fdivr st(3)
fdivr st,st(3)
fdivr st(3),st
fdivr DWORD PTR [ebx]
fdivr QWORD PTR [ebx]
fdivrp
fdivrp st(3)
fdivrp st(3),st
fdivr st,st(3)
fmul
fmul st(3)
fmul st,st(3)
fmul st(3),st
fmul DWORD PTR [ebx]
fmul QWORD PTR [ebx]
fmulp
fmulp st(3)
fmulp st(3),st
fsub
fsubr
fsub st(3)
fsub st,st(3)
fsub st(3),st
fsub DWORD PTR [ebx]
fsub QWORD PTR [ebx]
fsubp
fsubp st(3)
fsub st,st(3)
fsubp st(3),st
fsubr st(3)
fsubr st,st(3)
fsubr st(3),st
fsubr DWORD PTR [ebx]
fsubr QWORD PTR [ebx]
fsubrp
fsubrp st(3)
fsubrp st(3),st
fsubr st,st(3)
fidivr word ptr [ebx]
fidivr dword ptr [ebx]
cmovpe edx, 0x90909090[eax]
cmovpo edx, 0x90909090[eax]
cmovpe dx, 0x90909090[eax]
cmovpo dx, 0x90909090[eax]
# Check base/index swapping
.allow_index_reg
mov eax, [eax+esp]
mov eax, [eiz+eax]
vgatherdps xmm0, [xmm1+eax], xmm2
# Test that disassembly of a partial instruction shows the partial byte:
# https://www.sourceware.org/ml/binutils/2015-08/msg00226.html
.byte 0x24
.byte 0x2f
.byte 0x0f
barn:
.byte 0x0f
.byte 0xba
.byte 0xe2
.byte 0x03
|
tactcomplabs/xbgas-binutils-gdb
| 3,004
|
gas/testsuite/gas/i386/bundle.s
|
.bundle_align_mode 5
# We use these macros to test each pattern at every offset from
# bundle alignment, i.e. [0,31].
.macro offset_insn insn_name, offset
.p2align 5
\insn_name\()_offset_\offset\():
.if \offset
.space \offset, 0xf4
.endif
\insn_name
.endm
.macro test_offsets insn_name
offset_insn \insn_name, 0
offset_insn \insn_name, 1
offset_insn \insn_name, 2
offset_insn \insn_name, 3
offset_insn \insn_name, 4
offset_insn \insn_name, 5
offset_insn \insn_name, 6
offset_insn \insn_name, 7
offset_insn \insn_name, 8
offset_insn \insn_name, 9
offset_insn \insn_name, 10
offset_insn \insn_name, 11
offset_insn \insn_name, 12
offset_insn \insn_name, 13
offset_insn \insn_name, 14
offset_insn \insn_name, 15
offset_insn \insn_name, 16
offset_insn \insn_name, 17
offset_insn \insn_name, 18
offset_insn \insn_name, 19
offset_insn \insn_name, 20
offset_insn \insn_name, 21
offset_insn \insn_name, 22
offset_insn \insn_name, 23
offset_insn \insn_name, 24
offset_insn \insn_name, 25
offset_insn \insn_name, 26
offset_insn \insn_name, 27
offset_insn \insn_name, 28
offset_insn \insn_name, 29
offset_insn \insn_name, 30
offset_insn \insn_name, 31
.endm
# These are vanilla (non-relaxed) instructions of each length.
.macro test_1
inc %eax
.endm
.macro test_2
add %eax,%eax
.endm
.macro test_3
and $3,%eax
.endm
.macro test_4
lock andl $3,(%eax)
.endm
.macro test_5
mov $0xaabbccdd,%eax
.endm
.macro test_6
movl %eax,0xaabbccdd(%esi)
.endm
.macro test_7
movl $0xaabbccdd,0x7f(%esi)
.endm
.macro test_8
lock addl $0xaabbccdd,0x10(%esi)
.endm
.macro test_9
lock addl $0xaabbccdd,%fs:0x10(%esi)
.endm
.macro test_10
movl $0xaabbccdd,0x7ff(%esi)
.endm
.macro test_11
lock addl $0xaabbccdd,0x7ff(%esi)
.endm
.macro test_12
lock addl $0xaabbccdd,%fs:0x7ff(%esi)
.endm
test_offsets test_1
test_offsets test_2
test_offsets test_3
test_offsets test_4
test_offsets test_5
test_offsets test_6
test_offsets test_7
test_offsets test_8
test_offsets test_9
test_offsets test_10
test_offsets test_11
test_offsets test_12
# The only relaxation cases are the jump instructions.
# For each of the three flavors of jump (unconditional, conditional,
# and conditional with prediction), we test a case that can be relaxed
# to its shortest form, and one that must use the long form.
.macro jmp_2
jmp jmp_2_\@
movl $0xdeadbeef,%eax
jmp_2_\@\():
movl $0xb00b,%eax
.endm
.macro jmp_5
jmp jmp_5_\@
.rept 128
inc %eax
.endr
jmp_5_\@\():
movl $0xb00b,%eax
.endm
.macro cjmp_2
jz cjmp_2_\@
movl $0xdeadbeef,%eax
cjmp_2_\@\():
movl $0xb00b,%eax
.endm
.macro cjmp_6
jz cjmp_6_\@
.rept 128
inc %eax
.endr
cjmp_6_\@\():
movl $0xb00b,%eax
.endm
.macro pjmp_3
jz,pt pjmp_3_\@
movl $0xdeadbeef,%eax
pjmp_3_\@\():
movl $0xb00b,%eax
.endm
.macro pjmp_7
jz,pt pjmp_7_\@
.rept 128
inc %eax
.endr
pjmp_7_\@\():
movl $0xb00b,%eax
.endm
test_offsets jmp_2
test_offsets cjmp_2
test_offsets pjmp_3
test_offsets jmp_5
test_offsets cjmp_6
test_offsets pjmp_7
.p2align 5
hlt
|
tactcomplabs/xbgas-binutils-gdb
| 55,377
|
gas/testsuite/gas/i386/x86-64-avx512bw-wig.s
|
# Check 64bit AVX512BW WIG instructions
.allow_index_reg
.text
_start:
vpabsb %zmm29, %zmm30 # AVX512BW
vpabsb %zmm29, %zmm30{%k7} # AVX512BW
vpabsb %zmm29, %zmm30{%k7}{z} # AVX512BW
vpabsb (%rcx), %zmm30 # AVX512BW
vpabsb 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpabsb 8128(%rdx), %zmm30 # AVX512BW Disp8
vpabsb 8192(%rdx), %zmm30 # AVX512BW
vpabsb -8192(%rdx), %zmm30 # AVX512BW Disp8
vpabsb -8256(%rdx), %zmm30 # AVX512BW
vpabsw %zmm29, %zmm30 # AVX512BW
vpabsw %zmm29, %zmm30{%k7} # AVX512BW
vpabsw %zmm29, %zmm30{%k7}{z} # AVX512BW
vpabsw (%rcx), %zmm30 # AVX512BW
vpabsw 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpabsw 8128(%rdx), %zmm30 # AVX512BW Disp8
vpabsw 8192(%rdx), %zmm30 # AVX512BW
vpabsw -8192(%rdx), %zmm30 # AVX512BW Disp8
vpabsw -8256(%rdx), %zmm30 # AVX512BW
vpacksswb %zmm28, %zmm29, %zmm30 # AVX512BW
vpacksswb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpacksswb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpacksswb (%rcx), %zmm29, %zmm30 # AVX512BW
vpacksswb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpacksswb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpacksswb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpacksswb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpacksswb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpackuswb %zmm28, %zmm29, %zmm30 # AVX512BW
vpackuswb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpackuswb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpackuswb (%rcx), %zmm29, %zmm30 # AVX512BW
vpackuswb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpackuswb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackuswb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpackuswb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackuswb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddb %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddb (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusb %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddusb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddusb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddusb (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddusb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddusb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusw %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddusw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddusw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddusw (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddusw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddusw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddw %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddw (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpalignr $0xab, %zmm28, %zmm29, %zmm30 # AVX512BW
vpalignr $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpalignr $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpalignr $123, %zmm28, %zmm29, %zmm30 # AVX512BW
vpalignr $123, (%rcx), %zmm29, %zmm30 # AVX512BW
vpalignr $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpalignr $123, 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpalignr $123, 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpalignr $123, -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpalignr $123, -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgb %zmm28, %zmm29, %zmm30 # AVX512BW
vpavgb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpavgb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpavgb (%rcx), %zmm29, %zmm30 # AVX512BW
vpavgb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpavgb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgw %zmm28, %zmm29, %zmm30 # AVX512BW
vpavgw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpavgw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpavgw (%rcx), %zmm29, %zmm30 # AVX512BW
vpavgw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpavgw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpcmpeqb %zmm29, %zmm30, %k5 # AVX512BW
vpcmpeqb %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpeqb (%rcx), %zmm30, %k5 # AVX512BW
vpcmpeqb 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpeqb 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqb 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpeqb -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqb -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpeqw %zmm29, %zmm30, %k5 # AVX512BW
vpcmpeqw %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpeqw (%rcx), %zmm30, %k5 # AVX512BW
vpcmpeqw 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpeqw 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqw 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpeqw -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqw -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtb %zmm29, %zmm30, %k5 # AVX512BW
vpcmpgtb %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpgtb (%rcx), %zmm30, %k5 # AVX512BW
vpcmpgtb 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpgtb 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtb 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtb -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtb -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtw %zmm29, %zmm30, %k5 # AVX512BW
vpcmpgtw %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpgtw (%rcx), %zmm30, %k5 # AVX512BW
vpcmpgtw 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpgtw 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtw 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtw -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtw -8256(%rdx), %zmm30, %k5 # AVX512BW
vpextrb $0xab, %xmm29, %rax # AVX512BW
vpextrb $123, %xmm29, %rax # AVX512BW
vpextrb $123, %xmm29, %r8 # AVX512BW
vpextrb $123, %xmm29, (%rcx) # AVX512BW
vpextrb $123, %xmm29, 0x123(%rax,%r14,8) # AVX512BW
vpextrb $123, %xmm29, 127(%rdx) # AVX512BW Disp8
vpextrb $123, %xmm29, 128(%rdx) # AVX512BW
vpextrb $123, %xmm29, -128(%rdx) # AVX512BW Disp8
vpextrb $123, %xmm29, -129(%rdx) # AVX512BW
vpextrw $123, %xmm29, (%rcx) # AVX512BW
vpextrw $123, %xmm29, 0x123(%rax,%r14,8) # AVX512BW
vpextrw $123, %xmm29, 254(%rdx) # AVX512BW Disp8
vpextrw $123, %xmm29, 256(%rdx) # AVX512BW
vpextrw $123, %xmm29, -256(%rdx) # AVX512BW Disp8
vpextrw $123, %xmm29, -258(%rdx) # AVX512BW
vpextrw $0xab, %xmm30, %rax # AVX512BW
vpextrw $123, %xmm30, %rax # AVX512BW
vpextrw $123, %xmm30, %r8 # AVX512BW
vpinsrb $0xab, %eax, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, %eax, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, %ebp, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, %r13d, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, (%rcx), %xmm29, %xmm30 # AVX512BW
vpinsrb $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512BW
vpinsrb $123, 127(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrb $123, 128(%rdx), %xmm29, %xmm30 # AVX512BW
vpinsrb $123, -128(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrb $123, -129(%rdx), %xmm29, %xmm30 # AVX512BW
vpinsrw $0xab, %eax, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, %eax, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, %ebp, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, %r13d, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, (%rcx), %xmm29, %xmm30 # AVX512BW
vpinsrw $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512BW
vpinsrw $123, 254(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrw $123, 256(%rdx), %xmm29, %xmm30 # AVX512BW
vpinsrw $123, -256(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrw $123, -258(%rdx), %xmm29, %xmm30 # AVX512BW
vpmaddubsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaddubsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaddubsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaddubsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaddubsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaddubsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddubsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaddubsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddubsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaddwd %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaddwd %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaddwd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaddwd (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaddwd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaddwd 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddwd 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaddwd -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddwd -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxub %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxub %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxub %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxub (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxub 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxub 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxub 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxub -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxub -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxuw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxuw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxuw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxuw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxuw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxuw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxuw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxuw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxuw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpminsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpminsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpminsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpminsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminub %zmm28, %zmm29, %zmm30 # AVX512BW
vpminub %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminub %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminub (%rcx), %zmm29, %zmm30 # AVX512BW
vpminub 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminub 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminub 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminub -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminub -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminuw %zmm28, %zmm29, %zmm30 # AVX512BW
vpminuw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminuw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminuw (%rcx), %zmm29, %zmm30 # AVX512BW
vpminuw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminuw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminuw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminuw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminuw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmovsxbw %ymm29, %zmm30 # AVX512BW
vpmovsxbw %ymm29, %zmm30{%k7} # AVX512BW
vpmovsxbw %ymm29, %zmm30{%k7}{z} # AVX512BW
vpmovsxbw (%rcx), %zmm30 # AVX512BW
vpmovsxbw 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpmovsxbw 4064(%rdx), %zmm30 # AVX512BW Disp8
vpmovsxbw 4096(%rdx), %zmm30 # AVX512BW
vpmovsxbw -4096(%rdx), %zmm30 # AVX512BW Disp8
vpmovsxbw -4128(%rdx), %zmm30 # AVX512BW
vpmovzxbw %ymm29, %zmm30 # AVX512BW
vpmovzxbw %ymm29, %zmm30{%k7} # AVX512BW
vpmovzxbw %ymm29, %zmm30{%k7}{z} # AVX512BW
vpmovzxbw (%rcx), %zmm30 # AVX512BW
vpmovzxbw 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpmovzxbw 4064(%rdx), %zmm30 # AVX512BW Disp8
vpmovzxbw 4096(%rdx), %zmm30 # AVX512BW
vpmovzxbw -4096(%rdx), %zmm30 # AVX512BW Disp8
vpmovzxbw -4128(%rdx), %zmm30 # AVX512BW
vpmulhrsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmulhrsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmulhrsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmulhrsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmulhrsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmulhrsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhrsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhrsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhrsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhuw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmulhuw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmulhuw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmulhuw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmulhuw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmulhuw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhuw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhuw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhuw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmulhw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmulhw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmulhw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmulhw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmulhw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmullw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmullw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmullw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmullw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmullw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmullw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmullw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmullw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmullw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsadbw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsadbw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsadbw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsadbw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsadbw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsadbw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsadbw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpshufb %zmm28, %zmm29, %zmm30 # AVX512BW
vpshufb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpshufb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpshufb (%rcx), %zmm29, %zmm30 # AVX512BW
vpshufb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpshufb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpshufb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpshufb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpshufb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpshufhw $0xab, %zmm29, %zmm30 # AVX512BW
vpshufhw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpshufhw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpshufhw $123, %zmm29, %zmm30 # AVX512BW
vpshufhw $123, (%rcx), %zmm30 # AVX512BW
vpshufhw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpshufhw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpshufhw $123, 8192(%rdx), %zmm30 # AVX512BW
vpshufhw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpshufhw $123, -8256(%rdx), %zmm30 # AVX512BW
vpshuflw $0xab, %zmm29, %zmm30 # AVX512BW
vpshuflw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpshuflw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpshuflw $123, %zmm29, %zmm30 # AVX512BW
vpshuflw $123, (%rcx), %zmm30 # AVX512BW
vpshuflw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpshuflw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpshuflw $123, 8192(%rdx), %zmm30 # AVX512BW
vpshuflw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpshuflw $123, -8256(%rdx), %zmm30 # AVX512BW
vpsllw %xmm28, %zmm29, %zmm30 # AVX512BW
vpsllw %xmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsllw %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsllw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsllw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsllw 2032(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsllw 2048(%rdx), %zmm29, %zmm30 # AVX512BW
vpsllw -2048(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsllw -2064(%rdx), %zmm29, %zmm30 # AVX512BW
vpsraw %xmm28, %zmm29, %zmm30 # AVX512BW
vpsraw %xmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsraw %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsraw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsraw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsraw 2032(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsraw 2048(%rdx), %zmm29, %zmm30 # AVX512BW
vpsraw -2048(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsraw -2064(%rdx), %zmm29, %zmm30 # AVX512BW
vpsrlw %xmm28, %zmm29, %zmm30 # AVX512BW
vpsrlw %xmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsrlw %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsrlw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsrlw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsrlw 2032(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsrlw 2048(%rdx), %zmm29, %zmm30 # AVX512BW
vpsrlw -2048(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsrlw -2064(%rdx), %zmm29, %zmm30 # AVX512BW
vpsrldq $0xab, %zmm29, %zmm30 # AVX512BW
vpsrldq $123, %zmm29, %zmm30 # AVX512BW
vpsrldq $123, (%rcx), %zmm30 # AVX512BW
vpsrldq $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsrldq $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsrldq $123, 8192(%rdx), %zmm30 # AVX512BW
vpsrldq $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsrldq $123, -8256(%rdx), %zmm30 # AVX512BW
vpsrlw $0xab, %zmm29, %zmm30 # AVX512BW
vpsrlw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpsrlw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsrlw $123, %zmm29, %zmm30 # AVX512BW
vpsrlw $123, (%rcx), %zmm30 # AVX512BW
vpsrlw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsrlw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsrlw $123, 8192(%rdx), %zmm30 # AVX512BW
vpsrlw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsrlw $123, -8256(%rdx), %zmm30 # AVX512BW
vpsraw $0xab, %zmm29, %zmm30 # AVX512BW
vpsraw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpsraw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsraw $123, %zmm29, %zmm30 # AVX512BW
vpsraw $123, (%rcx), %zmm30 # AVX512BW
vpsraw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsraw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsraw $123, 8192(%rdx), %zmm30 # AVX512BW
vpsraw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsraw $123, -8256(%rdx), %zmm30 # AVX512BW
vpsubb %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubb (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusb %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubusb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubusb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubusb (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubusb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubusb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubusw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubusw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubusw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubusw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubusw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhbw %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpckhbw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpckhbw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpckhbw (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpckhbw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpckhbw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhbw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhbw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhbw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhwd %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpckhwd %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpckhwd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpckhwd (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpckhwd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpckhwd 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhwd 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhwd -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhwd -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklbw %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpcklbw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpcklbw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpcklbw (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpcklbw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpcklbw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklbw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklbw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklbw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklwd %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpcklwd %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpcklwd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpcklwd (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpcklwd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpcklwd 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklwd 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklwd -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklwd -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpslldq $0xab, %zmm29, %zmm30 # AVX512BW
vpslldq $123, %zmm29, %zmm30 # AVX512BW
vpslldq $123, (%rcx), %zmm30 # AVX512BW
vpslldq $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpslldq $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpslldq $123, 8192(%rdx), %zmm30 # AVX512BW
vpslldq $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpslldq $123, -8256(%rdx), %zmm30 # AVX512BW
vpsllw $0xab, %zmm29, %zmm30 # AVX512BW
vpsllw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpsllw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsllw $123, %zmm29, %zmm30 # AVX512BW
vpsllw $123, (%rcx), %zmm30 # AVX512BW
vpsllw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsllw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsllw $123, 8192(%rdx), %zmm30 # AVX512BW
vpsllw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsllw $123, -8256(%rdx), %zmm30 # AVX512BW
.intel_syntax noprefix
vpabsb zmm30, zmm29 # AVX512BW
vpabsb zmm30{k7}, zmm29 # AVX512BW
vpabsb zmm30{k7}{z}, zmm29 # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpabsb zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpabsb zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpabsw zmm30, zmm29 # AVX512BW
vpabsw zmm30{k7}, zmm29 # AVX512BW
vpabsw zmm30{k7}{z}, zmm29 # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpabsw zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpabsw zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpacksswb zmm30, zmm29, zmm28 # AVX512BW
vpacksswb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpacksswb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpackuswb zmm30, zmm29, zmm28 # AVX512BW
vpackuswb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpackuswb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddb zmm30, zmm29, zmm28 # AVX512BW
vpaddb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddsb zmm30, zmm29, zmm28 # AVX512BW
vpaddsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddsw zmm30, zmm29, zmm28 # AVX512BW
vpaddsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddusb zmm30, zmm29, zmm28 # AVX512BW
vpaddusb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddusb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddusw zmm30, zmm29, zmm28 # AVX512BW
vpaddusw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddusw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddw zmm30, zmm29, zmm28 # AVX512BW
vpaddw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpalignr zmm30, zmm29, zmm28, 0xab # AVX512BW
vpalignr zmm30{k7}, zmm29, zmm28, 0xab # AVX512BW
vpalignr zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512BW
vpalignr zmm30, zmm29, zmm28, 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpavgb zmm30, zmm29, zmm28 # AVX512BW
vpavgb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpavgb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpavgw zmm30, zmm29, zmm28 # AVX512BW
vpavgw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpavgw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpeqb k5, zmm30, zmm29 # AVX512BW
vpcmpeqb k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpeqw k5, zmm30, zmm29 # AVX512BW
vpcmpeqw k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpgtb k5, zmm30, zmm29 # AVX512BW
vpcmpgtb k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpgtw k5, zmm30, zmm29 # AVX512BW
vpcmpgtw k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpextrb rax, xmm29, 0xab # AVX512BW
vpextrb rax, xmm29, 123 # AVX512BW
vpextrb r8, xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rcx], xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rax+r14*8+0x1234], xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rdx+127], xmm29, 123 # AVX512BW Disp8
vpextrb BYTE PTR [rdx+128], xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rdx-128], xmm29, 123 # AVX512BW Disp8
vpextrb BYTE PTR [rdx-129], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rcx], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rax+r14*8+0x1234], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rdx+254], xmm29, 123 # AVX512BW Disp8
vpextrw WORD PTR [rdx+256], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rdx-256], xmm29, 123 # AVX512BW Disp8
vpextrw WORD PTR [rdx-258], xmm29, 123 # AVX512BW
vpextrw rax, xmm30, 0xab # AVX512BW
vpextrw rax, xmm30, 123 # AVX512BW
vpextrw r8, xmm30, 123 # AVX512BW
vpinsrb xmm30, xmm29, eax, 0xab # AVX512BW
vpinsrb xmm30, xmm29, eax, 123 # AVX512BW
vpinsrb xmm30, xmm29, ebp, 123 # AVX512BW
vpinsrb xmm30, xmm29, r13d, 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rcx], 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rdx+127], 123 # AVX512BW Disp8
vpinsrb xmm30, xmm29, BYTE PTR [rdx+128], 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rdx-128], 123 # AVX512BW Disp8
vpinsrb xmm30, xmm29, BYTE PTR [rdx-129], 123 # AVX512BW
vpinsrw xmm30, xmm29, eax, 0xab # AVX512BW
vpinsrw xmm30, xmm29, eax, 123 # AVX512BW
vpinsrw xmm30, xmm29, ebp, 123 # AVX512BW
vpinsrw xmm30, xmm29, r13d, 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rcx], 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rdx+254], 123 # AVX512BW Disp8
vpinsrw xmm30, xmm29, WORD PTR [rdx+256], 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rdx-256], 123 # AVX512BW Disp8
vpinsrw xmm30, xmm29, WORD PTR [rdx-258], 123 # AVX512BW
vpmaddubsw zmm30, zmm29, zmm28 # AVX512BW
vpmaddubsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaddubsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaddwd zmm30, zmm29, zmm28 # AVX512BW
vpmaddwd zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaddwd zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxsb zmm30, zmm29, zmm28 # AVX512BW
vpmaxsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxsw zmm30, zmm29, zmm28 # AVX512BW
vpmaxsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxub zmm30, zmm29, zmm28 # AVX512BW
vpmaxub zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxub zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxuw zmm30, zmm29, zmm28 # AVX512BW
vpmaxuw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxuw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminsb zmm30, zmm29, zmm28 # AVX512BW
vpminsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminsw zmm30, zmm29, zmm28 # AVX512BW
vpminsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminub zmm30, zmm29, zmm28 # AVX512BW
vpminub zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminub zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminub zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminub zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminuw zmm30, zmm29, zmm28 # AVX512BW
vpminuw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminuw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmovsxbw zmm30, ymm29 # AVX512BW
vpmovsxbw zmm30{k7}, ymm29 # AVX512BW
vpmovsxbw zmm30{k7}{z}, ymm29 # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rcx] # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rdx+4064] # AVX512BW Disp8
vpmovsxbw zmm30, YMMWORD PTR [rdx+4096] # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rdx-4096] # AVX512BW Disp8
vpmovsxbw zmm30, YMMWORD PTR [rdx-4128] # AVX512BW
vpmovzxbw zmm30, ymm29 # AVX512BW
vpmovzxbw zmm30{k7}, ymm29 # AVX512BW
vpmovzxbw zmm30{k7}{z}, ymm29 # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rcx] # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rdx+4064] # AVX512BW Disp8
vpmovzxbw zmm30, YMMWORD PTR [rdx+4096] # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rdx-4096] # AVX512BW Disp8
vpmovzxbw zmm30, YMMWORD PTR [rdx-4128] # AVX512BW
vpmulhrsw zmm30, zmm29, zmm28 # AVX512BW
vpmulhrsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmulhrsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmulhuw zmm30, zmm29, zmm28 # AVX512BW
vpmulhuw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmulhuw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmulhw zmm30, zmm29, zmm28 # AVX512BW
vpmulhw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmulhw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmullw zmm30, zmm29, zmm28 # AVX512BW
vpmullw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmullw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsadbw zmm30, zmm29, zmm28 # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpshufb zmm30, zmm29, zmm28 # AVX512BW
vpshufb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpshufb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpshufhw zmm30, zmm29, 0xab # AVX512BW
vpshufhw zmm30{k7}, zmm29, 0xab # AVX512BW
vpshufhw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpshufhw zmm30, zmm29, 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpshufhw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpshufhw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpshuflw zmm30, zmm29, 0xab # AVX512BW
vpshuflw zmm30{k7}, zmm29, 0xab # AVX512BW
vpshuflw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpshuflw zmm30, zmm29, 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpshuflw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpshuflw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsllw zmm30, zmm29, xmm28 # AVX512BW
vpsllw zmm30{k7}, zmm29, xmm28 # AVX512BW
vpsllw zmm30{k7}{z}, zmm29, xmm28 # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rcx] # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rdx+2032] # AVX512BW Disp8
vpsllw zmm30, zmm29, XMMWORD PTR [rdx+2048] # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rdx-2048] # AVX512BW Disp8
vpsllw zmm30, zmm29, XMMWORD PTR [rdx-2064] # AVX512BW
vpsraw zmm30, zmm29, xmm28 # AVX512BW
vpsraw zmm30{k7}, zmm29, xmm28 # AVX512BW
vpsraw zmm30{k7}{z}, zmm29, xmm28 # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rcx] # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rdx+2032] # AVX512BW Disp8
vpsraw zmm30, zmm29, XMMWORD PTR [rdx+2048] # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rdx-2048] # AVX512BW Disp8
vpsraw zmm30, zmm29, XMMWORD PTR [rdx-2064] # AVX512BW
vpsrlw zmm30, zmm29, xmm28 # AVX512BW
vpsrlw zmm30{k7}, zmm29, xmm28 # AVX512BW
vpsrlw zmm30{k7}{z}, zmm29, xmm28 # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rcx] # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx+2032] # AVX512BW Disp8
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx+2048] # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx-2048] # AVX512BW Disp8
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx-2064] # AVX512BW
vpsrldq zmm30, zmm29, 0xab # AVX512BW
vpsrldq zmm30, zmm29, 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsrldq zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsrldq zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsrlw zmm30, zmm29, 0xab # AVX512BW
vpsrlw zmm30{k7}, zmm29, 0xab # AVX512BW
vpsrlw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpsrlw zmm30, zmm29, 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsrlw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsrlw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsraw zmm30, zmm29, 0xab # AVX512BW
vpsraw zmm30{k7}, zmm29, 0xab # AVX512BW
vpsraw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpsraw zmm30, zmm29, 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsraw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsraw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsubb zmm30, zmm29, zmm28 # AVX512BW
vpsubb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubsb zmm30, zmm29, zmm28 # AVX512BW
vpsubsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubsw zmm30, zmm29, zmm28 # AVX512BW
vpsubsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubusb zmm30, zmm29, zmm28 # AVX512BW
vpsubusb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubusb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubusw zmm30, zmm29, zmm28 # AVX512BW
vpsubusw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubusw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubw zmm30, zmm29, zmm28 # AVX512BW
vpsubw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpckhbw zmm30, zmm29, zmm28 # AVX512BW
vpunpckhbw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpckhbw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpckhwd zmm30, zmm29, zmm28 # AVX512BW
vpunpckhwd zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpckhwd zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpcklbw zmm30, zmm29, zmm28 # AVX512BW
vpunpcklbw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpcklbw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpcklwd zmm30, zmm29, zmm28 # AVX512BW
vpunpcklwd zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpcklwd zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpslldq zmm30, zmm29, 0xab # AVX512BW
vpslldq zmm30, zmm29, 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpslldq zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpslldq zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsllw zmm30, zmm29, 0xab # AVX512BW
vpsllw zmm30{k7}, zmm29, 0xab # AVX512BW
vpsllw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpsllw zmm30, zmm29, 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsllw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsllw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
|
tactcomplabs/xbgas-binutils-gdb
| 1,179
|
gas/testsuite/gas/i386/x86-64-adx.s
|
# Check 64 bit ADX instructions.
.allow_index_reg
.text
_start:
adcx 400(%ecx), %eax
adcx %edx, %ecx
adcx -654321(%esp,%esi,8), %edx
adcx (%eax), %eax
adcxl %edx, %ecx
adcxl (%eax), %eax
adcx 400(%rcx), %r11
adcx %r14, %r12
adcx -654321(%esp,%esi,8), %rdx
adcx (%r8), %rax
adcxq %rdx, %rcx
adcxq (%rax), %rax
adox 400(%ecx), %eax
adox %edx, %ecx
adox -654321(%esp,%esi,8), %edx
adox (%eax), %eax
adoxl %edx, %ecx
adoxl (%eax), %eax
adox 400(%rcx), %r11
adox %r14, %r12
adox -654321(%esp,%esi,8), %rdx
adox (%r8), %rax
adoxq %rdx, %rcx
adoxq (%rax), %rax
.intel_syntax noprefix
adcx eax, DWORD PTR [edx+399]
adcx edx, ecx
adcx edx, DWORD PTR [esp+esi*8-123456]
adcx eax, DWORD PTR [eax]
adcx rax, QWORD PTR [r11+399]
adcx rdx, r9
adcx rdx, QWORD PTR [rsp+rsi*8-123456]
adcx rax, [rbx]
adox eax, DWORD PTR [edx+399]
adox edx, ecx
adox edx, DWORD PTR [esp+esi*8-123456]
adox eax, DWORD PTR [eax]
adox rax, QWORD PTR [r11+399]
adox rdx, r9
adox rdx, QWORD PTR [rsp+rsi*8-123456]
adox rax, QWORD PTR [rbx]
|
tactcomplabs/xbgas-binutils-gdb
| 2,094
|
gas/testsuite/gas/i386/x86-64-vex-lig-2.s
|
# Check VEX non-LIG instructions with with -mavxscalar=256
.allow_index_reg
.text
_start:
vmovd %eax, %xmm0
vmovd (%rax), %xmm0
{vex3} vmovd %eax, %xmm0
{vex3} vmovd (%rax), %xmm0
vmovd %xmm0, %eax
vmovd %xmm0, (%rax)
{vex3} vmovd %xmm0, %eax
{vex3} vmovd %xmm0, (%rax)
vmovq %xmm0, %xmm0
vmovq (%rax), %xmm0
{vex3} vmovq %xmm0, %xmm0
{vex3} vmovq (%rax), %xmm0
{store} vmovq %xmm0, %xmm0
vmovq %xmm0, (%rax)
{vex3} {store} vmovq %xmm0, %xmm0
{vex3} vmovq %xmm0, (%rax)
vextractps $0, %xmm0, %eax
vextractps $0, %xmm0, (%rax)
vpextrb $0, %xmm0, %eax
vpextrb $0, %xmm0, (%rax)
vpextrw $0, %xmm0, %eax
{vex3} vpextrw $0, %xmm0, %eax
{store} vpextrw $0, %xmm0, %eax
vpextrw $0, %xmm0, (%rax)
vpextrd $0, %xmm0, %eax
vpextrd $0, %xmm0, (%rax)
vpextrq $0, %xmm0, %rax
vpextrq $0, %xmm0, (%rax)
vinsertps $0, %xmm0, %xmm0, %xmm0
vinsertps $0, (%rax), %xmm0, %xmm0
vpinsrb $0, %eax, %xmm0, %xmm0
vpinsrb $0, (%rax), %xmm0, %xmm0
vpinsrw $0, %eax, %xmm0, %xmm0
vpinsrw $0, (%rax), %xmm0, %xmm0
{vex3} vpinsrw $0, %eax, %xmm0, %xmm0
{vex3} vpinsrw $0, (%rax), %xmm0, %xmm0
vpinsrd $0, %eax, %xmm0, %xmm0
vpinsrd $0, (%rax), %xmm0, %xmm0
vpinsrq $0, %rax, %xmm0, %xmm0
vpinsrq $0, (%rax), %xmm0, %xmm0
vldmxcsr (%rax)
vstmxcsr (%rax)
{vex3} vldmxcsr (%rax)
{vex3} vstmxcsr (%rax)
andn (%rax), %eax, %eax
bextr %eax, (%rax), %eax
blsi (%rax), %eax
blsmsk (%rax), %eax
blsr (%rax), %eax
bzhi %eax, (%rax), %eax
mulx (%rax), %eax, %eax
pdep (%rax), %eax, %eax
pext (%rax), %eax, %eax
rorx $0, (%rax), %eax
sarx %eax, (%rax), %eax
shlx %eax, (%rax), %eax
shrx %eax, (%rax), %eax
bextr $0, (%rax), %eax
blcfill (%rax), %eax
blci (%rax), %eax
blcic (%rax), %eax
blcmsk (%rax), %eax
blcs (%rax), %eax
blsfill (%rax), %eax
blsic (%rax), %eax
t1mskc (%rax), %eax
tzmsk (%rax), %eax
|
tactcomplabs/xbgas-binutils-gdb
| 1,369
|
gas/testsuite/gas/i386/avx512f-nondef.s
|
# Check if objdump works correctly when some bits in instruction
# has non-default value
# vrndscalesd {sae}, $123, %xmm4, %xmm5, %xmm6{%k7} # with null RC
.byte 0x62, 0xf3, 0xd5, 0x1f, 0x0b, 0xf4, 0x7b
# vrndscalesd {sae}, $123, %xmm4, %xmm5, %xmm6{%k7} # with not-null RC
.byte 0x62, 0xf3, 0xd5, 0x5f, 0x0b, 0xf4, 0x7b
# vpminud %zmm4, %zmm5, %zmm6{%k7} # with 11 EVEX.{B,R'}
.byte 0x62, 0xf2, 0x55, 0x4f, 0x3b, 0xf4
# vpminud %zmm4, %zmm5, %zmm6{%k7} # with not-11 EVEX.{B,R'}
.byte 0x62, 0xc2, 0x55, 0x4f, 0x3b, 0xf4
# vpminud %zmm4, %zmm5, %zmm6{%k7} # with set EVEX.b bit
.byte 0x62, 0xf2, 0x55, 0x1f, 0x3b, 0xf4
# vpmovdb %zmm6, 2032(%rdx) # with unset EVEX.b bit
.byte 0x62, 0xf2, 0x7e, 0x48, 0x31, 0x72, 0x7f
# vpmovdb %zmm6, 2032(%rdx) # with set EVEX.b bit - we should get (bad) operand
.byte 0x62, 0xf2, 0x7e, 0x58, 0x31, 0x72, 0x7f
# vaddps xmm0, xmm0, xmm3 # with EVEX.z set
.byte 0x62, 0xf1, 0x7c, 0x88, 0x58, 0xc3
# vgatherdps (%ecx), %zmm0{%k7} # without SIB / index register
.byte 0x62, 0xf2, 0x7d, 0x4f, 0x92, 0x01
# vgatherdps (%bx,%xmm?), %zmm0{%k7} # with 16-bit addressing
.byte 0x67, 0x62, 0xf2, 0x7d, 0x4f, 0x92, 0x01
# vgatherdps (%eax,%zmm1), %zmm0{%k7}{z} # with set EVEX.z
.byte 0x62, 0xf2, 0x7d, 0xcf, 0x92, 0x04, 0x08
# vgatherdps (%eax,%zmm1), %zmm0 # without actual mask register
.byte 0x62, 0xf2, 0x7d, 0x48, 0x92, 0x04, 0x08
|
tactcomplabs/xbgas-binutils-gdb
| 1,996
|
gas/testsuite/gas/i386/avx512bw-opts.s
|
# Check 32bit AVX512BW swap instructions
.allow_index_reg
.text
_start:
vmovdqu8 %zmm5, %zmm6 # AVX512BW
vmovdqu8.s %zmm5, %zmm6 # AVX512BW
vmovdqu8 %zmm5, %zmm6{%k7} # AVX512BW
vmovdqu8.s %zmm5, %zmm6{%k7} # AVX512BW
vmovdqu8 %zmm5, %zmm6{%k7}{z} # AVX512BW
vmovdqu8.s %zmm5, %zmm6{%k7}{z} # AVX512BW
vmovdqu8 %zmm5, %zmm6 # AVX512BW
vmovdqu8.s %zmm5, %zmm6 # AVX512BW
vmovdqu8 %zmm5, %zmm6{%k7} # AVX512BW
vmovdqu8.s %zmm5, %zmm6{%k7} # AVX512BW
vmovdqu8 %zmm5, %zmm6{%k7}{z} # AVX512BW
vmovdqu8.s %zmm5, %zmm6{%k7}{z} # AVX512BW
vmovdqu16 %zmm5, %zmm6 # AVX512BW
vmovdqu16.s %zmm5, %zmm6 # AVX512BW
vmovdqu16 %zmm5, %zmm6{%k7} # AVX512BW
vmovdqu16.s %zmm5, %zmm6{%k7} # AVX512BW
vmovdqu16 %zmm5, %zmm6{%k7}{z} # AVX512BW
vmovdqu16.s %zmm5, %zmm6{%k7}{z} # AVX512BW
vmovdqu16 %zmm5, %zmm6 # AVX512BW
vmovdqu16.s %zmm5, %zmm6 # AVX512BW
vmovdqu16 %zmm5, %zmm6{%k7} # AVX512BW
vmovdqu16.s %zmm5, %zmm6{%k7} # AVX512BW
vmovdqu16 %zmm5, %zmm6{%k7}{z} # AVX512BW
vmovdqu16.s %zmm5, %zmm6{%k7}{z} # AVX512BW
.intel_syntax noprefix
vmovdqu8 zmm6, zmm5 # AVX512BW
vmovdqu8.s zmm6, zmm5 # AVX512BW
vmovdqu8 zmm6{k7}, zmm5 # AVX512BW
vmovdqu8.s zmm6{k7}, zmm5 # AVX512BW
vmovdqu8 zmm6{k7}{z}, zmm5 # AVX512BW
vmovdqu8.s zmm6{k7}{z}, zmm5 # AVX512BW
vmovdqu8 zmm6, zmm5 # AVX512BW
vmovdqu8.s zmm6, zmm5 # AVX512BW
vmovdqu8 zmm6{k7}, zmm5 # AVX512BW
vmovdqu8.s zmm6{k7}, zmm5 # AVX512BW
vmovdqu8 zmm6{k7}{z}, zmm5 # AVX512BW
vmovdqu8.s zmm6{k7}{z}, zmm5 # AVX512BW
vmovdqu16 zmm6, zmm5 # AVX512BW
vmovdqu16.s zmm6, zmm5 # AVX512BW
vmovdqu16 zmm6{k7}, zmm5 # AVX512BW
vmovdqu16.s zmm6{k7}, zmm5 # AVX512BW
vmovdqu16 zmm6{k7}{z}, zmm5 # AVX512BW
vmovdqu16.s zmm6{k7}{z}, zmm5 # AVX512BW
vmovdqu16 zmm6, zmm5 # AVX512BW
vmovdqu16.s zmm6, zmm5 # AVX512BW
vmovdqu16 zmm6{k7}, zmm5 # AVX512BW
vmovdqu16.s zmm6{k7}, zmm5 # AVX512BW
vmovdqu16 zmm6{k7}{z}, zmm5 # AVX512BW
vmovdqu16.s zmm6{k7}{z}, zmm5 # AVX512BW
|
tactcomplabs/xbgas-binutils-gdb
| 1,387
|
gas/testsuite/gas/i386/wrap32.s
|
.text
wrap:
mov $500 - 0x100, %eax
mov $500 + 0xffffff00, %edx
mov $val - 0x100, %eax
mov $val + 0xffffff00, %edx
mov $sym - 0x100, %eax
mov $sym + 0xffffff00, %edx
mov $sym + 500 - 0x100, %eax
mov $sym + 500 + 0xffffff00, %edx
movl $500 - 0x100, (%eax)
movl $500 + 0xffffff00, (%edx)
movl $val - 0x100, (%eax)
movl $val + 0xffffff00, (%edx)
movl $sym - 0x100, (%eax)
movl $sym + 0xffffff00, (%edx)
movl $sym + 500 - 0x100, (%eax)
movl $sym + 500 + 0xffffff00, (%edx)
add $500 - 0x100, %ecx
add $500 + 0xffffff00, %edx
add $val - 0x100, %ecx
add $val + 0xffffff00, %edx
add $sym - 0x100, %ecx
add $sym + 0xffffff00, %edx
add $sym + 500 - 0x100, %ecx
add $sym + 500 + 0xffffff00, %edx
addl $500 - 0x100, (%eax)
addl $500 + 0xffffff00, (%edx)
addl $val - 0x100, (%eax)
addl $val + 0xffffff00, (%edx)
addl $sym - 0x100, (%eax)
addl $sym + 0xffffff00, (%edx)
addl $sym + 500 - 0x100, (%eax)
addl $sym + 500 + 0xffffff00, (%edx)
ret
.data
.long 500 - 0x100
.long 500 + 0xffffff00
.long val - 0x100
.long val + 0xffffff00
.long sym - 0x100
.long sym + 0xffffff00
.long sym + 500 - 0x100
.long sym + 500 + 0xffffff00
.slong 500 - 0x8fffff00
.slong 500 + 0x7fffff00
.slong val - 0x8fffff00
.slong val + 0x7fffff00
.slong sym - 0x8fffff00
.slong sym + 0x7fffff00
.slong sym + 500 - 0x8fffff00
.slong sym + 500 + 0x7fffff00
.equ val, 400
|
tactcomplabs/xbgas-binutils-gdb
| 100,338
|
gas/testsuite/gas/i386/avx512bw_vl-wig.s
|
# Check 32bit AVX512{BW,VL} WIG instructions
.allow_index_reg
.text
_start:
vpabsb %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpabsb %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpabsb (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpabsb -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpabsb 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsb 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsb -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsb -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsb %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpabsb %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpabsb (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpabsb -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpabsb 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsb 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpabsb -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsb -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpabsw %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpabsw %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpabsw (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpabsw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpabsw 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsw 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsw -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsw -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsw %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpabsw %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpabsw (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpabsw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpabsw 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsw 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpabsw -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsw -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpacksswb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpacksswb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpacksswb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpackuswb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpackuswb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddusb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddusb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddusw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddusw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpalignr $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpalignr $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpavgb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpavgb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpavgw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpavgw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpcmpeqb %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpmaddubsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaddubsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaddubsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaddwd (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaddwd (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxub (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxub (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxuw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxuw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminub (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminub 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminub -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminub (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminub 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminub -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminuw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminuw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminuw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminuw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminuw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminuw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovsxbw (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw 1016(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw 1024(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw -1024(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw -1032(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmovsxbw (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw 2032(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw 2048(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw -2048(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw -2064(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovzxbw (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw 1016(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw 1024(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw -1024(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw -1032(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmovzxbw (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw 2032(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw 2048(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw -2048(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw -2064(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmulhrsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmulhrsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmulhuw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmulhuw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmulhw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmulhw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmullw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmullw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmullw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmullw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmullw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmullw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpshufb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpshufb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpshufhw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpshufhw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpshuflw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpshuflw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsllw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsllw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsraw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsraw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsrlw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsrlw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsrlw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsrlw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsraw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsraw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsubb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubusb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubusb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubusw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubusw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpckhbw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpckhbw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpckhwd (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpckhwd (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpcklbw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpcklbw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpcklwd (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpcklwd (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsllw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsllw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
.intel_syntax noprefix
vpabsb xmm6{k7}, xmm5 # AVX512{BW,VL}
vpabsb xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpabsb xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpabsb xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpabsb ymm6{k7}, ymm5 # AVX512{BW,VL}
vpabsb ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpabsb ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpabsb ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpabsw xmm6{k7}, xmm5 # AVX512{BW,VL}
vpabsw xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpabsw xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpabsw xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpabsw ymm6{k7}, ymm5 # AVX512{BW,VL}
vpabsw ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpabsw ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpabsw ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpacksswb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpacksswb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpackuswb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpackuswb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, xmm4, 0xab # AVX512{BW,VL}
vpalignr xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, xmm4, 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, ymm4, 0xab # AVX512{BW,VL}
vpalignr ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, ymm4, 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpavgb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpavgb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpavgw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpavgw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddubsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddubsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddwd xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddwd ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxub xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxub ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxuw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxuw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminub xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminub ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminuw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminuw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, xmm5 # AVX512{BW,VL}
vpmovsxbw xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [ecx] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [edx+1016] # AVX512{BW,VL} Disp8
vpmovsxbw xmm6{k7}, QWORD PTR [edx+1024] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [edx-1024] # AVX512{BW,VL} Disp8
vpmovsxbw xmm6{k7}, QWORD PTR [edx-1032] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, xmm5 # AVX512{BW,VL}
vpmovsxbw ymm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, xmm5 # AVX512{BW,VL}
vpmovzxbw xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [ecx] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [edx+1016] # AVX512{BW,VL} Disp8
vpmovzxbw xmm6{k7}, QWORD PTR [edx+1024] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [edx-1024] # AVX512{BW,VL} Disp8
vpmovzxbw xmm6{k7}, QWORD PTR [edx-1032] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, xmm5 # AVX512{BW,VL}
vpmovzxbw ymm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhrsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhrsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhuw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhuw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmullw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmullw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpshufb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpshufb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpshufhw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpshufhw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpshufhw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpshufhw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpshufhw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpshufhw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpshufhw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpshufhw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpshufhw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpshuflw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpshuflw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpshuflw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpshuflw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpshuflw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpshuflw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpshuflw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpshuflw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsllw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, xmm4 # AVX512{BW,VL}
vpsllw ymm6{k7}{z}, ymm5, xmm4 # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsraw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, xmm4 # AVX512{BW,VL}
vpsraw ymm6{k7}{z}, ymm5, xmm4 # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsrlw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, xmm4 # AVX512{BW,VL}
vpsrlw ymm6{k7}{z}, ymm5, xmm4 # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpsrlw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpsrlw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpsraw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpsraw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhbw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhbw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhwd xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhwd ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklbw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklbw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklwd xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklwd ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpsllw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpsllw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 33,993
|
gas/testsuite/gas/i386/x86-64-opcode.s
|
.text
# Prefixes
# O16 A32 OV REX OPCODE ; NOTES
# CALL
CALLq *(%r8) # -- -- -- 41 FF 10 ; REX to access upper reg.
CALLq *(%rax) # -- -- -- -- FF 10
CALLq *(%r8) # -- -- -- 41 FF 10 ; REX to access upper reg.
CALLq *(%rax) # -- -- -- -- FF 10
# RET
lretl # -- -- -- -- CB
lretq # -- -- -- 48 CB
retq # -- -- -- -- C3
# IRET
IRETL # -- -- -- -- CF ; 32-bit operand size
IRETW # 66 -- -- -- CF ; O16 for 16-bit operand size
IRETQ # -- -- -- 48 CF ; REX for 64-bit operand size
# CMP
# MOV
MOVw %cs,(%r8) # -- -- -- 41 8C 08 ; REX to access upper reg.
MOVw %cs,(%rax) # -- -- -- -- 8C 08
MOVw %ss,(%r8) # -- -- -- 41 8C 10 ; REX to access upper reg.
MOVw %ss,(%rax) # -- -- -- -- 8C 10
MOVw %fs,(%r8) # -- -- -- 41 8C 20 ; REX to access upper reg.
MOVw %fs,(%rax) # -- -- -- -- 8C 20
MOVw (%r8),%ss # -- -- -- 41 8E 10 ; REX to access upper reg.
MOVw (%rax),%ss # -- -- -- -- 8E 10
MOVw (%r8),%fs # -- -- -- 41 8E 20 ; REX to access upper reg.
MOVw (%rax),%fs # -- -- -- -- 8E 20
MOVb $0,(%r8) # -- -- -- 41 C6 00 00 ; REX to access upper reg.
MOVb $0,(%rax) # -- -- -- -- C6 00 00
MOVw $0x7000,(%r8) # 66 -- -- 41 C7 00 00 70 ; REX to access upper reg. O16 for 16-bit operand size
MOVw $0x7000,(%rax) # 66 -- -- -- C7 00 00 70 ; O16 for 16-bit operand size
MOVl $0x70000000,(%r8) # -- -- -- 41 C7 00 00 00 00 70 ; REX to access upper reg.
MOVl $0x70000000,(%rax) # -- -- -- -- C7 00 00 00 00 70
MOVb $0,(%r8) # -- -- -- 41 C6 00 00 ; REX to access upper reg.
MOVb $0,(%rax) # -- -- -- -- C6 00 00
MOVw $0x7000,(%r8) # 66 -- -- -- 41 C7 00 00 70 ; O16 for 16-bit operand size
MOVw $0x7000,(%rax) # 66 -- -- -- C7 00 00 70 ; O16 for 16-bit operand size
MOVl $0x70000000,(%rax) # -- -- -- -- C7 00 00 00 00 70
MOVb $0,(%r8) # -- -- -- 41 C6 00 00 ; REX to access upper reg.
MOVb $0,(%rax) # -- -- -- -- C6 00 00
MOVw $0x7000,(%r8) # 66 -- -- 41 C7 00 00 70 ; REX to access upper reg. O16 for 16-bit operand size
MOVw $0x7000,(%rax) # 66 -- -- -- C7 00 00 70 ; O16 for 16-bit operand size
MOVl $0x70000000,(%r8) # -- -- -- 41 C7 00 00 00 00 70 ; REX to access upper reg.
MOVl $0x70000000,(%rax) # -- -- -- -- C7 00 00 00 00 70
MOVq $0x70000000,(%r8) # -- -- -- 49 C7 00 00 00 00 70 ; REX for 64-bit operand size. REX to access upper reg.
MOVq $0x70000000,(%rax) # -- -- -- 48 C7 00 00 00 00 70 ; REX for 64-bit operand size
# LFS etc
LFS (%rax), %ecx # -- -- -- -- 0F B4 ..
LFSl (%rcx), %eax # -- -- -- -- 0F B4 ..
LFS (%rax), %cx # 66 -- -- -- 0F B4 ..
LFSw (%rcx), %ax # 66 -- -- -- 0F B4 ..
LGS (%rcx), %edx # -- -- -- -- 0F B5 ..
LGSl (%rdx), %ecx # -- -- -- -- 0F B5 ..
LGS (%rcx), %dx # 66 -- -- -- 0F B5 ..
LGSw (%rdx), %cx # 66 -- -- -- 0F B5 ..
LSS (%rdx), %ebx # -- -- -- -- 0F B2 ..
LSSl (%rbx), %edx # -- -- -- -- 0F B2 ..
LSS (%rdx), %bx # 66 -- -- -- 0F B2 ..
LSSw (%rbx), %dx # 66 -- -- -- 0F B2 ..
# MOVNTI
MOVNTI %eax,(%r8) # -- -- -- 41 0f c3 00 ; REX to access upper reg.
MOVNTI %eax,(%rax) # -- -- -- -- 0f c3 00
MOVNTI %rax,(%r8) # -- -- -- 49 0F C3 00 ; REX to access upper reg. REX for 64-bit operand size
MOVNTI %rax,(%rax) # -- -- -- 48 0F C3 00 ; REX for 64-bit operand size. REX to access upper reg.
MOVNTI %r8,(%r8) # -- -- -- 4D 0F C3 00 ; REX to access upper reg. REX for 64-bit operand size
MOVNTI %r8,(%rax) # -- -- -- 4C 0F C3 00 ; REX to access upper reg. REX for 64-bit operand size
# Conditionals
# LOOP
LOOP . # -- -- -- -- E2 FE ; RCX used as counter.
LOOPq . # -- -- -- -- E2 FE ; RCX used as counter.
LOOPl . # -- 67 -- -- E2 FD ; ECX used as counter.
# Jcc
# 66 -- -- -- 77 FD ; O16 override: (Addr64) = ZEXT(Addr16)
# 66 -- -- -- 0F 87 F9 FF FF FF ; O16 override: (Addr64) = ZEXT(Addr16)
# J*CXZ
JRCXZ . # -- -- -- -- E3 FE ; RCX used as counter.
JECXZ . # -- 67 -- -- E3 FD ; ECX used as counter.
# Integer
# IDIV
IDIVb (%r8) # -- -- -- 41 F6 38 ; Sign extended result. REX to access upper reg.
IDIVb (%rax) # -- -- -- -- F6 38 ; Sign extended result
IDIVw (%r8) # 66 -- -- 41 F7 38 ; Sign extended result. REX to access upper reg. O16 for 16-bit
IDIVw (%rax) # 66 -- -- -- F7 38 ; Sign extended result. O16 for 16-bit operand size
IDIVl (%r8) # -- -- -- 41 F7 38 ; Sign extended result. REX to access upper reg
IDIVl (%rax) # -- -- -- -- F7 38 ; Sign extended result
IDIVq (%r8) # -- -- -- 49 F7 38 ; Sign extended result. REX for 64-bit operand size. REX to access u
IDIVq (%rax) # -- -- -- 48 F7 38 ; Sign extended result. REX for 64-bit operand size
# IMUL
IMULb (%r8) # -- -- -- 41 F6 28 ; Sign extended result. REX to access upper reg
IMULb (%rax) # -- -- -- -- F6 28 ; Sign extended result
IMULw (%r8) # 66 -- -- 41 F7 28 ; Sign extended result. O16 for 16-bit operand size. REX to access
IMULw (%rax) # 66 -- -- -- F7 28 ; Sign extended result. O16 for 16-bit operand size
IMULl (%r8) # -- -- -- 41 F7 28 ; Sign extended result. REX to access upper reg
IMULl (%rax) # -- -- -- -- F7 28 ; Sign extended result
IMULq (%r8) # -- -- -- 49 F7 28 ; Sign extended result. REX for 64-bit operand size. REX to access u
IMULq (%rax) # -- -- -- 48 F7 28 ; Sign extended result. REX for 64-bit operand size
# SIMD/SSE
# ADDPD
ADDPD (%r8),%xmm0 # -- -- 66 41 0F 58 00 ; REX to access upper reg. OVR 128bit MMinstr.
ADDPD (%rax),%xmm0 # -- -- 66 -- 0F 58 00 ; OVR 128bit MMinstr.
ADDPD (%r8),%xmm15 # -- -- 66 45 0F 58 38 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
ADDPD (%rax),%xmm15 # -- -- 66 44 0F 58 38 ; REX to access upper XMM reg. OVR 128bit MMinstr.
ADDPD (%r8),%xmm8 # -- -- 66 45 0F 58 00 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
ADDPD (%rax),%xmm8 # -- -- 66 44 0F 58 00 ; REX to access upper XMM reg. OVR 128bit MMinstr.
ADDPD (%r8),%xmm7 # -- -- 66 41 0F 58 38 ; REX to access upper reg. OVR 128bit MMinstr.
ADDPD (%rax),%xmm7 # -- -- 66 -- 0F 58 38 ; OVR 128bit MMinstr.
ADDPD %xmm0,%xmm0 # -- -- 66 -- 0F 58 C0 ; OVR 128bit MMinstr.
ADDPD %xmm15,%xmm15 # -- -- 66 45 0F 58 FF ; REX to access upper XMM reg. OVR 128bit MMinstr.
ADDPD %xmm15,%xmm8 # -- -- 66 45 0F 58 C7 ; REX to access upper XMM reg. OVR 128bit MMinstr.
# CMPPD
# CVTSD2SI
CVTSD2SIq (%r8),%rax # -- -- F2 49 0f 2d 00 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTSD2SIq (%rax),%rax # -- -- F2 48 0f 2d 00 ; OVR 128-bit media instruction override REX for 64-bit operand size
CVTSD2SIq (%r8),%r8 # -- -- F2 4D 0f 2d 00 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTSD2SIq (%rax),%r8 # -- -- F2 4C 0f 2d 00 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTSD2SIq %xmm0,%rax # -- -- F2 48 0f 2d c0 ; OVR 128-bit media instruction override REX for 64-bit operand size
CVTSD2SIq %xmm15,%r8 # -- -- F2 4D 0f 2d c7 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg REX to access upper reg.
CVTSD2SIq %xmm15,%rax # -- -- F2 49 0f 2d c7 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg
CVTSD2SIq %xmm8,%r8 # -- -- F2 4D 0f 2d c0 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg REX to access upper reg.
CVTSD2SIq %xmm8,%rax # -- -- F2 49 0f 2d c0 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg
CVTSD2SIq %xmm7,%r8 # -- -- F2 4C 0f 2d c7 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTSD2SIq %xmm7,%rax # -- -- F2 48 0f 2d c7 ; OVR 128-bit media instruction override REX for 64-bit operand size
CVTSD2SIq %xmm0,%r8 # -- -- F2 4C 0f 2d c0 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
# CVTTSD2SI
CVTTSD2SIq (%r8),%rax # -- -- F2 49 0f 2c 00 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTTSD2SIq (%rax),%rax # -- -- F2 48 0f 2c 00 ; OVR 128-bit media instruction override REX for 64-bit operand size
CVTTSD2SIq (%r8),%r8 # -- -- F2 4D 0f 2c 00 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTTSD2SIq (%rax),%r8 # -- -- F2 4C 0f 2c 00 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTTSD2SIq %xmm0,%rax # -- -- F2 48 0f 2c c0 ; OVR 128-bit media instruction override REX for 64-bit operand size
CVTTSD2SIq %xmm15,%r8 # -- -- F2 4D 0f 2c c7 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg REX to access upper reg.
CVTTSD2SIq %xmm15,%rax # -- -- F2 49 0f 2c c7 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg
CVTTSD2SIq %xmm8,%r8 # -- -- F2 4D 0f 2c c0 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg REX to access upper reg.
CVTTSD2SIq %xmm8,%rax # -- -- F2 49 0f 2c c0 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper XMM reg
CVTTSD2SIq %xmm7,%r8 # -- -- F2 4C 0f 2c c7 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
CVTTSD2SIq %xmm7,%rax # -- -- F2 48 0f 2c c7 ; OVR 128-bit media instruction override REX for 64-bit operand size
CVTTSD2SIq %xmm0,%r8 # -- -- F2 4C 0f 2c c0 ; OVR 128-bit media instruction override REX for 64-bit operand size REX to access upper reg.
# CVTSS2SI
CVTSS2SIq (%r8),%rax # -- -- F3 49 0f 2d 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTSS2SIq (%rax),%rax # -- -- F3 48 0f 2d 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size
CVTSS2SIq (%r8),%r8 # -- -- F3 4D 0f 2d 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTSS2SIq (%rax),%r8 # -- -- F3 4C 0f 2d 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTSS2SIq %xmm0,%rax # -- -- F3 48 0f 2d c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size
CVTSS2SIq %xmm15,%r8 # -- -- F3 4D 0f 2d c7 ; OVR 128-bit media instruction override Result is sign extended REX to access upper XMM reg REX to access upper reg.
CVTSS2SIq %xmm15,%rax # -- -- F3 49 0f 2d c7 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper XMM reg
CVTSS2SIq %xmm8,%r8 # -- -- F3 4D 0f 2d c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper XMM reg REX to access upper reg.
CVTSS2SIq %xmm8,%rax # -- -- F3 49 0f 2d c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size
CVTSS2SIq %xmm7,%r8 # -- -- F3 4C 0f 2d c7 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTSS2SIq %xmm7,%rax # -- -- F3 48 0f 2d c7 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size
CVTSS2SIq %xmm0,%r8 # -- -- F3 4C 0f 2d c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
# CVTTSS2SI
CVTTSS2SIq (%r8),%rax # -- -- F3 49 0f 2c 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTTSS2SIq (%rax),%rax # -- -- F3 48 0f 2c 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size
CVTTSS2SIq (%r8),%r8 # -- -- F3 4D 0f 2c 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTTSS2SIq (%rax),%r8 # -- -- F3 4C 0f 2c 00 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTTSS2SIq %xmm0,%rax # -- -- F3 48 0f 2c c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size
CVTTSS2SIq %xmm15,%r8 # -- -- F3 4D 0f 2c c7 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper XMM reg REX to access upper reg.
CVTTSS2SIq %xmm15,%rax # -- -- F3 49 0f 2c c7 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper XMM reg
CVTTSS2SIq %xmm8,%r8 # -- -- F3 4D 0f 2c c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper XMM reg REX to access upper reg.
CVTTSS2SIq %xmm8,%rax # -- -- F3 49 0f 2c c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size
CVTTSS2SIq %xmm7,%r8 # -- -- F3 4C 0f 2c c7 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
CVTTSS2SIq %xmm7,%rax # -- -- F3 48 0f 2c c7 ; OVR 128-bit media instruction override Result is sign extended
CVTTSS2SIq %xmm0,%r8 # -- -- F3 4C 0f 2c c0 ; OVR 128-bit media instruction override Result is sign extended REX for 64-bit operand size REX to access upper reg.
# CVTSI2SS
CVTSI2SSl (%r8),%xmm0 # -- -- F3 41 0f 2a 00 ; OVR 128-bit media instruction override REX to access upper reg.
CVTSI2SSl (%rax),%xmm0 # -- -- F3 -- 0f 2a 00 ; OVR 128-bit media instruction override
CVTSI2SSl (%r8),%xmm15 # -- -- F3 45 0f 2a 38 ; OVR 128-bit media instruction override REX to access upper XMM reg REX to access upper reg.
CVTSI2SSl (%rax),%xmm15 # -- -- F3 44 0f 2a 38 ; OVR 128-bit media instruction override REX to access upper XMM reg
CVTSI2SSl (%r8),%xmm8 # -- -- F3 45 0f 2a 00 ; OVR 128-bit media instruction override REX to access upper XMM reg REX to access upper reg.
CVTSI2SSl (%rax),%xmm8 # -- -- F3 44 0f 2a 00 ; OVR 128-bit media instruction override REX to access upper XMM reg
CVTSI2SSl (%r8),%xmm7 # -- -- F3 41 0f 2a 38 ; OVR 128-bit media instruction override REX to access upper reg.
CVTSI2SSl (%rax),%xmm7 # -- -- F3 -- 0f 2a 38 ; OVR 128-bit media instruction override
CVTSI2SS %eax,%xmm0 # -- -- F3 -- 0f 2a c0 ; OVR 128-bit media instruction override
CVTSI2SS %eax,%xmm15 # -- -- F3 44 0f 2a f8 ; OVR 128-bit media instruction override REX to access upper XMM reg
CVTSI2SS %eax,%xmm8 # -- -- F3 44 0f 2a c0 ; OVR 128-bit media instruction override REX to access upper XMM reg
CVTSI2SS %eax,%xmm7 # -- -- F3 -- 0f 2a f8 ; OVR 128-bit media instruction override
CVTSI2SSl (%r8),%xmm0 # -- -- F3 41 0f 2a 00 ; OVR 128-bit media instruction override REX to access upper reg.
CVTSI2SSl (%rax),%xmm0 # -- -- F3 -- 0f 2a 00 ; OVR 128-bit media instruction override
CVTSI2SSl (%r8),%xmm15 # -- -- F3 45 0f 2a 38 ; OVR 128-bit media instruction override REX to access upper XMM reg REX to access upper reg.
CVTSI2SSl (%rax),%xmm15 # -- -- F3 44 0f 2a 38 ; OVR 128-bit media instruction override REX to access upper XMM reg
CVTSI2SSl (%r8),%xmm8 # -- -- F3 45 0f 2a 00 ; OVR 128-bit media instruction override REX to access upper XMM reg REX to access upper reg.
CVTSI2SSl (%rax),%xmm8 # -- -- F3 44 0f 2a 00 ; OVR 128-bit media instruction override REX to access upper XMM reg
CVTSI2SSl (%r8),%xmm7 # -- -- F3 41 0f 2a 38 ; OVR 128-bit media instruction override REX to access upper reg.
CVTSI2SSl (%rax),%xmm7 # -- -- F3 -- 0f 2a 38 ; OVR 128-bit media instruction override
# CVTSI2SD
CVTSI2SDl (%r8),%xmm0 # -- -- F2 41 0F 2A 00 ; REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm0 # -- -- F2 -- 0F 2A 00 ; OVR 128bit MMinstr.
CVTSI2SDl (%r8),%xmm15 # -- -- F2 45 0F 2A 38 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm15 # -- -- F2 44 0F 2A 38 ; REX to access upper XMM reg. OVR 128bit MMinstr.
CVTSI2SDl (%r8),%xmm8 # -- -- F2 45 0F 2A 00 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm8 # -- -- F2 44 0F 2A 00 ; REX to access upper XMM reg. OVR 128bit MMinstr.
CVTSI2SDl (%r8),%xmm7 # -- -- F2 41 0F 2A 38 ; REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm7 # -- -- F2 -- 0F 2A 38 ; OVR 128bit MMinstr.
CVTSI2SD %eax,%xmm0 # -- -- F2 -- 0F 2A C0 ; OVR 128bit MMinstr.
CVTSI2SD %eax,%xmm15 # -- -- F2 44 0F 2A F8 ; REX to access upper XMM reg. OVR 128bit MMinstr.
CVTSI2SD %eax,%xmm8 # -- -- F2 44 0F 2A C0 ; REX to access upper XMM reg. OVR 128bit MMinstr.
CVTSI2SD %eax,%xmm7 # -- -- F2 -- 0F 2A F8 ; OVR 128bit MMinstr.
CVTSI2SDl (%r8),%xmm0 # -- -- F2 41 0F 2A 00 ; REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm0 # -- -- F2 -- 0F 2A 00 ; OVR 128bit MMinstr.
CVTSI2SDl (%r8),%xmm15 # -- -- F2 45 0F 2A 38 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm15 # -- -- F2 44 0F 2A 38 ; REX to access upper XMM reg. OVR 128bit MMinstr.
CVTSI2SDl (%r8),%xmm8 # -- -- F2 45 0F 2A 00 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm8 # -- -- F2 44 0F 2A 00 ; REX to access upper XMM reg. OVR 128bit MMinstr.
CVTSI2SDl (%r8),%xmm7 # -- -- F2 41 0F 2A 38 ; REX to access upper reg. OVR 128bit MMinstr.
CVTSI2SDl (%rax),%xmm7 # -- -- F2 -- 0F 2A 38 ; OVR 128bit MMinstr.
# MOVD
MOVD (%r8),%xmm0 # -- -- 66 41 0F 6E 00 ; REX to access upper reg. Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD (%rax),%xmm0 # -- -- 66 -- 0F 6E 00 ; Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD (%r8),%xmm15 # -- -- 66 45 0F 6E 38 ; REX to access upper XMM reg. REX to access upper reg. Data128 = ZEXT(Data32)
MOVD (%rax),%xmm15 # -- -- 66 44 0F 6E 38 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVD (%r8),%xmm8 # -- -- 66 45 0F 6E 00 ; REX to access upper XMM reg. REX to access upper reg. Data128 = ZEXT(Data32)
MOVD (%rax),%xmm8 # -- -- 66 44 0F 6E 00 ; REX to access upper XMM reg. Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD (%r8),%xmm7 # -- -- 66 41 0F 6E 38 ; REX to access upper reg. Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD (%rax),%xmm7 # -- -- 66 -- 0F 6E 38 ; Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD %eax,%xmm0 # -- -- 66 -- 0F 6E C0 ; Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD %eax,%xmm15 # -- -- 66 44 0F 6E F8 ; REX to access upper XMM reg. Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD %eax,%xmm8 # -- -- 66 44 0F 6E C0 ; REX to access upper XMM reg. Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD %eax,%xmm7 # -- -- 66 -- 0F 6E F8 ; Data128 = ZEXT(Data32). OVR 128bit MMinstr.
MOVD %xmm0,(%r8) # -- -- 66 41 0F 7E 00 ; REX to access upper reg. OVR 128bit MMinstr.
MOVD %xmm0,(%rax) # -- -- 66 -- 0F 7E 00 ; OVR 128bit MMinstr.
MOVD %xmm15,(%r8) # -- -- 66 45 0F 7E 38 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
MOVD %xmm15,(%rax) # -- -- 66 44 0F 7E 38 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVD %xmm8,(%r8) # -- -- 66 45 0F 7E 00 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
MOVD %xmm8,(%rax) # -- -- 66 44 0F 7E 00 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVD %xmm7,(%r8) # -- -- 66 41 0F 7E 38 ; REX to access upper reg. OVR 128bit MMinstr.
MOVD %xmm7,(%rax) # -- -- 66 -- 0F 7E 38 ; OVR 128bit MMinstr.
MOVD %xmm0,%eax # -- -- 66 -- 0F 7E C0 ; OVR 128bit MMinstr.
MOVD %xmm15,%eax # -- -- 66 44 0F 7E F8 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVD %xmm8,%eax # -- -- 66 44 0F 7E C0 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVD %xmm7,%eax # -- -- 66 -- 0F 7E F8 ; OVR 128bit MMinstr.
MOVD %rax,%xmm0 # -- -- 66 48 0F 6E C0 ; Data128 = ZEXT(Data64). OVR 128bit MMinstr. REX for 64-bit operand size.
MOVD %r8,%xmm0 # -- -- 66 49 0F 6E C0 ; REX to access upper reg. Data128 = ZEXT(Data64). OVR 128bit MMinstr. REX for 64-bit operand size.
MOVD %r8,%xmm15 # -- -- 66 4D 0F 6E F8 ; REX to access upper reg. Data128 = ZEXT(Data64). OVR 128bit MMinstr. REX for 64-bit operand size.
MOVD %xmm0,%rax # -- -- 66 48 0F 7E C0 ; OVR 128bit MMinstr. REX for 64-bit operand size.
MOVD %xmm0,%r8 # -- -- 66 49 0F 7E C0 ; OVR 128bit MMinstr. REX for 64-bit operand size.
MOVD %xmm7,%r8 # -- -- 66 49 0F 7E F8 ; OVR 128bit MMinstr. REX for 64-bit operand size.
# MOVQ
MOVQ (%r8),%xmm0 # -- -- F3 41 0F 7E 00 ; REX to access upper reg. Data128 = ZEXT(Data64). OVR 128bit MMinstr.
MOVQ (%rax),%xmm0 # -- -- F3 -- 0F 7E 00 ; Data128 = ZEXT(Data64). OVR 128bit MMinstr.
MOVQ (%r8),%xmm15 # -- -- F3 45 0F 7E 38 ; REX to access upper XMM reg. REX to access upper reg. Data128 = ZEXT(Data64)
MOVQ (%rax),%xmm15 # -- -- F3 44 0F 7E 38 ; REX to access upper XMM reg. Data128 = ZEXT(Data64). OVR 128bit MMinstr.
MOVQ (%r8),%xmm8 # -- -- F3 45 0F 7E 00 ; REX to access upper XMM reg. REX to access upper reg. Data128 = ZEXT(Data64)
MOVQ (%rax),%xmm8 # -- -- F3 44 0F 7E 00 ; REX to access upper XMM reg. Data128 = ZEXT(Data64). OVR 128bit MMinstr.
MOVQ (%r8),%xmm7 # -- -- F3 41 0F 7E 38 ; REX to access upper reg. Data128 = ZEXT(Data64). OVR 128bit MMinstr.
MOVQ (%rax),%xmm7 # -- -- F3 -- 0F 7E 38 ; Data128 = ZEXT(Data64). OVR 128bit MMinstr.
MOVQ %xmm0,%xmm0 # -- -- F3 -- 0F 7E C0 ; OVR 128bit MMinstr.
MOVQ %xmm15,%xmm15 # -- -- F3 45 0F 7E FF ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm15,%xmm8 # -- -- F3 45 0F 7E C7 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm15,%xmm7 # -- -- F3 41 0F 7E FF ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm15,%xmm0 # -- -- F3 41 0F 7E C7 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm8,%xmm15 # -- -- F3 45 0F 7E F8 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm8,%xmm8 # -- -- F3 45 0F 7E C0 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm8,%xmm7 # -- -- F3 41 0F 7E F8 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm8,%xmm0 # -- -- F3 41 0F 7E C0 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm7,%xmm15 # -- -- F3 44 0F 7E FF ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm7,%xmm8 # -- -- F3 44 0F 7E C7 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm7,%xmm7 # -- -- F3 -- 0F 7E FF ; OVR 128bit MMinstr.
MOVQ %xmm7,%xmm0 # -- -- F3 -- 0F 7E C7 ; OVR 128bit MMinstr.
MOVQ %xmm0,%xmm15 # -- -- F3 44 0F 7E F8 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm0,%xmm8 # -- -- F3 44 0F 7E C0 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm0,%xmm7 # -- -- F3 -- 0F 7E F8 ; OVR 128bit MMinstr.
MOVQ %xmm0,(%r8) # -- -- 66 41 0F D6 00 ; REX to access upper reg. OVR 128bit MMinstr.
MOVQ %xmm0,(%rax) # -- -- 66 -- 0F D6 00 ; OVR 128bit MMinstr.
MOVQ %xmm15,(%r8) # -- -- 66 45 0F D6 38 ; REX to access upper reg. OVR 128bit MMinstr.
MOVQ %xmm15,(%rax) # -- -- 66 44 0F D6 38 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm8,(%r8) # -- -- 66 45 0F D6 00 ; REX to access upper XMM reg. REX to access upper reg. OVR 128bit MMinstr.
MOVQ %xmm8,(%rax) # -- -- 66 44 0F D6 00 ; REX to access upper XMM reg. OVR 128bit MMinstr.
MOVQ %xmm7,(%r8) # -- -- 66 41 0F D6 38 ; REX to access upper reg. OVR 128bit MMinstr.
# 64-bit MMX
# CVTPD2PI
# MOVD
MOVD (%r8),%mm0 # -- -- -- 41 0F 6E 00 ; REX to access upper reg. Data64 = ZEXT(Data32)
MOVD (%rax),%mm0 # -- -- -- -- 0F 6E 00 ; Data64 = ZEXT(Data32)
MOVD (%r8),%mm7 # -- -- -- 41 0F 6E 38 ; REX to access upper reg. Data64 = ZEXT(Data32)
MOVD (%rax),%mm7 # -- -- -- -- 0F 6E 38 ; Data64 = ZEXT(Data32)
MOVD %eax,%mm0 # -- -- -- -- 0F 6E C0 ; Data64 = ZEXT(Data32)
MOVD %eax,%mm7 # -- -- -- -- 0F 6E F8 ; Data64 = ZEXT(Data32)
MOVD %mm0,(%r8) # -- -- -- 41 0F 7E 00 ; REX to access upper reg.
MOVD %mm0,(%rax) # -- -- -- -- 0F 7E 00
MOVD %mm7,(%r8) # -- -- -- 41 0F 7E 38 ; REX to access upper reg.
MOVD %mm7,(%rax) # -- -- -- -- 0F 7E 38
MOVD %mm0,%eax # -- -- -- -- 0F 7E C0
MOVD %mm7,%eax # -- -- -- -- 0F 7E F8
# MOVQ
MOVQ (%r8),%mm0 # -- -- -- 41 0F 6F 00 ; REX to access upper reg.
MOVQ (%rax),%mm0 # -- -- -- -- 0F 6F 00
MOVQ (%r8),%mm7 # -- -- -- 41 0F 6F 38 ; REX to access upper reg.
MOVQ (%rax),%mm7 # -- -- -- -- 0F 6F 38
MOVQ %mm0,(%r8) # -- -- -- 41 0F 7F 00 ; REX to access upper reg.
MOVQ %mm0,(%rax) # -- -- -- -- 0F 7F 00
MOVQ %mm7,(%r8) # -- -- -- 41 0F 7F 38 ; REX to access upper reg.
MOVQ %mm7,(%rax) # -- -- -- -- 0F 7F 38
# X87
# FADDP
# FDIV
# Stack Operations
# POP
POPq (%r8) # -- -- -- 41 8F 00 ; REX to access upper reg.
POPq (%rax) # -- -- -- -- 8F 00
POP %fs # -- -- -- -- 0F A1
POPq %fs # -- -- -- -- 0F A1
POP %gs # -- -- -- -- 0F A9
POPq %gs # -- -- -- -- 0F A9
POPF # -- -- -- -- 9D
POPFq # -- -- -- -- 9D
# PUSH
PUSHq (%r8) # -- -- -- 41 FF 30 ; REX to access upper reg.
PUSHq (%rax) # -- -- -- -- FF 30
PUSH %fs # -- -- -- -- 0F A0
PUSHq %fs # -- -- -- -- 0F A0
PUSH %gs # -- -- -- -- 0F A8
PUSHq %gs # -- -- -- -- 0F A8
PUSHF # -- -- -- -- 9C
PUSHFq # -- -- -- -- 9C
# MMX/XMM/x87 State
# FNSAVE
# FRSTOR
# FSAVE
# FXRSTOR
# FXSAVE
# EMMS
EMMS # -- -- -- -- 0F 77
# FEMMS
FEMMS # -- -- -- -- 0F 0E
# LEA calculation
# MISC System Instructions
# CLFLUSH
# INVD
INVD # -- -- -- -- 0F 08
# INVLPG
INVLPG (%r8) # -- -- -- 41 0F 01 38 ; REX to access upper reg.
INVLPG (%rax) # -- -- -- -- 0F 01 38
INVLPG (%r8) # -- -- -- 41 0F 01 38 ; REX to access upper reg.
INVLPG (%rax) # -- -- -- -- 0F 01 38
INVLPG (%r8) # -- -- -- 41 0F 01 38 ; REX to access upper reg.
INVLPG (%rax) # -- -- -- -- 0F 01 38
# LAR
# LGDT
# LIDT
# LLDT
# SGDT
# SIDT
# SLDT
# SLDT (%eax) # -- 67 -- -- 0F 00 00 ; A32 override: (Addr64) = ZEXT(Addr32 )
SLDT %eax # -- -- -- -- 0F 00 C0
SLDT %rax # -- -- -- 48 0F 00 C0
SLDT %ax # 66 -- -- -- 0F 00 C0
SLDT (%rax) # -- -- -- -- 0F 00 00
# SWAPGS
# IO
# OUT
OUT %al,$0 # -- -- -- -- E6 00
OUT %ax,$0 # 66 -- -- -- E7 00 ; O16 for 16-bit operand size
OUT %eax,$0 # -- -- -- -- E7 00
# IN
xchg %ax,%ax # 66 -- -- -- 90
xchg %eax,%eax # -- -- -- -- 87 C0
xchg %rax,%rax # -- -- -- -- 90
rex64 xchg %rax,%rax # -- -- -- 48 90
xchg %rax,%r8 # -- -- -- 49 90
xchg %eax,%r8d # -- -- -- 41 90
xchg %r8d,%eax # -- -- -- 41 90
xchg %eax,%r9d # -- -- -- 41 91
xchg %r9d,%eax # -- -- -- 41 91
xchg %ebx,%eax # -- -- -- 93
xchg %eax,%ebx # -- -- -- 93
xchg %ax,%r8w # -- -- -- 66 41 90
xchg %r8w,%ax # -- -- -- 66 41 90
xchg %ax,%r9w # -- -- -- 66 41 91
xchg %r9w,%ax # -- -- -- 66 41 91
smsw %rax # -- -- -- 48 0F 01 e0
smsw %eax # -- -- -- -- 0F 01 e0
smsw %ax # 66 -- -- -- 0F 01 e0
smsw (%rax) # -- -- -- -- 0F 01 20
str %rax # -- -- -- 48 0F 00 c8
str %eax # -- -- -- -- 0F 00 c8
str %ax # 66 -- -- -- 0F 00 c8
str (%rax) # -- -- -- -- 0F 00 08
syscall # -- -- -- -- 0F 05
sysretl # -- -- -- -- 0F 07
sysretq # -- -- -- 48 0F 07
swapgs # -- -- -- -- 0F 01 f8
pushw $0x2222
int1
int3
int $0x90
.byte 0xf6, 0xc9, 0x01
.byte 0x66, 0xf7, 0xc9, 0x02, 0x00
.byte 0xf7, 0xc9, 0x04, 0x00, 0x00, 0x00
.byte 0x48, 0xf7, 0xc9, 0x08, 0x00, 0x00, 0x00
.byte 0xc0, 0xf0, 0x02
.byte 0xc1, 0xf0, 0x01
.byte 0x48, 0xc1, 0xf0, 0x01
.byte 0xd0, 0xf0
.byte 0xd1, 0xf0
.byte 0x48, 0xd1, 0xf0
.byte 0xd2, 0xf0
.byte 0xd3, 0xf0
.byte 0x48, 0xd3, 0xf0
|
tactcomplabs/xbgas-binutils-gdb
| 4,166
|
gas/testsuite/gas/i386/avx-gather.s
|
# Check 32bit AVX gather instructions
.text
_start:
vgatherdpd %xmm2, (%ebp, %xmm7, 2),%xmm1
vgatherqpd %xmm2, (%ebp, %xmm7, 2),%xmm1
vgatherdpd %ymm2, (%ebp, %xmm7, 2),%ymm1
vgatherqpd %ymm2, (%ebp, %ymm7, 2),%ymm1
vgatherdpd %ymm5,0x8(,%xmm4,1),%ymm6
vgatherdpd %ymm5,-0x8(,%xmm4,1),%ymm6
vgatherdpd %ymm5,(,%xmm4,1),%ymm6
vgatherdpd %ymm5,0x298(,%xmm4,1),%ymm6
vgatherdpd %ymm5,0x8(,%xmm4,8),%ymm6
vgatherdpd %ymm5,-0x8(,%xmm4,8),%ymm6
vgatherdpd %ymm5,(,%xmm4,8),%ymm6
vgatherdpd %ymm5,0x298(,%xmm4,8),%ymm6
vgatherdps %xmm2, (%ebp, %xmm7, 2),%xmm1
vgatherqps %xmm2, (%ebp, %xmm7, 2),%xmm1
vgatherdps %ymm2, (%ebp, %ymm7, 2),%ymm1
vgatherqps %xmm2, (%ebp, %ymm7, 2),%xmm1
vgatherdps %xmm5,0x8(,%xmm4,1),%xmm6
vgatherdps %xmm5,-0x8(,%xmm4,1),%xmm6
vgatherdps %xmm5,(,%xmm4,1),%xmm6
vgatherdps %xmm5,0x298(,%xmm4,1),%xmm6
vgatherdps %xmm5,0x8(,%xmm4,8),%xmm6
vgatherdps %xmm5,-0x8(,%xmm4,8),%xmm6
vgatherdps %xmm5,(,%xmm4,8),%xmm6
vgatherdps %xmm5,0x298(,%xmm4,8),%xmm6
vpgatherdd %xmm2, (%ebp, %xmm7, 2),%xmm1
vpgatherqd %xmm2, (%ebp, %xmm7, 2),%xmm1
vpgatherdd %ymm2, (%ebp, %ymm7, 2),%ymm1
vpgatherqd %xmm2, (%ebp, %ymm7, 2),%xmm1
vpgatherdd %xmm5,0x8(,%xmm4,1),%xmm6
vpgatherdd %xmm5,-0x8(,%xmm4,1),%xmm6
vpgatherdd %xmm5,(,%xmm4,1),%xmm6
vpgatherdd %xmm5,0x298(,%xmm4,1),%xmm6
vpgatherdd %xmm5,0x8(,%xmm4,8),%xmm6
vpgatherdd %xmm5,-0x8(,%xmm4,8),%xmm6
vpgatherdd %xmm5,(,%xmm4,8),%xmm6
vpgatherdd %xmm5,0x298(,%xmm4,8),%xmm6
vpgatherdq %xmm2, (%ebp, %xmm7, 2),%xmm1
vpgatherqq %xmm2, (%ebp, %xmm7, 2),%xmm1
vpgatherdq %ymm2, (%ebp, %xmm7, 2),%ymm1
vpgatherqq %ymm2, (%ebp, %ymm7, 2),%ymm1
vpgatherdq %ymm5,0x8(,%xmm4,1),%ymm6
vpgatherdq %ymm5,-0x8(,%xmm4,1),%ymm6
vpgatherdq %ymm5,(,%xmm4,1),%ymm6
vpgatherdq %ymm5,0x298(,%xmm4,1),%ymm6
vpgatherdq %ymm5,0x8(,%xmm4,8),%ymm6
vpgatherdq %ymm5,-0x8(,%xmm4,8),%ymm6
vpgatherdq %ymm5,(,%xmm4,8),%ymm6
vpgatherdq %ymm5,0x298(,%xmm4,8),%ymm6
.intel_syntax noprefix
vgatherdpd xmm1,QWORD PTR [ebp+xmm7*2+0x0],xmm2
vgatherqpd xmm1,QWORD PTR [ebp+xmm7*2+0x0],xmm2
vgatherdpd ymm1,QWORD PTR [ebp+xmm7*2+0x0],ymm2
vgatherqpd ymm1,QWORD PTR [ebp+ymm7*2+0x0],ymm2
vgatherdpd ymm6,QWORD PTR [xmm4*1+0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*1-0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*1+0x0],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*1+0x298],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*8+0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*8-0x8],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*8+0x0],ymm5
vgatherdpd ymm6,QWORD PTR [xmm4*8+0x298],ymm5
vgatherdps xmm1,DWORD PTR [ebp+xmm7*2+0x0],xmm2
vgatherqps xmm1,DWORD PTR [ebp+xmm7*2+0x0],xmm2
vgatherdps ymm1,DWORD PTR [ebp+ymm7*2+0x0],ymm2
vgatherqps xmm1,DWORD PTR [ebp+ymm7*2+0x0],xmm2
vgatherdps xmm6,DWORD PTR [xmm4*1+0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*1-0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*1+0x0],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*1+0x298],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*8+0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*8-0x8],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*8+0x0],xmm5
vgatherdps xmm6,DWORD PTR [xmm4*8+0x298],xmm5
vpgatherdd xmm1,DWORD PTR [ebp+xmm7*2+0x0],xmm2
vpgatherqd xmm1,DWORD PTR [ebp+xmm7*2+0x0],xmm2
vpgatherdd ymm1,DWORD PTR [ebp+ymm7*2+0x0],ymm2
vpgatherqd xmm1,DWORD PTR [ebp+ymm7*2+0x0],xmm2
vpgatherdd xmm6,DWORD PTR [xmm4*1+0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*1-0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*1+0x0],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*1+0x298],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*8+0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*8-0x8],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*8+0x0],xmm5
vpgatherdd xmm6,DWORD PTR [xmm4*8+0x298],xmm5
vpgatherdq xmm1,QWORD PTR [ebp+xmm7*2+0x0],xmm2
vpgatherqq xmm1,QWORD PTR [ebp+xmm7*2+0x0],xmm2
vpgatherdq ymm1,QWORD PTR [ebp+xmm7*2+0x0],ymm2
vpgatherqq ymm1,QWORD PTR [ebp+ymm7*2+0x0],ymm2
vpgatherdq ymm6,QWORD PTR [xmm4*1+0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*1-0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*1+0x0],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*1+0x298],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*8+0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*8-0x8],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*8+0x0],ymm5
vpgatherdq ymm6,QWORD PTR [xmm4*8+0x298],ymm5
|
tactcomplabs/xbgas-binutils-gdb
| 74,845
|
gas/testsuite/gas/i386/xop.s
|
# Check XOP instructions (maxcombos=16, maxops=3, archbits=32, seed=1)
.allow_index_reg
.text
_start:
# Tests for op VFRCZPD xmm2/mem128, xmm1 (at&t syntax)
VFRCZPD %xmm7,%xmm7
VFRCZPD %xmm0,%xmm6
VFRCZPD (%ebx),%xmm0
VFRCZPD (%esi),%xmm7
VFRCZPD %xmm0,%xmm0
VFRCZPD (%eax),%xmm7
VFRCZPD %xmm7,%xmm0
VFRCZPD %xmm1,%xmm6
VFRCZPD %xmm1,%xmm0
VFRCZPD %xmm0,%xmm7
VFRCZPD (%eax),%xmm6
VFRCZPD %xmm1,%xmm7
VFRCZPD (%esi),%xmm0
VFRCZPD (%ebx),%xmm7
VFRCZPD (%esi),%xmm6
VFRCZPD (%eax),%xmm0
# Tests for op VFRCZPD ymm2/mem256, ymm1 (at&t syntax)
VFRCZPD %ymm7,%ymm7
VFRCZPD %ymm0,%ymm6
VFRCZPD (%ebx),%ymm0
VFRCZPD (%esi),%ymm7
VFRCZPD %ymm0,%ymm0
VFRCZPD (%eax),%ymm7
VFRCZPD %ymm7,%ymm0
VFRCZPD %ymm1,%ymm6
VFRCZPD %ymm1,%ymm0
VFRCZPD %ymm0,%ymm7
VFRCZPD (%eax),%ymm6
VFRCZPD %ymm1,%ymm7
VFRCZPD (%esi),%ymm0
VFRCZPD (%ebx),%ymm7
VFRCZPD (%esi),%ymm6
VFRCZPD (%eax),%ymm0
# Tests for op VFRCZPS xmm2/mem128, xmm1 (at&t syntax)
VFRCZPS %xmm7,%xmm7
VFRCZPS %xmm0,%xmm6
VFRCZPS (%ebx),%xmm0
VFRCZPS (%esi),%xmm7
VFRCZPS %xmm0,%xmm0
VFRCZPS (%eax),%xmm7
VFRCZPS %xmm7,%xmm0
VFRCZPS %xmm1,%xmm6
VFRCZPS %xmm1,%xmm0
VFRCZPS %xmm0,%xmm7
VFRCZPS (%eax),%xmm6
VFRCZPS %xmm1,%xmm7
VFRCZPS (%esi),%xmm0
VFRCZPS (%ebx),%xmm7
VFRCZPS (%esi),%xmm6
VFRCZPS (%eax),%xmm0
# Tests for op VFRCZPS ymm2/mem256, ymm1 (at&t syntax)
VFRCZPS %ymm7,%ymm7
VFRCZPS %ymm0,%ymm6
VFRCZPS (%ebx),%ymm0
VFRCZPS (%esi),%ymm7
VFRCZPS %ymm0,%ymm0
VFRCZPS (%eax),%ymm7
VFRCZPS %ymm7,%ymm0
VFRCZPS %ymm1,%ymm6
VFRCZPS %ymm1,%ymm0
VFRCZPS %ymm0,%ymm7
VFRCZPS (%eax),%ymm6
VFRCZPS %ymm1,%ymm7
VFRCZPS (%esi),%ymm0
VFRCZPS (%ebx),%ymm7
VFRCZPS (%esi),%ymm6
VFRCZPS (%eax),%ymm0
# Tests for op VFRCZSD xmm2/mem64, xmm1 (at&t syntax)
VFRCZSD %xmm7,%xmm7
VFRCZSD %xmm0,%xmm6
VFRCZSD (%ebx),%xmm0
VFRCZSD (%esi),%xmm7
VFRCZSD %xmm0,%xmm0
VFRCZSD (%eax),%xmm7
VFRCZSD %xmm7,%xmm0
VFRCZSD %xmm1,%xmm6
VFRCZSD %xmm1,%xmm0
VFRCZSD %xmm0,%xmm7
VFRCZSD (%eax),%xmm6
VFRCZSD %xmm1,%xmm7
VFRCZSD (%esi),%xmm0
VFRCZSD (%ebx),%xmm7
VFRCZSD (%esi),%xmm6
VFRCZSD (%eax),%xmm0
# Tests for op VFRCZSS xmm2/mem32, xmm1 (at&t syntax)
VFRCZSS %xmm7,%xmm7
VFRCZSS %xmm0,%xmm6
VFRCZSS (%ebx),%xmm0
VFRCZSS (%esi),%xmm7
VFRCZSS %xmm0,%xmm0
VFRCZSS (%eax),%xmm7
VFRCZSS %xmm7,%xmm0
VFRCZSS %xmm1,%xmm6
VFRCZSS %xmm1,%xmm0
VFRCZSS %xmm0,%xmm7
VFRCZSS (%eax),%xmm6
VFRCZSS %xmm1,%xmm7
VFRCZSS (%esi),%xmm0
VFRCZSS (%ebx),%xmm7
VFRCZSS (%esi),%xmm6
VFRCZSS (%eax),%xmm0
# Tests for op VPCMOV xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCMOV %xmm0,%xmm7,%xmm7,%xmm0
VPCMOV %xmm7,(%esi),%xmm0,%xmm0
VPCMOV %xmm1,(%esi),%xmm3,%xmm0
VPCMOV %xmm1,%xmm0,%xmm0,%xmm5
VPCMOV %xmm1,%xmm6,%xmm0,%xmm0
VPCMOV %xmm1,%xmm6,%xmm0,%xmm7
VPCMOV %xmm1,(%edx),%xmm0,%xmm7
VPCMOV %xmm7,%xmm0,%xmm0,%xmm7
VPCMOV %xmm7,(%esi),%xmm3,%xmm7
VPCMOV %xmm7,%xmm6,%xmm3,%xmm7
VPCMOV %xmm7,%xmm7,%xmm3,%xmm0
VPCMOV %xmm0,(%edx),%xmm3,%xmm0
VPCMOV %xmm1,(%edx),%xmm7,%xmm5
VPCMOV %xmm1,%xmm7,%xmm7,%xmm5
VPCMOV %xmm1,%xmm7,%xmm0,%xmm0
VPCMOV %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPCMOV ymm4, ymm3/mem256, ymm2, ymm1 (at&t syntax)
VPCMOV %ymm0,%ymm7,%ymm7,%ymm0
VPCMOV %ymm7,(%esi),%ymm0,%ymm0
VPCMOV %ymm1,(%esi),%ymm3,%ymm0
VPCMOV %ymm1,%ymm0,%ymm0,%ymm5
VPCMOV %ymm1,%ymm6,%ymm0,%ymm0
VPCMOV %ymm1,%ymm6,%ymm0,%ymm7
VPCMOV %ymm1,(%edx),%ymm0,%ymm7
VPCMOV %ymm7,%ymm0,%ymm0,%ymm7
VPCMOV %ymm7,(%esi),%ymm3,%ymm7
VPCMOV %ymm7,%ymm6,%ymm3,%ymm7
VPCMOV %ymm7,%ymm7,%ymm3,%ymm0
VPCMOV %ymm0,(%edx),%ymm3,%ymm0
VPCMOV %ymm1,(%edx),%ymm7,%ymm5
VPCMOV %ymm1,%ymm7,%ymm7,%ymm5
VPCMOV %ymm1,%ymm7,%ymm0,%ymm0
VPCMOV %ymm7,(%esi),%ymm3,%ymm5
# Tests for op VPCMOV xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPCMOV %xmm0,%xmm6,%xmm7,%xmm0
VPCMOV (%esi),%xmm7,%xmm0,%xmm0
VPCMOV (%eax),%xmm7,%xmm3,%xmm0
VPCMOV %xmm7,%xmm0,%xmm0,%xmm5
VPCMOV %xmm7,%xmm0,%xmm0,%xmm0
VPCMOV %xmm7,%xmm0,%xmm0,%xmm7
VPCMOV (%eax),%xmm6,%xmm0,%xmm7
VPCMOV (%esi),%xmm0,%xmm0,%xmm7
VPCMOV (%ebx),%xmm7,%xmm3,%xmm7
VPCMOV (%ebx),%xmm0,%xmm3,%xmm7
VPCMOV (%esi),%xmm6,%xmm3,%xmm0
VPCMOV %xmm1,%xmm7,%xmm3,%xmm0
VPCMOV (%eax),%xmm7,%xmm7,%xmm5
VPCMOV %xmm7,%xmm6,%xmm7,%xmm5
VPCMOV %xmm7,%xmm6,%xmm0,%xmm0
VPCMOV (%ebx),%xmm7,%xmm3,%xmm5
# Tests for op VPCMOV ymm4/mem256, ymm3, ymm2, ymm1 (at&t syntax)
VPCMOV %ymm0,%ymm6,%ymm7,%ymm0
VPCMOV (%esi),%ymm7,%ymm0,%ymm0
VPCMOV (%eax),%ymm7,%ymm3,%ymm0
VPCMOV %ymm7,%ymm0,%ymm0,%ymm5
VPCMOV %ymm7,%ymm0,%ymm0,%ymm0
VPCMOV %ymm7,%ymm0,%ymm0,%ymm7
VPCMOV (%eax),%ymm6,%ymm0,%ymm7
VPCMOV (%esi),%ymm0,%ymm0,%ymm7
VPCMOV (%ebx),%ymm7,%ymm3,%ymm7
VPCMOV (%ebx),%ymm0,%ymm3,%ymm7
VPCMOV (%esi),%ymm6,%ymm3,%ymm0
VPCMOV %ymm1,%ymm7,%ymm3,%ymm0
VPCMOV (%eax),%ymm7,%ymm7,%ymm5
VPCMOV %ymm7,%ymm6,%ymm7,%ymm5
VPCMOV %ymm7,%ymm6,%ymm0,%ymm0
VPCMOV (%ebx),%ymm7,%ymm3,%ymm5
# Tests for op VPCOMB imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMB $0x3,(%eax),%xmm0,%xmm7
VPCOMB $0xFF,%xmm0,%xmm0,%xmm1
VPCOMB $0xFF,%xmm5,%xmm0,%xmm1
VPCOMB $0x0,%xmm5,%xmm5,%xmm1
VPCOMB $0x0,%xmm5,%xmm0,%xmm1
VPCOMB $0x0,%xmm0,%xmm7,%xmm1
VPCOMB $0x3,%xmm0,%xmm7,%xmm7
VPCOMB $0x0,%xmm5,%xmm7,%xmm7
VPCOMB $0xFF,%xmm7,%xmm7,%xmm7
VPCOMB $0x0,%xmm7,%xmm7,%xmm7
VPCOMB $0x3,%xmm7,%xmm0,%xmm0
VPCOMB $0xFF,%xmm7,%xmm0,%xmm1
VPCOMB $0xFF,(%eax),%xmm5,%xmm1
VPCOMB $0x3,(%eax),%xmm5,%xmm1
VPCOMB $0x3,%xmm0,%xmm0,%xmm7
VPCOMB $0xFF,%xmm7,%xmm5,%xmm0
# Tests for op VPCOMD imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMD $0x3,(%eax),%xmm0,%xmm7
VPCOMD $0xFF,%xmm0,%xmm0,%xmm1
VPCOMD $0xFF,%xmm5,%xmm0,%xmm1
VPCOMD $0x0,%xmm5,%xmm5,%xmm1
VPCOMD $0x0,%xmm5,%xmm0,%xmm1
VPCOMD $0x0,%xmm0,%xmm7,%xmm1
VPCOMD $0x3,%xmm0,%xmm7,%xmm7
VPCOMD $0x0,%xmm5,%xmm7,%xmm7
VPCOMD $0xFF,%xmm7,%xmm7,%xmm7
VPCOMD $0x0,%xmm7,%xmm7,%xmm7
VPCOMD $0x3,%xmm7,%xmm0,%xmm0
VPCOMD $0xFF,%xmm7,%xmm0,%xmm1
VPCOMD $0xFF,(%eax),%xmm5,%xmm1
VPCOMD $0x3,(%eax),%xmm5,%xmm1
VPCOMD $0x3,%xmm0,%xmm0,%xmm7
VPCOMD $0xFF,%xmm7,%xmm5,%xmm0
# Tests for op VPCOMQ imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMQ $0x3,(%eax),%xmm0,%xmm7
VPCOMQ $0xFF,%xmm0,%xmm0,%xmm1
VPCOMQ $0xFF,%xmm5,%xmm0,%xmm1
VPCOMQ $0x0,%xmm5,%xmm5,%xmm1
VPCOMQ $0x0,%xmm5,%xmm0,%xmm1
VPCOMQ $0x0,%xmm0,%xmm7,%xmm1
VPCOMQ $0x3,%xmm0,%xmm7,%xmm7
VPCOMQ $0x0,%xmm5,%xmm7,%xmm7
VPCOMQ $0xFF,%xmm7,%xmm7,%xmm7
VPCOMQ $0x0,%xmm7,%xmm7,%xmm7
VPCOMQ $0x3,%xmm7,%xmm0,%xmm0
VPCOMQ $0xFF,%xmm7,%xmm0,%xmm1
VPCOMQ $0xFF,(%eax),%xmm5,%xmm1
VPCOMQ $0x3,(%eax),%xmm5,%xmm1
VPCOMQ $0x3,%xmm0,%xmm0,%xmm7
VPCOMQ $0xFF,%xmm7,%xmm5,%xmm0
# Tests for op VPCOMUB imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUB $0x3,(%eax),%xmm0,%xmm7
VPCOMUB $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUB $0xFF,%xmm5,%xmm0,%xmm1
VPCOMUB $0x0,%xmm5,%xmm5,%xmm1
VPCOMUB $0x0,%xmm5,%xmm0,%xmm1
VPCOMUB $0x0,%xmm0,%xmm7,%xmm1
VPCOMUB $0x3,%xmm0,%xmm7,%xmm7
VPCOMUB $0x0,%xmm5,%xmm7,%xmm7
VPCOMUB $0xFF,%xmm7,%xmm7,%xmm7
VPCOMUB $0x0,%xmm7,%xmm7,%xmm7
VPCOMUB $0x3,%xmm7,%xmm0,%xmm0
VPCOMUB $0xFF,%xmm7,%xmm0,%xmm1
VPCOMUB $0xFF,(%eax),%xmm5,%xmm1
VPCOMUB $0x3,(%eax),%xmm5,%xmm1
VPCOMUB $0x3,%xmm0,%xmm0,%xmm7
VPCOMUB $0xFF,%xmm7,%xmm5,%xmm0
# Tests for op VPCOMUD imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUD $0x3,(%eax),%xmm0,%xmm7
VPCOMUD $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUD $0xFF,%xmm5,%xmm0,%xmm1
VPCOMUD $0x0,%xmm5,%xmm5,%xmm1
VPCOMUD $0x0,%xmm5,%xmm0,%xmm1
VPCOMUD $0x0,%xmm0,%xmm7,%xmm1
VPCOMUD $0x3,%xmm0,%xmm7,%xmm7
VPCOMUD $0x0,%xmm5,%xmm7,%xmm7
VPCOMUD $0xFF,%xmm7,%xmm7,%xmm7
VPCOMUD $0x0,%xmm7,%xmm7,%xmm7
VPCOMUD $0x3,%xmm7,%xmm0,%xmm0
VPCOMUD $0xFF,%xmm7,%xmm0,%xmm1
VPCOMUD $0xFF,(%eax),%xmm5,%xmm1
VPCOMUD $0x3,(%eax),%xmm5,%xmm1
VPCOMUD $0x3,%xmm0,%xmm0,%xmm7
VPCOMUD $0xFF,%xmm7,%xmm5,%xmm0
# Tests for op VPCOMUQ imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUQ $0x3,(%eax),%xmm0,%xmm7
VPCOMUQ $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUQ $0xFF,%xmm5,%xmm0,%xmm1
VPCOMUQ $0x0,%xmm5,%xmm5,%xmm1
VPCOMUQ $0x0,%xmm5,%xmm0,%xmm1
VPCOMUQ $0x0,%xmm0,%xmm7,%xmm1
VPCOMUQ $0x3,%xmm0,%xmm7,%xmm7
VPCOMUQ $0x0,%xmm5,%xmm7,%xmm7
VPCOMUQ $0xFF,%xmm7,%xmm7,%xmm7
VPCOMUQ $0x0,%xmm7,%xmm7,%xmm7
VPCOMUQ $0x3,%xmm7,%xmm0,%xmm0
VPCOMUQ $0xFF,%xmm7,%xmm0,%xmm1
VPCOMUQ $0xFF,(%eax),%xmm5,%xmm1
VPCOMUQ $0x3,(%eax),%xmm5,%xmm1
VPCOMUQ $0x3,%xmm0,%xmm0,%xmm7
VPCOMUQ $0xFF,%xmm7,%xmm5,%xmm0
# Tests for op VPCOMUW imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUW $0x3,(%eax),%xmm0,%xmm7
VPCOMUW $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUW $0xFF,%xmm5,%xmm0,%xmm1
VPCOMUW $0x0,%xmm5,%xmm5,%xmm1
VPCOMUW $0x0,%xmm5,%xmm0,%xmm1
VPCOMUW $0x0,%xmm0,%xmm7,%xmm1
VPCOMUW $0x3,%xmm0,%xmm7,%xmm7
VPCOMUW $0x0,%xmm5,%xmm7,%xmm7
VPCOMUW $0xFF,%xmm7,%xmm7,%xmm7
VPCOMUW $0x0,%xmm7,%xmm7,%xmm7
VPCOMUW $0x3,%xmm7,%xmm0,%xmm0
VPCOMUW $0xFF,%xmm7,%xmm0,%xmm1
VPCOMUW $0xFF,(%eax),%xmm5,%xmm1
VPCOMUW $0x3,(%eax),%xmm5,%xmm1
VPCOMUW $0x3,%xmm0,%xmm0,%xmm7
VPCOMUW $0xFF,%xmm7,%xmm5,%xmm0
# Tests for op VPCOMW imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMW $0x3,(%eax),%xmm0,%xmm7
VPCOMW $0xFF,%xmm0,%xmm0,%xmm1
VPCOMW $0xFF,%xmm5,%xmm0,%xmm1
VPCOMW $0x0,%xmm5,%xmm5,%xmm1
VPCOMW $0x0,%xmm5,%xmm0,%xmm1
VPCOMW $0x0,%xmm0,%xmm7,%xmm1
VPCOMW $0x3,%xmm0,%xmm7,%xmm7
VPCOMW $0x0,%xmm5,%xmm7,%xmm7
VPCOMW $0xFF,%xmm7,%xmm7,%xmm7
VPCOMW $0x0,%xmm7,%xmm7,%xmm7
VPCOMW $0x3,%xmm7,%xmm0,%xmm0
VPCOMW $0xFF,%xmm7,%xmm0,%xmm1
VPCOMW $0xFF,(%eax),%xmm5,%xmm1
VPCOMW $0x3,(%eax),%xmm5,%xmm1
VPCOMW $0x3,%xmm0,%xmm0,%xmm7
VPCOMW $0xFF,%xmm7,%xmm5,%xmm0
# Testing VPERMIL2PD imm8, xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPERMIL2PD $0x0,%xmm5,(%eax),%xmm7,%xmm0
VPERMIL2PD $0x1,%xmm1,%xmm2,%xmm4,%xmm0
VPERMIL2PD $0x2,%xmm4,(%eax),%xmm7,%xmm2
VPERMIL2PD $0x3,%xmm3,(%ebx,%eax,4),%xmm4,%xmm7
VPERMIL2PD $0x0,%xmm3,%xmm7,%xmm0,%xmm6
VPERMIL2PD $0x1,%xmm7,(%esi,%edx),%xmm0,%xmm2
VPERMIL2PD $0x2,%xmm3,%xmm5,%xmm4,%xmm7
VPERMIL2PD $0x3,%xmm3,%xmm0,%xmm1,%xmm2
# Testing VPERMIL2PD imm8, xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPERMIL2PD $0x2,%xmm7,%xmm1,%xmm2,%xmm6
VPERMIL2PD $0x1,0x788(%ecx,%ebx,1),%xmm0,%xmm2,%xmm7
VPERMIL2PD $0x0,%xmm4,%xmm1,%xmm0,%xmm7
VPERMIL2PD $0x3,%xmm3,%xmm7,%xmm4,%xmm0
VPERMIL2PD $0x3,0x788(%ecx,%ebx,2),%xmm7,%xmm0,%xmm6
VPERMIL2PD $0x1,%xmm3,%xmm7,%xmm5,%xmm0
VPERMIL2PD $0x2,%xmm2,%xmm1,%xmm4,%xmm6
VPERMIL2PD $0x3,%xmm0,%xmm3,%xmm2,%xmm7
# Testing VPERMIL2PD imm8, ymm4, ymm3/mem256, ymm2, ymm1 (at&t syntax)
VPERMIL2PD $0x3,%ymm6,%ymm7,%ymm1,%ymm2
VPERMIL2PD $0x1,%ymm6,%ymm7,%ymm1,%ymm4
VPERMIL2PD $0x2,%ymm0,0x5(%edi,%eax,4),%ymm5,%ymm7
VPERMIL2PD $0x0,%ymm5,%ymm6,%ymm0,%ymm2
VPERMIL2PD $0x3,%ymm4,%ymm7,%ymm3,%ymm0
VPERMIL2PD $0x0,%ymm7,%ymm6,%ymm2,%ymm0
VPERMIL2PD $0x2,%ymm4,(%esi),%ymm1,%ymm7
VPERMIL2PD $0x1,%ymm6,%ymm0,%ymm1,%ymm7
# Testing VPERMIL2PD imm8, ymm4/mem256, ymm3, ymm2, ymm1 (at&t syntax)
VPERMIL2PD $0x1,(%ecx),%ymm5,%ymm7,%ymm0
VPERMIL2PD $0x3,(%esi,%eax,2),%ymm4,%ymm7,%ymm0
VPERMIL2PD $0x0,(%ecx),%ymm0,%ymm3,%ymm7
VPERMIL2PD $0x2,(%esi,%eax,1),%ymm2,%ymm6,%ymm7
VPERMIL2PD $0x0,(%ecx),%ymm0,%ymm6,%ymm1
VPERMIL2PD $0x2,%ymm2,%ymm3,%ymm7,%ymm0
VPERMIL2PD $0x3,%ymm0,%ymm2,%ymm7,%ymm1
VPERMIL2PD $0x1,%ymm5,%ymm0,%ymm4,%ymm7
# Testing VPERMIL2PS imm8, xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPERMIL2PS $0x3,%xmm3,%xmm4,%xmm0,%xmm7
VPERMIL2PS $0x1,%xmm0,(%eax),%xmm4,%xmm7
VPERMIL2PS $0x2,%xmm3,(%eax),%xmm7,%xmm7
VPERMIL2PS $0x3,%xmm7,(%ebx,%eax,8),%xmm7,%xmm2
VPERMIL2PS $0x2,%xmm7,%xmm0,%xmm7,%xmm7
VPERMIL2PS $0x3,%xmm7,(%esi,%edx),%xmm0,%xmm7
VPERMIL2PS $0x1,%xmm7,%xmm4,%xmm7,%xmm7
VPERMIL2PS $0x0,%xmm3,(%eax),%xmm7,%xmm2
# Testing VPERMIL2PS imm8, xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPERMIL2PS $0x2,(%ebx),%xmm7,%xmm7,%xmm6
VPERMIL2PS $0x3,(%ebx,%ebx),%xmm7,%xmm5,%xmm0
VPERMIL2PS $0x0,(%ebx,%ebx),%xmm1,%xmm7,%xmm6
VPERMIL2PS $0x2,%xmm0,%xmm1,%xmm2,%xmm7
VPERMIL2PS $0x2,(%ebx,%ebx),%xmm7,%xmm2,%xmm6
VPERMIL2PS $0x3,(%ebx,%ebx),%xmm1,%xmm7,%xmm6
VPERMIL2PS $0x0,(%ebx,%ebx),%xmm7,%xmm2,%xmm7
VPERMIL2PS $0x1,%xmm7,%xmm1,%xmm7,%xmm7
# Testing VPERMIL2PS imm8, ymm4, ymm3/mem256, ymm2, ymm1 (at&t syntax)
VPERMIL2PS $0x1,%ymm6,%ymm7,%ymm1,%ymm2
VPERMIL2PS $0x3,%ymm7,%ymm6,%ymm7,%ymm0
VPERMIL2PS $0x2,%ymm5,%ymm6,%ymm7,%ymm2
VPERMIL2PS $0x0,%ymm2,%ymm0,%ymm7,%ymm7
VPERMIL2PS $0x3,%ymm6,(%edi,%ecx,8),%ymm7,%ymm0
VPERMIL2PS $0x2,%ymm6,%ymm7,%ymm7,%ymm0
VPERMIL2PS $0x0,%ymm7,%ymm6,%ymm1,%ymm2
VPERMIL2PS $0x1,%ymm6,(%esi),%ymm1,%ymm0
# Testing VPERMIL2PS imm8, ymm4/mem256, ymm3, ymm2, ymm1 (at&t syntax)
VPERMIL2PS $0x2,0xC(%ebx,%eax,2),%ymm4,%ymm0,%ymm7
VPERMIL2PS $0x1,%ymm5,%ymm6,%ymm2,%ymm0
VPERMIL2PS $0x3,(%esi,%eax,1),%ymm4,%ymm6,%ymm7
VPERMIL2PS $0x1,(%esi,%ebx,8),%ymm3,%ymm6,%ymm0
VPERMIL2PS $0x0,(%eax,%ecx,2),%ymm7,%ymm0,%ymm1
VPERMIL2PS $0x2,%ymm6,%ymm7,%ymm7,%ymm7
VPERMIL2PS $0x3,%ymm4,%ymm3,%ymm2,%ymm0
VPERMIL2PS $0x0,%ymm0,%ymm6,%ymm7,%ymm7
# Tests for op VPHADDBD xmm2/mem128, xmm1 (at&t syntax)
VPHADDBD %xmm7,%xmm7
VPHADDBD %xmm0,%xmm6
VPHADDBD (%ebx),%xmm0
VPHADDBD (%esi),%xmm7
VPHADDBD %xmm0,%xmm0
VPHADDBD (%eax),%xmm7
VPHADDBD %xmm7,%xmm0
VPHADDBD %xmm1,%xmm6
VPHADDBD %xmm1,%xmm0
VPHADDBD %xmm0,%xmm7
VPHADDBD (%eax),%xmm6
VPHADDBD %xmm1,%xmm7
VPHADDBD (%esi),%xmm0
VPHADDBD (%ebx),%xmm7
VPHADDBD (%esi),%xmm6
VPHADDBD (%eax),%xmm0
# Tests for op VPHADDBQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDBQ %xmm7,%xmm7
VPHADDBQ %xmm0,%xmm6
VPHADDBQ (%ebx),%xmm0
VPHADDBQ (%esi),%xmm7
VPHADDBQ %xmm0,%xmm0
VPHADDBQ (%eax),%xmm7
VPHADDBQ %xmm7,%xmm0
VPHADDBQ %xmm1,%xmm6
VPHADDBQ %xmm1,%xmm0
VPHADDBQ %xmm0,%xmm7
VPHADDBQ (%eax),%xmm6
VPHADDBQ %xmm1,%xmm7
VPHADDBQ (%esi),%xmm0
VPHADDBQ (%ebx),%xmm7
VPHADDBQ (%esi),%xmm6
VPHADDBQ (%eax),%xmm0
# Tests for op VPHADDBW xmm2/mem128, xmm1 (at&t syntax)
VPHADDBW %xmm7,%xmm7
VPHADDBW %xmm0,%xmm6
VPHADDBW (%ebx),%xmm0
VPHADDBW (%esi),%xmm7
VPHADDBW %xmm0,%xmm0
VPHADDBW (%eax),%xmm7
VPHADDBW %xmm7,%xmm0
VPHADDBW %xmm1,%xmm6
VPHADDBW %xmm1,%xmm0
VPHADDBW %xmm0,%xmm7
VPHADDBW (%eax),%xmm6
VPHADDBW %xmm1,%xmm7
VPHADDBW (%esi),%xmm0
VPHADDBW (%ebx),%xmm7
VPHADDBW (%esi),%xmm6
VPHADDBW (%eax),%xmm0
# Tests for op VPHADDDQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDDQ %xmm7,%xmm7
VPHADDDQ %xmm0,%xmm6
VPHADDDQ (%ebx),%xmm0
VPHADDDQ (%esi),%xmm7
VPHADDDQ %xmm0,%xmm0
VPHADDDQ (%eax),%xmm7
VPHADDDQ %xmm7,%xmm0
VPHADDDQ %xmm1,%xmm6
VPHADDDQ %xmm1,%xmm0
VPHADDDQ %xmm0,%xmm7
VPHADDDQ (%eax),%xmm6
VPHADDDQ %xmm1,%xmm7
VPHADDDQ (%esi),%xmm0
VPHADDDQ (%ebx),%xmm7
VPHADDDQ (%esi),%xmm6
VPHADDDQ (%eax),%xmm0
# Tests for op VPHADDUBD xmm2/mem128, xmm1 (at&t syntax)
VPHADDUBD %xmm7,%xmm7
VPHADDUBD %xmm0,%xmm6
VPHADDUBD (%ebx),%xmm0
VPHADDUBD (%esi),%xmm7
VPHADDUBD %xmm0,%xmm0
VPHADDUBD (%eax),%xmm7
VPHADDUBD %xmm7,%xmm0
VPHADDUBD %xmm1,%xmm6
VPHADDUBD %xmm1,%xmm0
VPHADDUBD %xmm0,%xmm7
VPHADDUBD (%eax),%xmm6
VPHADDUBD %xmm1,%xmm7
VPHADDUBD (%esi),%xmm0
VPHADDUBD (%ebx),%xmm7
VPHADDUBD (%esi),%xmm6
VPHADDUBD (%eax),%xmm0
# Tests for op VPHADDUBQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDUBQ %xmm7,%xmm7
VPHADDUBQ %xmm0,%xmm6
VPHADDUBQ (%ebx),%xmm0
VPHADDUBQ (%esi),%xmm7
VPHADDUBQ %xmm0,%xmm0
VPHADDUBQ (%eax),%xmm7
VPHADDUBQ %xmm7,%xmm0
VPHADDUBQ %xmm1,%xmm6
VPHADDUBQ %xmm1,%xmm0
VPHADDUBQ %xmm0,%xmm7
VPHADDUBQ (%eax),%xmm6
VPHADDUBQ %xmm1,%xmm7
VPHADDUBQ (%esi),%xmm0
VPHADDUBQ (%ebx),%xmm7
VPHADDUBQ (%esi),%xmm6
VPHADDUBQ (%eax),%xmm0
# Tests for op VPHADDUBW xmm2/mem128, xmm1 (at&t syntax)
VPHADDUBW %xmm7,%xmm7
VPHADDUBW %xmm0,%xmm6
VPHADDUBW (%ebx),%xmm0
VPHADDUBW (%esi),%xmm7
VPHADDUBW %xmm0,%xmm0
VPHADDUBW (%eax),%xmm7
VPHADDUBW %xmm7,%xmm0
VPHADDUBW %xmm1,%xmm6
VPHADDUBW %xmm1,%xmm0
VPHADDUBW %xmm0,%xmm7
VPHADDUBW (%eax),%xmm6
VPHADDUBW %xmm1,%xmm7
VPHADDUBW (%esi),%xmm0
VPHADDUBW (%ebx),%xmm7
VPHADDUBW (%esi),%xmm6
VPHADDUBW (%eax),%xmm0
# Tests for op VPHADDUDQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDUDQ %xmm7,%xmm7
VPHADDUDQ %xmm0,%xmm6
VPHADDUDQ (%ebx),%xmm0
VPHADDUDQ (%esi),%xmm7
VPHADDUDQ %xmm0,%xmm0
VPHADDUDQ (%eax),%xmm7
VPHADDUDQ %xmm7,%xmm0
VPHADDUDQ %xmm1,%xmm6
VPHADDUDQ %xmm1,%xmm0
VPHADDUDQ %xmm0,%xmm7
VPHADDUDQ (%eax),%xmm6
VPHADDUDQ %xmm1,%xmm7
VPHADDUDQ (%esi),%xmm0
VPHADDUDQ (%ebx),%xmm7
VPHADDUDQ (%esi),%xmm6
VPHADDUDQ (%eax),%xmm0
# Tests for op VPHADDUWD xmm2/mem128, xmm1 (at&t syntax)
VPHADDUWD %xmm7,%xmm7
VPHADDUWD %xmm0,%xmm6
VPHADDUWD (%ebx),%xmm0
VPHADDUWD (%esi),%xmm7
VPHADDUWD %xmm0,%xmm0
VPHADDUWD (%eax),%xmm7
VPHADDUWD %xmm7,%xmm0
VPHADDUWD %xmm1,%xmm6
VPHADDUWD %xmm1,%xmm0
VPHADDUWD %xmm0,%xmm7
VPHADDUWD (%eax),%xmm6
VPHADDUWD %xmm1,%xmm7
VPHADDUWD (%esi),%xmm0
VPHADDUWD (%ebx),%xmm7
VPHADDUWD (%esi),%xmm6
VPHADDUWD (%eax),%xmm0
# Tests for op VPHADDUWQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDUWQ %xmm7,%xmm7
VPHADDUWQ %xmm0,%xmm6
VPHADDUWQ (%ebx),%xmm0
VPHADDUWQ (%esi),%xmm7
VPHADDUWQ %xmm0,%xmm0
VPHADDUWQ (%eax),%xmm7
VPHADDUWQ %xmm7,%xmm0
VPHADDUWQ %xmm1,%xmm6
VPHADDUWQ %xmm1,%xmm0
VPHADDUWQ %xmm0,%xmm7
VPHADDUWQ (%eax),%xmm6
VPHADDUWQ %xmm1,%xmm7
VPHADDUWQ (%esi),%xmm0
VPHADDUWQ (%ebx),%xmm7
VPHADDUWQ (%esi),%xmm6
VPHADDUWQ (%eax),%xmm0
# Tests for op VPHADDWD xmm2/mem128, xmm1 (at&t syntax)
VPHADDWD %xmm7,%xmm7
VPHADDWD %xmm0,%xmm6
VPHADDWD (%ebx),%xmm0
VPHADDWD (%esi),%xmm7
VPHADDWD %xmm0,%xmm0
VPHADDWD (%eax),%xmm7
VPHADDWD %xmm7,%xmm0
VPHADDWD %xmm1,%xmm6
VPHADDWD %xmm1,%xmm0
VPHADDWD %xmm0,%xmm7
VPHADDWD (%eax),%xmm6
VPHADDWD %xmm1,%xmm7
VPHADDWD (%esi),%xmm0
VPHADDWD (%ebx),%xmm7
VPHADDWD (%esi),%xmm6
VPHADDWD (%eax),%xmm0
# Tests for op VPHADDWQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDWQ %xmm7,%xmm7
VPHADDWQ %xmm0,%xmm6
VPHADDWQ (%ebx),%xmm0
VPHADDWQ (%esi),%xmm7
VPHADDWQ %xmm0,%xmm0
VPHADDWQ (%eax),%xmm7
VPHADDWQ %xmm7,%xmm0
VPHADDWQ %xmm1,%xmm6
VPHADDWQ %xmm1,%xmm0
VPHADDWQ %xmm0,%xmm7
VPHADDWQ (%eax),%xmm6
VPHADDWQ %xmm1,%xmm7
VPHADDWQ (%esi),%xmm0
VPHADDWQ (%ebx),%xmm7
VPHADDWQ (%esi),%xmm6
VPHADDWQ (%eax),%xmm0
# Tests for op VPHSUBBW xmm2/mem128, xmm1 (at&t syntax)
VPHSUBBW %xmm7,%xmm7
VPHSUBBW %xmm0,%xmm6
VPHSUBBW (%ebx),%xmm0
VPHSUBBW (%esi),%xmm7
VPHSUBBW %xmm0,%xmm0
VPHSUBBW (%eax),%xmm7
VPHSUBBW %xmm7,%xmm0
VPHSUBBW %xmm1,%xmm6
VPHSUBBW %xmm1,%xmm0
VPHSUBBW %xmm0,%xmm7
VPHSUBBW (%eax),%xmm6
VPHSUBBW %xmm1,%xmm7
VPHSUBBW (%esi),%xmm0
VPHSUBBW (%ebx),%xmm7
VPHSUBBW (%esi),%xmm6
VPHSUBBW (%eax),%xmm0
# Tests for op VPHSUBDQ xmm2/mem128, xmm1 (at&t syntax)
VPHSUBDQ %xmm7,%xmm7
VPHSUBDQ %xmm0,%xmm6
VPHSUBDQ (%ebx),%xmm0
VPHSUBDQ (%esi),%xmm7
VPHSUBDQ %xmm0,%xmm0
VPHSUBDQ (%eax),%xmm7
VPHSUBDQ %xmm7,%xmm0
VPHSUBDQ %xmm1,%xmm6
VPHSUBDQ %xmm1,%xmm0
VPHSUBDQ %xmm0,%xmm7
VPHSUBDQ (%eax),%xmm6
VPHSUBDQ %xmm1,%xmm7
VPHSUBDQ (%esi),%xmm0
VPHSUBDQ (%ebx),%xmm7
VPHSUBDQ (%esi),%xmm6
VPHSUBDQ (%eax),%xmm0
# Tests for op VPHSUBWD xmm2/mem128, xmm1 (at&t syntax)
VPHSUBWD %xmm7,%xmm7
VPHSUBWD %xmm0,%xmm6
VPHSUBWD (%ebx),%xmm0
VPHSUBWD (%esi),%xmm7
VPHSUBWD %xmm0,%xmm0
VPHSUBWD (%eax),%xmm7
VPHSUBWD %xmm7,%xmm0
VPHSUBWD %xmm1,%xmm6
VPHSUBWD %xmm1,%xmm0
VPHSUBWD %xmm0,%xmm7
VPHSUBWD (%eax),%xmm6
VPHSUBWD %xmm1,%xmm7
VPHSUBWD (%esi),%xmm0
VPHSUBWD (%ebx),%xmm7
VPHSUBWD (%esi),%xmm6
VPHSUBWD (%eax),%xmm0
# Tests for op VPMACSDD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSDD %xmm0,%xmm7,%xmm7,%xmm0
VPMACSDD %xmm7,(%esi),%xmm0,%xmm0
VPMACSDD %xmm1,(%esi),%xmm3,%xmm0
VPMACSDD %xmm1,%xmm0,%xmm0,%xmm5
VPMACSDD %xmm1,%xmm6,%xmm0,%xmm0
VPMACSDD %xmm1,%xmm6,%xmm0,%xmm7
VPMACSDD %xmm1,(%edx),%xmm0,%xmm7
VPMACSDD %xmm7,%xmm0,%xmm0,%xmm7
VPMACSDD %xmm7,(%esi),%xmm3,%xmm7
VPMACSDD %xmm7,%xmm6,%xmm3,%xmm7
VPMACSDD %xmm7,%xmm7,%xmm3,%xmm0
VPMACSDD %xmm0,(%edx),%xmm3,%xmm0
VPMACSDD %xmm1,(%edx),%xmm7,%xmm5
VPMACSDD %xmm1,%xmm7,%xmm7,%xmm5
VPMACSDD %xmm1,%xmm7,%xmm0,%xmm0
VPMACSDD %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSDQH xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSDQH %xmm0,%xmm7,%xmm7,%xmm0
VPMACSDQH %xmm7,(%esi),%xmm0,%xmm0
VPMACSDQH %xmm1,(%esi),%xmm3,%xmm0
VPMACSDQH %xmm1,%xmm0,%xmm0,%xmm5
VPMACSDQH %xmm1,%xmm6,%xmm0,%xmm0
VPMACSDQH %xmm1,%xmm6,%xmm0,%xmm7
VPMACSDQH %xmm1,(%edx),%xmm0,%xmm7
VPMACSDQH %xmm7,%xmm0,%xmm0,%xmm7
VPMACSDQH %xmm7,(%esi),%xmm3,%xmm7
VPMACSDQH %xmm7,%xmm6,%xmm3,%xmm7
VPMACSDQH %xmm7,%xmm7,%xmm3,%xmm0
VPMACSDQH %xmm0,(%edx),%xmm3,%xmm0
VPMACSDQH %xmm1,(%edx),%xmm7,%xmm5
VPMACSDQH %xmm1,%xmm7,%xmm7,%xmm5
VPMACSDQH %xmm1,%xmm7,%xmm0,%xmm0
VPMACSDQH %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSDQL xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSDQL %xmm0,%xmm7,%xmm7,%xmm0
VPMACSDQL %xmm7,(%esi),%xmm0,%xmm0
VPMACSDQL %xmm1,(%esi),%xmm3,%xmm0
VPMACSDQL %xmm1,%xmm0,%xmm0,%xmm5
VPMACSDQL %xmm1,%xmm6,%xmm0,%xmm0
VPMACSDQL %xmm1,%xmm6,%xmm0,%xmm7
VPMACSDQL %xmm1,(%edx),%xmm0,%xmm7
VPMACSDQL %xmm7,%xmm0,%xmm0,%xmm7
VPMACSDQL %xmm7,(%esi),%xmm3,%xmm7
VPMACSDQL %xmm7,%xmm6,%xmm3,%xmm7
VPMACSDQL %xmm7,%xmm7,%xmm3,%xmm0
VPMACSDQL %xmm0,(%edx),%xmm3,%xmm0
VPMACSDQL %xmm1,(%edx),%xmm7,%xmm5
VPMACSDQL %xmm1,%xmm7,%xmm7,%xmm5
VPMACSDQL %xmm1,%xmm7,%xmm0,%xmm0
VPMACSDQL %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSSDD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSDD %xmm0,%xmm7,%xmm7,%xmm0
VPMACSSDD %xmm7,(%esi),%xmm0,%xmm0
VPMACSSDD %xmm1,(%esi),%xmm3,%xmm0
VPMACSSDD %xmm1,%xmm0,%xmm0,%xmm5
VPMACSSDD %xmm1,%xmm6,%xmm0,%xmm0
VPMACSSDD %xmm1,%xmm6,%xmm0,%xmm7
VPMACSSDD %xmm1,(%edx),%xmm0,%xmm7
VPMACSSDD %xmm7,%xmm0,%xmm0,%xmm7
VPMACSSDD %xmm7,(%esi),%xmm3,%xmm7
VPMACSSDD %xmm7,%xmm6,%xmm3,%xmm7
VPMACSSDD %xmm7,%xmm7,%xmm3,%xmm0
VPMACSSDD %xmm0,(%edx),%xmm3,%xmm0
VPMACSSDD %xmm1,(%edx),%xmm7,%xmm5
VPMACSSDD %xmm1,%xmm7,%xmm7,%xmm5
VPMACSSDD %xmm1,%xmm7,%xmm0,%xmm0
VPMACSSDD %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSSDQH xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSDQH %xmm0,%xmm7,%xmm7,%xmm0
VPMACSSDQH %xmm7,(%esi),%xmm0,%xmm0
VPMACSSDQH %xmm1,(%esi),%xmm3,%xmm0
VPMACSSDQH %xmm1,%xmm0,%xmm0,%xmm5
VPMACSSDQH %xmm1,%xmm6,%xmm0,%xmm0
VPMACSSDQH %xmm1,%xmm6,%xmm0,%xmm7
VPMACSSDQH %xmm1,(%edx),%xmm0,%xmm7
VPMACSSDQH %xmm7,%xmm0,%xmm0,%xmm7
VPMACSSDQH %xmm7,(%esi),%xmm3,%xmm7
VPMACSSDQH %xmm7,%xmm6,%xmm3,%xmm7
VPMACSSDQH %xmm7,%xmm7,%xmm3,%xmm0
VPMACSSDQH %xmm0,(%edx),%xmm3,%xmm0
VPMACSSDQH %xmm1,(%edx),%xmm7,%xmm5
VPMACSSDQH %xmm1,%xmm7,%xmm7,%xmm5
VPMACSSDQH %xmm1,%xmm7,%xmm0,%xmm0
VPMACSSDQH %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSSDQL xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSDQL %xmm0,%xmm7,%xmm7,%xmm0
VPMACSSDQL %xmm7,(%esi),%xmm0,%xmm0
VPMACSSDQL %xmm1,(%esi),%xmm3,%xmm0
VPMACSSDQL %xmm1,%xmm0,%xmm0,%xmm5
VPMACSSDQL %xmm1,%xmm6,%xmm0,%xmm0
VPMACSSDQL %xmm1,%xmm6,%xmm0,%xmm7
VPMACSSDQL %xmm1,(%edx),%xmm0,%xmm7
VPMACSSDQL %xmm7,%xmm0,%xmm0,%xmm7
VPMACSSDQL %xmm7,(%esi),%xmm3,%xmm7
VPMACSSDQL %xmm7,%xmm6,%xmm3,%xmm7
VPMACSSDQL %xmm7,%xmm7,%xmm3,%xmm0
VPMACSSDQL %xmm0,(%edx),%xmm3,%xmm0
VPMACSSDQL %xmm1,(%edx),%xmm7,%xmm5
VPMACSSDQL %xmm1,%xmm7,%xmm7,%xmm5
VPMACSSDQL %xmm1,%xmm7,%xmm0,%xmm0
VPMACSSDQL %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSWD %xmm0,%xmm7,%xmm7,%xmm0
VPMACSSWD %xmm7,(%esi),%xmm0,%xmm0
VPMACSSWD %xmm1,(%esi),%xmm3,%xmm0
VPMACSSWD %xmm1,%xmm0,%xmm0,%xmm5
VPMACSSWD %xmm1,%xmm6,%xmm0,%xmm0
VPMACSSWD %xmm1,%xmm6,%xmm0,%xmm7
VPMACSSWD %xmm1,(%edx),%xmm0,%xmm7
VPMACSSWD %xmm7,%xmm0,%xmm0,%xmm7
VPMACSSWD %xmm7,(%esi),%xmm3,%xmm7
VPMACSSWD %xmm7,%xmm6,%xmm3,%xmm7
VPMACSSWD %xmm7,%xmm7,%xmm3,%xmm0
VPMACSSWD %xmm0,(%edx),%xmm3,%xmm0
VPMACSSWD %xmm1,(%edx),%xmm7,%xmm5
VPMACSSWD %xmm1,%xmm7,%xmm7,%xmm5
VPMACSSWD %xmm1,%xmm7,%xmm0,%xmm0
VPMACSSWD %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSSWW xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSWW %xmm0,%xmm7,%xmm7,%xmm0
VPMACSSWW %xmm7,(%esi),%xmm0,%xmm0
VPMACSSWW %xmm1,(%esi),%xmm3,%xmm0
VPMACSSWW %xmm1,%xmm0,%xmm0,%xmm5
VPMACSSWW %xmm1,%xmm6,%xmm0,%xmm0
VPMACSSWW %xmm1,%xmm6,%xmm0,%xmm7
VPMACSSWW %xmm1,(%edx),%xmm0,%xmm7
VPMACSSWW %xmm7,%xmm0,%xmm0,%xmm7
VPMACSSWW %xmm7,(%esi),%xmm3,%xmm7
VPMACSSWW %xmm7,%xmm6,%xmm3,%xmm7
VPMACSSWW %xmm7,%xmm7,%xmm3,%xmm0
VPMACSSWW %xmm0,(%edx),%xmm3,%xmm0
VPMACSSWW %xmm1,(%edx),%xmm7,%xmm5
VPMACSSWW %xmm1,%xmm7,%xmm7,%xmm5
VPMACSSWW %xmm1,%xmm7,%xmm0,%xmm0
VPMACSSWW %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSWD %xmm0,%xmm7,%xmm7,%xmm0
VPMACSWD %xmm7,(%esi),%xmm0,%xmm0
VPMACSWD %xmm1,(%esi),%xmm3,%xmm0
VPMACSWD %xmm1,%xmm0,%xmm0,%xmm5
VPMACSWD %xmm1,%xmm6,%xmm0,%xmm0
VPMACSWD %xmm1,%xmm6,%xmm0,%xmm7
VPMACSWD %xmm1,(%edx),%xmm0,%xmm7
VPMACSWD %xmm7,%xmm0,%xmm0,%xmm7
VPMACSWD %xmm7,(%esi),%xmm3,%xmm7
VPMACSWD %xmm7,%xmm6,%xmm3,%xmm7
VPMACSWD %xmm7,%xmm7,%xmm3,%xmm0
VPMACSWD %xmm0,(%edx),%xmm3,%xmm0
VPMACSWD %xmm1,(%edx),%xmm7,%xmm5
VPMACSWD %xmm1,%xmm7,%xmm7,%xmm5
VPMACSWD %xmm1,%xmm7,%xmm0,%xmm0
VPMACSWD %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMACSWW xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSWW %xmm0,%xmm7,%xmm7,%xmm0
VPMACSWW %xmm7,(%esi),%xmm0,%xmm0
VPMACSWW %xmm1,(%esi),%xmm3,%xmm0
VPMACSWW %xmm1,%xmm0,%xmm0,%xmm5
VPMACSWW %xmm1,%xmm6,%xmm0,%xmm0
VPMACSWW %xmm1,%xmm6,%xmm0,%xmm7
VPMACSWW %xmm1,(%edx),%xmm0,%xmm7
VPMACSWW %xmm7,%xmm0,%xmm0,%xmm7
VPMACSWW %xmm7,(%esi),%xmm3,%xmm7
VPMACSWW %xmm7,%xmm6,%xmm3,%xmm7
VPMACSWW %xmm7,%xmm7,%xmm3,%xmm0
VPMACSWW %xmm0,(%edx),%xmm3,%xmm0
VPMACSWW %xmm1,(%edx),%xmm7,%xmm5
VPMACSWW %xmm1,%xmm7,%xmm7,%xmm5
VPMACSWW %xmm1,%xmm7,%xmm0,%xmm0
VPMACSWW %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMADCSSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMADCSSWD %xmm0,%xmm7,%xmm7,%xmm0
VPMADCSSWD %xmm7,(%esi),%xmm0,%xmm0
VPMADCSSWD %xmm1,(%esi),%xmm3,%xmm0
VPMADCSSWD %xmm1,%xmm0,%xmm0,%xmm5
VPMADCSSWD %xmm1,%xmm6,%xmm0,%xmm0
VPMADCSSWD %xmm1,%xmm6,%xmm0,%xmm7
VPMADCSSWD %xmm1,(%edx),%xmm0,%xmm7
VPMADCSSWD %xmm7,%xmm0,%xmm0,%xmm7
VPMADCSSWD %xmm7,(%esi),%xmm3,%xmm7
VPMADCSSWD %xmm7,%xmm6,%xmm3,%xmm7
VPMADCSSWD %xmm7,%xmm7,%xmm3,%xmm0
VPMADCSSWD %xmm0,(%edx),%xmm3,%xmm0
VPMADCSSWD %xmm1,(%edx),%xmm7,%xmm5
VPMADCSSWD %xmm1,%xmm7,%xmm7,%xmm5
VPMADCSSWD %xmm1,%xmm7,%xmm0,%xmm0
VPMADCSSWD %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPMADCSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMADCSWD %xmm0,%xmm7,%xmm7,%xmm0
VPMADCSWD %xmm7,(%esi),%xmm0,%xmm0
VPMADCSWD %xmm1,(%esi),%xmm3,%xmm0
VPMADCSWD %xmm1,%xmm0,%xmm0,%xmm5
VPMADCSWD %xmm1,%xmm6,%xmm0,%xmm0
VPMADCSWD %xmm1,%xmm6,%xmm0,%xmm7
VPMADCSWD %xmm1,(%edx),%xmm0,%xmm7
VPMADCSWD %xmm7,%xmm0,%xmm0,%xmm7
VPMADCSWD %xmm7,(%esi),%xmm3,%xmm7
VPMADCSWD %xmm7,%xmm6,%xmm3,%xmm7
VPMADCSWD %xmm7,%xmm7,%xmm3,%xmm0
VPMADCSWD %xmm0,(%edx),%xmm3,%xmm0
VPMADCSWD %xmm1,(%edx),%xmm7,%xmm5
VPMADCSWD %xmm1,%xmm7,%xmm7,%xmm5
VPMADCSWD %xmm1,%xmm7,%xmm0,%xmm0
VPMADCSWD %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPPERM xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPPERM %xmm0,%xmm6,%xmm7,%xmm0
VPPERM (%esi),%xmm7,%xmm0,%xmm0
VPPERM (%eax),%xmm7,%xmm3,%xmm0
VPPERM %xmm7,%xmm0,%xmm0,%xmm5
VPPERM %xmm7,%xmm0,%xmm0,%xmm0
VPPERM %xmm7,%xmm0,%xmm0,%xmm7
VPPERM (%eax),%xmm6,%xmm0,%xmm7
VPPERM (%esi),%xmm0,%xmm0,%xmm7
VPPERM (%ebx),%xmm7,%xmm3,%xmm7
VPPERM (%ebx),%xmm0,%xmm3,%xmm7
VPPERM (%esi),%xmm6,%xmm3,%xmm0
VPPERM %xmm1,%xmm7,%xmm3,%xmm0
VPPERM (%eax),%xmm7,%xmm7,%xmm5
VPPERM %xmm7,%xmm6,%xmm7,%xmm5
VPPERM %xmm7,%xmm6,%xmm0,%xmm0
VPPERM (%ebx),%xmm7,%xmm3,%xmm5
# Tests for op VPPERM xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPPERM %xmm0,%xmm7,%xmm7,%xmm0
VPPERM %xmm7,(%esi),%xmm0,%xmm0
VPPERM %xmm1,(%esi),%xmm3,%xmm0
VPPERM %xmm1,%xmm0,%xmm0,%xmm5
VPPERM %xmm1,%xmm6,%xmm0,%xmm0
VPPERM %xmm1,%xmm6,%xmm0,%xmm7
VPPERM %xmm1,(%edx),%xmm0,%xmm7
VPPERM %xmm7,%xmm0,%xmm0,%xmm7
VPPERM %xmm7,(%esi),%xmm3,%xmm7
VPPERM %xmm7,%xmm6,%xmm3,%xmm7
VPPERM %xmm7,%xmm7,%xmm3,%xmm0
VPPERM %xmm0,(%edx),%xmm3,%xmm0
VPPERM %xmm1,(%edx),%xmm7,%xmm5
VPPERM %xmm1,%xmm7,%xmm7,%xmm5
VPPERM %xmm1,%xmm7,%xmm0,%xmm0
VPPERM %xmm7,(%esi),%xmm3,%xmm5
# Tests for op VPROTB xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTB %xmm7,%xmm0,%xmm3
VPROTB %xmm7,%xmm6,%xmm7
VPROTB %xmm7,%xmm0,%xmm0
VPROTB %xmm1,(%esi),%xmm3
VPROTB %xmm0,%xmm7,%xmm0
VPROTB %xmm0,%xmm7,%xmm3
VPROTB %xmm0,%xmm6,%xmm0
VPROTB %xmm1,%xmm6,%xmm0
VPROTB %xmm7,%xmm7,%xmm3
VPROTB %xmm0,(%esi),%xmm7
VPROTB %xmm0,%xmm6,%xmm7
VPROTB %xmm7,(%esi),%xmm3
VPROTB %xmm7,(%edx),%xmm0
VPROTB %xmm1,(%esi),%xmm7
VPROTB %xmm1,%xmm7,%xmm0
VPROTB %xmm0,(%edx),%xmm3
# Tests for op VPROTB xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTB (%ebx),%xmm0,%xmm3
VPROTB (%ebx),%xmm0,%xmm7
VPROTB (%esi),%xmm0,%xmm0
VPROTB (%eax),%xmm7,%xmm3
VPROTB %xmm1,%xmm6,%xmm0
VPROTB %xmm1,%xmm6,%xmm3
VPROTB %xmm1,%xmm0,%xmm0
VPROTB %xmm7,%xmm0,%xmm0
VPROTB (%esi),%xmm6,%xmm3
VPROTB %xmm1,%xmm7,%xmm7
VPROTB %xmm0,%xmm0,%xmm7
VPROTB (%ebx),%xmm7,%xmm3
VPROTB (%ebx),%xmm7,%xmm0
VPROTB (%eax),%xmm7,%xmm7
VPROTB (%eax),%xmm6,%xmm0
VPROTB %xmm1,%xmm7,%xmm3
# Tests for op VPROTB imm8, xmm2, xmm1 (at&t syntax)
VPROTB $0x3,%xmm5,%xmm2
VPROTB $0xFF,%xmm0,%xmm0
VPROTB $0xFF,%xmm5,%xmm7
VPROTB $0x0,%xmm5,%xmm7
VPROTB $0x0,%xmm7,%xmm7
VPROTB $0x0,%xmm0,%xmm2
VPROTB $0xFF,%xmm5,%xmm0
VPROTB $0x3,%xmm0,%xmm0
VPROTB $0x3,%xmm5,%xmm0
VPROTB $0x0,%xmm0,%xmm7
VPROTB $0xFF,%xmm7,%xmm0
VPROTB $0xFF,%xmm0,%xmm2
VPROTB $0xFF,%xmm7,%xmm2
VPROTB $0x3,%xmm7,%xmm7
VPROTB $0xFF,%xmm5,%xmm2
VPROTB $0x3,%xmm0,%xmm2
# Tests for op VPROTD xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTD %xmm7,%xmm0,%xmm3
VPROTD %xmm7,%xmm6,%xmm7
VPROTD %xmm7,%xmm0,%xmm0
VPROTD %xmm1,(%esi),%xmm3
VPROTD %xmm0,%xmm7,%xmm0
VPROTD %xmm0,%xmm7,%xmm3
VPROTD %xmm0,%xmm6,%xmm0
VPROTD %xmm1,%xmm6,%xmm0
VPROTD %xmm7,%xmm7,%xmm3
VPROTD %xmm0,(%esi),%xmm7
VPROTD %xmm0,%xmm6,%xmm7
VPROTD %xmm7,(%esi),%xmm3
VPROTD %xmm7,(%edx),%xmm0
VPROTD %xmm1,(%esi),%xmm7
VPROTD %xmm1,%xmm7,%xmm0
VPROTD %xmm0,(%edx),%xmm3
# Tests for op VPROTD xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTD (%ebx),%xmm0,%xmm3
VPROTD (%ebx),%xmm0,%xmm7
VPROTD (%esi),%xmm0,%xmm0
VPROTD (%eax),%xmm7,%xmm3
VPROTD %xmm1,%xmm6,%xmm0
VPROTD %xmm1,%xmm6,%xmm3
VPROTD %xmm1,%xmm0,%xmm0
VPROTD %xmm7,%xmm0,%xmm0
VPROTD (%esi),%xmm6,%xmm3
VPROTD %xmm1,%xmm7,%xmm7
VPROTD %xmm0,%xmm0,%xmm7
VPROTD (%ebx),%xmm7,%xmm3
VPROTD (%ebx),%xmm7,%xmm0
VPROTD (%eax),%xmm7,%xmm7
VPROTD (%eax),%xmm6,%xmm0
VPROTD %xmm1,%xmm7,%xmm3
# Tests for op VPROTD imm8, xmm2/mem128, xmm1 (at&t syntax)
VPROTD $0x0,%xmm7,%xmm7
VPROTD $0x0,(%ebx),%xmm7
VPROTD $0x0,%xmm0,%xmm5
VPROTD $0xFF,%xmm5,%xmm0
VPROTD $0x3,%xmm0,%xmm0
VPROTD $0x3,%xmm7,%xmm0
VPROTD $0x0,%xmm5,%xmm5
VPROTD $0x0,%xmm0,%xmm7
VPROTD $0x3,(%eax),%xmm0
VPROTD $0xFF,(%ebx),%xmm0
VPROTD $0x0,(%eax),%xmm7
VPROTD $0xFF,%xmm7,%xmm7
VPROTD $0xFF,%xmm5,%xmm5
VPROTD $0xFF,(%ebx),%xmm5
VPROTD $0xFF,%xmm7,%xmm0
VPROTD $0x3,(%eax),%xmm7
# Tests for op VPROTQ xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTQ %xmm7,%xmm0,%xmm3
VPROTQ %xmm7,%xmm6,%xmm7
VPROTQ %xmm7,%xmm0,%xmm0
VPROTQ %xmm1,(%esi),%xmm3
VPROTQ %xmm0,%xmm7,%xmm0
VPROTQ %xmm0,%xmm7,%xmm3
VPROTQ %xmm0,%xmm6,%xmm0
VPROTQ %xmm1,%xmm6,%xmm0
VPROTQ %xmm7,%xmm7,%xmm3
VPROTQ %xmm0,(%esi),%xmm7
VPROTQ %xmm0,%xmm6,%xmm7
VPROTQ %xmm7,(%esi),%xmm3
VPROTQ %xmm7,(%edx),%xmm0
VPROTQ %xmm1,(%esi),%xmm7
VPROTQ %xmm1,%xmm7,%xmm0
VPROTQ %xmm0,(%edx),%xmm3
# Tests for op VPROTQ xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTQ (%ebx),%xmm0,%xmm3
VPROTQ (%ebx),%xmm0,%xmm7
VPROTQ (%esi),%xmm0,%xmm0
VPROTQ (%eax),%xmm7,%xmm3
VPROTQ %xmm1,%xmm6,%xmm0
VPROTQ %xmm1,%xmm6,%xmm3
VPROTQ %xmm1,%xmm0,%xmm0
VPROTQ %xmm7,%xmm0,%xmm0
VPROTQ (%esi),%xmm6,%xmm3
VPROTQ %xmm1,%xmm7,%xmm7
VPROTQ %xmm0,%xmm0,%xmm7
VPROTQ (%ebx),%xmm7,%xmm3
VPROTQ (%ebx),%xmm7,%xmm0
VPROTQ (%eax),%xmm7,%xmm7
VPROTQ (%eax),%xmm6,%xmm0
VPROTQ %xmm1,%xmm7,%xmm3
# Tests for op VPROTQ imm8, xmm2/mem128, xmm1 (at&t syntax)
VPROTQ $0x0,%xmm7,%xmm7
VPROTQ $0x0,(%ebx),%xmm7
VPROTQ $0x0,%xmm0,%xmm5
VPROTQ $0xFF,%xmm5,%xmm0
VPROTQ $0x3,%xmm0,%xmm0
VPROTQ $0x3,%xmm7,%xmm0
VPROTQ $0x0,%xmm5,%xmm5
VPROTQ $0x0,%xmm0,%xmm7
VPROTQ $0x3,(%eax),%xmm0
VPROTQ $0xFF,(%ebx),%xmm0
VPROTQ $0x0,(%eax),%xmm7
VPROTQ $0xFF,%xmm7,%xmm7
VPROTQ $0xFF,%xmm5,%xmm5
VPROTQ $0xFF,(%ebx),%xmm5
VPROTQ $0xFF,%xmm7,%xmm0
VPROTQ $0x3,(%eax),%xmm7
# Tests for op VPROTW xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTW %xmm7,%xmm0,%xmm3
VPROTW %xmm7,%xmm6,%xmm7
VPROTW %xmm7,%xmm0,%xmm0
VPROTW %xmm1,(%esi),%xmm3
VPROTW %xmm0,%xmm7,%xmm0
VPROTW %xmm0,%xmm7,%xmm3
VPROTW %xmm0,%xmm6,%xmm0
VPROTW %xmm1,%xmm6,%xmm0
VPROTW %xmm7,%xmm7,%xmm3
VPROTW %xmm0,(%esi),%xmm7
VPROTW %xmm0,%xmm6,%xmm7
VPROTW %xmm7,(%esi),%xmm3
VPROTW %xmm7,(%edx),%xmm0
VPROTW %xmm1,(%esi),%xmm7
VPROTW %xmm1,%xmm7,%xmm0
VPROTW %xmm0,(%edx),%xmm3
# Tests for op VPROTW xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTW (%ebx),%xmm0,%xmm3
VPROTW (%ebx),%xmm0,%xmm7
VPROTW (%esi),%xmm0,%xmm0
VPROTW (%eax),%xmm7,%xmm3
VPROTW %xmm1,%xmm6,%xmm0
VPROTW %xmm1,%xmm6,%xmm3
VPROTW %xmm1,%xmm0,%xmm0
VPROTW %xmm7,%xmm0,%xmm0
VPROTW (%esi),%xmm6,%xmm3
VPROTW %xmm1,%xmm7,%xmm7
VPROTW %xmm0,%xmm0,%xmm7
VPROTW (%ebx),%xmm7,%xmm3
VPROTW (%ebx),%xmm7,%xmm0
VPROTW (%eax),%xmm7,%xmm7
VPROTW (%eax),%xmm6,%xmm0
VPROTW %xmm1,%xmm7,%xmm3
# Tests for op VPROTW imm8, xmm2/mem128, xmm1 (at&t syntax)
VPROTW $0x0,%xmm7,%xmm7
VPROTW $0x0,(%ebx),%xmm7
VPROTW $0x0,%xmm0,%xmm5
VPROTW $0xFF,%xmm5,%xmm0
VPROTW $0x3,%xmm0,%xmm0
VPROTW $0x3,%xmm7,%xmm0
VPROTW $0x0,%xmm5,%xmm5
VPROTW $0x0,%xmm0,%xmm7
VPROTW $0x3,(%eax),%xmm0
VPROTW $0xFF,(%ebx),%xmm0
VPROTW $0x0,(%eax),%xmm7
VPROTW $0xFF,%xmm7,%xmm7
VPROTW $0xFF,%xmm5,%xmm5
VPROTW $0xFF,(%ebx),%xmm5
VPROTW $0xFF,%xmm7,%xmm0
VPROTW $0x3,(%eax),%xmm7
# Tests for op VPSHAB xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAB %xmm7,%xmm0,%xmm3
VPSHAB %xmm7,%xmm6,%xmm7
VPSHAB %xmm7,%xmm0,%xmm0
VPSHAB %xmm1,(%esi),%xmm3
VPSHAB %xmm0,%xmm7,%xmm0
VPSHAB %xmm0,%xmm7,%xmm3
VPSHAB %xmm0,%xmm6,%xmm0
VPSHAB %xmm1,%xmm6,%xmm0
VPSHAB %xmm7,%xmm7,%xmm3
VPSHAB %xmm0,(%esi),%xmm7
VPSHAB %xmm0,%xmm6,%xmm7
VPSHAB %xmm7,(%esi),%xmm3
VPSHAB %xmm7,(%edx),%xmm0
VPSHAB %xmm1,(%esi),%xmm7
VPSHAB %xmm1,%xmm7,%xmm0
VPSHAB %xmm0,(%edx),%xmm3
# Tests for op VPSHAB xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAB (%ebx),%xmm0,%xmm3
VPSHAB (%ebx),%xmm0,%xmm7
VPSHAB (%esi),%xmm0,%xmm0
VPSHAB (%eax),%xmm7,%xmm3
VPSHAB %xmm1,%xmm6,%xmm0
VPSHAB %xmm1,%xmm6,%xmm3
VPSHAB %xmm1,%xmm0,%xmm0
VPSHAB %xmm7,%xmm0,%xmm0
VPSHAB (%esi),%xmm6,%xmm3
VPSHAB %xmm1,%xmm7,%xmm7
VPSHAB %xmm0,%xmm0,%xmm7
VPSHAB (%ebx),%xmm7,%xmm3
VPSHAB (%ebx),%xmm7,%xmm0
VPSHAB (%eax),%xmm7,%xmm7
VPSHAB (%eax),%xmm6,%xmm0
VPSHAB %xmm1,%xmm7,%xmm3
# Tests for op VPSHAD xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAD %xmm7,%xmm0,%xmm3
VPSHAD %xmm7,%xmm6,%xmm7
VPSHAD %xmm7,%xmm0,%xmm0
VPSHAD %xmm1,(%esi),%xmm3
VPSHAD %xmm0,%xmm7,%xmm0
VPSHAD %xmm0,%xmm7,%xmm3
VPSHAD %xmm0,%xmm6,%xmm0
VPSHAD %xmm1,%xmm6,%xmm0
VPSHAD %xmm7,%xmm7,%xmm3
VPSHAD %xmm0,(%esi),%xmm7
VPSHAD %xmm0,%xmm6,%xmm7
VPSHAD %xmm7,(%esi),%xmm3
VPSHAD %xmm7,(%edx),%xmm0
VPSHAD %xmm1,(%esi),%xmm7
VPSHAD %xmm1,%xmm7,%xmm0
VPSHAD %xmm0,(%edx),%xmm3
# Tests for op VPSHAD xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAD (%ebx),%xmm0,%xmm3
VPSHAD (%ebx),%xmm0,%xmm7
VPSHAD (%esi),%xmm0,%xmm0
VPSHAD (%eax),%xmm7,%xmm3
VPSHAD %xmm1,%xmm6,%xmm0
VPSHAD %xmm1,%xmm6,%xmm3
VPSHAD %xmm1,%xmm0,%xmm0
VPSHAD %xmm7,%xmm0,%xmm0
VPSHAD (%esi),%xmm6,%xmm3
VPSHAD %xmm1,%xmm7,%xmm7
VPSHAD %xmm0,%xmm0,%xmm7
VPSHAD (%ebx),%xmm7,%xmm3
VPSHAD (%ebx),%xmm7,%xmm0
VPSHAD (%eax),%xmm7,%xmm7
VPSHAD (%eax),%xmm6,%xmm0
VPSHAD %xmm1,%xmm7,%xmm3
# Tests for op VPSHAQ xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAQ %xmm7,%xmm0,%xmm3
VPSHAQ %xmm7,%xmm6,%xmm7
VPSHAQ %xmm7,%xmm0,%xmm0
VPSHAQ %xmm1,(%esi),%xmm3
VPSHAQ %xmm0,%xmm7,%xmm0
VPSHAQ %xmm0,%xmm7,%xmm3
VPSHAQ %xmm0,%xmm6,%xmm0
VPSHAQ %xmm1,%xmm6,%xmm0
VPSHAQ %xmm7,%xmm7,%xmm3
VPSHAQ %xmm0,(%esi),%xmm7
VPSHAQ %xmm0,%xmm6,%xmm7
VPSHAQ %xmm7,(%esi),%xmm3
VPSHAQ %xmm7,(%edx),%xmm0
VPSHAQ %xmm1,(%esi),%xmm7
VPSHAQ %xmm1,%xmm7,%xmm0
VPSHAQ %xmm0,(%edx),%xmm3
# Tests for op VPSHAQ xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAQ (%ebx),%xmm0,%xmm3
VPSHAQ (%ebx),%xmm0,%xmm7
VPSHAQ (%esi),%xmm0,%xmm0
VPSHAQ (%eax),%xmm7,%xmm3
VPSHAQ %xmm1,%xmm6,%xmm0
VPSHAQ %xmm1,%xmm6,%xmm3
VPSHAQ %xmm1,%xmm0,%xmm0
VPSHAQ %xmm7,%xmm0,%xmm0
VPSHAQ (%esi),%xmm6,%xmm3
VPSHAQ %xmm1,%xmm7,%xmm7
VPSHAQ %xmm0,%xmm0,%xmm7
VPSHAQ (%ebx),%xmm7,%xmm3
VPSHAQ (%ebx),%xmm7,%xmm0
VPSHAQ (%eax),%xmm7,%xmm7
VPSHAQ (%eax),%xmm6,%xmm0
VPSHAQ %xmm1,%xmm7,%xmm3
# Tests for op VPSHAW xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAW %xmm7,%xmm0,%xmm3
VPSHAW %xmm7,%xmm6,%xmm7
VPSHAW %xmm7,%xmm0,%xmm0
VPSHAW %xmm1,(%esi),%xmm3
VPSHAW %xmm0,%xmm7,%xmm0
VPSHAW %xmm0,%xmm7,%xmm3
VPSHAW %xmm0,%xmm6,%xmm0
VPSHAW %xmm1,%xmm6,%xmm0
VPSHAW %xmm7,%xmm7,%xmm3
VPSHAW %xmm0,(%esi),%xmm7
VPSHAW %xmm0,%xmm6,%xmm7
VPSHAW %xmm7,(%esi),%xmm3
VPSHAW %xmm7,(%edx),%xmm0
VPSHAW %xmm1,(%esi),%xmm7
VPSHAW %xmm1,%xmm7,%xmm0
VPSHAW %xmm0,(%edx),%xmm3
# Tests for op VPSHAW xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAW (%ebx),%xmm0,%xmm3
VPSHAW (%ebx),%xmm0,%xmm7
VPSHAW (%esi),%xmm0,%xmm0
VPSHAW (%eax),%xmm7,%xmm3
VPSHAW %xmm1,%xmm6,%xmm0
VPSHAW %xmm1,%xmm6,%xmm3
VPSHAW %xmm1,%xmm0,%xmm0
VPSHAW %xmm7,%xmm0,%xmm0
VPSHAW (%esi),%xmm6,%xmm3
VPSHAW %xmm1,%xmm7,%xmm7
VPSHAW %xmm0,%xmm0,%xmm7
VPSHAW (%ebx),%xmm7,%xmm3
VPSHAW (%ebx),%xmm7,%xmm0
VPSHAW (%eax),%xmm7,%xmm7
VPSHAW (%eax),%xmm6,%xmm0
VPSHAW %xmm1,%xmm7,%xmm3
# Tests for op VPSHLB xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHLB %xmm7,%xmm0,%xmm3
VPSHLB %xmm7,%xmm6,%xmm7
VPSHLB %xmm7,%xmm0,%xmm0
VPSHLB %xmm1,(%esi),%xmm3
VPSHLB %xmm0,%xmm7,%xmm0
VPSHLB %xmm0,%xmm7,%xmm3
VPSHLB %xmm0,%xmm6,%xmm0
VPSHLB %xmm1,%xmm6,%xmm0
VPSHLB %xmm7,%xmm7,%xmm3
VPSHLB %xmm0,(%esi),%xmm7
VPSHLB %xmm0,%xmm6,%xmm7
VPSHLB %xmm7,(%esi),%xmm3
VPSHLB %xmm7,(%edx),%xmm0
VPSHLB %xmm1,(%esi),%xmm7
VPSHLB %xmm1,%xmm7,%xmm0
VPSHLB %xmm0,(%edx),%xmm3
# Tests for op VPSHLB xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLB (%ebx),%xmm0,%xmm3
VPSHLB (%ebx),%xmm0,%xmm7
VPSHLB (%esi),%xmm0,%xmm0
VPSHLB (%eax),%xmm7,%xmm3
VPSHLB %xmm1,%xmm6,%xmm0
VPSHLB %xmm1,%xmm6,%xmm3
VPSHLB %xmm1,%xmm0,%xmm0
VPSHLB %xmm7,%xmm0,%xmm0
VPSHLB (%esi),%xmm6,%xmm3
VPSHLB %xmm1,%xmm7,%xmm7
VPSHLB %xmm0,%xmm0,%xmm7
VPSHLB (%ebx),%xmm7,%xmm3
VPSHLB (%ebx),%xmm7,%xmm0
VPSHLB (%eax),%xmm7,%xmm7
VPSHLB (%eax),%xmm6,%xmm0
VPSHLB %xmm1,%xmm7,%xmm3
# Tests for op VPSHLD xmm2, xmm3/mem128, xmm1 (at&t syntax)
VPSHLD %xmm7,%xmm0,%xmm3
VPSHLD %xmm7,%xmm6,%xmm7
VPSHLD %xmm7,%xmm0,%xmm0
VPSHLD %xmm1,(%esi),%xmm3
VPSHLD %xmm0,%xmm7,%xmm0
VPSHLD %xmm0,%xmm7,%xmm3
VPSHLD %xmm0,%xmm6,%xmm0
VPSHLD %xmm1,%xmm6,%xmm0
VPSHLD %xmm7,%xmm7,%xmm3
VPSHLD %xmm0,(%esi),%xmm7
VPSHLD %xmm0,%xmm6,%xmm7
VPSHLD %xmm7,(%esi),%xmm3
VPSHLD %xmm7,(%edx),%xmm0
VPSHLD %xmm1,(%esi),%xmm7
VPSHLD %xmm1,%xmm7,%xmm0
VPSHLD %xmm0,(%edx),%xmm3
# Tests for op VPSHLD xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLD (%ebx),%xmm0,%xmm3
VPSHLD (%ebx),%xmm0,%xmm7
VPSHLD (%esi),%xmm0,%xmm0
VPSHLD (%eax),%xmm7,%xmm3
VPSHLD %xmm1,%xmm6,%xmm0
VPSHLD %xmm1,%xmm6,%xmm3
VPSHLD %xmm1,%xmm0,%xmm0
VPSHLD %xmm7,%xmm0,%xmm0
VPSHLD (%esi),%xmm6,%xmm3
VPSHLD %xmm1,%xmm7,%xmm7
VPSHLD %xmm0,%xmm0,%xmm7
VPSHLD (%ebx),%xmm7,%xmm3
VPSHLD (%ebx),%xmm7,%xmm0
VPSHLD (%eax),%xmm7,%xmm7
VPSHLD (%eax),%xmm6,%xmm0
VPSHLD %xmm1,%xmm7,%xmm3
# Tests for op VPSHLQ xmm2, xmm3/mem128, xmm1 (at&t syntax)
VPSHLQ %xmm7,%xmm0,%xmm3
VPSHLQ %xmm7,%xmm6,%xmm7
VPSHLQ %xmm7,%xmm0,%xmm0
VPSHLQ %xmm1,(%esi),%xmm3
VPSHLQ %xmm0,%xmm7,%xmm0
VPSHLQ %xmm0,%xmm7,%xmm3
VPSHLQ %xmm0,%xmm6,%xmm0
VPSHLQ %xmm1,%xmm6,%xmm0
VPSHLQ %xmm7,%xmm7,%xmm3
VPSHLQ %xmm0,(%esi),%xmm7
VPSHLQ %xmm0,%xmm6,%xmm7
VPSHLQ %xmm7,(%esi),%xmm3
VPSHLQ %xmm7,(%edx),%xmm0
VPSHLQ %xmm1,(%esi),%xmm7
VPSHLQ %xmm1,%xmm7,%xmm0
VPSHLQ %xmm0,(%edx),%xmm3
# Tests for op VPSHLQ xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLQ (%ebx),%xmm0,%xmm3
VPSHLQ (%ebx),%xmm0,%xmm7
VPSHLQ (%esi),%xmm0,%xmm0
VPSHLQ (%eax),%xmm7,%xmm3
VPSHLQ %xmm1,%xmm6,%xmm0
VPSHLQ %xmm1,%xmm6,%xmm3
VPSHLQ %xmm1,%xmm0,%xmm0
VPSHLQ %xmm7,%xmm0,%xmm0
VPSHLQ (%esi),%xmm6,%xmm3
VPSHLQ %xmm1,%xmm7,%xmm7
VPSHLQ %xmm0,%xmm0,%xmm7
VPSHLQ (%ebx),%xmm7,%xmm3
VPSHLQ (%ebx),%xmm7,%xmm0
VPSHLQ (%eax),%xmm7,%xmm7
VPSHLQ (%eax),%xmm6,%xmm0
VPSHLQ %xmm1,%xmm7,%xmm3
# Tests for op VPSHLW xmm2, xmm3/mem128, xmm1 (at&t syntax)
VPSHLW %xmm7,%xmm0,%xmm3
VPSHLW %xmm7,%xmm6,%xmm7
VPSHLW %xmm7,%xmm0,%xmm0
VPSHLW %xmm1,(%esi),%xmm3
VPSHLW %xmm0,%xmm7,%xmm0
VPSHLW %xmm0,%xmm7,%xmm3
VPSHLW %xmm0,%xmm6,%xmm0
VPSHLW %xmm1,%xmm6,%xmm0
VPSHLW %xmm7,%xmm7,%xmm3
VPSHLW %xmm0,(%esi),%xmm7
VPSHLW %xmm0,%xmm6,%xmm7
VPSHLW %xmm7,(%esi),%xmm3
VPSHLW %xmm7,(%edx),%xmm0
VPSHLW %xmm1,(%esi),%xmm7
VPSHLW %xmm1,%xmm7,%xmm0
VPSHLW %xmm0,(%edx),%xmm3
# Tests for op VPSHLW xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLW (%ebx),%xmm0,%xmm3
VPSHLW (%ebx),%xmm0,%xmm7
VPSHLW (%esi),%xmm0,%xmm0
VPSHLW (%eax),%xmm7,%xmm3
VPSHLW %xmm1,%xmm6,%xmm0
VPSHLW %xmm1,%xmm6,%xmm3
VPSHLW %xmm1,%xmm0,%xmm0
VPSHLW %xmm7,%xmm0,%xmm0
VPSHLW (%esi),%xmm6,%xmm3
VPSHLW %xmm1,%xmm7,%xmm7
VPSHLW %xmm0,%xmm0,%xmm7
VPSHLW (%ebx),%xmm7,%xmm3
VPSHLW (%ebx),%xmm7,%xmm0
VPSHLW (%eax),%xmm7,%xmm7
VPSHLW (%eax),%xmm6,%xmm0
VPSHLW %xmm1,%xmm7,%xmm3
# All variants of VPCOM* aliases
VPCOMLTB %xmm6,%xmm0,%xmm0
VPCOMLTB %xmm6,%xmm0,%xmm7
VPCOMLTB (%edi,%eax,2),%xmm0,%xmm0
VPCOMLTB %xmm6,%xmm7,%xmm0
VPCOMLTB %xmm7,%xmm0,%xmm7
VPCOMLTB (%eax),%xmm7,%xmm7
VPCOMLTB %xmm6,%xmm4,%xmm0
VPCOMLTB (%edx,%ecx),%xmm0,%xmm7
VPCOMLTD (%eax),%xmm7,%xmm0
VPCOMLTD (%edi,%eax,2),%xmm4,%xmm7
VPCOMLTD (%eax),%xmm0,%xmm0
VPCOMLTD (%edi,%eax,2),%xmm4,%xmm3
VPCOMLTD (%edi,%eax,2),%xmm4,%xmm0
VPCOMLTD 0x1(%eax,%edx,1),%xmm0,%xmm7
VPCOMLTD 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMLTD (%edi,%eax,2),%xmm0,%xmm3
VPCOMLTQ (%edi,%eax,2),%xmm4,%xmm7
VPCOMLTQ 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMLTQ (%edx,%ecx),%xmm0,%xmm7
VPCOMLTQ %xmm0,%xmm7,%xmm3
VPCOMLTQ 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMLTQ (%edi,%eax,2),%xmm0,%xmm3
VPCOMLTQ (%eax),%xmm7,%xmm7
VPCOMLTQ %xmm6,%xmm0,%xmm7
VPCOMLTUB (%edx,%ecx),%xmm4,%xmm3
VPCOMLTUB (%eax),%xmm4,%xmm0
VPCOMLTUB %xmm7,%xmm7,%xmm3
VPCOMLTUB %xmm0,%xmm7,%xmm3
VPCOMLTUB %xmm7,%xmm7,%xmm0
VPCOMLTUB %xmm6,%xmm0,%xmm7
VPCOMLTUB %xmm7,%xmm0,%xmm3
VPCOMLTUB (%edx,%ecx),%xmm7,%xmm0
VPCOMLTUD (%edx,%ecx),%xmm7,%xmm0
VPCOMLTUD (%edx,%ecx),%xmm0,%xmm7
VPCOMLTUD (%edx,%ecx),%xmm4,%xmm7
VPCOMLTUD (%edx,%ecx),%xmm7,%xmm3
VPCOMLTUD %xmm7,%xmm4,%xmm0
VPCOMLTUD %xmm0,%xmm7,%xmm3
VPCOMLTUD 0x1(%eax,%edx,1),%xmm4,%xmm3
VPCOMLTUD %xmm7,%xmm4,%xmm7
VPCOMLTUQ (%edi,%eax,2),%xmm7,%xmm0
VPCOMLTUQ (%eax),%xmm4,%xmm7
VPCOMLTUQ %xmm6,%xmm7,%xmm3
VPCOMLTUQ %xmm7,%xmm4,%xmm0
VPCOMLTUQ 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMLTUQ (%edi,%eax,2),%xmm4,%xmm3
VPCOMLTUQ (%edx,%ecx),%xmm4,%xmm0
VPCOMLTUQ %xmm0,%xmm7,%xmm7
VPCOMLTUW %xmm7,%xmm0,%xmm7
VPCOMLTUW 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMLTUW (%edi,%eax,2),%xmm4,%xmm0
VPCOMLTUW (%edx,%ecx),%xmm7,%xmm0
VPCOMLTUW (%edi,%eax,2),%xmm0,%xmm7
VPCOMLTUW (%edi,%eax,2),%xmm4,%xmm7
VPCOMLTUW (%edx,%ecx),%xmm4,%xmm0
VPCOMLTUW %xmm7,%xmm7,%xmm3
VPCOMLTW %xmm6,%xmm0,%xmm7
VPCOMLTW %xmm6,%xmm4,%xmm3
VPCOMLTW (%eax),%xmm4,%xmm3
VPCOMLTW (%edi,%eax,2),%xmm0,%xmm3
VPCOMLTW %xmm7,%xmm4,%xmm7
VPCOMLTW 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMLTW 0x1(%eax,%edx,1),%xmm4,%xmm7
VPCOMLTW 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMLEB (%edx,%ecx),%xmm7,%xmm0
VPCOMLEB %xmm6,%xmm4,%xmm0
VPCOMLEB (%edx,%ecx),%xmm4,%xmm0
VPCOMLEB 0x1(%eax,%edx,1),%xmm7,%xmm7
VPCOMLEB 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMLEB (%eax),%xmm7,%xmm7
VPCOMLEB (%edi,%eax,2),%xmm7,%xmm0
VPCOMLEB %xmm0,%xmm4,%xmm3
VPCOMLED (%eax),%xmm7,%xmm0
VPCOMLED (%edx,%ecx),%xmm4,%xmm7
VPCOMLED (%edi,%eax,2),%xmm0,%xmm3
VPCOMLED (%edx,%ecx),%xmm7,%xmm0
VPCOMLED %xmm7,%xmm7,%xmm3
VPCOMLED %xmm0,%xmm0,%xmm3
VPCOMLED 0x1(%eax,%edx,1),%xmm7,%xmm7
VPCOMLED (%edi,%eax,2),%xmm4,%xmm3
VPCOMLEQ 0x1(%eax,%edx,1),%xmm4,%xmm7
VPCOMLEQ 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMLEQ %xmm7,%xmm0,%xmm7
VPCOMLEQ (%edi,%eax,2),%xmm7,%xmm7
VPCOMLEQ (%edx,%ecx),%xmm0,%xmm3
VPCOMLEQ 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMLEQ %xmm0,%xmm0,%xmm3
VPCOMLEQ %xmm6,%xmm7,%xmm3
VPCOMLEUB %xmm0,%xmm7,%xmm0
VPCOMLEUB %xmm0,%xmm4,%xmm7
VPCOMLEUB %xmm0,%xmm7,%xmm7
VPCOMLEUB (%eax),%xmm0,%xmm7
VPCOMLEUB %xmm0,%xmm7,%xmm3
VPCOMLEUB %xmm7,%xmm0,%xmm7
VPCOMLEUB %xmm7,%xmm0,%xmm0
VPCOMLEUB %xmm7,%xmm7,%xmm3
VPCOMLEUD (%eax),%xmm7,%xmm7
VPCOMLEUD 0x1(%eax,%edx,1),%xmm4,%xmm3
VPCOMLEUD (%edi,%eax,2),%xmm4,%xmm0
VPCOMLEUD (%edx,%ecx),%xmm7,%xmm3
VPCOMLEUD (%edi,%eax,2),%xmm4,%xmm3
VPCOMLEUD %xmm6,%xmm7,%xmm3
VPCOMLEUD %xmm7,%xmm0,%xmm3
VPCOMLEUD %xmm0,%xmm4,%xmm3
VPCOMLEUQ %xmm6,%xmm4,%xmm7
VPCOMLEUQ %xmm6,%xmm4,%xmm3
VPCOMLEUQ 0x1(%eax,%edx,1),%xmm0,%xmm7
VPCOMLEUQ (%edi,%eax,2),%xmm0,%xmm0
VPCOMLEUQ %xmm6,%xmm0,%xmm3
VPCOMLEUQ (%edx,%ecx),%xmm7,%xmm0
VPCOMLEUQ %xmm0,%xmm4,%xmm0
VPCOMLEUQ (%edx,%ecx),%xmm0,%xmm7
VPCOMLEUW (%edx,%ecx),%xmm4,%xmm7
VPCOMLEUW (%edi,%eax,2),%xmm7,%xmm3
VPCOMLEUW %xmm6,%xmm4,%xmm0
VPCOMLEUW %xmm7,%xmm0,%xmm0
VPCOMLEUW 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMLEUW 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMLEUW %xmm6,%xmm7,%xmm7
VPCOMLEUW %xmm0,%xmm0,%xmm3
VPCOMLEW 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMLEW %xmm7,%xmm4,%xmm3
VPCOMLEW %xmm7,%xmm0,%xmm7
VPCOMLEW 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMLEW (%eax),%xmm7,%xmm0
VPCOMLEW (%edi,%eax,2),%xmm7,%xmm3
VPCOMLEW (%edx,%ecx),%xmm0,%xmm7
VPCOMLEW (%edx,%ecx),%xmm4,%xmm7
VPCOMGTB (%eax),%xmm0,%xmm0
VPCOMGTB (%eax),%xmm7,%xmm3
VPCOMGTB (%eax),%xmm0,%xmm7
VPCOMGTB (%edi,%eax,2),%xmm4,%xmm0
VPCOMGTB %xmm7,%xmm0,%xmm3
VPCOMGTB %xmm0,%xmm7,%xmm7
VPCOMGTB (%edx,%ecx),%xmm4,%xmm7
VPCOMGTB (%edx,%ecx),%xmm7,%xmm7
VPCOMGTD (%edi,%eax,2),%xmm4,%xmm0
VPCOMGTD (%edx,%ecx),%xmm0,%xmm3
VPCOMGTD (%eax),%xmm4,%xmm0
VPCOMGTD 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMGTD %xmm0,%xmm0,%xmm7
VPCOMGTD (%edi,%eax,2),%xmm7,%xmm3
VPCOMGTD %xmm0,%xmm7,%xmm0
VPCOMGTD %xmm6,%xmm0,%xmm7
VPCOMGTQ (%edi,%eax,2),%xmm4,%xmm7
VPCOMGTQ (%edx,%ecx),%xmm4,%xmm0
VPCOMGTQ %xmm0,%xmm0,%xmm3
VPCOMGTQ %xmm0,%xmm7,%xmm7
VPCOMGTQ %xmm7,%xmm4,%xmm3
VPCOMGTQ (%edx,%ecx),%xmm7,%xmm7
VPCOMGTQ %xmm6,%xmm7,%xmm7
VPCOMGTQ %xmm6,%xmm7,%xmm3
VPCOMGTUB (%eax),%xmm0,%xmm0
VPCOMGTUB (%edx,%ecx),%xmm0,%xmm0
VPCOMGTUB (%edx,%ecx),%xmm7,%xmm7
VPCOMGTUB %xmm7,%xmm7,%xmm0
VPCOMGTUB %xmm6,%xmm4,%xmm7
VPCOMGTUB (%edi,%eax,2),%xmm0,%xmm7
VPCOMGTUB (%edx,%ecx),%xmm4,%xmm7
VPCOMGTUB (%edx,%ecx),%xmm7,%xmm0
VPCOMGTUD %xmm7,%xmm7,%xmm0
VPCOMGTUD (%edi,%eax,2),%xmm0,%xmm3
VPCOMGTUD %xmm6,%xmm0,%xmm0
VPCOMGTUD (%edi,%eax,2),%xmm0,%xmm0
VPCOMGTUD %xmm6,%xmm0,%xmm7
VPCOMGTUD 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMGTUD %xmm0,%xmm4,%xmm3
VPCOMGTUD (%edx,%ecx),%xmm4,%xmm3
VPCOMGTUQ (%eax),%xmm0,%xmm0
VPCOMGTUQ (%eax),%xmm0,%xmm3
VPCOMGTUQ (%edx,%ecx),%xmm4,%xmm3
VPCOMGTUQ %xmm7,%xmm7,%xmm3
VPCOMGTUQ 0x1(%eax,%edx,1),%xmm4,%xmm7
VPCOMGTUQ 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMGTUQ 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMGTUQ %xmm7,%xmm7,%xmm0
VPCOMGTUW (%edx,%ecx),%xmm0,%xmm0
VPCOMGTUW %xmm6,%xmm0,%xmm3
VPCOMGTUW %xmm0,%xmm0,%xmm7
VPCOMGTUW %xmm6,%xmm4,%xmm7
VPCOMGTUW (%eax),%xmm7,%xmm7
VPCOMGTUW %xmm0,%xmm4,%xmm3
VPCOMGTUW 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMGTUW (%edx,%ecx),%xmm4,%xmm3
VPCOMGTW 0x1(%eax,%edx,1),%xmm4,%xmm3
VPCOMGTW (%edx,%ecx),%xmm0,%xmm3
VPCOMGTW (%edx,%ecx),%xmm7,%xmm7
VPCOMGTW (%eax),%xmm4,%xmm7
VPCOMGTW %xmm7,%xmm0,%xmm0
VPCOMGTW %xmm7,%xmm7,%xmm3
VPCOMGTW %xmm6,%xmm7,%xmm0
VPCOMGTW %xmm6,%xmm0,%xmm7
VPCOMGEB %xmm6,%xmm0,%xmm0
VPCOMGEB 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMGEB (%eax),%xmm0,%xmm3
VPCOMGEB (%edx,%ecx),%xmm0,%xmm0
VPCOMGEB %xmm0,%xmm7,%xmm7
VPCOMGEB (%eax),%xmm4,%xmm7
VPCOMGEB (%edi,%eax,2),%xmm0,%xmm7
VPCOMGEB %xmm0,%xmm4,%xmm7
VPCOMGED (%eax),%xmm0,%xmm3
VPCOMGED (%edx,%ecx),%xmm0,%xmm7
VPCOMGED (%edi,%eax,2),%xmm4,%xmm7
VPCOMGED %xmm6,%xmm4,%xmm3
VPCOMGED %xmm0,%xmm0,%xmm3
VPCOMGED %xmm6,%xmm0,%xmm7
VPCOMGED %xmm0,%xmm0,%xmm7
VPCOMGED (%eax),%xmm7,%xmm0
VPCOMGEQ %xmm6,%xmm4,%xmm7
VPCOMGEQ %xmm7,%xmm0,%xmm0
VPCOMGEQ 0x1(%eax,%edx,1),%xmm4,%xmm3
VPCOMGEQ (%edx,%ecx),%xmm4,%xmm3
VPCOMGEQ %xmm0,%xmm4,%xmm3
VPCOMGEQ 0x1(%eax,%edx,1),%xmm0,%xmm7
VPCOMGEQ (%edi,%eax,2),%xmm4,%xmm0
VPCOMGEQ (%eax),%xmm7,%xmm7
VPCOMGEUB (%edi,%eax,2),%xmm0,%xmm0
VPCOMGEUB (%eax),%xmm7,%xmm3
VPCOMGEUB 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMGEUB 0x1(%eax,%edx,1),%xmm4,%xmm3
VPCOMGEUB (%eax),%xmm4,%xmm3
VPCOMGEUB %xmm6,%xmm7,%xmm3
VPCOMGEUB %xmm6,%xmm7,%xmm0
VPCOMGEUB 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMGEUD 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMGEUD 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMGEUD (%edx,%ecx),%xmm0,%xmm0
VPCOMGEUD %xmm7,%xmm4,%xmm3
VPCOMGEUD %xmm0,%xmm0,%xmm3
VPCOMGEUD (%edx,%ecx),%xmm7,%xmm0
VPCOMGEUD %xmm7,%xmm4,%xmm7
VPCOMGEUD (%edi,%eax,2),%xmm4,%xmm3
VPCOMGEUQ 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMGEUQ (%edi,%eax,2),%xmm7,%xmm7
VPCOMGEUQ %xmm7,%xmm4,%xmm7
VPCOMGEUQ (%edi,%eax,2),%xmm4,%xmm3
VPCOMGEUQ (%edx,%ecx),%xmm4,%xmm3
VPCOMGEUQ %xmm0,%xmm7,%xmm3
VPCOMGEUQ %xmm7,%xmm7,%xmm7
VPCOMGEUQ (%edx,%ecx),%xmm7,%xmm3
VPCOMGEUW %xmm7,%xmm7,%xmm0
VPCOMGEUW (%edi,%eax,2),%xmm0,%xmm3
VPCOMGEUW (%edx,%ecx),%xmm7,%xmm3
VPCOMGEUW 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMGEUW 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMGEUW (%eax),%xmm4,%xmm0
VPCOMGEUW %xmm0,%xmm0,%xmm0
VPCOMGEUW %xmm7,%xmm7,%xmm7
VPCOMGEW %xmm0,%xmm0,%xmm0
VPCOMGEW (%eax),%xmm7,%xmm7
VPCOMGEW %xmm6,%xmm4,%xmm0
VPCOMGEW (%eax),%xmm4,%xmm0
VPCOMGEW %xmm0,%xmm4,%xmm3
VPCOMGEW 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMGEW (%edi,%eax,2),%xmm7,%xmm7
VPCOMGEW %xmm6,%xmm4,%xmm3
VPCOMEQB (%eax),%xmm0,%xmm7
VPCOMEQB (%eax),%xmm4,%xmm0
VPCOMEQB (%edx,%ecx),%xmm0,%xmm3
VPCOMEQB %xmm7,%xmm7,%xmm7
VPCOMEQB (%edi,%eax,2),%xmm7,%xmm0
VPCOMEQB (%edx,%ecx),%xmm0,%xmm7
VPCOMEQB %xmm6,%xmm0,%xmm7
VPCOMEQB %xmm0,%xmm0,%xmm7
VPCOMEQD (%edi,%eax,2),%xmm0,%xmm7
VPCOMEQD (%edx,%ecx),%xmm4,%xmm7
VPCOMEQD 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMEQD %xmm7,%xmm0,%xmm7
VPCOMEQD (%edx,%ecx),%xmm4,%xmm0
VPCOMEQD %xmm0,%xmm7,%xmm7
VPCOMEQD %xmm0,%xmm4,%xmm3
VPCOMEQD (%edx,%ecx),%xmm7,%xmm3
VPCOMEQQ (%eax),%xmm7,%xmm0
VPCOMEQQ %xmm6,%xmm4,%xmm0
VPCOMEQQ (%edi,%eax,2),%xmm4,%xmm0
VPCOMEQQ %xmm6,%xmm0,%xmm0
VPCOMEQQ (%edx,%ecx),%xmm4,%xmm0
VPCOMEQQ (%edi,%eax,2),%xmm0,%xmm7
VPCOMEQQ %xmm0,%xmm0,%xmm0
VPCOMEQQ 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMEQUB %xmm7,%xmm4,%xmm0
VPCOMEQUB (%eax),%xmm0,%xmm3
VPCOMEQUB (%edx,%ecx),%xmm4,%xmm7
VPCOMEQUB (%edx,%ecx),%xmm7,%xmm3
VPCOMEQUB (%edx,%ecx),%xmm4,%xmm3
VPCOMEQUB %xmm6,%xmm4,%xmm3
VPCOMEQUB 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMEQUB %xmm6,%xmm7,%xmm0
VPCOMEQUD (%eax),%xmm4,%xmm0
VPCOMEQUD (%edi,%eax,2),%xmm4,%xmm7
VPCOMEQUD (%eax),%xmm4,%xmm7
VPCOMEQUD %xmm6,%xmm0,%xmm3
VPCOMEQUD (%edi,%eax,2),%xmm4,%xmm3
VPCOMEQUD 0x1(%eax,%edx,1),%xmm7,%xmm7
VPCOMEQUD %xmm7,%xmm7,%xmm7
VPCOMEQUD %xmm6,%xmm4,%xmm7
VPCOMEQUQ 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMEQUQ %xmm6,%xmm7,%xmm7
VPCOMEQUQ 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMEQUQ %xmm7,%xmm0,%xmm7
VPCOMEQUQ %xmm7,%xmm4,%xmm0
VPCOMEQUQ %xmm6,%xmm7,%xmm3
VPCOMEQUQ %xmm6,%xmm0,%xmm3
VPCOMEQUQ 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMEQUW (%edi,%eax,2),%xmm0,%xmm0
VPCOMEQUW (%eax),%xmm7,%xmm0
VPCOMEQUW (%eax),%xmm4,%xmm3
VPCOMEQUW 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMEQUW %xmm7,%xmm4,%xmm7
VPCOMEQUW (%eax),%xmm0,%xmm7
VPCOMEQUW (%eax),%xmm7,%xmm3
VPCOMEQUW %xmm0,%xmm4,%xmm3
VPCOMEQW (%edx,%ecx),%xmm0,%xmm3
VPCOMEQW (%edx,%ecx),%xmm4,%xmm0
VPCOMEQW (%eax),%xmm4,%xmm7
VPCOMEQW (%eax),%xmm7,%xmm7
VPCOMEQW (%edi,%eax,2),%xmm4,%xmm3
VPCOMEQW %xmm0,%xmm4,%xmm3
VPCOMEQW %xmm0,%xmm7,%xmm3
VPCOMEQW %xmm7,%xmm7,%xmm0
VPCOMNEQB 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMNEQB (%eax),%xmm0,%xmm3
VPCOMNEQB (%eax),%xmm4,%xmm0
VPCOMNEQB 0x1(%eax,%edx,1),%xmm7,%xmm7
VPCOMNEQB %xmm6,%xmm7,%xmm7
VPCOMNEQB %xmm0,%xmm7,%xmm7
VPCOMNEQB (%edi,%eax,2),%xmm0,%xmm3
VPCOMNEQB %xmm6,%xmm0,%xmm3
VPCOMNEQD %xmm0,%xmm7,%xmm3
VPCOMNEQD (%edx,%ecx),%xmm4,%xmm0
VPCOMNEQD (%edi,%eax,2),%xmm4,%xmm7
VPCOMNEQD (%eax),%xmm4,%xmm7
VPCOMNEQD %xmm0,%xmm4,%xmm7
VPCOMNEQD (%edx,%ecx),%xmm7,%xmm3
VPCOMNEQD %xmm7,%xmm0,%xmm3
VPCOMNEQD (%eax),%xmm7,%xmm3
VPCOMNEQQ %xmm6,%xmm7,%xmm3
VPCOMNEQQ %xmm0,%xmm4,%xmm0
VPCOMNEQQ 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMNEQQ %xmm7,%xmm7,%xmm0
VPCOMNEQQ (%eax),%xmm7,%xmm0
VPCOMNEQQ %xmm7,%xmm4,%xmm7
VPCOMNEQQ (%edx,%ecx),%xmm7,%xmm7
VPCOMNEQQ (%edi,%eax,2),%xmm0,%xmm7
VPCOMNEQUB 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMNEQUB (%edx,%ecx),%xmm0,%xmm0
VPCOMNEQUB (%edx,%ecx),%xmm0,%xmm3
VPCOMNEQUB %xmm6,%xmm4,%xmm3
VPCOMNEQUB %xmm0,%xmm0,%xmm7
VPCOMNEQUB %xmm7,%xmm0,%xmm3
VPCOMNEQUB %xmm7,%xmm7,%xmm0
VPCOMNEQUB %xmm0,%xmm7,%xmm3
VPCOMNEQUD %xmm0,%xmm0,%xmm0
VPCOMNEQUD (%edi,%eax,2),%xmm4,%xmm3
VPCOMNEQUD (%edx,%ecx),%xmm4,%xmm7
VPCOMNEQUD (%edx,%ecx),%xmm0,%xmm3
VPCOMNEQUD (%eax),%xmm0,%xmm0
VPCOMNEQUD (%eax),%xmm4,%xmm7
VPCOMNEQUD %xmm6,%xmm0,%xmm3
VPCOMNEQUD (%eax),%xmm0,%xmm7
VPCOMNEQUQ %xmm7,%xmm0,%xmm3
VPCOMNEQUQ (%edx,%ecx),%xmm0,%xmm0
VPCOMNEQUQ %xmm7,%xmm4,%xmm0
VPCOMNEQUQ %xmm0,%xmm4,%xmm3
VPCOMNEQUQ (%edx,%ecx),%xmm0,%xmm7
VPCOMNEQUQ (%edi,%eax,2),%xmm4,%xmm0
VPCOMNEQUQ (%eax),%xmm7,%xmm3
VPCOMNEQUQ 0x1(%eax,%edx,1),%xmm4,%xmm3
VPCOMNEQUW (%eax),%xmm4,%xmm0
VPCOMNEQUW 0x1(%eax,%edx,1),%xmm4,%xmm7
VPCOMNEQUW %xmm0,%xmm4,%xmm0
VPCOMNEQUW %xmm6,%xmm7,%xmm7
VPCOMNEQUW (%edx,%ecx),%xmm7,%xmm0
VPCOMNEQUW %xmm7,%xmm0,%xmm0
VPCOMNEQUW 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMNEQUW %xmm6,%xmm0,%xmm3
VPCOMNEQW %xmm6,%xmm4,%xmm7
VPCOMNEQW (%eax),%xmm0,%xmm7
VPCOMNEQW %xmm7,%xmm4,%xmm7
VPCOMNEQW %xmm0,%xmm0,%xmm3
VPCOMNEQW (%eax),%xmm7,%xmm0
VPCOMNEQW %xmm7,%xmm7,%xmm3
VPCOMNEQW (%eax),%xmm0,%xmm3
VPCOMNEQW (%edi,%eax,2),%xmm4,%xmm7
VPCOMFALSEB (%edx,%ecx),%xmm7,%xmm3
VPCOMFALSEB 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMFALSEB %xmm7,%xmm0,%xmm0
VPCOMFALSEB (%eax),%xmm7,%xmm7
VPCOMFALSEB (%eax),%xmm0,%xmm7
VPCOMFALSEB (%edi,%eax,2),%xmm0,%xmm3
VPCOMFALSEB 0x1(%eax,%edx,1),%xmm0,%xmm7
VPCOMFALSEB (%eax),%xmm0,%xmm0
VPCOMFALSED %xmm6,%xmm0,%xmm3
VPCOMFALSED 0x1(%eax,%edx,1),%xmm0,%xmm7
VPCOMFALSED 0x1(%eax,%edx,1),%xmm7,%xmm7
VPCOMFALSED %xmm7,%xmm4,%xmm3
VPCOMFALSED %xmm0,%xmm4,%xmm0
VPCOMFALSED (%eax),%xmm0,%xmm3
VPCOMFALSED (%edx,%ecx),%xmm7,%xmm0
VPCOMFALSED (%edx,%ecx),%xmm0,%xmm0
VPCOMFALSEQ %xmm6,%xmm7,%xmm0
VPCOMFALSEQ 0x1(%eax,%edx,1),%xmm0,%xmm7
VPCOMFALSEQ %xmm0,%xmm4,%xmm0
VPCOMFALSEQ (%edx,%ecx),%xmm7,%xmm0
VPCOMFALSEQ (%eax),%xmm7,%xmm0
VPCOMFALSEQ (%eax),%xmm7,%xmm3
VPCOMFALSEQ %xmm7,%xmm4,%xmm7
VPCOMFALSEQ (%edx,%ecx),%xmm4,%xmm3
VPCOMFALSEUB %xmm6,%xmm0,%xmm7
VPCOMFALSEUB (%eax),%xmm4,%xmm7
VPCOMFALSEUB (%edi,%eax,2),%xmm0,%xmm7
VPCOMFALSEUB %xmm0,%xmm4,%xmm0
VPCOMFALSEUB %xmm7,%xmm7,%xmm0
VPCOMFALSEUB (%edx,%ecx),%xmm4,%xmm3
VPCOMFALSEUB %xmm0,%xmm7,%xmm3
VPCOMFALSEUB 0x1(%eax,%edx,1),%xmm4,%xmm7
VPCOMFALSEUD (%edx,%ecx),%xmm4,%xmm7
VPCOMFALSEUD (%eax),%xmm4,%xmm7
VPCOMFALSEUD (%eax),%xmm0,%xmm0
VPCOMFALSEUD %xmm7,%xmm0,%xmm0
VPCOMFALSEUD (%eax),%xmm4,%xmm0
VPCOMFALSEUD 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMFALSEUD (%edi,%eax,2),%xmm7,%xmm0
VPCOMFALSEUD (%edx,%ecx),%xmm0,%xmm0
VPCOMFALSEUQ %xmm0,%xmm4,%xmm0
VPCOMFALSEUQ 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMFALSEUQ (%eax),%xmm0,%xmm7
VPCOMFALSEUQ %xmm0,%xmm7,%xmm0
VPCOMFALSEUQ %xmm7,%xmm0,%xmm0
VPCOMFALSEUQ 0x1(%eax,%edx,1),%xmm4,%xmm7
VPCOMFALSEUQ (%eax),%xmm0,%xmm3
VPCOMFALSEUQ %xmm7,%xmm4,%xmm0
VPCOMFALSEUW (%eax),%xmm7,%xmm3
VPCOMFALSEUW (%edx,%ecx),%xmm4,%xmm0
VPCOMFALSEUW %xmm6,%xmm4,%xmm7
VPCOMFALSEUW %xmm7,%xmm4,%xmm3
VPCOMFALSEUW %xmm0,%xmm7,%xmm7
VPCOMFALSEUW %xmm7,%xmm7,%xmm0
VPCOMFALSEUW 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMFALSEUW (%eax),%xmm0,%xmm7
VPCOMFALSEW 0x1(%eax,%edx,1),%xmm4,%xmm7
VPCOMFALSEW (%eax),%xmm4,%xmm3
VPCOMFALSEW (%edi,%eax,2),%xmm7,%xmm7
VPCOMFALSEW (%edi,%eax,2),%xmm0,%xmm3
VPCOMFALSEW (%edx,%ecx),%xmm0,%xmm7
VPCOMFALSEW 0x1(%eax,%edx,1),%xmm0,%xmm7
VPCOMFALSEW %xmm6,%xmm0,%xmm7
VPCOMFALSEW %xmm7,%xmm0,%xmm7
VPCOMTRUEB (%edi,%eax,2),%xmm0,%xmm7
VPCOMTRUEB (%edi,%eax,2),%xmm4,%xmm3
VPCOMTRUEB (%eax),%xmm4,%xmm3
VPCOMTRUEB 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMTRUEB 0x1(%eax,%edx,1),%xmm0,%xmm3
VPCOMTRUEB %xmm7,%xmm4,%xmm0
VPCOMTRUEB %xmm7,%xmm7,%xmm3
VPCOMTRUEB %xmm0,%xmm7,%xmm0
VPCOMTRUED (%eax),%xmm7,%xmm7
VPCOMTRUED %xmm6,%xmm4,%xmm0
VPCOMTRUED %xmm0,%xmm7,%xmm7
VPCOMTRUED (%edx,%ecx),%xmm4,%xmm0
VPCOMTRUED 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMTRUED %xmm7,%xmm0,%xmm7
VPCOMTRUED (%eax),%xmm0,%xmm7
VPCOMTRUED (%edx,%ecx),%xmm0,%xmm3
VPCOMTRUEQ (%edi,%eax,2),%xmm7,%xmm3
VPCOMTRUEQ %xmm7,%xmm7,%xmm3
VPCOMTRUEQ %xmm6,%xmm4,%xmm7
VPCOMTRUEQ (%edi,%eax,2),%xmm7,%xmm0
VPCOMTRUEQ %xmm0,%xmm4,%xmm0
VPCOMTRUEQ %xmm7,%xmm0,%xmm0
VPCOMTRUEQ (%edi,%eax,2),%xmm0,%xmm0
VPCOMTRUEQ (%eax),%xmm0,%xmm7
VPCOMTRUEUB (%edx,%ecx),%xmm0,%xmm7
VPCOMTRUEUB (%edi,%eax,2),%xmm7,%xmm3
VPCOMTRUEUB 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMTRUEUB 0x1(%eax,%edx,1),%xmm7,%xmm3
VPCOMTRUEUB 0x1(%eax,%edx,1),%xmm4,%xmm0
VPCOMTRUEUB (%eax),%xmm7,%xmm7
VPCOMTRUEUB (%edi,%eax,2),%xmm0,%xmm0
VPCOMTRUEUB %xmm6,%xmm0,%xmm3
VPCOMTRUEUD (%edi,%eax,2),%xmm0,%xmm7
VPCOMTRUEUD %xmm7,%xmm4,%xmm3
VPCOMTRUEUD %xmm7,%xmm4,%xmm0
VPCOMTRUEUD (%edi,%eax,2),%xmm4,%xmm3
VPCOMTRUEUD (%eax),%xmm0,%xmm7
VPCOMTRUEUD %xmm6,%xmm7,%xmm0
VPCOMTRUEUD 0x1(%eax,%edx,1),%xmm7,%xmm0
VPCOMTRUEUD (%edi,%eax,2),%xmm0,%xmm0
VPCOMTRUEUQ (%edx,%ecx),%xmm7,%xmm7
VPCOMTRUEUQ (%eax),%xmm0,%xmm7
VPCOMTRUEUQ 0x1(%eax,%edx,1),%xmm7,%xmm7
VPCOMTRUEUQ %xmm7,%xmm0,%xmm7
VPCOMTRUEUQ (%eax),%xmm7,%xmm0
VPCOMTRUEUQ (%edi,%eax,2),%xmm0,%xmm0
VPCOMTRUEUQ (%edx,%ecx),%xmm7,%xmm0
VPCOMTRUEUQ %xmm0,%xmm0,%xmm0
VPCOMTRUEUW (%edx,%ecx),%xmm0,%xmm3
VPCOMTRUEUW (%edi,%eax,2),%xmm7,%xmm7
VPCOMTRUEUW 0x1(%eax,%edx,1),%xmm0,%xmm0
VPCOMTRUEUW %xmm7,%xmm0,%xmm0
VPCOMTRUEUW %xmm0,%xmm7,%xmm7
VPCOMTRUEUW %xmm0,%xmm4,%xmm3
VPCOMTRUEUW (%eax),%xmm7,%xmm7
VPCOMTRUEUW %xmm0,%xmm0,%xmm7
VPCOMTRUEW %xmm6,%xmm0,%xmm0
VPCOMTRUEW (%edx,%ecx),%xmm7,%xmm0
VPCOMTRUEW (%edx,%ecx),%xmm7,%xmm3
VPCOMTRUEW (%edx,%ecx),%xmm4,%xmm7
VPCOMTRUEW (%eax),%xmm4,%xmm7
VPCOMTRUEW %xmm6,%xmm7,%xmm7
VPCOMTRUEW %xmm0,%xmm4,%xmm3
VPCOMTRUEW (%edx,%ecx),%xmm0,%xmm3
|
tactcomplabs/xbgas-binutils-gdb
| 16,324
|
gas/testsuite/gas/i386/fma.s
|
# Check FMA instructions
.allow_index_reg
.text
_start:
# Tests for op ymm/mem256, ymm, ymm
vfmadd132pd %ymm4,%ymm6,%ymm2
vfmadd132pd (%ecx),%ymm6,%ymm2
vfmadd132ps %ymm4,%ymm6,%ymm2
vfmadd132ps (%ecx),%ymm6,%ymm2
vfmadd213pd %ymm4,%ymm6,%ymm2
vfmadd213pd (%ecx),%ymm6,%ymm2
vfmadd213ps %ymm4,%ymm6,%ymm2
vfmadd213ps (%ecx),%ymm6,%ymm2
vfmadd231pd %ymm4,%ymm6,%ymm2
vfmadd231pd (%ecx),%ymm6,%ymm2
vfmadd231ps %ymm4,%ymm6,%ymm2
vfmadd231ps (%ecx),%ymm6,%ymm2
vfmaddsub132pd %ymm4,%ymm6,%ymm2
vfmaddsub132pd (%ecx),%ymm6,%ymm2
vfmaddsub132ps %ymm4,%ymm6,%ymm2
vfmaddsub132ps (%ecx),%ymm6,%ymm2
vfmaddsub213pd %ymm4,%ymm6,%ymm2
vfmaddsub213pd (%ecx),%ymm6,%ymm2
vfmaddsub213ps %ymm4,%ymm6,%ymm2
vfmaddsub213ps (%ecx),%ymm6,%ymm2
vfmaddsub231pd %ymm4,%ymm6,%ymm2
vfmaddsub231pd (%ecx),%ymm6,%ymm2
vfmaddsub231ps %ymm4,%ymm6,%ymm2
vfmaddsub231ps (%ecx),%ymm6,%ymm2
vfmsubadd132pd %ymm4,%ymm6,%ymm2
vfmsubadd132pd (%ecx),%ymm6,%ymm2
vfmsubadd132ps %ymm4,%ymm6,%ymm2
vfmsubadd132ps (%ecx),%ymm6,%ymm2
vfmsubadd213pd %ymm4,%ymm6,%ymm2
vfmsubadd213pd (%ecx),%ymm6,%ymm2
vfmsubadd213ps %ymm4,%ymm6,%ymm2
vfmsubadd213ps (%ecx),%ymm6,%ymm2
vfmsubadd231pd %ymm4,%ymm6,%ymm2
vfmsubadd231pd (%ecx),%ymm6,%ymm2
vfmsubadd231ps %ymm4,%ymm6,%ymm2
vfmsubadd231ps (%ecx),%ymm6,%ymm2
vfmsub132pd %ymm4,%ymm6,%ymm2
vfmsub132pd (%ecx),%ymm6,%ymm2
vfmsub132ps %ymm4,%ymm6,%ymm2
vfmsub132ps (%ecx),%ymm6,%ymm2
vfmsub213pd %ymm4,%ymm6,%ymm2
vfmsub213pd (%ecx),%ymm6,%ymm2
vfmsub213ps %ymm4,%ymm6,%ymm2
vfmsub213ps (%ecx),%ymm6,%ymm2
vfmsub231pd %ymm4,%ymm6,%ymm2
vfmsub231pd (%ecx),%ymm6,%ymm2
vfmsub231ps %ymm4,%ymm6,%ymm2
vfmsub231ps (%ecx),%ymm6,%ymm2
vfnmadd132pd %ymm4,%ymm6,%ymm2
vfnmadd132pd (%ecx),%ymm6,%ymm2
vfnmadd132ps %ymm4,%ymm6,%ymm2
vfnmadd132ps (%ecx),%ymm6,%ymm2
vfnmadd213pd %ymm4,%ymm6,%ymm2
vfnmadd213pd (%ecx),%ymm6,%ymm2
vfnmadd213ps %ymm4,%ymm6,%ymm2
vfnmadd213ps (%ecx),%ymm6,%ymm2
vfnmadd231pd %ymm4,%ymm6,%ymm2
vfnmadd231pd (%ecx),%ymm6,%ymm2
vfnmadd231ps %ymm4,%ymm6,%ymm2
vfnmadd231ps (%ecx),%ymm6,%ymm2
vfnmsub132pd %ymm4,%ymm6,%ymm2
vfnmsub132pd (%ecx),%ymm6,%ymm2
vfnmsub132ps %ymm4,%ymm6,%ymm2
vfnmsub132ps (%ecx),%ymm6,%ymm2
vfnmsub213pd %ymm4,%ymm6,%ymm2
vfnmsub213pd (%ecx),%ymm6,%ymm2
vfnmsub213ps %ymm4,%ymm6,%ymm2
vfnmsub213ps (%ecx),%ymm6,%ymm2
vfnmsub231pd %ymm4,%ymm6,%ymm2
vfnmsub231pd (%ecx),%ymm6,%ymm2
vfnmsub231ps %ymm4,%ymm6,%ymm2
vfnmsub231ps (%ecx),%ymm6,%ymm2
# Tests for op xmm/mem128, xmm, xmm
vfmadd132pd %xmm4,%xmm6,%xmm2
vfmadd132pd (%ecx),%xmm6,%xmm7
vfmadd132ps %xmm4,%xmm6,%xmm2
vfmadd132ps (%ecx),%xmm6,%xmm7
vfmadd213pd %xmm4,%xmm6,%xmm2
vfmadd213pd (%ecx),%xmm6,%xmm7
vfmadd213ps %xmm4,%xmm6,%xmm2
vfmadd213ps (%ecx),%xmm6,%xmm7
vfmadd231pd %xmm4,%xmm6,%xmm2
vfmadd231pd (%ecx),%xmm6,%xmm7
vfmadd231ps %xmm4,%xmm6,%xmm2
vfmadd231ps (%ecx),%xmm6,%xmm7
vfmaddsub132pd %xmm4,%xmm6,%xmm2
vfmaddsub132pd (%ecx),%xmm6,%xmm7
vfmaddsub132ps %xmm4,%xmm6,%xmm2
vfmaddsub132ps (%ecx),%xmm6,%xmm7
vfmaddsub213pd %xmm4,%xmm6,%xmm2
vfmaddsub213pd (%ecx),%xmm6,%xmm7
vfmaddsub213ps %xmm4,%xmm6,%xmm2
vfmaddsub213ps (%ecx),%xmm6,%xmm7
vfmaddsub231pd %xmm4,%xmm6,%xmm2
vfmaddsub231pd (%ecx),%xmm6,%xmm7
vfmaddsub231ps %xmm4,%xmm6,%xmm2
vfmaddsub231ps (%ecx),%xmm6,%xmm7
vfmsubadd132pd %xmm4,%xmm6,%xmm2
vfmsubadd132pd (%ecx),%xmm6,%xmm7
vfmsubadd132ps %xmm4,%xmm6,%xmm2
vfmsubadd132ps (%ecx),%xmm6,%xmm7
vfmsubadd213pd %xmm4,%xmm6,%xmm2
vfmsubadd213pd (%ecx),%xmm6,%xmm7
vfmsubadd213ps %xmm4,%xmm6,%xmm2
vfmsubadd213ps (%ecx),%xmm6,%xmm7
vfmsubadd231pd %xmm4,%xmm6,%xmm2
vfmsubadd231pd (%ecx),%xmm6,%xmm7
vfmsubadd231ps %xmm4,%xmm6,%xmm2
vfmsubadd231ps (%ecx),%xmm6,%xmm7
vfmsub132pd %xmm4,%xmm6,%xmm2
vfmsub132pd (%ecx),%xmm6,%xmm7
vfmsub132ps %xmm4,%xmm6,%xmm2
vfmsub132ps (%ecx),%xmm6,%xmm7
vfmsub213pd %xmm4,%xmm6,%xmm2
vfmsub213pd (%ecx),%xmm6,%xmm7
vfmsub213ps %xmm4,%xmm6,%xmm2
vfmsub213ps (%ecx),%xmm6,%xmm7
vfmsub231pd %xmm4,%xmm6,%xmm2
vfmsub231pd (%ecx),%xmm6,%xmm7
vfmsub231ps %xmm4,%xmm6,%xmm2
vfmsub231ps (%ecx),%xmm6,%xmm7
vfnmadd132pd %xmm4,%xmm6,%xmm2
vfnmadd132pd (%ecx),%xmm6,%xmm7
vfnmadd132ps %xmm4,%xmm6,%xmm2
vfnmadd132ps (%ecx),%xmm6,%xmm7
vfnmadd213pd %xmm4,%xmm6,%xmm2
vfnmadd213pd (%ecx),%xmm6,%xmm7
vfnmadd213ps %xmm4,%xmm6,%xmm2
vfnmadd213ps (%ecx),%xmm6,%xmm7
vfnmadd231pd %xmm4,%xmm6,%xmm2
vfnmadd231pd (%ecx),%xmm6,%xmm7
vfnmadd231ps %xmm4,%xmm6,%xmm2
vfnmadd231ps (%ecx),%xmm6,%xmm7
vfnmsub132pd %xmm4,%xmm6,%xmm2
vfnmsub132pd (%ecx),%xmm6,%xmm7
vfnmsub132ps %xmm4,%xmm6,%xmm2
vfnmsub132ps (%ecx),%xmm6,%xmm7
vfnmsub213pd %xmm4,%xmm6,%xmm2
vfnmsub213pd (%ecx),%xmm6,%xmm7
vfnmsub213ps %xmm4,%xmm6,%xmm2
vfnmsub213ps (%ecx),%xmm6,%xmm7
vfnmsub231pd %xmm4,%xmm6,%xmm2
vfnmsub231pd (%ecx),%xmm6,%xmm7
vfnmsub231ps %xmm4,%xmm6,%xmm2
vfnmsub231ps (%ecx),%xmm6,%xmm7
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd %xmm4,%xmm6,%xmm2
vfmadd132sd (%ecx),%xmm6,%xmm2
vfmadd213sd %xmm4,%xmm6,%xmm2
vfmadd213sd (%ecx),%xmm6,%xmm2
vfmadd231sd %xmm4,%xmm6,%xmm2
vfmadd231sd (%ecx),%xmm6,%xmm2
vfmsub132sd %xmm4,%xmm6,%xmm2
vfmsub132sd (%ecx),%xmm6,%xmm2
vfmsub213sd %xmm4,%xmm6,%xmm2
vfmsub213sd (%ecx),%xmm6,%xmm2
vfmsub231sd %xmm4,%xmm6,%xmm2
vfmsub231sd (%ecx),%xmm6,%xmm2
vfnmadd132sd %xmm4,%xmm6,%xmm2
vfnmadd132sd (%ecx),%xmm6,%xmm2
vfnmadd213sd %xmm4,%xmm6,%xmm2
vfnmadd213sd (%ecx),%xmm6,%xmm2
vfnmadd231sd %xmm4,%xmm6,%xmm2
vfnmadd231sd (%ecx),%xmm6,%xmm2
vfnmsub132sd %xmm4,%xmm6,%xmm2
vfnmsub132sd (%ecx),%xmm6,%xmm2
vfnmsub213sd %xmm4,%xmm6,%xmm2
vfnmsub213sd (%ecx),%xmm6,%xmm2
vfnmsub231sd %xmm4,%xmm6,%xmm2
vfnmsub231sd (%ecx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss %xmm4,%xmm6,%xmm2
vfmadd132ss (%ecx),%xmm6,%xmm2
vfmadd213ss %xmm4,%xmm6,%xmm2
vfmadd213ss (%ecx),%xmm6,%xmm2
vfmadd231ss %xmm4,%xmm6,%xmm2
vfmadd231ss (%ecx),%xmm6,%xmm2
vfmsub132ss %xmm4,%xmm6,%xmm2
vfmsub132ss (%ecx),%xmm6,%xmm2
vfmsub213ss %xmm4,%xmm6,%xmm2
vfmsub213ss (%ecx),%xmm6,%xmm2
vfmsub231ss %xmm4,%xmm6,%xmm2
vfmsub231ss (%ecx),%xmm6,%xmm2
vfnmadd132ss %xmm4,%xmm6,%xmm2
vfnmadd132ss (%ecx),%xmm6,%xmm2
vfnmadd213ss %xmm4,%xmm6,%xmm2
vfnmadd213ss (%ecx),%xmm6,%xmm2
vfnmadd231ss %xmm4,%xmm6,%xmm2
vfnmadd231ss (%ecx),%xmm6,%xmm2
vfnmsub132ss %xmm4,%xmm6,%xmm2
vfnmsub132ss (%ecx),%xmm6,%xmm2
vfnmsub213ss %xmm4,%xmm6,%xmm2
vfnmsub213ss (%ecx),%xmm6,%xmm2
vfnmsub231ss %xmm4,%xmm6,%xmm2
vfnmsub231ss (%ecx),%xmm6,%xmm2
.intel_syntax noprefix
# Tests for op ymm/mem256, ymm, ymm
vfmadd132pd ymm2,ymm6,ymm4
vfmadd132pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmadd132pd ymm2,ymm6,[ecx]
vfmadd132ps ymm2,ymm6,ymm4
vfmadd132ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmadd132ps ymm2,ymm6,[ecx]
vfmadd213pd ymm2,ymm6,ymm4
vfmadd213pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmadd213pd ymm2,ymm6,[ecx]
vfmadd213ps ymm2,ymm6,ymm4
vfmadd213ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmadd213ps ymm2,ymm6,[ecx]
vfmadd231pd ymm2,ymm6,ymm4
vfmadd231pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmadd231pd ymm2,ymm6,[ecx]
vfmadd231ps ymm2,ymm6,ymm4
vfmadd231ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmadd231ps ymm2,ymm6,[ecx]
vfmaddsub132pd ymm2,ymm6,ymm4
vfmaddsub132pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmaddsub132pd ymm2,ymm6,[ecx]
vfmaddsub132ps ymm2,ymm6,ymm4
vfmaddsub132ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmaddsub132ps ymm2,ymm6,[ecx]
vfmaddsub213pd ymm2,ymm6,ymm4
vfmaddsub213pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmaddsub213pd ymm2,ymm6,[ecx]
vfmaddsub213ps ymm2,ymm6,ymm4
vfmaddsub213ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmaddsub213ps ymm2,ymm6,[ecx]
vfmaddsub231pd ymm2,ymm6,ymm4
vfmaddsub231pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmaddsub231pd ymm2,ymm6,[ecx]
vfmaddsub231ps ymm2,ymm6,ymm4
vfmaddsub231ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmaddsub231ps ymm2,ymm6,[ecx]
vfmsubadd132pd ymm2,ymm6,ymm4
vfmsubadd132pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmsubadd132pd ymm2,ymm6,[ecx]
vfmsubadd132ps ymm2,ymm6,ymm4
vfmsubadd132ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmsubadd132ps ymm2,ymm6,[ecx]
vfmsubadd213pd ymm2,ymm6,ymm4
vfmsubadd213pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmsubadd213pd ymm2,ymm6,[ecx]
vfmsubadd213ps ymm2,ymm6,ymm4
vfmsubadd213ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmsubadd213ps ymm2,ymm6,[ecx]
vfmsubadd231pd ymm2,ymm6,ymm4
vfmsubadd231pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmsubadd231pd ymm2,ymm6,[ecx]
vfmsubadd231ps ymm2,ymm6,ymm4
vfmsubadd231ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmsubadd231ps ymm2,ymm6,[ecx]
vfmsub132pd ymm2,ymm6,ymm4
vfmsub132pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmsub132pd ymm2,ymm6,[ecx]
vfmsub132ps ymm2,ymm6,ymm4
vfmsub132ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmsub132ps ymm2,ymm6,[ecx]
vfmsub213pd ymm2,ymm6,ymm4
vfmsub213pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmsub213pd ymm2,ymm6,[ecx]
vfmsub213ps ymm2,ymm6,ymm4
vfmsub213ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmsub213ps ymm2,ymm6,[ecx]
vfmsub231pd ymm2,ymm6,ymm4
vfmsub231pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmsub231pd ymm2,ymm6,[ecx]
vfmsub231ps ymm2,ymm6,ymm4
vfmsub231ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmsub231ps ymm2,ymm6,[ecx]
vfnmadd132pd ymm2,ymm6,ymm4
vfnmadd132pd ymm2,ymm6,YMMWORD PTR [ecx]
vfnmadd132pd ymm2,ymm6,[ecx]
vfnmadd132ps ymm2,ymm6,ymm4
vfnmadd132ps ymm2,ymm6,YMMWORD PTR [ecx]
vfnmadd132ps ymm2,ymm6,[ecx]
vfnmadd213pd ymm2,ymm6,ymm4
vfnmadd213pd ymm2,ymm6,YMMWORD PTR [ecx]
vfnmadd213pd ymm2,ymm6,[ecx]
vfnmadd213ps ymm2,ymm6,ymm4
vfnmadd213ps ymm2,ymm6,YMMWORD PTR [ecx]
vfnmadd213ps ymm2,ymm6,[ecx]
vfnmadd231pd ymm2,ymm6,ymm4
vfnmadd231pd ymm2,ymm6,YMMWORD PTR [ecx]
vfnmadd231pd ymm2,ymm6,[ecx]
vfnmadd231ps ymm2,ymm6,ymm4
vfnmadd231ps ymm2,ymm6,YMMWORD PTR [ecx]
vfnmadd231ps ymm2,ymm6,[ecx]
vfnmsub132pd ymm2,ymm6,ymm4
vfnmsub132pd ymm2,ymm6,YMMWORD PTR [ecx]
vfnmsub132pd ymm2,ymm6,[ecx]
vfnmsub132ps ymm2,ymm6,ymm4
vfnmsub132ps ymm2,ymm6,YMMWORD PTR [ecx]
vfnmsub132ps ymm2,ymm6,[ecx]
vfnmsub213pd ymm2,ymm6,ymm4
vfnmsub213pd ymm2,ymm6,YMMWORD PTR [ecx]
vfnmsub213pd ymm2,ymm6,[ecx]
vfnmsub213ps ymm2,ymm6,ymm4
vfnmsub213ps ymm2,ymm6,YMMWORD PTR [ecx]
vfnmsub213ps ymm2,ymm6,[ecx]
vfnmsub231pd ymm2,ymm6,ymm4
vfnmsub231pd ymm2,ymm6,YMMWORD PTR [ecx]
vfnmsub231pd ymm2,ymm6,[ecx]
vfnmsub231ps ymm2,ymm6,ymm4
vfnmsub231ps ymm2,ymm6,YMMWORD PTR [ecx]
vfnmsub231ps ymm2,ymm6,[ecx]
# Tests for op xmm/mem128, xmm, xmm
vfmadd132pd xmm2,xmm6,xmm4
vfmadd132pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmadd132pd xmm7,xmm6,[ecx]
vfmadd132ps xmm2,xmm6,xmm4
vfmadd132ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmadd132ps xmm7,xmm6,[ecx]
vfmadd213pd xmm2,xmm6,xmm4
vfmadd213pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmadd213pd xmm7,xmm6,[ecx]
vfmadd213ps xmm2,xmm6,xmm4
vfmadd213ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmadd213ps xmm7,xmm6,[ecx]
vfmadd231pd xmm2,xmm6,xmm4
vfmadd231pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmadd231pd xmm7,xmm6,[ecx]
vfmadd231ps xmm2,xmm6,xmm4
vfmadd231ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmadd231ps xmm7,xmm6,[ecx]
vfmaddsub132pd xmm2,xmm6,xmm4
vfmaddsub132pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmaddsub132pd xmm7,xmm6,[ecx]
vfmaddsub132ps xmm2,xmm6,xmm4
vfmaddsub132ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmaddsub132ps xmm7,xmm6,[ecx]
vfmaddsub213pd xmm2,xmm6,xmm4
vfmaddsub213pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmaddsub213pd xmm7,xmm6,[ecx]
vfmaddsub213ps xmm2,xmm6,xmm4
vfmaddsub213ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmaddsub213ps xmm7,xmm6,[ecx]
vfmaddsub231pd xmm2,xmm6,xmm4
vfmaddsub231pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmaddsub231pd xmm7,xmm6,[ecx]
vfmaddsub231ps xmm2,xmm6,xmm4
vfmaddsub231ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmaddsub231ps xmm7,xmm6,[ecx]
vfmsubadd132pd xmm2,xmm6,xmm4
vfmsubadd132pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmsubadd132pd xmm7,xmm6,[ecx]
vfmsubadd132ps xmm2,xmm6,xmm4
vfmsubadd132ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmsubadd132ps xmm7,xmm6,[ecx]
vfmsubadd213pd xmm2,xmm6,xmm4
vfmsubadd213pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmsubadd213pd xmm7,xmm6,[ecx]
vfmsubadd213ps xmm2,xmm6,xmm4
vfmsubadd213ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmsubadd213ps xmm7,xmm6,[ecx]
vfmsubadd231pd xmm2,xmm6,xmm4
vfmsubadd231pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmsubadd231pd xmm7,xmm6,[ecx]
vfmsubadd231ps xmm2,xmm6,xmm4
vfmsubadd231ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmsubadd231ps xmm7,xmm6,[ecx]
vfmsub132pd xmm2,xmm6,xmm4
vfmsub132pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmsub132pd xmm7,xmm6,[ecx]
vfmsub132ps xmm2,xmm6,xmm4
vfmsub132ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmsub132ps xmm7,xmm6,[ecx]
vfmsub213pd xmm2,xmm6,xmm4
vfmsub213pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmsub213pd xmm7,xmm6,[ecx]
vfmsub213ps xmm2,xmm6,xmm4
vfmsub213ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmsub213ps xmm7,xmm6,[ecx]
vfmsub231pd xmm2,xmm6,xmm4
vfmsub231pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmsub231pd xmm7,xmm6,[ecx]
vfmsub231ps xmm2,xmm6,xmm4
vfmsub231ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmsub231ps xmm7,xmm6,[ecx]
vfnmadd132pd xmm2,xmm6,xmm4
vfnmadd132pd xmm7,xmm6,XMMWORD PTR [ecx]
vfnmadd132pd xmm7,xmm6,[ecx]
vfnmadd132ps xmm2,xmm6,xmm4
vfnmadd132ps xmm7,xmm6,XMMWORD PTR [ecx]
vfnmadd132ps xmm7,xmm6,[ecx]
vfnmadd213pd xmm2,xmm6,xmm4
vfnmadd213pd xmm7,xmm6,XMMWORD PTR [ecx]
vfnmadd213pd xmm7,xmm6,[ecx]
vfnmadd213ps xmm2,xmm6,xmm4
vfnmadd213ps xmm7,xmm6,XMMWORD PTR [ecx]
vfnmadd213ps xmm7,xmm6,[ecx]
vfnmadd231pd xmm2,xmm6,xmm4
vfnmadd231pd xmm7,xmm6,XMMWORD PTR [ecx]
vfnmadd231pd xmm7,xmm6,[ecx]
vfnmadd231ps xmm2,xmm6,xmm4
vfnmadd231ps xmm7,xmm6,XMMWORD PTR [ecx]
vfnmadd231ps xmm7,xmm6,[ecx]
vfnmsub132pd xmm2,xmm6,xmm4
vfnmsub132pd xmm7,xmm6,XMMWORD PTR [ecx]
vfnmsub132pd xmm7,xmm6,[ecx]
vfnmsub132ps xmm2,xmm6,xmm4
vfnmsub132ps xmm7,xmm6,XMMWORD PTR [ecx]
vfnmsub132ps xmm7,xmm6,[ecx]
vfnmsub213pd xmm2,xmm6,xmm4
vfnmsub213pd xmm7,xmm6,XMMWORD PTR [ecx]
vfnmsub213pd xmm7,xmm6,[ecx]
vfnmsub213ps xmm2,xmm6,xmm4
vfnmsub213ps xmm7,xmm6,XMMWORD PTR [ecx]
vfnmsub213ps xmm7,xmm6,[ecx]
vfnmsub231pd xmm2,xmm6,xmm4
vfnmsub231pd xmm7,xmm6,XMMWORD PTR [ecx]
vfnmsub231pd xmm7,xmm6,[ecx]
vfnmsub231ps xmm2,xmm6,xmm4
vfnmsub231ps xmm7,xmm6,XMMWORD PTR [ecx]
vfnmsub231ps xmm7,xmm6,[ecx]
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd xmm2,xmm6,xmm4
vfmadd132sd xmm2,xmm6,QWORD PTR [ecx]
vfmadd132sd xmm2,xmm6,[ecx]
vfmadd213sd xmm2,xmm6,xmm4
vfmadd213sd xmm2,xmm6,QWORD PTR [ecx]
vfmadd213sd xmm2,xmm6,[ecx]
vfmadd231sd xmm2,xmm6,xmm4
vfmadd231sd xmm2,xmm6,QWORD PTR [ecx]
vfmadd231sd xmm2,xmm6,[ecx]
vfmsub132sd xmm2,xmm6,xmm4
vfmsub132sd xmm2,xmm6,QWORD PTR [ecx]
vfmsub132sd xmm2,xmm6,[ecx]
vfmsub213sd xmm2,xmm6,xmm4
vfmsub213sd xmm2,xmm6,QWORD PTR [ecx]
vfmsub213sd xmm2,xmm6,[ecx]
vfmsub231sd xmm2,xmm6,xmm4
vfmsub231sd xmm2,xmm6,QWORD PTR [ecx]
vfmsub231sd xmm2,xmm6,[ecx]
vfnmadd132sd xmm2,xmm6,xmm4
vfnmadd132sd xmm2,xmm6,QWORD PTR [ecx]
vfnmadd132sd xmm2,xmm6,[ecx]
vfnmadd213sd xmm2,xmm6,xmm4
vfnmadd213sd xmm2,xmm6,QWORD PTR [ecx]
vfnmadd213sd xmm2,xmm6,[ecx]
vfnmadd231sd xmm2,xmm6,xmm4
vfnmadd231sd xmm2,xmm6,QWORD PTR [ecx]
vfnmadd231sd xmm2,xmm6,[ecx]
vfnmsub132sd xmm2,xmm6,xmm4
vfnmsub132sd xmm2,xmm6,QWORD PTR [ecx]
vfnmsub132sd xmm2,xmm6,[ecx]
vfnmsub213sd xmm2,xmm6,xmm4
vfnmsub213sd xmm2,xmm6,QWORD PTR [ecx]
vfnmsub213sd xmm2,xmm6,[ecx]
vfnmsub231sd xmm2,xmm6,xmm4
vfnmsub231sd xmm2,xmm6,QWORD PTR [ecx]
vfnmsub231sd xmm2,xmm6,[ecx]
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss xmm2,xmm6,xmm4
vfmadd132ss xmm2,xmm6,DWORD PTR [ecx]
vfmadd132ss xmm2,xmm6,[ecx]
vfmadd213ss xmm2,xmm6,xmm4
vfmadd213ss xmm2,xmm6,DWORD PTR [ecx]
vfmadd213ss xmm2,xmm6,[ecx]
vfmadd231ss xmm2,xmm6,xmm4
vfmadd231ss xmm2,xmm6,DWORD PTR [ecx]
vfmadd231ss xmm2,xmm6,[ecx]
vfmsub132ss xmm2,xmm6,xmm4
vfmsub132ss xmm2,xmm6,DWORD PTR [ecx]
vfmsub132ss xmm2,xmm6,[ecx]
vfmsub213ss xmm2,xmm6,xmm4
vfmsub213ss xmm2,xmm6,DWORD PTR [ecx]
vfmsub213ss xmm2,xmm6,[ecx]
vfmsub231ss xmm2,xmm6,xmm4
vfmsub231ss xmm2,xmm6,DWORD PTR [ecx]
vfmsub231ss xmm2,xmm6,[ecx]
vfnmadd132ss xmm2,xmm6,xmm4
vfnmadd132ss xmm2,xmm6,DWORD PTR [ecx]
vfnmadd132ss xmm2,xmm6,[ecx]
vfnmadd213ss xmm2,xmm6,xmm4
vfnmadd213ss xmm2,xmm6,DWORD PTR [ecx]
vfnmadd213ss xmm2,xmm6,[ecx]
vfnmadd231ss xmm2,xmm6,xmm4
vfnmadd231ss xmm2,xmm6,DWORD PTR [ecx]
vfnmadd231ss xmm2,xmm6,[ecx]
vfnmsub132ss xmm2,xmm6,xmm4
vfnmsub132ss xmm2,xmm6,DWORD PTR [ecx]
vfnmsub132ss xmm2,xmm6,[ecx]
vfnmsub213ss xmm2,xmm6,xmm4
vfnmsub213ss xmm2,xmm6,DWORD PTR [ecx]
vfnmsub213ss xmm2,xmm6,[ecx]
vfnmsub231ss xmm2,xmm6,xmm4
vfnmsub231ss xmm2,xmm6,DWORD PTR [ecx]
vfnmsub231ss xmm2,xmm6,[ecx]
|
tactcomplabs/xbgas-binutils-gdb
| 2,257
|
gas/testsuite/gas/i386/ssemmx2.s
|
.code32
foo:
pavgb %xmm1,%xmm0
pavgb (%edx),%xmm1
pavgw %xmm3,%xmm2
pavgw (%esp,1),%xmm3
pextrw $0x0,%xmm1,%eax
pinsrw $0x1,(%ecx),%xmm1
pinsrw $0x2,%edx,%xmm2
pmaxsw %xmm1,%xmm0
pmaxsw (%edx),%xmm1
pmaxub %xmm2,%xmm2
pmaxub (%esp,1),%xmm3
pminsw %xmm5,%xmm4
pminsw (%esi),%xmm5
pminub %xmm7,%xmm6
pminub (%eax),%xmm7
pmovmskb %xmm5,%eax
pmulhuw %xmm5,%xmm4
pmulhuw (%esi),%xmm5
psadbw %xmm7,%xmm6
psadbw (%eax),%xmm7
pshufd $0x1,%xmm2,%xmm3
pshufd $0x4,0x0(%ebp),%xmm6
pshufhw $0x1,%xmm2,%xmm3
pshufhw $0x4,0x0(%ebp),%xmm6
pshuflw $0x1,%xmm2,%xmm3
pshuflw $0x4,0x0(%ebp),%xmm6
movntdq %xmm2,(%eax)
punpcklbw 0x90909090(%eax),%xmm2
punpcklwd 0x90909090(%eax),%xmm2
punpckldq 0x90909090(%eax),%xmm2
packsswb 0x90909090(%eax),%xmm2
pcmpgtb 0x90909090(%eax),%xmm2
pcmpgtw 0x90909090(%eax),%xmm2
pcmpgtd 0x90909090(%eax),%xmm2
packuswb 0x90909090(%eax),%xmm2
punpckhbw 0x90909090(%eax),%xmm2
punpckhwd 0x90909090(%eax),%xmm2
punpckhdq 0x90909090(%eax),%xmm2
packssdw 0x90909090(%eax),%xmm2
movd 0x90909090(%eax),%xmm2
movq 0x90909090(%eax),%xmm2
psrlw $0x90,%xmm0
psrld $0x90,%xmm0
psrlq $0x90,%xmm0
pcmpeqb 0x90909090(%eax),%xmm2
pcmpeqw 0x90909090(%eax),%xmm2
pcmpeqd 0x90909090(%eax),%xmm2
movd %xmm2,0x90909090(%eax)
movq %xmm2,0x90909090(%eax)
psrlw 0x90909090(%eax),%xmm2
psrld 0x90909090(%eax),%xmm2
psrlq 0x90909090(%eax),%xmm2
pmullw 0x90909090(%eax),%xmm2
psubusb 0x90909090(%eax),%xmm2
psubusw 0x90909090(%eax),%xmm2
pand 0x90909090(%eax),%xmm2
paddusb 0x90909090(%eax),%xmm2
paddusw 0x90909090(%eax),%xmm2
pandn 0x90909090(%eax),%xmm2
psraw 0x90909090(%eax),%xmm2
psrad 0x90909090(%eax),%xmm2
pmulhw 0x90909090(%eax),%xmm2
psubsb 0x90909090(%eax),%xmm2
psubsw 0x90909090(%eax),%xmm2
por 0x90909090(%eax),%xmm2
paddsb 0x90909090(%eax),%xmm2
paddsw 0x90909090(%eax),%xmm2
pxor 0x90909090(%eax),%xmm2
psllw 0x90909090(%eax),%xmm2
pslld 0x90909090(%eax),%xmm2
psllq 0x90909090(%eax),%xmm2
pmaddwd 0x90909090(%eax),%xmm2
psubb 0x90909090(%eax),%xmm2
psubw 0x90909090(%eax),%xmm2
psubd 0x90909090(%eax),%xmm2
paddb 0x90909090(%eax),%xmm2
paddw 0x90909090(%eax),%xmm2
paddd 0x90909090(%eax),%xmm2
.p2align 4
|
tactcomplabs/xbgas-binutils-gdb
| 14,595
|
gas/testsuite/gas/i386/x86-64-avx512f_vl-wig.s
|
# Check 64bit AVX512{F,VL} WIG instructions
.allow_index_reg
.text
_start:
vpmovsxbd %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxbd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxbd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxbd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxbd 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbd 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbd -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbd -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbd %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxbd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxbd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxbd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxbd 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbd 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxbd -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbd -1032(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxbq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxbq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxbq 254(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbq 256(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbq -256(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbq -258(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxbq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxbq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxbq 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbq 512(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxbq -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbq -516(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxwd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxwd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxwd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwd 1024(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwd -1032(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxwd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxwd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxwd 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwd 2048(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwd -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwd -2064(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxwq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxwq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxwq 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwq 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwq -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwq -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxwq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxwq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxwq 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwq 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwq -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwq -1032(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxbd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxbd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxbd 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbd 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbd -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbd -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxbd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxbd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxbd 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbd 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbd -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbd -1032(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxbq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxbq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxbq 254(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbq 256(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbq -256(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbq -258(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxbq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxbq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxbq 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbq 512(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbq -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbq -516(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxwd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxwd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxwd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwd 1024(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwd -1032(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxwd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxwd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxwd 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwd 2048(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwd -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwd -2064(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxwq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxwq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxwq 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwq 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwq -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwq -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxwq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxwq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxwq 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwq 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwq -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwq -1032(%rdx), %ymm30 # AVX512{F,VL}
.intel_syntax noprefix
vpmovsxbd xmm30, xmm29 # AVX512{F,VL}
vpmovsxbd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovsxbd xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovsxbd xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovsxbd ymm30, xmm29 # AVX512{F,VL}
vpmovsxbd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovsxbd ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovsxbd ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovsxbq xmm30, xmm29 # AVX512{F,VL}
vpmovsxbq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rcx] # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rdx+254] # AVX512{F,VL} Disp8
vpmovsxbq xmm30, WORD PTR [rdx+256] # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rdx-256] # AVX512{F,VL} Disp8
vpmovsxbq xmm30, WORD PTR [rdx-258] # AVX512{F,VL}
vpmovsxbq ymm30, xmm29 # AVX512{F,VL}
vpmovsxbq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovsxbq ymm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovsxbq ymm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovsxwd xmm30, xmm29 # AVX512{F,VL}
vpmovsxwd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovsxwd xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovsxwd xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovsxwd ymm30, xmm29 # AVX512{F,VL}
vpmovsxwd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmovsxwd ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmovsxwd ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmovsxwq xmm30, xmm29 # AVX512{F,VL}
vpmovsxwq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovsxwq xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovsxwq xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovsxwq ymm30, xmm29 # AVX512{F,VL}
vpmovsxwq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovsxwq ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovsxwq ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovzxbd xmm30, xmm29 # AVX512{F,VL}
vpmovzxbd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovzxbd xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovzxbd xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovzxbd ymm30, xmm29 # AVX512{F,VL}
vpmovzxbd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovzxbd ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovzxbd ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovzxbq xmm30, xmm29 # AVX512{F,VL}
vpmovzxbq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rcx] # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rdx+254] # AVX512{F,VL} Disp8
vpmovzxbq xmm30, WORD PTR [rdx+256] # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rdx-256] # AVX512{F,VL} Disp8
vpmovzxbq xmm30, WORD PTR [rdx-258] # AVX512{F,VL}
vpmovzxbq ymm30, xmm29 # AVX512{F,VL}
vpmovzxbq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovzxbq ymm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovzxbq ymm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovzxwd xmm30, xmm29 # AVX512{F,VL}
vpmovzxwd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovzxwd xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovzxwd xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovzxwd ymm30, xmm29 # AVX512{F,VL}
vpmovzxwd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmovzxwd ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmovzxwd ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmovzxwq xmm30, xmm29 # AVX512{F,VL}
vpmovzxwq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovzxwq xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovzxwq xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovzxwq ymm30, xmm29 # AVX512{F,VL}
vpmovzxwq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovzxwq ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovzxwq ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 4,064
|
gas/testsuite/gas/i386/fp.s
|
.data
# .tfloat is 80-bit floating point format.
.tfloat 3.32192809488736218171e0
# .byte 0x0, 0x88, 0x1b, 0xcd, 0x4b, 0x78, 0x9a, 0xd4, 0x0, 0x40
# .double is 64-bit floating point format.
.double 3.32192809488736218171e0
# .byte 0x71, 0xa3, 0x79, 0x09, 0x4f, 0x93, 0x0a, 0x40
# The next two are 32-bit floating point format.
.float 3.32192809488736218171e0
# .byte 0x78, 0x9a, 0x54, 0x40
.single 3.32192809488736218171e0
# .byte 0x78, 0x9a, 0x54, 0x40
.p2align 4,0
# The assembler used to treat the next value as zero instead of 1e-22.
.double .0000000000000000000001
.double 1e-22
# The assembler used to limit the number of digits too much.
.double 37778931862957165903871.0
.double 37778931862957165903873.0
# Ensure we handle a crazy number of digits
.double 1.000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001
.p2align 4,0
.ds.x 1, -1
.p2align 4,0xcc
.tfloat 0x:3ffe80
.dc.x 0x:bffd80
.dcb.x 1, 0x:03ff80
.p2align 4,0xaa
.hfloat 1, -2, 0x:3c00
.bfloat16 1, -2, 0x:3f80
.p2align 4,0x55
.hfloat Inf
.bfloat16 Inf
.single Inf
.double Inf
.tfloat Inf
.p2align 4,0x44
.hfloat +Inf
.bfloat16 +Inf
.single +Inf
.double +Inf
.tfloat +Inf
.p2align 4,0x33
.hfloat -Inf
.bfloat16 -Inf
.single -Inf
.double -Inf
.tfloat -Inf
.p2align 4,0x22
.hfloat NaN
.bfloat16 NaN
.single NaN
.double NaN
.tfloat NaN
.p2align 4,0x44
.hfloat +NaN
.bfloat16 +NaN
.single +NaN
.double +NaN
.tfloat +NaN
.p2align 4,0x33
.hfloat -NaN
.bfloat16 -NaN
.single -NaN
.double -NaN
.tfloat -NaN
.p2align 4,0x22
.hfloat QNaN
.bfloat16 QNaN
.single QNaN
.double QNaN
.tfloat QNaN
.p2align 4,0x44
.hfloat +QNaN
.bfloat16 +QNaN
.single +QNaN
.double +QNaN
.tfloat +QNaN
.p2align 4,0x33
.hfloat -QNaN
.bfloat16 -QNaN
.single -QNaN
.double -QNaN
.tfloat -QNaN
.p2align 4,0x22
.hfloat SNaN
.bfloat16 SNaN
.single SNaN
.double SNaN
.tfloat SNaN
.p2align 4,0x44
.hfloat +SNaN
.bfloat16 +SNaN
.single +SNaN
.double +SNaN
.tfloat +SNaN
.p2align 4,0x33
.hfloat -SNaN
.bfloat16 -SNaN
.single -SNaN
.double -SNaN
.tfloat -SNaN
.p2align 4,0x22
|
tactcomplabs/xbgas-binutils-gdb
| 115,231
|
gas/testsuite/gas/i386/avx512_fp16_vl.s
|
# Check 32bit AVX512-FP16,AVX512VL instructions
.allow_index_reg
.text
_start:
vaddph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vaddph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vaddph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vaddph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vaddph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vaddph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vaddph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vaddph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vaddph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vaddph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vaddph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vaddph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcmpph $123, %ymm4, %ymm5, %k5 #AVX512-FP16,AVX512VL
vcmpph $123, %ymm4, %ymm5, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph $123, %xmm4, %xmm5, %k5 #AVX512-FP16,AVX512VL
vcmpph $123, %xmm4, %xmm5, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph $123, 0x10000000(%esp, %esi, 8), %xmm5, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph $123, (%ecx){1to8}, %xmm5, %k5 #AVX512-FP16,AVX512VL BROADCAST_EN
vcmpph $123, 2032(%ecx), %xmm5, %k5 #AVX512-FP16,AVX512VL Disp8(7f)
vcmpph $123, -256(%edx){1to8}, %xmm5, %k5{%k7} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vcmpph $123, 0x10000000(%esp, %esi, 8), %ymm5, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph $123, (%ecx){1to16}, %ymm5, %k5 #AVX512-FP16,AVX512VL BROADCAST_EN
vcmpph $123, 4064(%ecx), %ymm5, %k5 #AVX512-FP16,AVX512VL Disp8(7f)
vcmpph $123, -256(%edx){1to16}, %ymm5, %k5{%k7} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vcvtdq2ph %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtdq2ph %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtdq2ph %ymm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtdq2ph %ymm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtdq2phx 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtdq2ph (%ecx){1to4}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtdq2phx 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtdq2ph -512(%edx){1to4}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtdq2ph (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtdq2phy 4064(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtdq2ph -512(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtpd2ph %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtpd2ph %ymm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtpd2ph %ymm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtpd2phx 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtpd2ph (%ecx){1to2}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtpd2phx 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtpd2ph -1024(%edx){1to2}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph (%ecx){1to4}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtpd2phy 4064(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtpd2ph -1024(%edx){1to4}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtph2dq %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2dq %xmm5, %ymm6 #AVX512-FP16,AVX512VL
vcvtph2dq %xmm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2dq 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2dq (%ecx){1to4}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2dq 1016(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2dq -256(%edx){1to4}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2dq (%ecx){1to8}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2dq 2032(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2dq -256(%edx){1to8}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtph2pd %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2pd %xmm5, %ymm6 #AVX512-FP16,AVX512VL
vcvtph2pd %xmm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2pd 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2pd (%ecx){1to2}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2pd 508(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2pd -256(%edx){1to2}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2pd (%ecx){1to4}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2pd 1016(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2pd -256(%edx){1to4}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtph2psx %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2psx %xmm5, %ymm6 #AVX512-FP16,AVX512VL
vcvtph2psx %xmm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2psx 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2psx (%ecx){1to4}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2psx 1016(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2psx -256(%edx){1to4}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2psx (%ecx){1to8}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2psx 2032(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2psx -256(%edx){1to8}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtph2qq %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2qq %xmm5, %ymm6 #AVX512-FP16,AVX512VL
vcvtph2qq %xmm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2qq 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2qq (%ecx){1to2}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2qq 508(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2qq -256(%edx){1to2}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2qq (%ecx){1to4}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2qq 1016(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2qq -256(%edx){1to4}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtph2udq %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2udq %xmm5, %ymm6 #AVX512-FP16,AVX512VL
vcvtph2udq %xmm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2udq 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2udq (%ecx){1to4}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2udq 1016(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2udq -256(%edx){1to4}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2udq (%ecx){1to8}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2udq 2032(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2udq -256(%edx){1to8}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtph2uqq %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uqq %xmm5, %ymm6 #AVX512-FP16,AVX512VL
vcvtph2uqq %xmm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uqq 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uqq (%ecx){1to2}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uqq 508(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uqq -256(%edx){1to2}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uqq (%ecx){1to4}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uqq 1016(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uqq -256(%edx){1to4}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtph2uw %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uw %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vcvtph2uw %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uw 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uw (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uw 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uw -256(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uw (%ecx){1to16}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uw 4064(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uw -256(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtph2w %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2w %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vcvtph2w %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2w 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2w (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2w 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2w -256(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2w (%ecx){1to16}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2w 4064(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2w -256(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtps2phx %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtps2phx %ymm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtps2phx %ymm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtps2phxx 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtps2phx (%ecx){1to4}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtps2phxx 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtps2phx -512(%edx){1to4}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtps2phxy 4064(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtps2phx -512(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtqq2ph %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtqq2ph %ymm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtqq2ph %ymm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtqq2phx 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtqq2ph (%ecx){1to2}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtqq2phx 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtqq2ph -1024(%edx){1to2}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph (%ecx){1to4}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtqq2phy 4064(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtqq2ph -1024(%edx){1to4}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvttph2dq %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2dq %xmm5, %ymm6 #AVX512-FP16,AVX512VL
vcvttph2dq %xmm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2dq 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2dq (%ecx){1to4}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2dq 1016(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2dq -256(%edx){1to4}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2dq (%ecx){1to8}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2dq 2032(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2dq -256(%edx){1to8}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvttph2qq %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2qq %xmm5, %ymm6 #AVX512-FP16,AVX512VL
vcvttph2qq %xmm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2qq 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2qq (%ecx){1to2}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2qq 508(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2qq -256(%edx){1to2}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2qq (%ecx){1to4}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2qq 1016(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2qq -256(%edx){1to4}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvttph2udq %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2udq %xmm5, %ymm6 #AVX512-FP16,AVX512VL
vcvttph2udq %xmm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2udq 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2udq (%ecx){1to4}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2udq 1016(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2udq -256(%edx){1to4}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2udq (%ecx){1to8}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2udq 2032(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2udq -256(%edx){1to8}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvttph2uqq %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uqq %xmm5, %ymm6 #AVX512-FP16,AVX512VL
vcvttph2uqq %xmm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uqq 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uqq (%ecx){1to2}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uqq 508(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uqq -256(%edx){1to2}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uqq (%ecx){1to4}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uqq 1016(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uqq -256(%edx){1to4}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvttph2uw %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uw %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vcvttph2uw %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uw 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uw (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uw 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uw -256(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uw (%ecx){1to16}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uw 4064(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uw -256(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvttph2w %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2w %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vcvttph2w %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2w 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2w (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2w 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2w -256(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2w (%ecx){1to16}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2w 4064(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2w -256(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtudq2ph %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtudq2ph %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtudq2ph %ymm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtudq2ph %ymm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtudq2phx 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtudq2ph (%ecx){1to4}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtudq2phx 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtudq2ph -512(%edx){1to4}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtudq2ph (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtudq2phy 4064(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtudq2ph -512(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtuqq2ph %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuqq2ph %ymm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtuqq2ph %ymm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuqq2phx 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuqq2ph (%ecx){1to2}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuqq2phx 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuqq2ph -1024(%edx){1to2}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph (%ecx){1to4}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuqq2phy 4064(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuqq2ph -1024(%edx){1to4}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuw2ph %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtuw2ph %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuw2ph %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vcvtuw2ph %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuw2ph 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuw2ph (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuw2ph 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuw2ph -256(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuw2ph 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuw2ph (%ecx){1to16}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuw2ph 4064(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuw2ph -256(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vcvtw2ph %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtw2ph %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vcvtw2ph %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtw2ph 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtw2ph (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtw2ph 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtw2ph -256(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtw2ph (%ecx){1to16}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtw2ph 4064(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtw2ph -256(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vdivph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vdivph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vdivph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vdivph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vdivph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vdivph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vdivph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vdivph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vdivph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vdivph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfcmaddcph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmaddcph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfcmaddcph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmaddcph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmaddcph (%ecx){1to8}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmaddcph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfcmaddcph -512(%edx){1to8}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmaddcph (%ecx){1to4}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmaddcph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfcmaddcph -512(%edx){1to4}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfcmulcph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmulcph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfcmulcph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmulcph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmulcph (%ecx){1to8}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmulcph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfcmulcph -512(%edx){1to8}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmulcph (%ecx){1to4}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmulcph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfcmulcph -512(%edx){1to4}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfmadd132ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd132ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfmadd132ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd132ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd132ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd132ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd132ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd132ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd132ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd132ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfmadd213ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd213ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfmadd213ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd213ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd213ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd213ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd213ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd213ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd213ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd213ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfmadd231ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd231ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfmadd231ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd231ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd231ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd231ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd231ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd231ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd231ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd231ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfmaddcph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddcph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfmaddcph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddcph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddcph (%ecx){1to8}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddcph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddcph -512(%edx){1to8}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddcph (%ecx){1to4}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddcph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddcph -512(%edx){1to4}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfmaddsub132ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub132ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfmaddsub132ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub132ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub132ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub132ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub132ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub132ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub132ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub132ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfmaddsub213ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub213ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfmaddsub213ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub213ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub213ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub213ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub213ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub213ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub213ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub213ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfmaddsub231ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub231ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfmaddsub231ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub231ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub231ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub231ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub231ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub231ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub231ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub231ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfmsub132ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub132ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfmsub132ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub132ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub132ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub132ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub132ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub132ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub132ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub132ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfmsub213ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub213ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfmsub213ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub213ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub213ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub213ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub213ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub213ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub213ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub213ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfmsub231ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub231ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfmsub231ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub231ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub231ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub231ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub231ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub231ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub231ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub231ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfmsubadd132ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd132ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfmsubadd132ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd132ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd132ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd132ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd132ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd132ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd132ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd132ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfmsubadd213ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd213ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfmsubadd213ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd213ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd213ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd213ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd213ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd213ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd213ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd213ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfmsubadd231ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd231ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfmsubadd231ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd231ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd231ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd231ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd231ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd231ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd231ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd231ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfmulcph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmulcph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfmulcph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmulcph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmulcph (%ecx){1to8}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmulcph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmulcph -512(%edx){1to8}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmulcph (%ecx){1to4}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmulcph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfmulcph -512(%edx){1to4}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfnmadd132ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd132ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfnmadd132ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd132ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd132ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd132ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd132ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd132ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd132ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd132ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfnmadd213ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd213ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfnmadd213ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd213ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd213ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd213ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd213ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd213ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd213ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd213ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfnmadd231ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd231ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfnmadd231ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd231ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd231ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd231ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd231ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd231ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd231ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd231ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfnmsub132ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub132ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfnmsub132ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub132ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub132ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub132ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub132ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub132ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub132ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub132ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfnmsub213ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub213ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfnmsub213ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub213ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub213ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub213ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub213ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub213ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub213ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub213ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vfnmsub231ph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub231ph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vfnmsub231ph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub231ph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub231ph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub231ph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub231ph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub231ph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub231ph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub231ph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfpclassph $123, %xmm6, %k5 #AVX512-FP16,AVX512VL
vfpclassph $123, %xmm6, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph $123, %ymm6, %k5 #AVX512-FP16,AVX512VL
vfpclassph $123, %ymm6, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassphx $123, 0x10000000(%esp, %esi, 8), %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph $123, (%ecx){1to8}, %k5 #AVX512-FP16,AVX512VL BROADCAST_EN
vfpclassphx $123, 2032(%ecx), %k5 #AVX512-FP16,AVX512VL Disp8(7f)
vfpclassph $123, -256(%edx){1to8}, %k5{%k7} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vfpclassph $123, (%ecx){1to16}, %k5 #AVX512-FP16,AVX512VL BROADCAST_EN
vfpclassphy $123, 4064(%ecx), %k5 #AVX512-FP16,AVX512VL Disp8(7f)
vfpclassph $123, -256(%edx){1to16}, %k5{%k7} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vgetexpph %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vgetexpph %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetexpph %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vgetexpph %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetexpph 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vgetexpph (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetexpph 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vgetexpph -256(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetexpph 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vgetexpph (%ecx){1to16}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetexpph 4064(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vgetexpph -256(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph $123, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vgetmantph $123, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetmantph $123, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vgetmantph $123, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetmantph $123, 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vgetmantph $123, (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetmantph $123, 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vgetmantph $123, -256(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph $123, 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vgetmantph $123, (%ecx){1to16}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetmantph $123, 4064(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vgetmantph $123, -256(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vmaxph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmaxph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vmaxph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmaxph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vmaxph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vmaxph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vmaxph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vmaxph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vmaxph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vmaxph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vminph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vminph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vminph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vminph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vminph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vminph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vminph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vminph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vminph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vminph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vmulph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmulph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vmulph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmulph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vmulph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vmulph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vmulph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vmulph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vmulph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vmulph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpph %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vrcpph %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrcpph %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vrcpph %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrcpph 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrcpph (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vrcpph 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vrcpph -256(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpph 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrcpph (%ecx){1to16}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vrcpph 4064(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vrcpph -256(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreduceph $123, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vreduceph $123, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vreduceph $123, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vreduceph $123, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vreduceph $123, 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vreduceph $123, (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vreduceph $123, 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vreduceph $123, -256(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreduceph $123, 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vreduceph $123, (%ecx){1to16}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vreduceph $123, 4064(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vreduceph $123, -256(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph $123, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vrndscaleph $123, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrndscaleph $123, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vrndscaleph $123, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrndscaleph $123, 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrndscaleph $123, (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vrndscaleph $123, 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vrndscaleph $123, -256(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph $123, 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrndscaleph $123, (%ecx){1to16}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vrndscaleph $123, 4064(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vrndscaleph $123, -256(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vrsqrtph %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrsqrtph %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vrsqrtph %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrsqrtph 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrsqrtph (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vrsqrtph 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vrsqrtph -256(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrsqrtph (%ecx){1to16}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vrsqrtph 4064(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vrsqrtph -256(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vscalefph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vscalefph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vscalefph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vscalefph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vscalefph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vscalefph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vscalefph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vscalefph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vscalefph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vscalefph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vsqrtph %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsqrtph %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vsqrtph %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsqrtph 0x10000000(%esp, %esi, 8), %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vsqrtph (%ecx){1to8}, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vsqrtph 2032(%ecx), %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vsqrtph -256(%edx){1to8}, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vsqrtph (%ecx){1to16}, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vsqrtph 4064(%ecx), %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vsqrtph -256(%edx){1to16}, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubph %ymm4, %ymm5, %ymm6 #AVX512-FP16,AVX512VL
vsubph %ymm4, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsubph %xmm4, %xmm5, %xmm6 #AVX512-FP16,AVX512VL
vsubph %xmm4, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsubph 0x10000000(%esp, %esi, 8), %ymm5, %ymm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vsubph (%ecx){1to16}, %ymm5, %ymm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vsubph 4064(%ecx), %ymm5, %ymm6 #AVX512-FP16,AVX512VL Disp8(7f)
vsubph -256(%edx){1to16}, %ymm5, %ymm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubph 0x10000000(%esp, %esi, 8), %xmm5, %xmm6{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vsubph (%ecx){1to8}, %xmm5, %xmm6 #AVX512-FP16,AVX512VL BROADCAST_EN
vsubph 2032(%ecx), %xmm5, %xmm6 #AVX512-FP16,AVX512VL Disp8(7f)
vsubph -256(%edx){1to8}, %xmm5, %xmm6{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
.intel_syntax noprefix
vaddph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vaddph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vaddph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vaddph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vaddph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vaddph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vaddph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vaddph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vaddph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vaddph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vaddph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vaddph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcmpph k5, ymm5, ymm4, 123 #AVX512-FP16,AVX512VL
vcmpph k5{k7}, ymm5, ymm4, 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph k5, xmm5, xmm4, 123 #AVX512-FP16,AVX512VL
vcmpph k5{k7}, xmm5, xmm4, 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph k5{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph k5, xmm5, WORD BCST [ecx], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vcmpph k5, xmm5, XMMWORD PTR [ecx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vcmpph k5{k7}, xmm5, WORD BCST [edx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vcmpph k5{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph k5, ymm5, WORD BCST [ecx], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vcmpph k5, ymm5, YMMWORD PTR [ecx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vcmpph k5{k7}, ymm5, WORD BCST [edx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vcvtdq2ph xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtdq2ph xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtdq2ph xmm6, ymm5 #AVX512-FP16,AVX512VL
vcvtdq2ph xmm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtdq2ph xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtdq2ph xmm6, DWORD BCST [ecx]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtdq2ph xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtdq2ph xmm6{k7}{z}, DWORD BCST [edx-512]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtdq2ph xmm6, DWORD BCST [ecx]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtdq2ph xmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtdq2ph xmm6{k7}{z}, DWORD BCST [edx-512]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtpd2ph xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtpd2ph xmm6, ymm5 #AVX512-FP16,AVX512VL
vcvtpd2ph xmm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtpd2ph xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtpd2ph xmm6, QWORD BCST [ecx]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtpd2ph xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtpd2ph xmm6{k7}{z}, QWORD BCST [edx-1024]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph xmm6, QWORD BCST [ecx]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtpd2ph xmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtpd2ph xmm6{k7}{z}, QWORD BCST [edx-1024]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtph2dq xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2dq ymm6, xmm5 #AVX512-FP16,AVX512VL
vcvtph2dq ymm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2dq xmm6{k7}, QWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2dq xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2dq xmm6, QWORD PTR [ecx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2dq xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq ymm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2dq ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2dq ymm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2dq ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtph2pd xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2pd ymm6, xmm5 #AVX512-FP16,AVX512VL
vcvtph2pd ymm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2pd xmm6{k7}, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2pd xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2pd xmm6, DWORD PTR [ecx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2pd xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd ymm6{k7}, QWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2pd ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2pd ymm6, QWORD PTR [ecx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2pd ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtph2psx xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2psx ymm6, xmm5 #AVX512-FP16,AVX512VL
vcvtph2psx ymm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2psx xmm6{k7}, QWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2psx xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2psx xmm6, QWORD PTR [ecx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2psx xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx ymm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2psx ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2psx ymm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2psx ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtph2qq xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2qq ymm6, xmm5 #AVX512-FP16,AVX512VL
vcvtph2qq ymm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2qq xmm6{k7}, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2qq xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2qq xmm6, DWORD PTR [ecx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2qq xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq ymm6{k7}, QWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2qq ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2qq ymm6, QWORD PTR [ecx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2qq ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtph2udq xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2udq ymm6, xmm5 #AVX512-FP16,AVX512VL
vcvtph2udq ymm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2udq xmm6{k7}, QWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2udq xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2udq xmm6, QWORD PTR [ecx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2udq xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq ymm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2udq ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2udq ymm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2udq ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtph2uqq xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uqq ymm6, xmm5 #AVX512-FP16,AVX512VL
vcvtph2uqq ymm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uqq xmm6{k7}, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uqq xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uqq xmm6, DWORD PTR [ecx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uqq xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq ymm6{k7}, QWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uqq ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uqq ymm6, QWORD PTR [ecx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uqq ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtph2uw xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uw ymm6, ymm5 #AVX512-FP16,AVX512VL
vcvtph2uw ymm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uw xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uw xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uw xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uw xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw ymm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uw ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uw ymm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uw ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtph2w xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2w ymm6, ymm5 #AVX512-FP16,AVX512VL
vcvtph2w ymm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2w xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2w xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2w xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2w xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w ymm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2w ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2w ymm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2w ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtps2phx xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtps2phx xmm6, ymm5 #AVX512-FP16,AVX512VL
vcvtps2phx xmm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtps2phx xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtps2phx xmm6, DWORD BCST [ecx]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtps2phx xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtps2phx xmm6{k7}{z}, DWORD BCST [edx-512]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx xmm6, DWORD BCST [ecx]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtps2phx xmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtps2phx xmm6{k7}{z}, DWORD BCST [edx-512]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtqq2ph xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtqq2ph xmm6, ymm5 #AVX512-FP16,AVX512VL
vcvtqq2ph xmm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtqq2ph xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtqq2ph xmm6, QWORD BCST [ecx]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtqq2ph xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtqq2ph xmm6{k7}{z}, QWORD BCST [edx-1024]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph xmm6, QWORD BCST [ecx]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtqq2ph xmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtqq2ph xmm6{k7}{z}, QWORD BCST [edx-1024]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvttph2dq xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2dq ymm6, xmm5 #AVX512-FP16,AVX512VL
vcvttph2dq ymm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2dq xmm6{k7}, QWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2dq xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2dq xmm6, QWORD PTR [ecx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2dq xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq ymm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2dq ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2dq ymm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2dq ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvttph2qq xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2qq ymm6, xmm5 #AVX512-FP16,AVX512VL
vcvttph2qq ymm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2qq xmm6{k7}, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2qq xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2qq xmm6, DWORD PTR [ecx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2qq xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq ymm6{k7}, QWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2qq ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2qq ymm6, QWORD PTR [ecx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2qq ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvttph2udq xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2udq ymm6, xmm5 #AVX512-FP16,AVX512VL
vcvttph2udq ymm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2udq xmm6{k7}, QWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2udq xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2udq xmm6, QWORD PTR [ecx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2udq xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq ymm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2udq ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2udq ymm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2udq ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvttph2uqq xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uqq ymm6, xmm5 #AVX512-FP16,AVX512VL
vcvttph2uqq ymm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uqq xmm6{k7}, DWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uqq xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uqq xmm6, DWORD PTR [ecx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uqq xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq ymm6{k7}, QWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uqq ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uqq ymm6, QWORD PTR [ecx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uqq ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvttph2uw xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uw ymm6, ymm5 #AVX512-FP16,AVX512VL
vcvttph2uw ymm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uw xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uw xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uw xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uw xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw ymm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uw ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uw ymm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uw ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvttph2w xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2w ymm6, ymm5 #AVX512-FP16,AVX512VL
vcvttph2w ymm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2w xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2w xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2w xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2w xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w ymm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2w ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2w ymm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2w ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtudq2ph xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtudq2ph xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtudq2ph xmm6, ymm5 #AVX512-FP16,AVX512VL
vcvtudq2ph xmm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtudq2ph xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtudq2ph xmm6, DWORD BCST [ecx]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtudq2ph xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtudq2ph xmm6{k7}{z}, DWORD BCST [edx-512]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtudq2ph xmm6, DWORD BCST [ecx]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtudq2ph xmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtudq2ph xmm6{k7}{z}, DWORD BCST [edx-512]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtuqq2ph xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm6, ymm5 #AVX512-FP16,AVX512VL
vcvtuqq2ph xmm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuqq2ph xmm6, QWORD BCST [ecx]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuqq2ph xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuqq2ph xmm6{k7}{z}, QWORD BCST [edx-1024]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm6, QWORD BCST [ecx]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuqq2ph xmm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuqq2ph xmm6{k7}{z}, QWORD BCST [edx-1024]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuw2ph xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtuw2ph xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuw2ph ymm6, ymm5 #AVX512-FP16,AVX512VL
vcvtuw2ph ymm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuw2ph xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuw2ph xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuw2ph xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuw2ph xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuw2ph ymm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuw2ph ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuw2ph ymm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuw2ph ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph xmm6, xmm5 #AVX512-FP16,AVX512VL
vcvtw2ph xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtw2ph ymm6, ymm5 #AVX512-FP16,AVX512VL
vcvtw2ph ymm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtw2ph xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtw2ph xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtw2ph xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtw2ph xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph ymm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtw2ph ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtw2ph ymm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtw2ph ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vdivph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vdivph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vdivph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vdivph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vdivph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vdivph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vdivph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vdivph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vdivph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vdivph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfcmaddcph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmaddcph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfcmaddcph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmaddcph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmaddcph ymm6, ymm5, DWORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmaddcph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfcmaddcph ymm6{k7}{z}, ymm5, DWORD BCST [edx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmaddcph xmm6, xmm5, DWORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmaddcph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfcmaddcph xmm6{k7}{z}, xmm5, DWORD BCST [edx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfcmulcph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmulcph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfcmulcph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmulcph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmulcph ymm6, ymm5, DWORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmulcph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfcmulcph ymm6{k7}{z}, ymm5, DWORD BCST [edx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmulcph xmm6, xmm5, DWORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmulcph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfcmulcph xmm6{k7}{z}, xmm5, DWORD BCST [edx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfmadd132ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd132ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfmadd132ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd132ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd132ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd132ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd132ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd132ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd132ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd132ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfmadd213ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd213ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfmadd213ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd213ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd213ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd213ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd213ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd213ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd213ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd213ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfmadd231ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd231ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfmadd231ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd231ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd231ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd231ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd231ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd231ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd231ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd231ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfmaddcph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddcph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfmaddcph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddcph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddcph ymm6, ymm5, DWORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddcph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddcph ymm6{k7}{z}, ymm5, DWORD BCST [edx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddcph xmm6, xmm5, DWORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddcph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddcph xmm6{k7}{z}, xmm5, DWORD BCST [edx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfmaddsub132ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub132ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfmaddsub132ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub132ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub132ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub132ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub132ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub132ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub132ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub132ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfmaddsub213ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub213ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfmaddsub213ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub213ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub213ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub213ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub213ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub213ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub213ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub213ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfmaddsub231ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub231ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfmaddsub231ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub231ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub231ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub231ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub231ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub231ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub231ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub231ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfmsub132ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub132ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfmsub132ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub132ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub132ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub132ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub132ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub132ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub132ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub132ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfmsub213ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub213ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfmsub213ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub213ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub213ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub213ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub213ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub213ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub213ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub213ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfmsub231ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub231ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfmsub231ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub231ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub231ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub231ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub231ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub231ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub231ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub231ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfmsubadd132ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd132ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfmsubadd132ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd132ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd132ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd132ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd132ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd132ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd132ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd132ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfmsubadd213ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd213ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfmsubadd213ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd213ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd213ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd213ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd213ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd213ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd213ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd213ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfmsubadd231ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd231ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfmsubadd231ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd231ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd231ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd231ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd231ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd231ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd231ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd231ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfmulcph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmulcph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfmulcph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmulcph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmulcph ymm6, ymm5, DWORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmulcph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmulcph ymm6{k7}{z}, ymm5, DWORD BCST [edx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmulcph xmm6, xmm5, DWORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmulcph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmulcph xmm6{k7}{z}, xmm5, DWORD BCST [edx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfnmadd132ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd132ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfnmadd132ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd132ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd132ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd132ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd132ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd132ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd132ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd132ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfnmadd213ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd213ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfnmadd213ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd213ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd213ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd213ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd213ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd213ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd213ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd213ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfnmadd231ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd231ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfnmadd231ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd231ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd231ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd231ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd231ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd231ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd231ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd231ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfnmsub132ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub132ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfnmsub132ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub132ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub132ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub132ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub132ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub132ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub132ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub132ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfnmsub213ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub213ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfnmsub213ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub213ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub213ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub213ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub213ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub213ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub213ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub213ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vfnmsub231ph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub231ph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vfnmsub231ph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub231ph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub231ph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub231ph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub231ph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub231ph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub231ph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub231ph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfpclassph k5, xmm6, 123 #AVX512-FP16,AVX512VL
vfpclassph k5{k7}, xmm6, 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph k5, ymm6, 123 #AVX512-FP16,AVX512VL
vfpclassph k5{k7}, ymm6, 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph k5{k7}, XMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph k5, WORD BCST [ecx]{1to8}, 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vfpclassph k5, XMMWORD PTR [ecx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vfpclassph k5{k7}, WORD BCST [edx-256]{1to8}, 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vfpclassph k5, WORD BCST [ecx]{1to16}, 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vfpclassph k5, YMMWORD PTR [ecx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vfpclassph k5{k7}, WORD BCST [edx-256]{1to16}, 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vgetexpph xmm6, xmm5 #AVX512-FP16,AVX512VL
vgetexpph xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetexpph ymm6, ymm5 #AVX512-FP16,AVX512VL
vgetexpph ymm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetexpph xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vgetexpph xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vgetexpph xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vgetexpph xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetexpph ymm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vgetexpph ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vgetexpph ymm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vgetexpph ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph ymm6, ymm5, 123 #AVX512-FP16,AVX512VL
vgetmantph ymm6{k7}{z}, ymm5, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetmantph xmm6, xmm5, 123 #AVX512-FP16,AVX512VL
vgetmantph xmm6{k7}{z}, xmm5, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetmantph xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vgetmantph xmm6, WORD BCST [ecx], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetmantph xmm6, XMMWORD PTR [ecx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vgetmantph xmm6{k7}{z}, WORD BCST [edx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph ymm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vgetmantph ymm6, WORD BCST [ecx], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetmantph ymm6, YMMWORD PTR [ecx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vgetmantph ymm6{k7}{z}, WORD BCST [edx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vmaxph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmaxph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vmaxph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmaxph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vmaxph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vmaxph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vmaxph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vmaxph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vmaxph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vmaxph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vminph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vminph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vminph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vminph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vminph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vminph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vminph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vminph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vminph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vminph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vmulph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmulph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vmulph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmulph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vmulph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vmulph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vmulph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vmulph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vmulph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vmulph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpph xmm6, xmm5 #AVX512-FP16,AVX512VL
vrcpph xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrcpph ymm6, ymm5 #AVX512-FP16,AVX512VL
vrcpph ymm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrcpph xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vrcpph xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vrcpph xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vrcpph xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpph ymm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vrcpph ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vrcpph ymm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vrcpph ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreduceph ymm6, ymm5, 123 #AVX512-FP16,AVX512VL
vreduceph ymm6{k7}{z}, ymm5, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vreduceph xmm6, xmm5, 123 #AVX512-FP16,AVX512VL
vreduceph xmm6{k7}{z}, xmm5, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vreduceph xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vreduceph xmm6, WORD BCST [ecx], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vreduceph xmm6, XMMWORD PTR [ecx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vreduceph xmm6{k7}{z}, WORD BCST [edx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreduceph ymm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vreduceph ymm6, WORD BCST [ecx], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vreduceph ymm6, YMMWORD PTR [ecx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vreduceph ymm6{k7}{z}, WORD BCST [edx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph ymm6, ymm5, 123 #AVX512-FP16,AVX512VL
vrndscaleph ymm6{k7}{z}, ymm5, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrndscaleph xmm6, xmm5, 123 #AVX512-FP16,AVX512VL
vrndscaleph xmm6{k7}{z}, xmm5, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrndscaleph xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vrndscaleph xmm6, WORD BCST [ecx], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vrndscaleph xmm6, XMMWORD PTR [ecx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vrndscaleph xmm6{k7}{z}, WORD BCST [edx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph ymm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vrndscaleph ymm6, WORD BCST [ecx], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vrndscaleph ymm6, YMMWORD PTR [ecx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vrndscaleph ymm6{k7}{z}, WORD BCST [edx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph xmm6, xmm5 #AVX512-FP16,AVX512VL
vrsqrtph xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrsqrtph ymm6, ymm5 #AVX512-FP16,AVX512VL
vrsqrtph ymm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrsqrtph xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vrsqrtph xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vrsqrtph xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vrsqrtph xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph ymm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vrsqrtph ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vrsqrtph ymm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vrsqrtph ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vscalefph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vscalefph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vscalefph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vscalefph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vscalefph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vscalefph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vscalefph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vscalefph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vscalefph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vscalefph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph xmm6, xmm5 #AVX512-FP16,AVX512VL
vsqrtph xmm6{k7}{z}, xmm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsqrtph ymm6, ymm5 #AVX512-FP16,AVX512VL
vsqrtph ymm6{k7}{z}, ymm5 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsqrtph xmm6{k7}, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vsqrtph xmm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vsqrtph xmm6, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vsqrtph xmm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph ymm6{k7}, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vsqrtph ymm6, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vsqrtph ymm6, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vsqrtph ymm6{k7}{z}, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubph ymm6, ymm5, ymm4 #AVX512-FP16,AVX512VL
vsubph ymm6{k7}{z}, ymm5, ymm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsubph xmm6, xmm5, xmm4 #AVX512-FP16,AVX512VL
vsubph xmm6{k7}{z}, xmm5, xmm4 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsubph ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vsubph ymm6, ymm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vsubph ymm6, ymm5, YMMWORD PTR [ecx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vsubph ymm6{k7}{z}, ymm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubph xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vsubph xmm6, xmm5, WORD BCST [ecx] #AVX512-FP16,AVX512VL BROADCAST_EN
vsubph xmm6, xmm5, XMMWORD PTR [ecx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vsubph xmm6{k7}{z}, xmm5, WORD BCST [edx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
|
tactcomplabs/xbgas-binutils-gdb
| 3,303
|
gas/testsuite/gas/i386/avx512ifma.s
|
# Check 32bit AVX512IFMA instructions
.allow_index_reg
.text
_start:
vpmadd52luq %zmm4, %zmm5, %zmm6 # AVX512IFMA
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512IFMA
vpmadd52luq (%ecx), %zmm5, %zmm6 # AVX512IFMA
vpmadd52luq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512IFMA
vpmadd52luq (%eax){1to8}, %zmm5, %zmm6 # AVX512IFMA
vpmadd52luq 8128(%edx), %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52luq 8192(%edx), %zmm5, %zmm6 # AVX512IFMA
vpmadd52luq -8192(%edx), %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52luq -8256(%edx), %zmm5, %zmm6 # AVX512IFMA
vpmadd52luq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52luq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA
vpmadd52luq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52luq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq %zmm4, %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52huq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512IFMA
vpmadd52huq (%ecx), %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq (%eax){1to8}, %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq 8128(%edx), %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52huq 8192(%edx), %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq -8192(%edx), %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52huq -8256(%edx), %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52huq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52huq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA
.intel_syntax noprefix
vpmadd52luq zmm6, zmm5, zmm4 # AVX512IFMA
vpmadd52luq zmm6{k7}, zmm5, zmm4 # AVX512IFMA
vpmadd52luq zmm6{k7}{z}, zmm5, zmm4 # AVX512IFMA
vpmadd52luq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512IFMA
vpmadd52luq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512IFMA
vpmadd52luq zmm6, zmm5, [eax]{1to8} # AVX512IFMA
vpmadd52luq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512IFMA Disp8
vpmadd52luq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512IFMA
vpmadd52luq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512IFMA Disp8
vpmadd52luq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512IFMA
vpmadd52luq zmm6, zmm5, [edx+1016]{1to8} # AVX512IFMA Disp8
vpmadd52luq zmm6, zmm5, [edx+1024]{1to8} # AVX512IFMA
vpmadd52luq zmm6, zmm5, [edx-1024]{1to8} # AVX512IFMA Disp8
vpmadd52luq zmm6, zmm5, [edx-1032]{1to8} # AVX512IFMA
vpmadd52huq zmm6, zmm5, zmm4 # AVX512IFMA
vpmadd52huq zmm6{k7}, zmm5, zmm4 # AVX512IFMA
vpmadd52huq zmm6{k7}{z}, zmm5, zmm4 # AVX512IFMA
vpmadd52huq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512IFMA
vpmadd52huq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512IFMA
vpmadd52huq zmm6, zmm5, [eax]{1to8} # AVX512IFMA
vpmadd52huq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512IFMA Disp8
vpmadd52huq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512IFMA
vpmadd52huq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512IFMA Disp8
vpmadd52huq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512IFMA
vpmadd52huq zmm6, zmm5, [edx+1016]{1to8} # AVX512IFMA Disp8
vpmadd52huq zmm6, zmm5, [edx+1024]{1to8} # AVX512IFMA
vpmadd52huq zmm6, zmm5, [edx-1024]{1to8} # AVX512IFMA Disp8
vpmadd52huq zmm6, zmm5, [edx-1032]{1to8} # AVX512IFMA
|
tactcomplabs/xbgas-binutils-gdb
| 6,004
|
gas/testsuite/gas/i386/x86-64-avx512vnni_vl.s
|
# Check 64bit AVX512{VNNI,VL} instructions
.allow_index_reg
.text
_start:
vpdpwssd %xmm20, %xmm22, %xmm26 # AVX512{VNNI,VL}
vpdpwssd %xmm20, %xmm22, %xmm26{%k3} # AVX512{VNNI,VL}
vpdpwssd %xmm20, %xmm22, %xmm26{%k3}{z} # AVX512{VNNI,VL}
vpdpwssd 0x123(%rax,%r14,8), %xmm22, %xmm26 # AVX512{VNNI,VL}
vpdpwssd 2032(%rdx), %xmm22, %xmm26 # AVX512{VNNI,VL} Disp8
vpdpwssd 508(%rdx){1to4}, %xmm22, %xmm26 # AVX512{VNNI,VL} Disp8
vpdpwssd %ymm18, %ymm20, %ymm20 # AVX512{VNNI,VL}
vpdpwssd %ymm18, %ymm20, %ymm20{%k5} # AVX512{VNNI,VL}
vpdpwssd %ymm18, %ymm20, %ymm20{%k5}{z} # AVX512{VNNI,VL}
vpdpwssd 0x123(%rax,%r14,8), %ymm20, %ymm20 # AVX512{VNNI,VL}
vpdpwssd 4064(%rdx), %ymm20, %ymm20 # AVX512{VNNI,VL} Disp8
vpdpwssd 508(%rdx){1to8}, %ymm20, %ymm20 # AVX512{VNNI,VL} Disp8
vpdpwssds %xmm23, %xmm19, %xmm22 # AVX512{VNNI,VL}
vpdpwssds %xmm23, %xmm19, %xmm22{%k7} # AVX512{VNNI,VL}
vpdpwssds %xmm23, %xmm19, %xmm22{%k7}{z} # AVX512{VNNI,VL}
vpdpwssds 0x123(%rax,%r14,8), %xmm19, %xmm22 # AVX512{VNNI,VL}
vpdpwssds 2032(%rdx), %xmm19, %xmm22 # AVX512{VNNI,VL} Disp8
vpdpwssds 508(%rdx){1to4}, %xmm19, %xmm22 # AVX512{VNNI,VL} Disp8
vpdpwssds %ymm28, %ymm23, %ymm23 # AVX512{VNNI,VL}
vpdpwssds %ymm28, %ymm23, %ymm23{%k3} # AVX512{VNNI,VL}
vpdpwssds %ymm28, %ymm23, %ymm23{%k3}{z} # AVX512{VNNI,VL}
vpdpwssds 0x123(%rax,%r14,8), %ymm23, %ymm23 # AVX512{VNNI,VL}
vpdpwssds 4064(%rdx), %ymm23, %ymm23 # AVX512{VNNI,VL} Disp8
vpdpwssds 508(%rdx){1to8}, %ymm23, %ymm23 # AVX512{VNNI,VL} Disp8
vpdpbusd %xmm28, %xmm29, %xmm18 # AVX512{VNNI,VL}
vpdpbusd %xmm28, %xmm29, %xmm18{%k3} # AVX512{VNNI,VL}
vpdpbusd %xmm28, %xmm29, %xmm18{%k3}{z} # AVX512{VNNI,VL}
vpdpbusd 0x123(%rax,%r14,8), %xmm29, %xmm18 # AVX512{VNNI,VL}
vpdpbusd 2032(%rdx), %xmm29, %xmm18 # AVX512{VNNI,VL} Disp8
vpdpbusd 508(%rdx){1to4}, %xmm29, %xmm18 # AVX512{VNNI,VL} Disp8
vpdpbusd %ymm17, %ymm18, %ymm20 # AVX512{VNNI,VL}
vpdpbusd %ymm17, %ymm18, %ymm20{%k2} # AVX512{VNNI,VL}
vpdpbusd %ymm17, %ymm18, %ymm20{%k2}{z} # AVX512{VNNI,VL}
vpdpbusd 0x123(%rax,%r14,8), %ymm18, %ymm20 # AVX512{VNNI,VL}
vpdpbusd 4064(%rdx), %ymm18, %ymm20 # AVX512{VNNI,VL} Disp8
vpdpbusd 508(%rdx){1to8}, %ymm18, %ymm20 # AVX512{VNNI,VL} Disp8
vpdpbusds %xmm27, %xmm26, %xmm24 # AVX512{VNNI,VL}
vpdpbusds %xmm27, %xmm26, %xmm24{%k4} # AVX512{VNNI,VL}
vpdpbusds %xmm27, %xmm26, %xmm24{%k4}{z} # AVX512{VNNI,VL}
vpdpbusds 0x123(%rax,%r14,8), %xmm26, %xmm24 # AVX512{VNNI,VL}
vpdpbusds 2032(%rdx), %xmm26, %xmm24 # AVX512{VNNI,VL} Disp8
vpdpbusds 508(%rdx){1to4}, %xmm26, %xmm24 # AVX512{VNNI,VL} Disp8
vpdpbusds %ymm25, %ymm29, %ymm30 # AVX512{VNNI,VL}
vpdpbusds %ymm25, %ymm29, %ymm30{%k1} # AVX512{VNNI,VL}
vpdpbusds %ymm25, %ymm29, %ymm30{%k1}{z} # AVX512{VNNI,VL}
vpdpbusds 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VNNI,VL}
vpdpbusds 4064(%rdx), %ymm29, %ymm30 # AVX512{VNNI,VL} Disp8
vpdpbusds 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{VNNI,VL} Disp8
.intel_syntax noprefix
vpdpwssd xmm21, xmm20, xmm23 # AVX512{VNNI,VL}
vpdpwssd xmm21{k6}, xmm20, xmm23 # AVX512{VNNI,VL}
vpdpwssd xmm21{k6}{z}, xmm20, xmm23 # AVX512{VNNI,VL}
vpdpwssd xmm21, xmm20, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpwssd xmm21, xmm20, XMMWORD PTR [rdx+2032] # AVX512{VNNI,VL} Disp8
vpdpwssd xmm21, xmm20, [rdx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpwssd ymm25, ymm27, ymm17 # AVX512{VNNI,VL}
vpdpwssd ymm25{k6}, ymm27, ymm17 # AVX512{VNNI,VL}
vpdpwssd ymm25{k6}{z}, ymm27, ymm17 # AVX512{VNNI,VL}
vpdpwssd ymm25, ymm27, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpwssd ymm25, ymm27, YMMWORD PTR [rdx+4064] # AVX512{VNNI,VL} Disp8
vpdpwssd ymm25, ymm27, [rdx+508]{1to8} # AVX512{VNNI,VL} Disp8
vpdpwssds xmm30, xmm25, xmm21 # AVX512{VNNI,VL}
vpdpwssds xmm30{k6}, xmm25, xmm21 # AVX512{VNNI,VL}
vpdpwssds xmm30{k6}{z}, xmm25, xmm21 # AVX512{VNNI,VL}
vpdpwssds xmm30, xmm25, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpwssds xmm30, xmm25, XMMWORD PTR [rdx+2032] # AVX512{VNNI,VL} Disp8
vpdpwssds xmm30, xmm25, [rdx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpwssds ymm28, ymm27, ymm27 # AVX512{VNNI,VL}
vpdpwssds ymm28{k7}, ymm27, ymm27 # AVX512{VNNI,VL}
vpdpwssds ymm28{k7}{z}, ymm27, ymm27 # AVX512{VNNI,VL}
vpdpwssds ymm28, ymm27, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpwssds ymm28, ymm27, YMMWORD PTR [rdx+4064] # AVX512{VNNI,VL} Disp8
vpdpwssds ymm28, ymm27, [rdx+508]{1to8} # AVX512{VNNI,VL} Disp8
vpdpbusd xmm26, xmm18, xmm19 # AVX512{VNNI,VL}
vpdpbusd xmm26{k6}, xmm18, xmm19 # AVX512{VNNI,VL}
vpdpbusd xmm26{k6}{z}, xmm18, xmm19 # AVX512{VNNI,VL}
vpdpbusd xmm26, xmm18, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpbusd xmm26, xmm18, XMMWORD PTR [rdx+2032] # AVX512{VNNI,VL} Disp8
vpdpbusd xmm26, xmm18, [rdx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpbusd ymm21, ymm17, ymm27 # AVX512{VNNI,VL}
vpdpbusd ymm21{k2}, ymm17, ymm27 # AVX512{VNNI,VL}
vpdpbusd ymm21{k2}{z}, ymm17, ymm27 # AVX512{VNNI,VL}
vpdpbusd ymm21, ymm17, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpbusd ymm21, ymm17, YMMWORD PTR [rdx+4064] # AVX512{VNNI,VL} Disp8
vpdpbusd ymm21, ymm17, [rdx+508]{1to8} # AVX512{VNNI,VL} Disp8
vpdpbusds xmm28, xmm26, xmm24 # AVX512{VNNI,VL}
vpdpbusds xmm28{k1}, xmm26, xmm24 # AVX512{VNNI,VL}
vpdpbusds xmm28{k1}{z}, xmm26, xmm24 # AVX512{VNNI,VL}
vpdpbusds xmm28, xmm26, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpbusds xmm28, xmm26, XMMWORD PTR [rdx+2032] # AVX512{VNNI,VL} Disp8
vpdpbusds xmm28, xmm26, [rdx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpbusds ymm23, ymm18, ymm27 # AVX512{VNNI,VL}
vpdpbusds ymm23{k6}, ymm18, ymm27 # AVX512{VNNI,VL}
vpdpbusds ymm23{k6}{z}, ymm18, ymm27 # AVX512{VNNI,VL}
vpdpbusds ymm23, ymm18, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpbusds ymm23, ymm18, YMMWORD PTR [rdx+4064] # AVX512{VNNI,VL} Disp8
vpdpbusds ymm23, ymm18, [rdx+508]{1to8} # AVX512{VNNI,VL} Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 3,683
|
gas/testsuite/gas/i386/x86-64-inval.s
|
.text
.allow_index_reg
# All the following should be illegal for x86-64
aaa # illegal
aad # illegal
aam # illegal
aas # illegal
arpl %ax,%ax # illegal
bound %eax,(%rax) # illegal
calll *%eax # 32-bit data size not allowed
calll *(%ax) # 32-bit data size not allowed
calll *(%eax) # 32-bit data size not allowed
calll *(%r8) # 32-bit data size not allowed
calll *(%rax) # 32-bit data size not allowed
callq *(%ax) # no 16-bit addressing
daa # illegal
das # illegal
enterl $0,$0 # can't have 32-bit stack operands
into # illegal
foo: jcxz foo # No prefix exists to select CX as a counter
jmpl *%eax # 32-bit data size not allowed
jmpl *(%ax) # 32-bit data size not allowed
jmpl *(%eax) # 32-bit data size not allowed
jmpl *(%r8) # 32-bit data size not allowed
jmpl *(%rax) # 32-bit data size not allowed
jmpq *(%ax) # no 16-bit addressing
lcalll $0,$0 # illegal
lcallq $0,$0 # illegal
ldsl %eax,(%rax) # illegal
ldsq %rax,(%rax) # illegal
lesl %eax,(%rax) # illegal
lesq %rax,(%rax) # illegal
ljmpl $0,$0 # illegal
ljmpq $0,$0 # illegal
ljmpq *(%rax) # 64-bit data size not allowed
loopw foo # No prefix exists to select CX as a counter
loopew foo # No prefix exists to select CX as a counter
loopnew foo # No prefix exists to select CX as a counter
loopnzw foo # No prefix exists to select CX as a counter
loopzw foo # No prefix exists to select CX as a counter
leavel # can't have 32-bit stack operands
pop %ds # illegal
pop %es # illegal
pop %ss # illegal
popa # illegal
popl %eax # can't have 32-bit stack operands
push %cs # illegal
push %ds # illegal
push %es # illegal
push %ss # illegal
pusha # illegal
pushl %eax # can't have 32-bit stack operands
pushfl # can't have 32-bit stack operands
popfl # can't have 32-bit stack operands
retl # can't have 32-bit stack operands
insertq $4,$2,%xmm2,%ebx # The last operand must be XMM register.
fnstsw %eax
fnstsw %al
fstsw %eax
fstsw %al
in $8,%rax
out %rax,$8
movzxl (%rax),%rax
movnti %ax, (%rax)
movntiw %ax, (%rax)
mov 0x80000000(%rax),%ebx
mov 0x80000000,%ebx
add (%rip,%rsi), %eax
add (%rsi,%rip), %eax
add (,%rip), %eax
add (%eip,%esi), %eax
add (%esi,%eip), %eax
add (,%eip), %eax
add (%rsi,%esi), %eax
add (%esi,%rsi), %eax
add (%eiz), %eax
add (%riz), %eax
add (%rax), %riz
add (%rax), %eiz
.intel_syntax noprefix
cmpxchg16b dword ptr [rax] # Must be oword
movq xmm1, XMMWORD PTR [rsp]
movq xmm1, DWORD PTR [rsp]
movq xmm1, WORD PTR [rsp]
movq xmm1, BYTE PTR [rsp]
movq XMMWORD PTR [rsp],xmm1
movq DWORD PTR [rsp],xmm1
movq WORD PTR [rsp],xmm1
movq BYTE PTR [rsp],xmm1
fnstsw eax
fnstsw al
fstsw eax
fstsw al
in rax,8
out 8,rax
movsx ax, [rax]
movsx eax, [rax]
movsx rax, [rax]
movzx ax, [rax]
movzx eax, [rax]
movzx rax, [rax]
movnti word ptr [rax], ax
calld eax # 32-bit data size not allowed
calld [ax] # 32-bit data size not allowed
calld [eax] # 32-bit data size not allowed
calld [r8] # 32-bit data size not allowed
calld [rax] # 32-bit data size not allowed
callq [ax] # no 16-bit addressing
jmpd eax # 32-bit data size not allowed
jmpd [ax] # 32-bit data size not allowed
jmpd [eax] # 32-bit data size not allowed
jmpd [r8] # 32-bit data size not allowed
jmpd [rax] # 32-bit data size not allowed
jmpq [ax] # no 16-bit addressing
mov eax,[rax+0x876543210] # out of range displacement
.att_syntax prefix
movsd (%rsi), %ss:(%rdi), %ss:(%rax)
|
tactcomplabs/xbgas-binutils-gdb
| 10,354
|
gas/testsuite/gas/i386/avx512vbmi2.s
|
# Check 32bit AVX512VBMI2 instructions
.allow_index_reg
.text
_start:
vpcompressb %zmm6, (%ecx){%k7} # AVX512VBMI2
vpcompressb %zmm6, -123456(%esp,%esi,8) # AVX512VBMI2
vpcompressb %zmm6, 126(%edx) # AVX512VBMI2 Disp8
vpcompressb %zmm5, %zmm6 # AVX512VBMI2
vpcompressb %zmm5, %zmm6{%k7} # AVX512VBMI2
vpcompressb %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpcompressw %zmm6, (%ecx){%k7} # AVX512VBMI2
vpcompressw %zmm6, -123456(%esp,%esi,8) # AVX512VBMI2
vpcompressw %zmm6, 128(%edx) # AVX512VBMI2 Disp8
vpcompressw %zmm5, %zmm6 # AVX512VBMI2
vpcompressw %zmm5, %zmm6{%k7} # AVX512VBMI2
vpcompressw %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpexpandb (%ecx), %zmm6{%k7} # AVX512VBMI2
vpexpandb (%ecx), %zmm6{%k7}{z} # AVX512VBMI2
vpexpandb -123456(%esp,%esi,8), %zmm6 # AVX512VBMI2
vpexpandb 126(%edx), %zmm6 # AVX512VBMI2 Disp8
vpexpandb %zmm5, %zmm6 # AVX512VBMI2
vpexpandb %zmm5, %zmm6{%k7} # AVX512VBMI2
vpexpandb %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpexpandw (%ecx), %zmm6{%k7} # AVX512VBMI2
vpexpandw (%ecx), %zmm6{%k7}{z} # AVX512VBMI2
vpexpandw -123456(%esp,%esi,8), %zmm6 # AVX512VBMI2
vpexpandw 128(%edx), %zmm6 # AVX512VBMI2 Disp8
vpexpandw %zmm5, %zmm6 # AVX512VBMI2
vpexpandw %zmm5, %zmm6{%k7} # AVX512VBMI2
vpexpandw %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshldvw %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshldvw %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshldvw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshldvw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshldvw 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldvd %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshldvd %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshldvd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshldvd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshldvd 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldvd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldvq %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshldvq %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshldvq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshldvq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshldvq 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldvq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdvw %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshrdvw %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshrdvw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshrdvw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshrdvw 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdvd %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshrdvd %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshrdvd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshrdvd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshrdvd 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdvd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdvq %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshrdvq %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshrdvq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshrdvq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshrdvq 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdvq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldw $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshldw $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshldw $123, %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshldw $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshldw $123, 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldd $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshldd $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshldd $123, %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshldd $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshldd $123, 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldd $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldq $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshldq $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshldq $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshldq $123, 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldq $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdw $0xab, %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshrdw $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshrdw $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshrdw $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshrdw $123, 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdd $0xab, %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshrdd $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshrdd $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshrdd $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshrdd $123, 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdd $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdq $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshrdq $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshrdq $123, %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshrdq $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshrdq $123, 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdq $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
.intel_syntax noprefix
vpcompressb ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512VBMI2
vpcompressb ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512VBMI2
vpcompressb ZMMWORD PTR [edx+126], zmm6 # AVX512VBMI2 Disp8
vpcompressb zmm6, zmm5 # AVX512VBMI2
vpcompressb zmm6{k7}, zmm5 # AVX512VBMI2
vpcompressb zmm6{k7}{z}, zmm5 # AVX512VBMI2
vpcompressw ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512VBMI2
vpcompressw ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512VBMI2
vpcompressw ZMMWORD PTR [edx+128], zmm6 # AVX512VBMI2 Disp8
vpcompressw zmm6, zmm5 # AVX512VBMI2
vpcompressw zmm6{k7}, zmm5 # AVX512VBMI2
vpcompressw zmm6{k7}{z}, zmm5 # AVX512VBMI2
vpexpandb zmm6{k7}, ZMMWORD PTR [ecx] # AVX512VBMI2
vpexpandb zmm6{k7}{z}, ZMMWORD PTR [ecx] # AVX512VBMI2
vpexpandb zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpexpandb zmm6, ZMMWORD PTR [edx+126] # AVX512VBMI2 Disp8
vpexpandb zmm6, zmm5 # AVX512VBMI2
vpexpandb zmm6{k7}, zmm5 # AVX512VBMI2
vpexpandb zmm6{k7}{z}, zmm5 # AVX512VBMI2
vpexpandw zmm6{k7}, ZMMWORD PTR [ecx] # AVX512VBMI2
vpexpandw zmm6{k7}{z}, ZMMWORD PTR [ecx] # AVX512VBMI2
vpexpandw zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpexpandw zmm6, ZMMWORD PTR [edx+128] # AVX512VBMI2 Disp8
vpexpandw zmm6, zmm5 # AVX512VBMI2
vpexpandw zmm6{k7}, zmm5 # AVX512VBMI2
vpexpandw zmm6{k7}{z}, zmm5 # AVX512VBMI2
vpshldvw zmm6, zmm5, zmm4 # AVX512VBMI2
vpshldvw zmm6{k7}, zmm5, zmm4 # AVX512VBMI2
vpshldvw zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI2
vpshldvw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpshldvw zmm6, zmm5, ZMMWORD PTR [edx+128] # AVX512VBMI2 Disp8
vpshldvd zmm6, zmm5, zmm4 # AVX512VBMI2
vpshldvd zmm6{k7}, zmm5, zmm4 # AVX512VBMI2
vpshldvd zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI2
vpshldvd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpshldvd zmm6, zmm5, ZMMWORD PTR [edx+128] # AVX512VBMI2 Disp8
vpshldvd zmm6, zmm5, [edx+508]{1to16} # AVX512VBMI2 Disp8
vpshldvq zmm6, zmm5, zmm4 # AVX512VBMI2
vpshldvq zmm6{k7}, zmm5, zmm4 # AVX512VBMI2
vpshldvq zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI2
vpshldvq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpshldvq zmm6, zmm5, ZMMWORD PTR [edx+128] # AVX512VBMI2 Disp8
vpshldvq zmm6, zmm5, [edx+1016]{1to8} # AVX512VBMI2 Disp8
vpshrdvw zmm6, zmm5, zmm4 # AVX512VBMI2
vpshrdvw zmm6{k7}, zmm5, zmm4 # AVX512VBMI2
vpshrdvw zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI2
vpshrdvw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpshrdvw zmm6, zmm5, ZMMWORD PTR [edx+128] # AVX512VBMI2 Disp8
vpshrdvd zmm6, zmm5, zmm4 # AVX512VBMI2
vpshrdvd zmm6{k7}, zmm5, zmm4 # AVX512VBMI2
vpshrdvd zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI2
vpshrdvd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpshrdvd zmm6, zmm5, ZMMWORD PTR [edx+128] # AVX512VBMI2 Disp8
vpshrdvd zmm6, zmm5, [edx+508]{1to16} # AVX512VBMI2 Disp8
vpshrdvq zmm6, zmm5, zmm4 # AVX512VBMI2
vpshrdvq zmm6{k7}, zmm5, zmm4 # AVX512VBMI2
vpshrdvq zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI2
vpshrdvq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpshrdvq zmm6, zmm5, ZMMWORD PTR [edx+128] # AVX512VBMI2 Disp8
vpshrdvq zmm6, zmm5, [edx+1016]{1to8} # AVX512VBMI2 Disp8
vpshldw zmm6, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldw zmm6{k7}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldw zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512VBMI2
vpshldw zmm6, zmm5, ZMMWORD PTR [edx+128], 123 # AVX512VBMI2 Disp8
vpshldd zmm6, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldd zmm6{k7}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldd zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512VBMI2
vpshldd zmm6, zmm5, ZMMWORD PTR [edx+128], 123 # AVX512VBMI2 Disp8
vpshldd zmm6, zmm5, [edx+508]{1to16}, 123 # AVX512VBMI2 Disp8
vpshldq zmm6, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldq zmm6{k7}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldq zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512VBMI2
vpshldq zmm6, zmm5, ZMMWORD PTR [edx+128], 123 # AVX512VBMI2 Disp8
vpshldq zmm6, zmm5, [edx+1016]{1to8}, 123 # AVX512VBMI2 Disp8
vpshrdw zmm6, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdw zmm6{k7}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdw zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512VBMI2
vpshrdw zmm6, zmm5, ZMMWORD PTR [edx+128], 123 # AVX512VBMI2 Disp8
vpshrdd zmm6, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdd zmm6{k7}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdd zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512VBMI2
vpshrdd zmm6, zmm5, ZMMWORD PTR [edx+128], 123 # AVX512VBMI2 Disp8
vpshrdd zmm6, zmm5, [edx+508]{1to16}, 123 # AVX512VBMI2 Disp8
vpshrdq zmm6, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdq zmm6{k7}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdq zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512VBMI2
vpshrdq zmm6, zmm5, ZMMWORD PTR [edx+128], 123 # AVX512VBMI2 Disp8
vpshrdq zmm6, zmm5, [edx+1016]{1to8}, 123 # AVX512VBMI2 Disp8
|
tactcomplabs/xbgas-binutils-gdb
| 1,413
|
gas/testsuite/gas/i386/ssse3.s
|
# SSSE3 New Instructions
.text
foo:
phaddw (%ecx),%mm0
phaddw %mm1,%mm0
phaddw (%ecx),%xmm0
phaddw %xmm1,%xmm0
phaddd (%ecx),%mm0
phaddd %mm1,%mm0
phaddd (%ecx),%xmm0
phaddd %xmm1,%xmm0
phaddsw (%ecx),%mm0
phaddsw %mm1,%mm0
phaddsw (%ecx),%xmm0
phaddsw %xmm1,%xmm0
phsubw (%ecx),%mm0
phsubw %mm1,%mm0
phsubw (%ecx),%xmm0
phsubw %xmm1,%xmm0
phsubd (%ecx),%mm0
phsubd %mm1,%mm0
phsubd (%ecx),%xmm0
phsubd %xmm1,%xmm0
phsubsw (%ecx),%mm0
phsubsw %mm1,%mm0
phsubsw (%ecx),%xmm0
phsubsw %xmm1,%xmm0
pmaddubsw (%ecx),%mm0
pmaddubsw %mm1,%mm0
pmaddubsw (%ecx),%xmm0
pmaddubsw %xmm1,%xmm0
pmulhrsw (%ecx),%mm0
pmulhrsw %mm1,%mm0
pmulhrsw (%ecx),%xmm0
pmulhrsw %xmm1,%xmm0
pshufb (%ecx),%mm0
pshufb %mm1,%mm0
pshufb (%ecx),%xmm0
pshufb %xmm1,%xmm0
psignb (%ecx),%mm0
psignb %mm1,%mm0
psignb (%ecx),%xmm0
psignb %xmm1,%xmm0
psignw (%ecx),%mm0
psignw %mm1,%mm0
psignw (%ecx),%xmm0
psignw %xmm1,%xmm0
psignd (%ecx),%mm0
psignd %mm1,%mm0
psignd (%ecx),%xmm0
psignd %xmm1,%xmm0
palignr $0x2,(%ecx),%mm0
palignr $0x2,%mm1,%mm0
palignr $0x2,(%ecx),%xmm0
palignr $0x2,%xmm1,%xmm0
pabsb (%ecx),%mm0
pabsb %mm1,%mm0
pabsb (%ecx),%xmm0
pabsb %xmm1,%xmm0
pabsw (%ecx),%mm0
pabsw %mm1,%mm0
pabsw (%ecx),%xmm0
pabsw %xmm1,%xmm0
pabsd (%ecx),%mm0
pabsd %mm1,%mm0
pabsd (%ecx),%xmm0
pabsd %xmm1,%xmm0
.p2align 4,0
|
tactcomplabs/xbgas-binutils-gdb
| 1,815
|
gas/testsuite/gas/i386/x86-64-sse4_2.s
|
# Streaming SIMD extensions 4.2 Instructions
.text
foo:
crc32 %cl,%ebx
crc32 %cl,%rbx
crc32 %cx,%ebx
crc32 %ecx,%ebx
crc32 %rcx,%rbx
crc32b (%rcx),%ebx
crc32w (%rcx),%ebx
crc32l (%rcx),%ebx
crc32q (%rcx),%rbx
crc32b %cl,%ebx
crc32b %cl,%rbx
crc32w %cx,%ebx
crc32l %ecx,%ebx
crc32q %rcx,%rbx
pcmpgtq (%rcx),%xmm0
pcmpgtq %xmm1,%xmm0
pcmpestri $0x0,(%rcx),%xmm0
pcmpestri $0x0,%xmm1,%xmm0
pcmpestriq $0x0,(%rcx),%xmm0
pcmpestril $0x0,%xmm1,%xmm0
pcmpestrm $0x1,(%rcx),%xmm0
pcmpestrm $0x1,%xmm1,%xmm0
pcmpestrmq $0x1,(%rcx),%xmm0
pcmpestrml $0x1,%xmm1,%xmm0
pcmpistri $0x2,(%rcx),%xmm0
pcmpistri $0x2,%xmm1,%xmm0
pcmpistrm $0x3,(%rcx),%xmm0
pcmpistrm $0x3,%xmm1,%xmm0
popcnt (%rcx),%bx
popcnt (%rcx),%ebx
popcnt (%rcx),%rbx
popcntw (%rcx),%bx
popcntl (%rcx),%ebx
popcntq (%rcx),%rbx
popcnt %cx,%bx
popcnt %ecx,%ebx
popcnt %rcx,%rbx
popcntw %cx,%bx
popcntl %ecx,%ebx
popcntq %rcx,%rbx
.intel_syntax noprefix
crc32 ebx,cl
crc32 rbx,cl
crc32 ebx,cx
crc32 ebx,ecx
crc32 rbx,rcx
crc32 ebx,BYTE PTR [rcx]
crc32 ebx,WORD PTR [rcx]
crc32 ebx,DWORD PTR [rcx]
crc32 rbx,QWORD PTR [rcx]
crc32 ebx,cl
crc32 rbx,cl
crc32 ebx,cx
crc32 ebx,ecx
crc32 rbx,rcx
pcmpgtq xmm0,XMMWORD PTR [rcx]
pcmpgtq xmm0,xmm1
pcmpestri xmm0,XMMWORD PTR [rcx],0x0
pcmpestri xmm0,xmm1,0x0
pcmpestrm xmm0,XMMWORD PTR [rcx],0x1
pcmpestrm xmm0,xmm1,0x1
pcmpistri xmm0,XMMWORD PTR [rcx],0x2
pcmpistri xmm0,xmm1,0x2
pcmpistrm xmm0,XMMWORD PTR [rcx],0x3
pcmpistrm xmm0,xmm1,0x3
popcnt bx,WORD PTR [rcx]
popcnt ebx,DWORD PTR [rcx]
popcnt rbx,QWORD PTR [rcx]
popcnt bx,WORD PTR [rcx]
popcnt ebx,DWORD PTR [rcx]
popcnt rbx,QWORD PTR [rcx]
popcnt bx,cx
popcnt ebx,ecx
popcnt rbx,rcx
popcnt bx,cx
popcnt ebx,ecx
popcnt rbx,rcx
.p2align 4,0
|
tactcomplabs/xbgas-binutils-gdb
| 5,467
|
gas/testsuite/gas/i386/x86-64-sse4_1.s
|
# Streaming SIMD extensions 4.1 Instructions
.text
foo:
blendpd $0x0,(%rcx),%xmm0
blendpd $0x0,%xmm1,%xmm0
blendps $0x0,(%rcx),%xmm0
blendps $0x0,%xmm1,%xmm0
blendvpd %xmm0,(%rcx),%xmm0
blendvpd %xmm0,%xmm1,%xmm0
blendvpd (%rcx),%xmm0
blendvpd %xmm1,%xmm0
blendvps %xmm0,(%rcx),%xmm0
blendvps %xmm0,%xmm1,%xmm0
blendvps (%rcx),%xmm0
blendvps %xmm1,%xmm0
dppd $0x0,(%rcx),%xmm0
dppd $0x0,%xmm1,%xmm0
dpps $0x0,(%rcx),%xmm0
dpps $0x0,%xmm1,%xmm0
extractps $0x0,%xmm0,%rcx
extractps $0x0,%xmm0,%ecx
extractps $0x0,%xmm0,(%rcx)
insertps $0x0,%xmm1,%xmm0
insertps $0x0,(%rcx),%xmm0
movntdqa (%rcx),%xmm0
mpsadbw $0x0,(%rcx),%xmm0
mpsadbw $0x0,%xmm1,%xmm0
packusdw (%rcx),%xmm0
packusdw %xmm1,%xmm0
pblendvb %xmm0,(%rcx),%xmm0
pblendvb %xmm0,%xmm1,%xmm0
pblendvb (%rcx),%xmm0
pblendvb %xmm1,%xmm0
pblendw $0x0,(%rcx),%xmm0
pblendw $0x0,%xmm1,%xmm0
pcmpeqq %xmm1,%xmm0
pcmpeqq (%rcx),%xmm0
pextrb $0x0,%xmm0,%rcx
pextrb $0x0,%xmm0,%ecx
pextrb $0x0,%xmm0,(%rcx)
pextrd $0x0,%xmm0,%ecx
pextrd $0x0,%xmm0,(%rcx)
pextrq $0x0,%xmm0,%rcx
pextrq $0x0,%xmm0,(%rcx)
pextrw $0x0,%xmm0,%rcx
pextrw $0x0,%xmm0,%ecx
pextrw $0x0,%xmm0,(%rcx)
phminposuw %xmm1,%xmm0
phminposuw (%rcx),%xmm0
pinsrb $0x0,(%rcx),%xmm0
pinsrb $0x0,%ecx,%xmm0
pinsrb $0x0,%rcx,%xmm0
pinsrd $0x0,(%rcx),%xmm0
pinsrd $0x0,%ecx,%xmm0
pinsrq $0x0,(%rcx),%xmm0
pinsrq $0x0,%rcx,%xmm0
pmaxsb %xmm1,%xmm0
pmaxsb (%rcx),%xmm0
pmaxsd %xmm1,%xmm0
pmaxsd (%rcx),%xmm0
pmaxud %xmm1,%xmm0
pmaxud (%rcx),%xmm0
pmaxuw %xmm1,%xmm0
pmaxuw (%rcx),%xmm0
pminsb %xmm1,%xmm0
pminsb (%rcx),%xmm0
pminsd %xmm1,%xmm0
pminsd (%rcx),%xmm0
pminud %xmm1,%xmm0
pminud (%rcx),%xmm0
pminuw %xmm1,%xmm0
pminuw (%rcx),%xmm0
pmovsxbw %xmm1,%xmm0
pmovsxbw (%rcx),%xmm0
pmovsxbd %xmm1,%xmm0
pmovsxbd (%rcx),%xmm0
pmovsxbq %xmm1,%xmm0
pmovsxbq (%rcx),%xmm0
pmovsxwd %xmm1,%xmm0
pmovsxwd (%rcx),%xmm0
pmovsxwq %xmm1,%xmm0
pmovsxwq (%rcx),%xmm0
pmovsxdq %xmm1,%xmm0
pmovsxdq (%rcx),%xmm0
pmovzxbw %xmm1,%xmm0
pmovzxbw (%rcx),%xmm0
pmovzxbd %xmm1,%xmm0
pmovzxbd (%rcx),%xmm0
pmovzxbq %xmm1,%xmm0
pmovzxbq (%rcx),%xmm0
pmovzxwd %xmm1,%xmm0
pmovzxwd (%rcx),%xmm0
pmovzxwq %xmm1,%xmm0
pmovzxwq (%rcx),%xmm0
pmovzxdq %xmm1,%xmm0
pmovzxdq (%rcx),%xmm0
pmuldq %xmm1,%xmm0
pmuldq (%rcx),%xmm0
pmulld %xmm1,%xmm0
pmulld (%rcx),%xmm0
ptest %xmm1,%xmm0
ptest (%rcx),%xmm0
roundpd $0x0,(%rcx),%xmm0
roundpd $0x0,%xmm1,%xmm0
roundps $0x0,(%rcx),%xmm0
roundps $0x0,%xmm1,%xmm0
roundsd $0x0,(%rcx),%xmm0
roundsd $0x0,%xmm1,%xmm0
roundss $0x0,(%rcx),%xmm0
roundss $0x0,%xmm1,%xmm0
.intel_syntax noprefix
blendpd xmm0,XMMWORD PTR [rcx],0x0
blendpd xmm0,xmm1,0x0
blendps xmm0,XMMWORD PTR [rcx],0x0
blendps xmm0,xmm1,0x0
blendvpd xmm0,XMMWORD PTR [rcx],xmm0
blendvpd xmm0,xmm1,xmm0
blendvps xmm0,XMMWORD PTR [rcx],xmm0
blendvps xmm0,xmm1,xmm0
dppd xmm0,XMMWORD PTR [rcx],0x0
dppd xmm0,xmm1,0x0
dpps xmm0,XMMWORD PTR [rcx],0x0
dpps xmm0,xmm1,0x0
extractps rcx,xmm0,0x0
extractps ecx,xmm0,0x0
extractps DWORD PTR [rcx],xmm0,0x0
insertps xmm0,xmm1,0x0
insertps xmm0,DWORD PTR [rcx],0x0
movntdqa xmm0,XMMWORD PTR [rcx]
mpsadbw xmm0,XMMWORD PTR [rcx],0x0
mpsadbw xmm0,xmm1,0x0
packusdw xmm0,XMMWORD PTR [rcx]
packusdw xmm0,xmm1
pblendvb xmm0,XMMWORD PTR [rcx],xmm0
pblendvb xmm0,xmm1,xmm0
pblendw xmm0,XMMWORD PTR [rcx],0x0
pblendw xmm0,xmm1,0x0
pcmpeqq xmm0,xmm1
pcmpeqq xmm0,XMMWORD PTR [rcx]
pextrb rcx,xmm0,0x0
pextrb ecx,xmm0,0x0
pextrb BYTE PTR [rcx],xmm0,0x0
pextrd ecx,xmm0,0x0
pextrd DWORD PTR [rcx],xmm0,0x0
pextrq rcx,xmm0,0x0
pextrq QWORD PTR [rcx],xmm0,0x0
pextrw rcx,xmm0,0x0
pextrw ecx,xmm0,0x0
pextrw WORD PTR [rcx],xmm0,0x0
phminposuw xmm0,xmm1
phminposuw xmm0,XMMWORD PTR [rcx]
pinsrb xmm0,BYTE PTR [rcx],0x0
pinsrb xmm0,ecx,0x0
pinsrb xmm0,rcx,0x0
pinsrd xmm0,DWORD PTR [rcx],0x0
pinsrd xmm0,ecx,0x0
pinsrq xmm0,QWORD PTR [rcx],0x0
pinsrq xmm0,rcx,0x0
pmaxsb xmm0,xmm1
pmaxsb xmm0,XMMWORD PTR [rcx]
pmaxsd xmm0,xmm1
pmaxsd xmm0,XMMWORD PTR [rcx]
pmaxud xmm0,xmm1
pmaxud xmm0,XMMWORD PTR [rcx]
pmaxuw xmm0,xmm1
pmaxuw xmm0,XMMWORD PTR [rcx]
pminsb xmm0,xmm1
pminsb xmm0,XMMWORD PTR [rcx]
pminsd xmm0,xmm1
pminsd xmm0,XMMWORD PTR [rcx]
pminud xmm0,xmm1
pminud xmm0,XMMWORD PTR [rcx]
pminuw xmm0,xmm1
pminuw xmm0,XMMWORD PTR [rcx]
pmovsxbw xmm0,xmm1
pmovsxbw xmm0,QWORD PTR [rcx]
pmovsxbd xmm0,xmm1
pmovsxbd xmm0,DWORD PTR [rcx]
pmovsxbq xmm0,xmm1
pmovsxbq xmm0,WORD PTR [rcx]
pmovsxwd xmm0,xmm1
pmovsxwd xmm0,QWORD PTR [rcx]
pmovsxwq xmm0,xmm1
pmovsxwq xmm0,DWORD PTR [rcx]
pmovsxdq xmm0,xmm1
pmovsxdq xmm0,QWORD PTR [rcx]
pmovzxbw xmm0,xmm1
pmovzxbw xmm0,QWORD PTR [rcx]
pmovzxbd xmm0,xmm1
pmovzxbd xmm0,DWORD PTR [rcx]
pmovzxbq xmm0,xmm1
pmovzxbq xmm0,WORD PTR [rcx]
pmovzxwd xmm0,xmm1
pmovzxwd xmm0,QWORD PTR [rcx]
pmovzxwq xmm0,xmm1
pmovzxwq xmm0,DWORD PTR [rcx]
pmovzxdq xmm0,xmm1
pmovzxdq xmm0,QWORD PTR [rcx]
pmuldq xmm0,xmm1
pmuldq xmm0,XMMWORD PTR [rcx]
pmulld xmm0,xmm1
pmulld xmm0,XMMWORD PTR [rcx]
ptest xmm0,xmm1
ptest xmm0,XMMWORD PTR [rcx]
roundpd xmm0,XMMWORD PTR [rcx],0x0
roundpd xmm0,xmm1,0x0
roundps xmm0,XMMWORD PTR [rcx],0x0
roundps xmm0,xmm1,0x0
roundsd xmm0,QWORD PTR [rcx],0x0
roundsd xmm0,xmm1,0x0
roundss xmm0,DWORD PTR [rcx],0x0
roundss xmm0,xmm1,0x0
.p2align 4,0
|
tactcomplabs/xbgas-binutils-gdb
| 1,522
|
gas/testsuite/gas/i386/x86-64-nops.s
|
.text
.byte 0x0f, 0x1f, 0x0
.byte 0x0f, 0x1f, 0x40, 0x0
.byte 0x0f, 0x1f, 0x44, 0x0, 0x0
.byte 0x66, 0x0f, 0x1f, 0x44, 0x0, 0x0
.byte 0x0f, 0x1f, 0x80, 0x0, 0x0, 0x0, 0x0
.byte 0x0f, 0x1f, 0x84, 0x0, 0x0, 0x0, 0x0, 0x0
.byte 0x66, 0x0f, 0x1f, 0x84, 0x0, 0x0, 0x0, 0x0, 0x0
.byte 0x66, 0x2e, 0x0f, 0x1f, 0x84, 0x0, 0x0, 0x0, 0x0, 0x0
# reg,reg
.byte 0x0f, 0x19, 0xff
.byte 0x0f, 0x1a, 0xff
.byte 0x0f, 0x1b, 0xff
.byte 0x0f, 0x1c, 0xff
.byte 0x0f, 0x1d, 0xff
.byte 0x0f, 0x1e, 0xff
.byte 0x0f, 0x1f, 0xff
# with base and imm8
.byte 0x0f, 0x19, 0x5A, 0x22
.byte 0x0f, 0x1c, 0x5A, 0x22
.byte 0x0f, 0x1d, 0x5A, 0x22
.byte 0x0f, 0x1e, 0x5A, 0x22
.byte 0x0f, 0x1f, 0x5A, 0x22
# with sib and imm32
.byte 0x0f, 0x19, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1c, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1d, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1e, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1f, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x19, 0x04, 0x60
.byte 0x0f, 0x1c, 0x0c, 0x60
.byte 0x0f, 0x1d, 0x04, 0x60
.byte 0x0f, 0x1e, 0x04, 0x60
.byte 0x0f, 0x1f, 0x04, 0x60
.byte 0x0f, 0x19, 0x04, 0x59
.byte 0x0f, 0x1c, 0x0c, 0x59
.byte 0x0f, 0x1d, 0x04, 0x59
.byte 0x0f, 0x1e, 0x04, 0x59
.byte 0x0f, 0x1f, 0x04, 0x59
nop %rax
nop %eax
nop %ax
nopq (%rax)
nopl (%rax)
nopw (%rax)
nopq %rax
nopl %eax
nopw %ax
nop %r10
nop %r10d
nop %r10w
nopq (%r10)
nopl (%r10)
nopw (%r10)
nopq %r10
nopl %r10d
nopw %r10w
|
tactcomplabs/xbgas-binutils-gdb
| 4,966
|
gas/testsuite/gas/i386/sse4_1.s
|
# Streaming SIMD extensions 4.1 Instructions
.text
foo:
blendpd $0,(%ecx),%xmm0
blendpd $0,%xmm1,%xmm0
blendps $0,(%ecx),%xmm0
blendps $0,%xmm1,%xmm0
blendvpd %xmm0,(%ecx),%xmm0
blendvpd %xmm0,%xmm1,%xmm0
blendvpd (%ecx),%xmm0
blendvpd %xmm1,%xmm0
blendvps %xmm0,(%ecx),%xmm0
blendvps %xmm0,%xmm1,%xmm0
blendvps (%ecx),%xmm0
blendvps %xmm1,%xmm0
dppd $0,(%ecx),%xmm0
dppd $0,%xmm1,%xmm0
dpps $0,(%ecx),%xmm0
dpps $0,%xmm1,%xmm0
extractps $0,%xmm0,%ecx
extractps $0,%xmm0,(%ecx)
insertps $0,%xmm1,%xmm0
insertps $0,(%ecx),%xmm0
movntdqa (%ecx),%xmm0
mpsadbw $0,(%ecx),%xmm0
mpsadbw $0,%xmm1,%xmm0
packusdw (%ecx),%xmm0
packusdw %xmm1,%xmm0
pblendvb %xmm0,(%ecx),%xmm0
pblendvb %xmm0,%xmm1,%xmm0
pblendvb (%ecx),%xmm0
pblendvb %xmm1,%xmm0
pblendw $0,(%ecx),%xmm0
pblendw $0,%xmm1,%xmm0
pcmpeqq %xmm1,%xmm0
pcmpeqq (%ecx),%xmm0
pextrb $0,%xmm0,%ecx
pextrb $0,%xmm0,(%ecx)
pextrd $0,%xmm0,%ecx
pextrd $0,%xmm0,(%ecx)
pextrw $0,%xmm0,%ecx
pextrw $0,%xmm0,(%ecx)
phminposuw %xmm1,%xmm0
phminposuw (%ecx),%xmm0
pinsrb $0,(%ecx),%xmm0
pinsrb $0,%ecx,%xmm0
pinsrd $0,(%ecx),%xmm0
pinsrd $0,%ecx,%xmm0
pmaxsb %xmm1,%xmm0
pmaxsb (%ecx),%xmm0
pmaxsd %xmm1,%xmm0
pmaxsd (%ecx),%xmm0
pmaxud %xmm1,%xmm0
pmaxud (%ecx),%xmm0
pmaxuw %xmm1,%xmm0
pmaxuw (%ecx),%xmm0
pminsb %xmm1,%xmm0
pminsb (%ecx),%xmm0
pminsd %xmm1,%xmm0
pminsd (%ecx),%xmm0
pminud %xmm1,%xmm0
pminud (%ecx),%xmm0
pminuw %xmm1,%xmm0
pminuw (%ecx),%xmm0
pmovsxbw %xmm1,%xmm0
pmovsxbw (%ecx),%xmm0
pmovsxbd %xmm1,%xmm0
pmovsxbd (%ecx),%xmm0
pmovsxbq %xmm1,%xmm0
pmovsxbq (%ecx),%xmm0
pmovsxwd %xmm1,%xmm0
pmovsxwd (%ecx),%xmm0
pmovsxwq %xmm1,%xmm0
pmovsxwq (%ecx),%xmm0
pmovsxdq %xmm1,%xmm0
pmovsxdq (%ecx),%xmm0
pmovzxbw %xmm1,%xmm0
pmovzxbw (%ecx),%xmm0
pmovzxbd %xmm1,%xmm0
pmovzxbd (%ecx),%xmm0
pmovzxbq %xmm1,%xmm0
pmovzxbq (%ecx),%xmm0
pmovzxwd %xmm1,%xmm0
pmovzxwd (%ecx),%xmm0
pmovzxwq %xmm1,%xmm0
pmovzxwq (%ecx),%xmm0
pmovzxdq %xmm1,%xmm0
pmovzxdq (%ecx),%xmm0
pmuldq %xmm1,%xmm0
pmuldq (%ecx),%xmm0
pmulld %xmm1,%xmm0
pmulld (%ecx),%xmm0
ptest %xmm1,%xmm0
ptest (%ecx),%xmm0
roundpd $0,(%ecx),%xmm0
roundpd $0,%xmm1,%xmm0
roundps $0,(%ecx),%xmm0
roundps $0,%xmm1,%xmm0
roundsd $0,(%ecx),%xmm0
roundsd $0,%xmm1,%xmm0
roundss $0,(%ecx),%xmm0
roundss $0,%xmm1,%xmm0
.intel_syntax noprefix
blendpd xmm0,XMMWORD PTR [ecx],0x0
blendpd xmm0,xmm1,0x0
blendps xmm0,XMMWORD PTR [ecx],0x0
blendps xmm0,xmm1,0x0
blendvpd xmm0,XMMWORD PTR [ecx],xmm0
blendvpd xmm0,xmm1,xmm0
blendvps xmm0,XMMWORD PTR [ecx],xmm0
blendvps xmm0,xmm1,xmm0
dppd xmm0,XMMWORD PTR [ecx],0x0
dppd xmm0,xmm1,0x0
dpps xmm0,XMMWORD PTR [ecx],0x0
dpps xmm0,xmm1,0x0
extractps ecx,xmm0,0x0
extractps DWORD PTR [ecx],xmm0,0x0
insertps xmm0,xmm1,0x0
insertps xmm0,DWORD PTR [ecx],0x0
movntdqa xmm0,XMMWORD PTR [ecx]
mpsadbw xmm0,XMMWORD PTR [ecx],0x0
mpsadbw xmm0,xmm1,0x0
packusdw xmm0,XMMWORD PTR [ecx]
packusdw xmm0,xmm1
pblendvb xmm0,XMMWORD PTR [ecx],xmm0
pblendvb xmm0,xmm1,xmm0
pblendw xmm0,XMMWORD PTR [ecx],0x0
pblendw xmm0,xmm1,0x0
pcmpeqq xmm0,xmm1
pcmpeqq xmm0,XMMWORD PTR [ecx]
pextrb ecx,xmm0,0x0
pextrb BYTE PTR [ecx],xmm0,0x0
pextrd ecx,xmm0,0x0
pextrd DWORD PTR [ecx],xmm0,0x0
pextrw ecx,xmm0,0x0
pextrw WORD PTR [ecx],xmm0,0x0
phminposuw xmm0,xmm1
phminposuw xmm0,XMMWORD PTR [ecx]
pinsrb xmm0,BYTE PTR [ecx],0x0
pinsrb xmm0,ecx,0x0
pinsrd xmm0,DWORD PTR [ecx],0x0
pinsrd xmm0,ecx,0x0
pmaxsb xmm0,xmm1
pmaxsb xmm0,XMMWORD PTR [ecx]
pmaxsd xmm0,xmm1
pmaxsd xmm0,XMMWORD PTR [ecx]
pmaxud xmm0,xmm1
pmaxud xmm0,XMMWORD PTR [ecx]
pmaxuw xmm0,xmm1
pmaxuw xmm0,XMMWORD PTR [ecx]
pminsb xmm0,xmm1
pminsb xmm0,XMMWORD PTR [ecx]
pminsd xmm0,xmm1
pminsd xmm0,XMMWORD PTR [ecx]
pminud xmm0,xmm1
pminud xmm0,XMMWORD PTR [ecx]
pminuw xmm0,xmm1
pminuw xmm0,XMMWORD PTR [ecx]
pmovsxbw xmm0,xmm1
pmovsxbw xmm0,QWORD PTR [ecx]
pmovsxbd xmm0,xmm1
pmovsxbd xmm0,DWORD PTR [ecx]
pmovsxbq xmm0,xmm1
pmovsxbq xmm0,WORD PTR [ecx]
pmovsxwd xmm0,xmm1
pmovsxwd xmm0,QWORD PTR [ecx]
pmovsxwq xmm0,xmm1
pmovsxwq xmm0,DWORD PTR [ecx]
pmovsxdq xmm0,xmm1
pmovsxdq xmm0,QWORD PTR [ecx]
pmovzxbw xmm0,xmm1
pmovzxbw xmm0,QWORD PTR [ecx]
pmovzxbd xmm0,xmm1
pmovzxbd xmm0,DWORD PTR [ecx]
pmovzxbq xmm0,xmm1
pmovzxbq xmm0,WORD PTR [ecx]
pmovzxwd xmm0,xmm1
pmovzxwd xmm0,QWORD PTR [ecx]
pmovzxwq xmm0,xmm1
pmovzxwq xmm0,DWORD PTR [ecx]
pmovzxdq xmm0,xmm1
pmovzxdq xmm0,QWORD PTR [ecx]
pmuldq xmm0,xmm1
pmuldq xmm0,XMMWORD PTR [ecx]
pmulld xmm0,xmm1
pmulld xmm0,XMMWORD PTR [ecx]
ptest xmm0,xmm1
ptest xmm0,XMMWORD PTR [ecx]
roundpd xmm0,XMMWORD PTR [ecx],0x0
roundpd xmm0,xmm1,0x0
roundps xmm0,XMMWORD PTR [ecx],0x0
roundps xmm0,xmm1,0x0
roundsd xmm0,QWORD PTR [ecx],0x0
roundsd xmm0,xmm1,0x0
roundss xmm0,DWORD PTR [ecx],0x0
roundss xmm0,xmm1,0x0
.p2align 4,0
|
tactcomplabs/xbgas-binutils-gdb
| 1,977
|
gas/testsuite/gas/i386/lwp.s
|
# Check 64bit LWP instructions
.allow_index_reg
.text
_start:
llwpcb %eax
llwpcb %ecx
llwpcb %edx
llwpcb %ebx
llwpcb %esp
llwpcb %ebp
llwpcb %esi
llwpcb %edi
slwpcb %edi
slwpcb %esi
slwpcb %ebp
slwpcb %esp
slwpcb %ebx
slwpcb %edx
slwpcb %ecx
slwpcb %eax
lwpins $0x12345678, %edi, %eax
lwpins $0x12345678, %esi, %ecx
lwpins $0x12345678, %ebp, %edx
lwpins $0x12345678, %esp, %ebx
lwpins $0x12345678, %ebx, %esp
lwpins $0x12345678, %edx, %ebp
lwpins $0x12345678, %ecx, %esi
lwpins $0x12345678, %eax, %edi
lwpval $0x12345678, %edi, %eax
lwpval $0x12345678, %esi, %ecx
lwpval $0x12345678, %ebp, %edx
lwpval $0x12345678, %esp, %ebx
lwpval $0x12345678, %ebx, %esp
lwpval $0x12345678, %edx, %ebp
lwpval $0x12345678, %ecx, %esi
lwpval $0x12345678, %eax, %edi
lwpins $0x12345678, (%edi), %eax
lwpins $0x12345678, (%esi), %ecx
lwpins $0x12345678, (%ebp), %edx
lwpins $0x12345678, (%esp), %ebx
lwpins $0x12345678, (%ebx), %esp
lwpins $0x12345678, (%edx), %ebp
lwpins $0x12345678, (%ecx), %esi
lwpins $0x12345678, (%eax), %edi
lwpval $0x12345678, (%edi), %eax
lwpval $0x12345678, (%esi), %ecx
lwpval $0x12345678, (%ebp), %edx
lwpval $0x12345678, (%esp), %ebx
lwpval $0x12345678, (%ebx), %esp
lwpval $0x12345678, (%edx), %ebp
lwpval $0x12345678, (%ecx), %esi
lwpval $0x12345678, (%eax), %edi
lwpins $0x12345678, 0xcafe(%edi), %eax
lwpins $0x12345678, 0xcafe(%esi), %ecx
lwpins $0x12345678, 0xcafe(%ebp), %edx
lwpins $0x12345678, 0xcafe(%esp), %ebx
lwpins $0x12345678, 0xcafe(%ebx), %esp
lwpins $0x12345678, 0xcafe(%edx), %ebp
lwpins $0x12345678, 0xcafe(%ecx), %esi
lwpins $0x12345678, 0xcafe(%eax), %edi
lwpval $0x12345678, 0xcafe(%edi), %eax
lwpval $0x12345678, 0xcafe(%esi), %ecx
lwpval $0x12345678, 0xcafe(%ebp), %edx
lwpval $0x12345678, 0xcafe(%esp), %ebx
lwpval $0x12345678, 0xcafe(%ebx), %esp
lwpval $0x12345678, 0xcafe(%edx), %ebp
lwpval $0x12345678, 0xcafe(%ecx), %esi
lwpval $0x12345678, 0xcafe(%eax), %edi
|
tactcomplabs/xbgas-binutils-gdb
| 1,604
|
gas/testsuite/gas/i386/lock-1.s
|
# Lockable Instructions
.text
foo:
lock add %eax, (%ebx)
lock addl $0x64, (%ebx)
lock adc %eax, (%ebx)
lock adcl $0x64, (%ebx)
lock and %eax, (%ebx)
lock andl $0x64, (%ebx)
lock btc %eax, (%ebx)
lock btcl $0x64, (%ebx)
lock btr %eax, (%ebx)
lock btrl $0x64, (%ebx)
lock bts %eax, (%ebx)
lock btsl $0x64, (%ebx)
lock cmpxchg %eax,(%ebx)
lock cmpxchg8b (%ebx)
lock decl (%ebx)
lock incl (%ebx)
lock negl (%ebx)
lock notl (%ebx)
lock or %eax, (%ebx)
lock orl $0x64, (%ebx)
lock sbb %eax, (%ebx)
lock sbbl $0x64, (%ebx)
lock sub %eax, (%ebx)
lock subl $0x64, (%ebx)
lock xadd %eax, (%ebx)
lock xchg (%ebx), %eax
lock xchg %eax, (%ebx)
lock xor %eax, (%ebx)
lock xorl $0x64, (%ebx)
.intel_syntax noprefix
lock add DWORD PTR [ebx],eax
lock add DWORD PTR [ebx],0x64
lock adc DWORD PTR [ebx],eax
lock adc DWORD PTR [ebx],0x64
lock and DWORD PTR [ebx],eax
lock and DWORD PTR [ebx],0x64
lock btc DWORD PTR [ebx],eax
lock btc DWORD PTR [ebx],0x64
lock btr DWORD PTR [ebx],eax
lock btr DWORD PTR [ebx],0x64
lock bts DWORD PTR [ebx],eax
lock bts DWORD PTR [ebx],0x64
lock cmpxchg DWORD PTR [ebx],eax
lock cmpxchg8b QWORD PTR [ebx]
lock dec DWORD PTR [ebx]
lock inc DWORD PTR [ebx]
lock neg DWORD PTR [ebx]
lock not DWORD PTR [ebx]
lock or DWORD PTR [ebx],eax
lock or DWORD PTR [ebx],0x64
lock sbb DWORD PTR [ebx],eax
lock sbb DWORD PTR [ebx],0x64
lock sub DWORD PTR [ebx],eax
lock sub DWORD PTR [ebx],0x64
lock xadd DWORD PTR [ebx],eax
lock xchg DWORD PTR [ebx],eax
lock xchg DWORD PTR [ebx],eax
lock xor DWORD PTR [ebx],eax
lock xor DWORD PTR [ebx],0x64
|
tactcomplabs/xbgas-binutils-gdb
| 697,567
|
gas/testsuite/gas/i386/avx512f.s
|
# Check 32bit AVX512F instructions
.allow_index_reg
.text
_start:
vaddpd %zmm4, %zmm5, %zmm6 # AVX512F
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vaddpd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddpd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddpd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddpd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddpd (%ecx), %zmm5, %zmm6 # AVX512F
vaddpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vaddpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vaddpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vaddpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vaddpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vaddpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vaddpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vaddpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vaddpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vaddpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vaddps %zmm4, %zmm5, %zmm6 # AVX512F
vaddps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vaddps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vaddps (%ecx), %zmm5, %zmm6 # AVX512F
vaddps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vaddps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vaddps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vaddps 8192(%edx), %zmm5, %zmm6 # AVX512F
vaddps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vaddps -8256(%edx), %zmm5, %zmm6 # AVX512F
vaddps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vaddps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vaddps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vaddps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vaddsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vaddsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vaddsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vaddsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vaddsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vaddsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vaddsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vaddss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vaddss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vaddss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vaddss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vaddss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vaddss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vaddss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vaddss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
valignd $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
valignd $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
valignd $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
valignd $123, %zmm4, %zmm5, %zmm6 # AVX512F
valignd $123, (%ecx), %zmm5, %zmm6 # AVX512F
valignd $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
valignd $123, (%eax){1to16}, %zmm5, %zmm6 # AVX512F
valignd $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
valignd $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
valignd $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
valignd $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
valignd $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
valignd $123, 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
valignd $123, -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
valignd $123, -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vblendmpd %zmm4, %zmm5, %zmm6 # AVX512F
vblendmpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vblendmpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vblendmpd (%ecx), %zmm5, %zmm6 # AVX512F
vblendmpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vblendmpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vblendmpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vblendmpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vblendmpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vblendmpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vblendmpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vblendmpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vblendmpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vblendmpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vblendmps %zmm4, %zmm5, %zmm6 # AVX512F
vblendmps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vblendmps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vblendmps (%ecx), %zmm5, %zmm6 # AVX512F
vblendmps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vblendmps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vblendmps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vblendmps 8192(%edx), %zmm5, %zmm6 # AVX512F
vblendmps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vblendmps -8256(%edx), %zmm5, %zmm6 # AVX512F
vblendmps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vblendmps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vblendmps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vblendmps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vbroadcastf32x4 (%ecx), %zmm6 # AVX512F
vbroadcastf32x4 (%ecx), %zmm6{%k7} # AVX512F
vbroadcastf32x4 (%ecx), %zmm6{%k7}{z} # AVX512F
vbroadcastf32x4 -123456(%esp,%esi,8), %zmm6 # AVX512F
vbroadcastf32x4 2032(%edx), %zmm6 # AVX512F Disp8
vbroadcastf32x4 2048(%edx), %zmm6 # AVX512F
vbroadcastf32x4 -2048(%edx), %zmm6 # AVX512F Disp8
vbroadcastf32x4 -2064(%edx), %zmm6 # AVX512F
vbroadcastf64x4 (%ecx), %zmm6 # AVX512F
vbroadcastf64x4 (%ecx), %zmm6{%k7} # AVX512F
vbroadcastf64x4 (%ecx), %zmm6{%k7}{z} # AVX512F
vbroadcastf64x4 -123456(%esp,%esi,8), %zmm6 # AVX512F
vbroadcastf64x4 4064(%edx), %zmm6 # AVX512F Disp8
vbroadcastf64x4 4096(%edx), %zmm6 # AVX512F
vbroadcastf64x4 -4096(%edx), %zmm6 # AVX512F Disp8
vbroadcastf64x4 -4128(%edx), %zmm6 # AVX512F
vbroadcasti32x4 (%ecx), %zmm6 # AVX512F
vbroadcasti32x4 (%ecx), %zmm6{%k7} # AVX512F
vbroadcasti32x4 (%ecx), %zmm6{%k7}{z} # AVX512F
vbroadcasti32x4 -123456(%esp,%esi,8), %zmm6 # AVX512F
vbroadcasti32x4 2032(%edx), %zmm6 # AVX512F Disp8
vbroadcasti32x4 2048(%edx), %zmm6 # AVX512F
vbroadcasti32x4 -2048(%edx), %zmm6 # AVX512F Disp8
vbroadcasti32x4 -2064(%edx), %zmm6 # AVX512F
vbroadcasti64x4 (%ecx), %zmm6 # AVX512F
vbroadcasti64x4 (%ecx), %zmm6{%k7} # AVX512F
vbroadcasti64x4 (%ecx), %zmm6{%k7}{z} # AVX512F
vbroadcasti64x4 -123456(%esp,%esi,8), %zmm6 # AVX512F
vbroadcasti64x4 4064(%edx), %zmm6 # AVX512F Disp8
vbroadcasti64x4 4096(%edx), %zmm6 # AVX512F
vbroadcasti64x4 -4096(%edx), %zmm6 # AVX512F Disp8
vbroadcasti64x4 -4128(%edx), %zmm6 # AVX512F
vbroadcastsd (%ecx), %zmm6 # AVX512F
vbroadcastsd (%ecx), %zmm6{%k7} # AVX512F
vbroadcastsd (%ecx), %zmm6{%k7}{z} # AVX512F
vbroadcastsd -123456(%esp,%esi,8), %zmm6 # AVX512F
vbroadcastsd 1016(%edx), %zmm6 # AVX512F Disp8
vbroadcastsd 1024(%edx), %zmm6 # AVX512F
vbroadcastsd -1024(%edx), %zmm6 # AVX512F Disp8
vbroadcastsd -1032(%edx), %zmm6 # AVX512F
vbroadcastsd %xmm5, %zmm6{%k7} # AVX512F
vbroadcastsd %xmm5, %zmm6{%k7}{z} # AVX512F
vbroadcastss (%ecx), %zmm6 # AVX512F
vbroadcastss (%ecx), %zmm6{%k7} # AVX512F
vbroadcastss (%ecx), %zmm6{%k7}{z} # AVX512F
vbroadcastss -123456(%esp,%esi,8), %zmm6 # AVX512F
vbroadcastss 508(%edx), %zmm6 # AVX512F Disp8
vbroadcastss 512(%edx), %zmm6 # AVX512F
vbroadcastss -512(%edx), %zmm6 # AVX512F Disp8
vbroadcastss -516(%edx), %zmm6 # AVX512F
vbroadcastss %xmm5, %zmm6{%k7} # AVX512F
vbroadcastss %xmm5, %zmm6{%k7}{z} # AVX512F
vcmppd $0xab, %zmm5, %zmm6, %k5 # AVX512F
vcmppd $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmppd $0xab, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmppd $123, %zmm5, %zmm6, %k5 # AVX512F
vcmppd $123, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmppd $123, (%ecx), %zmm6, %k5 # AVX512F
vcmppd $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmppd $123, (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmppd $123, 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmppd $123, 8192(%edx), %zmm6, %k5 # AVX512F
vcmppd $123, -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmppd $123, -8256(%edx), %zmm6, %k5 # AVX512F
vcmppd $123, 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmppd $123, 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmppd $123, -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmppd $123, -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_oqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_oqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_oqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_oqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_oqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_oqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_oqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_oqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_oqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_oqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpeqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpeqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpeqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmplt_ospd %zmm5, %zmm6, %k5 # AVX512F
vcmplt_ospd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmplt_ospd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmplt_ospd (%ecx), %zmm6, %k5 # AVX512F
vcmplt_ospd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmplt_ospd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmplt_ospd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_ospd 8192(%edx), %zmm6, %k5 # AVX512F
vcmplt_ospd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_ospd -8256(%edx), %zmm6, %k5 # AVX512F
vcmplt_ospd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmplt_ospd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmplt_ospd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmplt_ospd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpltpd %zmm5, %zmm6, %k5 # AVX512F
vcmpltpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpltpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpltpd (%ecx), %zmm6, %k5 # AVX512F
vcmpltpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpltpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpltpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpltpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpltpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpltpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpltpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpltpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpltpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpltpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmple_ospd %zmm5, %zmm6, %k5 # AVX512F
vcmple_ospd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmple_ospd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmple_ospd (%ecx), %zmm6, %k5 # AVX512F
vcmple_ospd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmple_ospd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmple_ospd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_ospd 8192(%edx), %zmm6, %k5 # AVX512F
vcmple_ospd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_ospd -8256(%edx), %zmm6, %k5 # AVX512F
vcmple_ospd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmple_ospd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmple_ospd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmple_ospd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmplepd %zmm5, %zmm6, %k5 # AVX512F
vcmplepd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmplepd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmplepd (%ecx), %zmm6, %k5 # AVX512F
vcmplepd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmplepd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmplepd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplepd 8192(%edx), %zmm6, %k5 # AVX512F
vcmplepd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplepd -8256(%edx), %zmm6, %k5 # AVX512F
vcmplepd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmplepd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmplepd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmplepd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpunord_qpd %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_qpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpunord_qpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_qpd (%ecx), %zmm6, %k5 # AVX512F
vcmpunord_qpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpunord_qpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpunord_qpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_qpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpunord_qpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_qpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpunord_qpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_qpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpunord_qpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_qpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpunordpd %zmm5, %zmm6, %k5 # AVX512F
vcmpunordpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpunordpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpunordpd (%ecx), %zmm6, %k5 # AVX512F
vcmpunordpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpunordpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpunordpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunordpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpunordpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunordpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpunordpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpunordpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpunordpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpunordpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_uqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_uqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_uqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_uqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_uqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_uqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_uqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_uqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_uqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_uqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpneqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpneqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpneqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnlt_uspd %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_uspd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnlt_uspd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_uspd (%ecx), %zmm6, %k5 # AVX512F
vcmpnlt_uspd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnlt_uspd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnlt_uspd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uspd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_uspd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uspd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_uspd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uspd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnlt_uspd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uspd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnltpd %zmm5, %zmm6, %k5 # AVX512F
vcmpnltpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnltpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnltpd (%ecx), %zmm6, %k5 # AVX512F
vcmpnltpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnltpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnltpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnltpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnltpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnltpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnltpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnltpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnltpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnltpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnle_uspd %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_uspd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnle_uspd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_uspd (%ecx), %zmm6, %k5 # AVX512F
vcmpnle_uspd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnle_uspd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnle_uspd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_uspd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnle_uspd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_uspd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnle_uspd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_uspd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnle_uspd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_uspd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnlepd %zmm5, %zmm6, %k5 # AVX512F
vcmpnlepd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnlepd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnlepd (%ecx), %zmm6, %k5 # AVX512F
vcmpnlepd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnlepd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnlepd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlepd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnlepd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlepd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnlepd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnlepd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnlepd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnlepd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpord_qpd %zmm5, %zmm6, %k5 # AVX512F
vcmpord_qpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpord_qpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpord_qpd (%ecx), %zmm6, %k5 # AVX512F
vcmpord_qpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpord_qpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpord_qpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_qpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpord_qpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_qpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpord_qpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpord_qpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpord_qpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpord_qpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpordpd %zmm5, %zmm6, %k5 # AVX512F
vcmpordpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpordpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpordpd (%ecx), %zmm6, %k5 # AVX512F
vcmpordpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpordpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpordpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpordpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpordpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpordpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpordpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpordpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpordpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpordpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_uqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_uqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_uqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_uqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_uqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_uqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_uqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_uqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_uqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_uqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnge_uspd %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_uspd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnge_uspd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_uspd (%ecx), %zmm6, %k5 # AVX512F
vcmpnge_uspd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnge_uspd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnge_uspd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_uspd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnge_uspd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_uspd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnge_uspd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_uspd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnge_uspd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_uspd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngepd %zmm5, %zmm6, %k5 # AVX512F
vcmpngepd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngepd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngepd (%ecx), %zmm6, %k5 # AVX512F
vcmpngepd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngepd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpngepd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngepd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngepd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngepd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngepd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngepd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngepd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngepd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngt_uspd %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_uspd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngt_uspd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_uspd (%ecx), %zmm6, %k5 # AVX512F
vcmpngt_uspd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngt_uspd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpngt_uspd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_uspd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngt_uspd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_uspd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngt_uspd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_uspd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngt_uspd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_uspd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngtpd %zmm5, %zmm6, %k5 # AVX512F
vcmpngtpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngtpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngtpd (%ecx), %zmm6, %k5 # AVX512F
vcmpngtpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngtpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpngtpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngtpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngtpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngtpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngtpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngtpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngtpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngtpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpfalse_oqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_oqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpfalse_oqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_oqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpfalse_oqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpfalse_oqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpfalse_oqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_oqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_oqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpfalse_oqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpfalsepd %zmm5, %zmm6, %k5 # AVX512F
vcmpfalsepd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpfalsepd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpfalsepd (%ecx), %zmm6, %k5 # AVX512F
vcmpfalsepd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpfalsepd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpfalsepd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalsepd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpfalsepd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalsepd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpfalsepd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpfalsepd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpfalsepd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpfalsepd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_oqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_oqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_oqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_oqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_oqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_oqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_oqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_oqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_oqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_oqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpge_ospd %zmm5, %zmm6, %k5 # AVX512F
vcmpge_ospd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpge_ospd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpge_ospd (%ecx), %zmm6, %k5 # AVX512F
vcmpge_ospd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpge_ospd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpge_ospd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_ospd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpge_ospd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_ospd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpge_ospd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpge_ospd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpge_ospd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpge_ospd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgepd %zmm5, %zmm6, %k5 # AVX512F
vcmpgepd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgepd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgepd (%ecx), %zmm6, %k5 # AVX512F
vcmpgepd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgepd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpgepd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgepd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgepd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgepd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgepd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgepd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgepd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgepd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgt_ospd %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_ospd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgt_ospd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_ospd (%ecx), %zmm6, %k5 # AVX512F
vcmpgt_ospd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgt_ospd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpgt_ospd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_ospd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgt_ospd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_ospd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgt_ospd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_ospd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgt_ospd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_ospd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgtpd %zmm5, %zmm6, %k5 # AVX512F
vcmpgtpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgtpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgtpd (%ecx), %zmm6, %k5 # AVX512F
vcmpgtpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgtpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpgtpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgtpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgtpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgtpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgtpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgtpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgtpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgtpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmptrue_uqpd %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_uqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmptrue_uqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_uqpd (%ecx), %zmm6, %k5 # AVX512F
vcmptrue_uqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmptrue_uqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmptrue_uqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmptrue_uqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmptrue_uqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmptrue_uqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmptruepd %zmm5, %zmm6, %k5 # AVX512F
vcmptruepd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmptruepd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmptruepd (%ecx), %zmm6, %k5 # AVX512F
vcmptruepd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmptruepd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmptruepd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptruepd 8192(%edx), %zmm6, %k5 # AVX512F
vcmptruepd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptruepd -8256(%edx), %zmm6, %k5 # AVX512F
vcmptruepd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmptruepd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmptruepd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmptruepd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_ospd %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_ospd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_ospd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_ospd (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_ospd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_ospd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_ospd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_ospd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_ospd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_ospd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_ospd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_ospd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_ospd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_ospd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmplt_oqpd %zmm5, %zmm6, %k5 # AVX512F
vcmplt_oqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmplt_oqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmplt_oqpd (%ecx), %zmm6, %k5 # AVX512F
vcmplt_oqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmplt_oqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmplt_oqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_oqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmplt_oqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_oqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmplt_oqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmplt_oqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmplt_oqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmplt_oqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmple_oqpd %zmm5, %zmm6, %k5 # AVX512F
vcmple_oqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmple_oqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmple_oqpd (%ecx), %zmm6, %k5 # AVX512F
vcmple_oqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmple_oqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmple_oqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_oqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmple_oqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_oqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmple_oqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmple_oqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmple_oqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmple_oqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpunord_spd %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_spd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpunord_spd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_spd (%ecx), %zmm6, %k5 # AVX512F
vcmpunord_spd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpunord_spd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpunord_spd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_spd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpunord_spd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_spd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpunord_spd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_spd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpunord_spd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_spd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_uspd %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_uspd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_uspd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_uspd (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_uspd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_uspd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_uspd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_uspd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_uspd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_uspd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_uspd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_uspd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_uspd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_uspd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnlt_uqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_uqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnlt_uqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_uqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpnlt_uqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnlt_uqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnlt_uqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_uqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_uqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnlt_uqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnle_uqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_uqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnle_uqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_uqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpnle_uqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnle_uqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnle_uqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnle_uqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnle_uqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnle_uqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpord_spd %zmm5, %zmm6, %k5 # AVX512F
vcmpord_spd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpord_spd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpord_spd (%ecx), %zmm6, %k5 # AVX512F
vcmpord_spd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpord_spd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpord_spd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_spd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpord_spd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_spd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpord_spd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpord_spd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpord_spd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpord_spd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_uspd %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_uspd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_uspd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_uspd (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_uspd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_uspd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_uspd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_uspd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_uspd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_uspd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_uspd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_uspd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpeq_uspd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_uspd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnge_uqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_uqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnge_uqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_uqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpnge_uqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnge_uqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpnge_uqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnge_uqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnge_uqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpnge_uqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngt_uqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_uqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngt_uqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_uqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpngt_uqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngt_uqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpngt_uqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngt_uqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngt_uqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpngt_uqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpfalse_ospd %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_ospd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpfalse_ospd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_ospd (%ecx), %zmm6, %k5 # AVX512F
vcmpfalse_ospd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpfalse_ospd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpfalse_ospd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_ospd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_ospd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_ospd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_ospd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_ospd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpfalse_ospd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_ospd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_ospd %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_ospd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_ospd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_ospd (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_ospd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_ospd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_ospd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_ospd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_ospd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_ospd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_ospd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_ospd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpneq_ospd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_ospd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpge_oqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpge_oqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpge_oqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpge_oqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpge_oqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpge_oqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpge_oqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_oqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpge_oqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_oqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpge_oqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpge_oqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpge_oqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpge_oqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgt_oqpd %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_oqpd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgt_oqpd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_oqpd (%ecx), %zmm6, %k5 # AVX512F
vcmpgt_oqpd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgt_oqpd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmpgt_oqpd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqpd 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgt_oqpd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqpd -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgt_oqpd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqpd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpgt_oqpd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqpd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmptrue_uspd %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_uspd %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmptrue_uspd {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_uspd (%ecx), %zmm6, %k5 # AVX512F
vcmptrue_uspd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmptrue_uspd (%eax){1to8}, %zmm6, %k5 # AVX512F
vcmptrue_uspd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_uspd 8192(%edx), %zmm6, %k5 # AVX512F
vcmptrue_uspd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_uspd -8256(%edx), %zmm6, %k5 # AVX512F
vcmptrue_uspd 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_uspd 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmptrue_uspd -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_uspd -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vcmpps $0xab, %zmm5, %zmm6, %k5 # AVX512F
vcmpps $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpps $0xab, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpps $123, %zmm5, %zmm6, %k5 # AVX512F
vcmpps $123, {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpps $123, (%ecx), %zmm6, %k5 # AVX512F
vcmpps $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpps $123, (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpps $123, 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpps $123, 8192(%edx), %zmm6, %k5 # AVX512F
vcmpps $123, -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpps $123, -8256(%edx), %zmm6, %k5 # AVX512F
vcmpps $123, 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpps $123, 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpps $123, -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpps $123, -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_oqps %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_oqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_oqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_oqps (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_oqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_oqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_oqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_oqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_oqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_oqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_oqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeqps %zmm5, %zmm6, %k5 # AVX512F
vcmpeqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeqps (%ecx), %zmm6, %k5 # AVX512F
vcmpeqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpeqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmplt_osps %zmm5, %zmm6, %k5 # AVX512F
vcmplt_osps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmplt_osps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmplt_osps (%ecx), %zmm6, %k5 # AVX512F
vcmplt_osps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmplt_osps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmplt_osps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_osps 8192(%edx), %zmm6, %k5 # AVX512F
vcmplt_osps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_osps -8256(%edx), %zmm6, %k5 # AVX512F
vcmplt_osps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmplt_osps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmplt_osps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmplt_osps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpltps %zmm5, %zmm6, %k5 # AVX512F
vcmpltps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpltps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpltps (%ecx), %zmm6, %k5 # AVX512F
vcmpltps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpltps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpltps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpltps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpltps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpltps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpltps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpltps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpltps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpltps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmple_osps %zmm5, %zmm6, %k5 # AVX512F
vcmple_osps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmple_osps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmple_osps (%ecx), %zmm6, %k5 # AVX512F
vcmple_osps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmple_osps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmple_osps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_osps 8192(%edx), %zmm6, %k5 # AVX512F
vcmple_osps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_osps -8256(%edx), %zmm6, %k5 # AVX512F
vcmple_osps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmple_osps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmple_osps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmple_osps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpleps %zmm5, %zmm6, %k5 # AVX512F
vcmpleps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpleps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpleps (%ecx), %zmm6, %k5 # AVX512F
vcmpleps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpleps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpleps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpleps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpleps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpleps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpleps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpleps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpleps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpleps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpunord_qps %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_qps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpunord_qps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_qps (%ecx), %zmm6, %k5 # AVX512F
vcmpunord_qps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpunord_qps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpunord_qps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_qps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpunord_qps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_qps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpunord_qps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_qps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpunord_qps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_qps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpunordps %zmm5, %zmm6, %k5 # AVX512F
vcmpunordps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpunordps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpunordps (%ecx), %zmm6, %k5 # AVX512F
vcmpunordps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpunordps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpunordps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunordps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpunordps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunordps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpunordps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpunordps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpunordps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpunordps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_uqps %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_uqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_uqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_uqps (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_uqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_uqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_uqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_uqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_uqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_uqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_uqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneqps %zmm5, %zmm6, %k5 # AVX512F
vcmpneqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneqps (%ecx), %zmm6, %k5 # AVX512F
vcmpneqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpneqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnlt_usps %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_usps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnlt_usps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_usps (%ecx), %zmm6, %k5 # AVX512F
vcmpnlt_usps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnlt_usps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnlt_usps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_usps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_usps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_usps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_usps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_usps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnlt_usps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_usps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnltps %zmm5, %zmm6, %k5 # AVX512F
vcmpnltps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnltps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnltps (%ecx), %zmm6, %k5 # AVX512F
vcmpnltps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnltps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnltps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnltps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnltps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnltps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnltps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnltps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnltps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnltps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnle_usps %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_usps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnle_usps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_usps (%ecx), %zmm6, %k5 # AVX512F
vcmpnle_usps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnle_usps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnle_usps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_usps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnle_usps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_usps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnle_usps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_usps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnle_usps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_usps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnleps %zmm5, %zmm6, %k5 # AVX512F
vcmpnleps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnleps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnleps (%ecx), %zmm6, %k5 # AVX512F
vcmpnleps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnleps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnleps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnleps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnleps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnleps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnleps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnleps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnleps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnleps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpord_qps %zmm5, %zmm6, %k5 # AVX512F
vcmpord_qps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpord_qps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpord_qps (%ecx), %zmm6, %k5 # AVX512F
vcmpord_qps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpord_qps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpord_qps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_qps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpord_qps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_qps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpord_qps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpord_qps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpord_qps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpord_qps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpordps %zmm5, %zmm6, %k5 # AVX512F
vcmpordps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpordps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpordps (%ecx), %zmm6, %k5 # AVX512F
vcmpordps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpordps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpordps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpordps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpordps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpordps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpordps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpordps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpordps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpordps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_uqps %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_uqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_uqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_uqps (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_uqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_uqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_uqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_uqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_uqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_uqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_uqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnge_usps %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_usps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnge_usps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_usps (%ecx), %zmm6, %k5 # AVX512F
vcmpnge_usps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnge_usps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnge_usps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_usps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnge_usps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_usps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnge_usps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_usps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnge_usps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_usps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngeps %zmm5, %zmm6, %k5 # AVX512F
vcmpngeps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngeps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngeps (%ecx), %zmm6, %k5 # AVX512F
vcmpngeps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngeps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpngeps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngeps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngeps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngeps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngeps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngeps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngeps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngeps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngt_usps %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_usps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngt_usps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_usps (%ecx), %zmm6, %k5 # AVX512F
vcmpngt_usps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngt_usps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpngt_usps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_usps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngt_usps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_usps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngt_usps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_usps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngt_usps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_usps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngtps %zmm5, %zmm6, %k5 # AVX512F
vcmpngtps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngtps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngtps (%ecx), %zmm6, %k5 # AVX512F
vcmpngtps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngtps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpngtps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngtps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngtps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngtps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngtps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngtps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngtps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngtps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpfalse_oqps %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_oqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpfalse_oqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_oqps (%ecx), %zmm6, %k5 # AVX512F
vcmpfalse_oqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpfalse_oqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpfalse_oqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_oqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_oqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpfalse_oqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_oqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpfalseps %zmm5, %zmm6, %k5 # AVX512F
vcmpfalseps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpfalseps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpfalseps (%ecx), %zmm6, %k5 # AVX512F
vcmpfalseps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpfalseps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpfalseps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalseps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpfalseps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalseps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpfalseps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpfalseps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpfalseps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpfalseps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_oqps %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_oqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_oqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_oqps (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_oqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_oqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_oqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_oqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_oqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_oqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_oqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpge_osps %zmm5, %zmm6, %k5 # AVX512F
vcmpge_osps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpge_osps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpge_osps (%ecx), %zmm6, %k5 # AVX512F
vcmpge_osps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpge_osps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpge_osps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_osps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpge_osps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_osps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpge_osps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpge_osps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpge_osps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpge_osps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgeps %zmm5, %zmm6, %k5 # AVX512F
vcmpgeps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgeps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgeps (%ecx), %zmm6, %k5 # AVX512F
vcmpgeps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgeps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpgeps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgeps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgeps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgeps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgeps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgeps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgeps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgeps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgt_osps %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_osps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgt_osps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_osps (%ecx), %zmm6, %k5 # AVX512F
vcmpgt_osps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgt_osps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpgt_osps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_osps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgt_osps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_osps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgt_osps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_osps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgt_osps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_osps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgtps %zmm5, %zmm6, %k5 # AVX512F
vcmpgtps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgtps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgtps (%ecx), %zmm6, %k5 # AVX512F
vcmpgtps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgtps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpgtps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgtps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgtps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgtps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgtps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgtps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgtps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgtps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmptrue_uqps %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_uqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmptrue_uqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_uqps (%ecx), %zmm6, %k5 # AVX512F
vcmptrue_uqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmptrue_uqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmptrue_uqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmptrue_uqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmptrue_uqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmptrue_uqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_uqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmptrueps %zmm5, %zmm6, %k5 # AVX512F
vcmptrueps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmptrueps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmptrueps (%ecx), %zmm6, %k5 # AVX512F
vcmptrueps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmptrueps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmptrueps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrueps 8192(%edx), %zmm6, %k5 # AVX512F
vcmptrueps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrueps -8256(%edx), %zmm6, %k5 # AVX512F
vcmptrueps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmptrueps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmptrueps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmptrueps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_osps %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_osps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_osps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_osps (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_osps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_osps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_osps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_osps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_osps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_osps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_osps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_osps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_osps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_osps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmplt_oqps %zmm5, %zmm6, %k5 # AVX512F
vcmplt_oqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmplt_oqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmplt_oqps (%ecx), %zmm6, %k5 # AVX512F
vcmplt_oqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmplt_oqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmplt_oqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_oqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmplt_oqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmplt_oqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmplt_oqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmplt_oqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmplt_oqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmplt_oqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmple_oqps %zmm5, %zmm6, %k5 # AVX512F
vcmple_oqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmple_oqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmple_oqps (%ecx), %zmm6, %k5 # AVX512F
vcmple_oqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmple_oqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmple_oqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_oqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmple_oqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmple_oqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmple_oqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmple_oqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmple_oqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmple_oqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpunord_sps %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_sps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpunord_sps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpunord_sps (%ecx), %zmm6, %k5 # AVX512F
vcmpunord_sps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpunord_sps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpunord_sps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_sps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpunord_sps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpunord_sps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpunord_sps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_sps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpunord_sps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpunord_sps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_usps %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_usps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_usps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_usps (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_usps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_usps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_usps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_usps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_usps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_usps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_usps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_usps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_usps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_usps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnlt_uqps %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_uqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnlt_uqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnlt_uqps (%ecx), %zmm6, %k5 # AVX512F
vcmpnlt_uqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnlt_uqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnlt_uqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_uqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnlt_uqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnlt_uqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnlt_uqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnle_uqps %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_uqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnle_uqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnle_uqps (%ecx), %zmm6, %k5 # AVX512F
vcmpnle_uqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnle_uqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnle_uqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnle_uqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnle_uqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnle_uqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnle_uqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpord_sps %zmm5, %zmm6, %k5 # AVX512F
vcmpord_sps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpord_sps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpord_sps (%ecx), %zmm6, %k5 # AVX512F
vcmpord_sps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpord_sps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpord_sps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_sps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpord_sps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpord_sps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpord_sps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpord_sps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpord_sps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpord_sps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_usps %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_usps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpeq_usps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpeq_usps (%ecx), %zmm6, %k5 # AVX512F
vcmpeq_usps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpeq_usps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_usps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_usps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpeq_usps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpeq_usps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpeq_usps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_usps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpeq_usps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpeq_usps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnge_uqps %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_uqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpnge_uqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpnge_uqps (%ecx), %zmm6, %k5 # AVX512F
vcmpnge_uqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpnge_uqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpnge_uqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpnge_uqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpnge_uqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpnge_uqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpnge_uqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngt_uqps %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_uqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpngt_uqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpngt_uqps (%ecx), %zmm6, %k5 # AVX512F
vcmpngt_uqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpngt_uqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpngt_uqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpngt_uqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpngt_uqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpngt_uqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpngt_uqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpfalse_osps %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_osps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpfalse_osps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpfalse_osps (%ecx), %zmm6, %k5 # AVX512F
vcmpfalse_osps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpfalse_osps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpfalse_osps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_osps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_osps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpfalse_osps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpfalse_osps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_osps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpfalse_osps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpfalse_osps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_osps %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_osps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpneq_osps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpneq_osps (%ecx), %zmm6, %k5 # AVX512F
vcmpneq_osps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpneq_osps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_osps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_osps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpneq_osps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpneq_osps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpneq_osps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_osps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpneq_osps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpneq_osps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpge_oqps %zmm5, %zmm6, %k5 # AVX512F
vcmpge_oqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpge_oqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpge_oqps (%ecx), %zmm6, %k5 # AVX512F
vcmpge_oqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpge_oqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpge_oqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_oqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpge_oqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpge_oqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpge_oqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpge_oqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpge_oqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpge_oqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgt_oqps %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_oqps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmpgt_oqps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmpgt_oqps (%ecx), %zmm6, %k5 # AVX512F
vcmpgt_oqps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmpgt_oqps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmpgt_oqps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqps 8192(%edx), %zmm6, %k5 # AVX512F
vcmpgt_oqps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqps -8256(%edx), %zmm6, %k5 # AVX512F
vcmpgt_oqps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpgt_oqps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmpgt_oqps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmptrue_usps %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_usps %zmm5, %zmm6, %k5{%k7} # AVX512F
vcmptrue_usps {sae}, %zmm5, %zmm6, %k5 # AVX512F
vcmptrue_usps (%ecx), %zmm6, %k5 # AVX512F
vcmptrue_usps -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vcmptrue_usps (%eax){1to16}, %zmm6, %k5 # AVX512F
vcmptrue_usps 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_usps 8192(%edx), %zmm6, %k5 # AVX512F
vcmptrue_usps -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vcmptrue_usps -8256(%edx), %zmm6, %k5 # AVX512F
vcmptrue_usps 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_usps 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmptrue_usps -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vcmptrue_usps -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vcmpsd $0xab, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpsd $0xab, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpsd $123, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpsd $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpsd $123, (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpsd $123, -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpsd $123, 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpsd $123, 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpsd $123, -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpsd $123, -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_ossd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_ossd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmplt_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmplt_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpltsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpltsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpltsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpltsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpltsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpltsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpltsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpltsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_ossd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_ossd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmple_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmple_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplesd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplesd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmplesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmplesd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplesd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplesd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplesd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_qsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_qsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_qsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_qsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunordsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunordsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunordsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpunordsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpunordsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunordsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunordsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunordsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_ussd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_ussd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnltsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnltsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnltsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnltsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnltsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnltsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnltsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnltsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_ussd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_ussd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnle_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlesd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlesd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnlesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnlesd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlesd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlesd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlesd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_qsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_qsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_qsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpord_qsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpord_qsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_qsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_qsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_qsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpordsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpordsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpordsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpordsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpordsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpordsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpordsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpordsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_ussd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_ussd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnge_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngesd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngesd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngesd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngesd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngesd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngesd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_ussd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_ussd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngt_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngtsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngtsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngtsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngtsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngtsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngtsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngtsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngtsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalsesd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalsesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalsesd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpfalsesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpfalsesd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalsesd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalsesd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalsesd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_ossd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_ossd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpge_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpge_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgesd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgesd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgesd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgesd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgesd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgesd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_ossd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_ossd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgt_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgtsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgtsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgtsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgtsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgtsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgtsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgtsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgtsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptruesd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptruesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptruesd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmptruesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmptruesd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptruesd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptruesd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptruesd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ossd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_ossd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmple_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmple_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_ssd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_ssd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_ssd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_ssd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpunord_ssd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_ssd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_ssd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_ssd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ussd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_ussd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_ssd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_ssd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_ssd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpord_ssd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpord_ssd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_ssd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_ssd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_ssd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ussd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_ussd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_ossd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_ossd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ossd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_ossd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_ussd %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_ussd (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmptrue_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpss $0xab, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpss $0xab, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpss $123, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpss $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpss $123, (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpss $123, -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpss $123, 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpss $123, 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpss $123, -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpss $123, -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_osss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_osss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmplt_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmplt_osss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_osss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_osss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_osss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpltss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpltss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpltss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpltss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpltss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpltss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpltss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpltss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_osss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_osss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmple_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmple_osss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_osss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_osss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_osss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpless %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpless {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpless (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpless -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpless 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpless 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpless -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpless -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_qss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_qss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_qss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_qss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_qss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunordss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunordss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunordss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpunordss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpunordss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunordss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunordss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunordss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_usss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_usss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_usss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_usss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_usss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_usss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnltss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnltss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnltss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnltss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnltss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnltss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnltss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnltss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_usss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_usss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnle_usss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_usss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_usss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_usss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnless %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnless {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnless (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnless -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnless 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnless 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnless -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnless -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_qss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_qss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_qss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpord_qss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpord_qss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_qss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_qss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_qss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpordss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpordss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpordss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpordss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpordss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpordss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpordss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpordss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_usss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_usss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnge_usss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_usss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_usss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_usss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngess %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngess (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngess 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngess 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngess -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngess -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_usss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_usss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngt_usss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_usss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_usss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_usss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngtss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngtss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngtss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngtss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngtss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngtss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngtss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngtss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalsess %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalsess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalsess (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpfalsess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpfalsess 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalsess 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalsess -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalsess -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_osss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_osss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpge_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpge_osss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_osss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_osss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_osss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgess %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgess (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgess 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgess 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgess -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgess -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_osss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_osss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgt_osss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_osss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_osss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_osss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgtss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgtss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgtss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgtss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgtss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgtss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgtss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgtss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptruess %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptruess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptruess (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmptruess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmptruess 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptruess 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptruess -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptruess -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_osss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_osss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_osss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_osss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_osss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_osss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmplt_oqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmplt_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmplt_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_oqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmple_oqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmple_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmple_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmple_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmple_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_sss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_sss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpunord_sss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_sss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpunord_sss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_sss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpunord_sss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpunord_sss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_usss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_usss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_usss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_usss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_usss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_usss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnlt_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnlt_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnle_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnle_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_sss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_sss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpord_sss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpord_sss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpord_sss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_sss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpord_sss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpord_sss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_usss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpeq_usss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpeq_usss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_usss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpeq_usss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpeq_usss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpnge_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpnge_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpngt_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpngt_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_osss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpfalse_osss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_osss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_osss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpfalse_osss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpfalse_osss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_osss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpneq_osss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpneq_osss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_osss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpneq_osss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpneq_osss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpge_oqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpge_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpge_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmpgt_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmpgt_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_usss %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512F
vcmptrue_usss (%ecx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512F
vcmptrue_usss 508(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_usss 512(%edx), %xmm5, %k5{%k7} # AVX512F
vcmptrue_usss -512(%edx), %xmm5, %k5{%k7} # AVX512F Disp8
vcmptrue_usss -516(%edx), %xmm5, %k5{%k7} # AVX512F
vcomisd {sae}, %xmm5, %xmm6 # AVX512F
vcomiss {sae}, %xmm5, %xmm6 # AVX512F
vcompresspd %zmm6, (%ecx) # AVX512F
vcompresspd %zmm6, (%ecx){%k7} # AVX512F
vcompresspd %zmm6, -123456(%esp,%esi,8) # AVX512F
vcompresspd %zmm6, 1016(%edx) # AVX512F Disp8
vcompresspd %zmm6, 1024(%edx) # AVX512F
vcompresspd %zmm6, -1024(%edx) # AVX512F Disp8
vcompresspd %zmm6, -1032(%edx) # AVX512F
vcompresspd %zmm5, %zmm6 # AVX512F
vcompresspd %zmm5, %zmm6{%k7} # AVX512F
vcompresspd %zmm5, %zmm6{%k7}{z} # AVX512F
vcompressps %zmm6, (%ecx) # AVX512F
vcompressps %zmm6, (%ecx){%k7} # AVX512F
vcompressps %zmm6, -123456(%esp,%esi,8) # AVX512F
vcompressps %zmm6, 508(%edx) # AVX512F Disp8
vcompressps %zmm6, 512(%edx) # AVX512F
vcompressps %zmm6, -512(%edx) # AVX512F Disp8
vcompressps %zmm6, -516(%edx) # AVX512F
vcompressps %zmm5, %zmm6 # AVX512F
vcompressps %zmm5, %zmm6{%k7} # AVX512F
vcompressps %zmm5, %zmm6{%k7}{z} # AVX512F
vcvtdq2pd %ymm5, %zmm6{%k7} # AVX512F
vcvtdq2pd %ymm5, %zmm6{%k7}{z} # AVX512F
vcvtdq2pd (%ecx), %zmm6{%k7} # AVX512F
vcvtdq2pd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vcvtdq2pd (%eax){1to8}, %zmm6{%k7} # AVX512F
vcvtdq2pd 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtdq2pd 4096(%edx), %zmm6{%k7} # AVX512F
vcvtdq2pd -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtdq2pd -4128(%edx), %zmm6{%k7} # AVX512F
vcvtdq2pd 508(%edx){1to8}, %zmm6{%k7} # AVX512F Disp8
vcvtdq2pd 512(%edx){1to8}, %zmm6{%k7} # AVX512F
vcvtdq2pd -512(%edx){1to8}, %zmm6{%k7} # AVX512F Disp8
vcvtdq2pd -516(%edx){1to8}, %zmm6{%k7} # AVX512F
vcvtdq2ps %zmm5, %zmm6 # AVX512F
vcvtdq2ps %zmm5, %zmm6{%k7} # AVX512F
vcvtdq2ps %zmm5, %zmm6{%k7}{z} # AVX512F
vcvtdq2ps {rn-sae}, %zmm5, %zmm6 # AVX512F
vcvtdq2ps {ru-sae}, %zmm5, %zmm6 # AVX512F
vcvtdq2ps {rd-sae}, %zmm5, %zmm6 # AVX512F
vcvtdq2ps {rz-sae}, %zmm5, %zmm6 # AVX512F
vcvtdq2ps (%ecx), %zmm6 # AVX512F
vcvtdq2ps -123456(%esp,%esi,8), %zmm6 # AVX512F
vcvtdq2ps (%eax){1to16}, %zmm6 # AVX512F
vcvtdq2ps 8128(%edx), %zmm6 # AVX512F Disp8
vcvtdq2ps 8192(%edx), %zmm6 # AVX512F
vcvtdq2ps -8192(%edx), %zmm6 # AVX512F Disp8
vcvtdq2ps -8256(%edx), %zmm6 # AVX512F
vcvtdq2ps 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtdq2ps 512(%edx){1to16}, %zmm6 # AVX512F
vcvtdq2ps -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtdq2ps -516(%edx){1to16}, %zmm6 # AVX512F
vcvtpd2dq %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2dq %zmm5, %ymm6{%k7}{z} # AVX512F
vcvtpd2dq {rn-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2dq {ru-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2dq {rd-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2dq {rz-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2dq (%ecx), %ymm6{%k7} # AVX512F
vcvtpd2dq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512F
vcvtpd2dq (%eax){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2dq 8128(%edx), %ymm6{%k7} # AVX512F Disp8
vcvtpd2dq 8192(%edx), %ymm6{%k7} # AVX512F
vcvtpd2dq -8192(%edx), %ymm6{%k7} # AVX512F Disp8
vcvtpd2dq -8256(%edx), %ymm6{%k7} # AVX512F
vcvtpd2dq 1016(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvtpd2dq 1024(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2dq -1024(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvtpd2dq -1032(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2ps %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2ps %zmm5, %ymm6{%k7}{z} # AVX512F
vcvtpd2ps {rn-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2ps {ru-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2ps {rd-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2ps {rz-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2ps (%ecx), %ymm6{%k7} # AVX512F
vcvtpd2ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512F
vcvtpd2ps (%eax){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2ps 8128(%edx), %ymm6{%k7} # AVX512F Disp8
vcvtpd2ps 8192(%edx), %ymm6{%k7} # AVX512F
vcvtpd2ps -8192(%edx), %ymm6{%k7} # AVX512F Disp8
vcvtpd2ps -8256(%edx), %ymm6{%k7} # AVX512F
vcvtpd2ps 1016(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvtpd2ps 1024(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2ps -1024(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvtpd2ps -1032(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2udq %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2udq %zmm5, %ymm6{%k7}{z} # AVX512F
vcvtpd2udq {rn-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2udq {ru-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2udq {rd-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2udq {rz-sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtpd2udq (%ecx), %ymm6{%k7} # AVX512F
vcvtpd2udq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512F
vcvtpd2udq (%eax){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2udq 8128(%edx), %ymm6{%k7} # AVX512F Disp8
vcvtpd2udq 8192(%edx), %ymm6{%k7} # AVX512F
vcvtpd2udq -8192(%edx), %ymm6{%k7} # AVX512F Disp8
vcvtpd2udq -8256(%edx), %ymm6{%k7} # AVX512F
vcvtpd2udq 1016(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvtpd2udq 1024(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvtpd2udq -1024(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvtpd2udq -1032(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvtph2ps %ymm5, %zmm6{%k7} # AVX512F
vcvtph2ps %ymm5, %zmm6{%k7}{z} # AVX512F
vcvtph2ps {sae}, %ymm5, %zmm6{%k7} # AVX512F
vcvtph2ps (%ecx), %zmm6{%k7} # AVX512F
vcvtph2ps -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vcvtph2ps 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtph2ps 4096(%edx), %zmm6{%k7} # AVX512F
vcvtph2ps -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtph2ps -4128(%edx), %zmm6{%k7} # AVX512F
vcvtps2dq %zmm5, %zmm6 # AVX512F
vcvtps2dq %zmm5, %zmm6{%k7} # AVX512F
vcvtps2dq %zmm5, %zmm6{%k7}{z} # AVX512F
vcvtps2dq {rn-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2dq {ru-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2dq {rd-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2dq {rz-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2dq (%ecx), %zmm6 # AVX512F
vcvtps2dq -123456(%esp,%esi,8), %zmm6 # AVX512F
vcvtps2dq (%eax){1to16}, %zmm6 # AVX512F
vcvtps2dq 8128(%edx), %zmm6 # AVX512F Disp8
vcvtps2dq 8192(%edx), %zmm6 # AVX512F
vcvtps2dq -8192(%edx), %zmm6 # AVX512F Disp8
vcvtps2dq -8256(%edx), %zmm6 # AVX512F
vcvtps2dq 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtps2dq 512(%edx){1to16}, %zmm6 # AVX512F
vcvtps2dq -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtps2dq -516(%edx){1to16}, %zmm6 # AVX512F
vcvtps2pd %ymm5, %zmm6{%k7} # AVX512F
vcvtps2pd %ymm5, %zmm6{%k7}{z} # AVX512F
vcvtps2pd {sae}, %ymm5, %zmm6{%k7} # AVX512F
vcvtps2pd (%ecx), %zmm6{%k7} # AVX512F
vcvtps2pd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vcvtps2pd (%eax){1to8}, %zmm6{%k7} # AVX512F
vcvtps2pd 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtps2pd 4096(%edx), %zmm6{%k7} # AVX512F
vcvtps2pd -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtps2pd -4128(%edx), %zmm6{%k7} # AVX512F
vcvtps2pd 508(%edx){1to8}, %zmm6{%k7} # AVX512F Disp8
vcvtps2pd 512(%edx){1to8}, %zmm6{%k7} # AVX512F
vcvtps2pd -512(%edx){1to8}, %zmm6{%k7} # AVX512F Disp8
vcvtps2pd -516(%edx){1to8}, %zmm6{%k7} # AVX512F
vcvtps2ph $0xab, %zmm5, %ymm6{%k7} # AVX512F
vcvtps2ph $0xab, %zmm5, %ymm6{%k7}{z} # AVX512F
vcvtps2ph $0xab, {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtps2ph $123, %zmm5, %ymm6{%k7} # AVX512F
vcvtps2ph $123, {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvtps2udq %zmm5, %zmm6 # AVX512F
vcvtps2udq %zmm5, %zmm6{%k7} # AVX512F
vcvtps2udq %zmm5, %zmm6{%k7}{z} # AVX512F
vcvtps2udq {rn-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2udq {ru-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2udq {rd-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2udq {rz-sae}, %zmm5, %zmm6 # AVX512F
vcvtps2udq (%ecx), %zmm6 # AVX512F
vcvtps2udq -123456(%esp,%esi,8), %zmm6 # AVX512F
vcvtps2udq (%eax){1to16}, %zmm6 # AVX512F
vcvtps2udq 8128(%edx), %zmm6 # AVX512F Disp8
vcvtps2udq 8192(%edx), %zmm6 # AVX512F
vcvtps2udq -8192(%edx), %zmm6 # AVX512F Disp8
vcvtps2udq -8256(%edx), %zmm6 # AVX512F
vcvtps2udq 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtps2udq 512(%edx){1to16}, %zmm6 # AVX512F
vcvtps2udq -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtps2udq -516(%edx){1to16}, %zmm6 # AVX512F
vcvtsd2si {rn-sae}, %xmm6, %eax # AVX512F
vcvtsd2si {ru-sae}, %xmm6, %eax # AVX512F
vcvtsd2si {rd-sae}, %xmm6, %eax # AVX512F
vcvtsd2si {rz-sae}, %xmm6, %eax # AVX512F
vcvtsd2si {rn-sae}, %xmm6, %ebp # AVX512F
vcvtsd2si {ru-sae}, %xmm6, %ebp # AVX512F
vcvtsd2si {rd-sae}, %xmm6, %ebp # AVX512F
vcvtsd2si {rz-sae}, %xmm6, %ebp # AVX512F
vcvtsd2ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vcvtsd2ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vcvtsd2ss 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vcvtsd2ss -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vcvtsd2ss -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vcvtsi2ssl %eax, {rn-sae}, %xmm5, %xmm6 # AVX512F
vcvtsi2ssl %eax, {ru-sae}, %xmm5, %xmm6 # AVX512F
vcvtsi2ssl %eax, {rd-sae}, %xmm5, %xmm6 # AVX512F
vcvtsi2ssl %eax, {rz-sae}, %xmm5, %xmm6 # AVX512F
vcvtsi2ssl %ebp, {rn-sae}, %xmm5, %xmm6 # AVX512F
vcvtsi2ssl %ebp, {ru-sae}, %xmm5, %xmm6 # AVX512F
vcvtsi2ssl %ebp, {rd-sae}, %xmm5, %xmm6 # AVX512F
vcvtsi2ssl %ebp, {rz-sae}, %xmm5, %xmm6 # AVX512F
vcvtss2sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvtss2sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vcvtss2sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vcvtss2sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vcvtss2sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vcvtss2sd 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vcvtss2sd 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vcvtss2sd -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vcvtss2sd -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vcvtss2si {rn-sae}, %xmm6, %eax # AVX512F
vcvtss2si {ru-sae}, %xmm6, %eax # AVX512F
vcvtss2si {rd-sae}, %xmm6, %eax # AVX512F
vcvtss2si {rz-sae}, %xmm6, %eax # AVX512F
vcvtss2si {rn-sae}, %xmm6, %ebp # AVX512F
vcvtss2si {ru-sae}, %xmm6, %ebp # AVX512F
vcvtss2si {rd-sae}, %xmm6, %ebp # AVX512F
vcvtss2si {rz-sae}, %xmm6, %ebp # AVX512F
vcvttpd2dq %zmm5, %ymm6{%k7} # AVX512F
vcvttpd2dq %zmm5, %ymm6{%k7}{z} # AVX512F
vcvttpd2dq {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvttpd2dq (%ecx), %ymm6{%k7} # AVX512F
vcvttpd2dq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512F
vcvttpd2dq (%eax){1to8}, %ymm6{%k7} # AVX512F
vcvttpd2dq 8128(%edx), %ymm6{%k7} # AVX512F Disp8
vcvttpd2dq 8192(%edx), %ymm6{%k7} # AVX512F
vcvttpd2dq -8192(%edx), %ymm6{%k7} # AVX512F Disp8
vcvttpd2dq -8256(%edx), %ymm6{%k7} # AVX512F
vcvttpd2dq 1016(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvttpd2dq 1024(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvttpd2dq -1024(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvttpd2dq -1032(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvttps2dq %zmm5, %zmm6 # AVX512F
vcvttps2dq %zmm5, %zmm6{%k7} # AVX512F
vcvttps2dq %zmm5, %zmm6{%k7}{z} # AVX512F
vcvttps2dq {sae}, %zmm5, %zmm6 # AVX512F
vcvttps2dq (%ecx), %zmm6 # AVX512F
vcvttps2dq -123456(%esp,%esi,8), %zmm6 # AVX512F
vcvttps2dq (%eax){1to16}, %zmm6 # AVX512F
vcvttps2dq 8128(%edx), %zmm6 # AVX512F Disp8
vcvttps2dq 8192(%edx), %zmm6 # AVX512F
vcvttps2dq -8192(%edx), %zmm6 # AVX512F Disp8
vcvttps2dq -8256(%edx), %zmm6 # AVX512F
vcvttps2dq 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvttps2dq 512(%edx){1to16}, %zmm6 # AVX512F
vcvttps2dq -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvttps2dq -516(%edx){1to16}, %zmm6 # AVX512F
vcvttsd2si {sae}, %xmm6, %eax # AVX512F
vcvttsd2si {sae}, %xmm6, %ebp # AVX512F
vcvttss2si {sae}, %xmm6, %eax # AVX512F
vcvttss2si {sae}, %xmm6, %ebp # AVX512F
vcvtudq2pd %ymm5, %zmm6{%k7} # AVX512F
vcvtudq2pd %ymm5, %zmm6{%k7}{z} # AVX512F
vcvtudq2pd (%ecx), %zmm6{%k7} # AVX512F
vcvtudq2pd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vcvtudq2pd (%eax){1to8}, %zmm6{%k7} # AVX512F
vcvtudq2pd 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtudq2pd 4096(%edx), %zmm6{%k7} # AVX512F
vcvtudq2pd -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vcvtudq2pd -4128(%edx), %zmm6{%k7} # AVX512F
vcvtudq2pd 508(%edx){1to8}, %zmm6{%k7} # AVX512F Disp8
vcvtudq2pd 512(%edx){1to8}, %zmm6{%k7} # AVX512F
vcvtudq2pd -512(%edx){1to8}, %zmm6{%k7} # AVX512F Disp8
vcvtudq2pd -516(%edx){1to8}, %zmm6{%k7} # AVX512F
vcvtudq2ps %zmm5, %zmm6 # AVX512F
vcvtudq2ps %zmm5, %zmm6{%k7} # AVX512F
vcvtudq2ps %zmm5, %zmm6{%k7}{z} # AVX512F
vcvtudq2ps {rn-sae}, %zmm5, %zmm6 # AVX512F
vcvtudq2ps {ru-sae}, %zmm5, %zmm6 # AVX512F
vcvtudq2ps {rd-sae}, %zmm5, %zmm6 # AVX512F
vcvtudq2ps {rz-sae}, %zmm5, %zmm6 # AVX512F
vcvtudq2ps (%ecx), %zmm6 # AVX512F
vcvtudq2ps -123456(%esp,%esi,8), %zmm6 # AVX512F
vcvtudq2ps (%eax){1to16}, %zmm6 # AVX512F
vcvtudq2ps 8128(%edx), %zmm6 # AVX512F Disp8
vcvtudq2ps 8192(%edx), %zmm6 # AVX512F
vcvtudq2ps -8192(%edx), %zmm6 # AVX512F Disp8
vcvtudq2ps -8256(%edx), %zmm6 # AVX512F
vcvtudq2ps 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtudq2ps 512(%edx){1to16}, %zmm6 # AVX512F
vcvtudq2ps -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvtudq2ps -516(%edx){1to16}, %zmm6 # AVX512F
vdivpd %zmm4, %zmm5, %zmm6 # AVX512F
vdivpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vdivpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vdivpd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivpd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivpd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivpd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivpd (%ecx), %zmm5, %zmm6 # AVX512F
vdivpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vdivpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vdivpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vdivpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vdivpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vdivpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vdivpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vdivpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vdivpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vdivpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vdivps %zmm4, %zmm5, %zmm6 # AVX512F
vdivps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vdivps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vdivps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vdivps (%ecx), %zmm5, %zmm6 # AVX512F
vdivps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vdivps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vdivps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vdivps 8192(%edx), %zmm5, %zmm6 # AVX512F
vdivps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vdivps -8256(%edx), %zmm5, %zmm6 # AVX512F
vdivps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vdivps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vdivps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vdivps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vdivsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vdivsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vdivsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vdivsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vdivsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vdivsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vdivsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vdivss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vdivss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vdivss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vdivss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vdivss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vdivss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vdivss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vdivss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vexpandpd (%ecx), %zmm6 # AVX512F
vexpandpd (%ecx), %zmm6{%k7} # AVX512F
vexpandpd (%ecx), %zmm6{%k7}{z} # AVX512F
vexpandpd -123456(%esp,%esi,8), %zmm6 # AVX512F
vexpandpd 1016(%edx), %zmm6 # AVX512F Disp8
vexpandpd 1024(%edx), %zmm6 # AVX512F
vexpandpd -1024(%edx), %zmm6 # AVX512F Disp8
vexpandpd -1032(%edx), %zmm6 # AVX512F
vexpandpd %zmm5, %zmm6 # AVX512F
vexpandpd %zmm5, %zmm6{%k7} # AVX512F
vexpandpd %zmm5, %zmm6{%k7}{z} # AVX512F
vexpandps (%ecx), %zmm6 # AVX512F
vexpandps (%ecx), %zmm6{%k7} # AVX512F
vexpandps (%ecx), %zmm6{%k7}{z} # AVX512F
vexpandps -123456(%esp,%esi,8), %zmm6 # AVX512F
vexpandps 508(%edx), %zmm6 # AVX512F Disp8
vexpandps 512(%edx), %zmm6 # AVX512F
vexpandps -512(%edx), %zmm6 # AVX512F Disp8
vexpandps -516(%edx), %zmm6 # AVX512F
vexpandps %zmm5, %zmm6 # AVX512F
vexpandps %zmm5, %zmm6{%k7} # AVX512F
vexpandps %zmm5, %zmm6{%k7}{z} # AVX512F
vextractf32x4 $0xab, %zmm5, %xmm6{%k7} # AVX512F
vextractf32x4 $0xab, %zmm5, %xmm6{%k7}{z} # AVX512F
vextractf32x4 $123, %zmm5, %xmm6{%k7} # AVX512F
vextractf64x4 $0xab, %zmm5, %ymm6{%k7} # AVX512F
vextractf64x4 $0xab, %zmm5, %ymm6{%k7}{z} # AVX512F
vextractf64x4 $123, %zmm5, %ymm6{%k7} # AVX512F
vextracti32x4 $0xab, %zmm5, %xmm6{%k7} # AVX512F
vextracti32x4 $0xab, %zmm5, %xmm6{%k7}{z} # AVX512F
vextracti32x4 $123, %zmm5, %xmm6{%k7} # AVX512F
vextracti64x4 $0xab, %zmm5, %ymm6{%k7} # AVX512F
vextracti64x4 $0xab, %zmm5, %ymm6{%k7}{z} # AVX512F
vextracti64x4 $123, %zmm5, %ymm6{%k7} # AVX512F
vfmadd132pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmadd132pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmadd132pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmadd132pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmadd132pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd132pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd132pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmadd132pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd132pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmadd132pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd132pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd132pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd132pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd132ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmadd132ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmadd132ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd132ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmadd132ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmadd132ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd132ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd132ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmadd132ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd132ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmadd132ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd132ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd132ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd132ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmadd132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmadd132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmadd213pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmadd213pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmadd213pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmadd213pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd213pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd213pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmadd213pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd213pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmadd213pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd213pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd213pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd213pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd213ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmadd213ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmadd213ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd213ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmadd213ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmadd213ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd213ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd213ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmadd213ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd213ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmadd213ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd213ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd213ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd213ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmadd213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmadd213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmadd231pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmadd231pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmadd231pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmadd231pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd231pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd231pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmadd231pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd231pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmadd231pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd231pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd231pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd231pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmadd231ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmadd231ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmadd231ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmadd231ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmadd231ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmadd231ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd231ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd231ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmadd231ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmadd231ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmadd231ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd231ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd231ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmadd231ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmadd231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmadd231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmadd231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmadd231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmadd231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmaddsub132pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmaddsub132pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmaddsub132pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmaddsub132pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmaddsub132pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub132pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub132pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub132pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub132pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmaddsub132ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmaddsub132ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmaddsub132ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmaddsub132ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub132ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub132ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub132ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub132ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmaddsub213pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmaddsub213pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmaddsub213pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmaddsub213pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub213pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub213pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub213pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmaddsub213ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmaddsub213ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmaddsub213ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmaddsub213ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub213ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub213ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub213ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub213ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmaddsub231pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmaddsub231pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmaddsub231pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmaddsub231pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub231pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub231pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub231pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmaddsub231ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmaddsub231ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmaddsub231ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmaddsub231ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub231ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmaddsub231ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmaddsub231ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmaddsub231ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub132pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsub132pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsub132pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmsub132pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsub132pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub132pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub132pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsub132pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub132pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsub132pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub132pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub132pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub132pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub132ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsub132ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsub132ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub132ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmsub132ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsub132ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub132ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub132ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsub132ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub132ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsub132ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub132ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub132ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub132ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmsub132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmsub132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsub213pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsub213pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmsub213pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsub213pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub213pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub213pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsub213pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub213pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsub213pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub213pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub213pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub213pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub213ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsub213ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsub213ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub213ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmsub213ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsub213ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub213ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub213ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsub213ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub213ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsub213ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub213ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub213ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub213ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmsub213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmsub213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsub231pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsub231pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmsub231pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsub231pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub231pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub231pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsub231pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub231pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsub231pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub231pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub231pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub231pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsub231ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsub231ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsub231ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsub231ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmsub231ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsub231ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub231ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub231ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsub231ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsub231ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsub231ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub231ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub231ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsub231ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsub231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmsub231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfmsub231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsub231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfmsub231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfmsubadd132pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsubadd132pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsubadd132pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmsubadd132pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsubadd132pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd132pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd132pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd132pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd132pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsubadd132ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsubadd132ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmsubadd132ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsubadd132ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd132ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd132ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd132ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd132ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsubadd213pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsubadd213pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmsubadd213pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsubadd213pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd213pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd213pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd213pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsubadd213ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsubadd213ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmsubadd213ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsubadd213ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd213ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd213ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd213ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd213ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsubadd231pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsubadd231pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd (%ecx), %zmm5, %zmm6 # AVX512F
vfmsubadd231pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsubadd231pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd231pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd231pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd231pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfmsubadd231ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfmsubadd231ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps (%ecx), %zmm5, %zmm6 # AVX512F
vfmsubadd231ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfmsubadd231ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd231ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfmsubadd231ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfmsubadd231ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfmsubadd231ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd132pd %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmadd132pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmadd132pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132pd (%ecx), %zmm5, %zmm6 # AVX512F
vfnmadd132pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmadd132pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd132pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd132pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd132pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd132pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd132ps %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmadd132ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmadd132ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd132ps (%ecx), %zmm5, %zmm6 # AVX512F
vfnmadd132ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmadd132ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd132ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd132ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd132ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd132ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd132ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmadd132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmadd132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213pd %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmadd213pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmadd213pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213pd (%ecx), %zmm5, %zmm6 # AVX512F
vfnmadd213pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmadd213pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd213pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd213pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd213pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd213pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd213ps %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmadd213ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmadd213ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd213ps (%ecx), %zmm5, %zmm6 # AVX512F
vfnmadd213ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmadd213ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd213ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd213ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd213ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd213ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd213ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmadd213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmadd213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231pd %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmadd231pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmadd231pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231pd (%ecx), %zmm5, %zmm6 # AVX512F
vfnmadd231pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmadd231pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd231pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd231pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd231pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd231pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmadd231ps %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmadd231ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmadd231ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmadd231ps (%ecx), %zmm5, %zmm6 # AVX512F
vfnmadd231ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmadd231ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd231ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd231ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmadd231ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd231ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmadd231ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmadd231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmadd231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmadd231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmadd231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmadd231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132pd %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmsub132pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmsub132pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132pd (%ecx), %zmm5, %zmm6 # AVX512F
vfnmsub132pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmsub132pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub132pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub132pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub132pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub132pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub132ps %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmsub132ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmsub132ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub132ps (%ecx), %zmm5, %zmm6 # AVX512F
vfnmsub132ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmsub132ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub132ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub132ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub132ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub132ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub132ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmsub132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmsub132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213pd %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmsub213pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmsub213pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213pd (%ecx), %zmm5, %zmm6 # AVX512F
vfnmsub213pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmsub213pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub213pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub213pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub213pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub213pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub213ps %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmsub213ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmsub213ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub213ps (%ecx), %zmm5, %zmm6 # AVX512F
vfnmsub213ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmsub213ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub213ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub213ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub213ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub213ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub213ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmsub213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmsub213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231pd %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmsub231pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmsub231pd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231pd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231pd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231pd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231pd (%ecx), %zmm5, %zmm6 # AVX512F
vfnmsub231pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmsub231pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub231pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub231pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub231pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub231pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfnmsub231ps %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfnmsub231ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfnmsub231ps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231ps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231ps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231ps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfnmsub231ps (%ecx), %zmm5, %zmm6 # AVX512F
vfnmsub231ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfnmsub231ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub231ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub231ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vfnmsub231ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub231ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfnmsub231ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfnmsub231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmsub231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfnmsub231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfnmsub231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfnmsub231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgatherdpd 123(%ebp,%ymm7,8), %zmm6{%k1} # AVX512F
vgatherdpd 123(%ebp,%ymm7,8), %zmm6{%k1} # AVX512F
vgatherdpd 256(%eax,%ymm7), %zmm6{%k1} # AVX512F
vgatherdpd 1024(%ecx,%ymm7,4), %zmm6{%k1} # AVX512F
vgatherdps 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vgatherdps 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vgatherdps 256(%eax,%zmm7), %zmm6{%k1} # AVX512F
vgatherdps 1024(%ecx,%zmm7,4), %zmm6{%k1} # AVX512F
vgatherqpd 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vgatherqpd 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vgatherqpd 256(%eax,%zmm7), %zmm6{%k1} # AVX512F
vgatherqpd 1024(%ecx,%zmm7,4), %zmm6{%k1} # AVX512F
vgatherqps 123(%ebp,%zmm7,8), %ymm6{%k1} # AVX512F
vgatherqps 123(%ebp,%zmm7,8), %ymm6{%k1} # AVX512F
vgatherqps 256(%eax,%zmm7), %ymm6{%k1} # AVX512F
vgatherqps 1024(%ecx,%zmm7,4), %ymm6{%k1} # AVX512F
vgetexppd %zmm5, %zmm6 # AVX512F
vgetexppd %zmm5, %zmm6{%k7} # AVX512F
vgetexppd %zmm5, %zmm6{%k7}{z} # AVX512F
vgetexppd {sae}, %zmm5, %zmm6 # AVX512F
vgetexppd (%ecx), %zmm6 # AVX512F
vgetexppd -123456(%esp,%esi,8), %zmm6 # AVX512F
vgetexppd (%eax){1to8}, %zmm6 # AVX512F
vgetexppd 8128(%edx), %zmm6 # AVX512F Disp8
vgetexppd 8192(%edx), %zmm6 # AVX512F
vgetexppd -8192(%edx), %zmm6 # AVX512F Disp8
vgetexppd -8256(%edx), %zmm6 # AVX512F
vgetexppd 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vgetexppd 1024(%edx){1to8}, %zmm6 # AVX512F
vgetexppd -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vgetexppd -1032(%edx){1to8}, %zmm6 # AVX512F
vgetexpps %zmm5, %zmm6 # AVX512F
vgetexpps %zmm5, %zmm6{%k7} # AVX512F
vgetexpps %zmm5, %zmm6{%k7}{z} # AVX512F
vgetexpps {sae}, %zmm5, %zmm6 # AVX512F
vgetexpps (%ecx), %zmm6 # AVX512F
vgetexpps -123456(%esp,%esi,8), %zmm6 # AVX512F
vgetexpps (%eax){1to16}, %zmm6 # AVX512F
vgetexpps 8128(%edx), %zmm6 # AVX512F Disp8
vgetexpps 8192(%edx), %zmm6 # AVX512F
vgetexpps -8192(%edx), %zmm6 # AVX512F Disp8
vgetexpps -8256(%edx), %zmm6 # AVX512F
vgetexpps 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vgetexpps 512(%edx){1to16}, %zmm6 # AVX512F
vgetexpps -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vgetexpps -516(%edx){1to16}, %zmm6 # AVX512F
vgetexpsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetexpsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vgetexpsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetexpsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vgetexpsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vgetexpsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetexpsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgetexpsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetexpsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgetexpss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetexpss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vgetexpss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetexpss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vgetexpss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vgetexpss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetexpss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgetexpss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetexpss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgetmantpd $0xab, %zmm5, %zmm6 # AVX512F
vgetmantpd $0xab, %zmm5, %zmm6{%k7} # AVX512F
vgetmantpd $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vgetmantpd $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantpd $123, %zmm5, %zmm6 # AVX512F
vgetmantpd $123, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantpd $123, (%ecx), %zmm6 # AVX512F
vgetmantpd $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vgetmantpd $123, (%eax){1to8}, %zmm6 # AVX512F
vgetmantpd $123, 8128(%edx), %zmm6 # AVX512F Disp8
vgetmantpd $123, 8192(%edx), %zmm6 # AVX512F
vgetmantpd $123, -8192(%edx), %zmm6 # AVX512F Disp8
vgetmantpd $123, -8256(%edx), %zmm6 # AVX512F
vgetmantpd $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vgetmantpd $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vgetmantpd $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vgetmantpd $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vgetmantps $0xab, %zmm5, %zmm6 # AVX512F
vgetmantps $0xab, %zmm5, %zmm6{%k7} # AVX512F
vgetmantps $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vgetmantps $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantps $123, %zmm5, %zmm6 # AVX512F
vgetmantps $123, {sae}, %zmm5, %zmm6 # AVX512F
vgetmantps $123, (%ecx), %zmm6 # AVX512F
vgetmantps $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vgetmantps $123, (%eax){1to16}, %zmm6 # AVX512F
vgetmantps $123, 8128(%edx), %zmm6 # AVX512F Disp8
vgetmantps $123, 8192(%edx), %zmm6 # AVX512F
vgetmantps $123, -8192(%edx), %zmm6 # AVX512F Disp8
vgetmantps $123, -8256(%edx), %zmm6 # AVX512F
vgetmantps $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vgetmantps $123, 512(%edx){1to16}, %zmm6 # AVX512F
vgetmantps $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vgetmantps $123, -516(%edx){1to16}, %zmm6 # AVX512F
vgetmantsd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vgetmantsd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetmantsd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgetmantsd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetmantsd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vgetmantss $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetmantss $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vgetmantss $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vgetmantss $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vinsertf32x4 $0xab, %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vinsertf32x4 $0xab, %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vinsertf32x4 $123, %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vinsertf32x4 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vinsertf32x4 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vinsertf32x4 $123, 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinsertf32x4 $123, 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vinsertf32x4 $123, -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinsertf32x4 $123, -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vinsertf64x4 $0xab, %ymm4, %zmm5, %zmm6{%k7} # AVX512F
vinsertf64x4 $0xab, %ymm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vinsertf64x4 $123, %ymm4, %zmm5, %zmm6{%k7} # AVX512F
vinsertf64x4 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vinsertf64x4 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vinsertf64x4 $123, 4064(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinsertf64x4 $123, 4096(%edx), %zmm5, %zmm6{%k7} # AVX512F
vinsertf64x4 $123, -4096(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinsertf64x4 $123, -4128(%edx), %zmm5, %zmm6{%k7} # AVX512F
vinserti32x4 $0xab, %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vinserti32x4 $0xab, %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vinserti32x4 $123, %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vinserti32x4 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vinserti32x4 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vinserti32x4 $123, 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinserti32x4 $123, 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vinserti32x4 $123, -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinserti32x4 $123, -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vinserti64x4 $0xab, %ymm4, %zmm5, %zmm6{%k7} # AVX512F
vinserti64x4 $0xab, %ymm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vinserti64x4 $123, %ymm4, %zmm5, %zmm6{%k7} # AVX512F
vinserti64x4 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vinserti64x4 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vinserti64x4 $123, 4064(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinserti64x4 $123, 4096(%edx), %zmm5, %zmm6{%k7} # AVX512F
vinserti64x4 $123, -4096(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vinserti64x4 $123, -4128(%edx), %zmm5, %zmm6{%k7} # AVX512F
vmaxpd %zmm4, %zmm5, %zmm6 # AVX512F
vmaxpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vmaxpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vmaxpd {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmaxpd (%ecx), %zmm5, %zmm6 # AVX512F
vmaxpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vmaxpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vmaxpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmaxpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vmaxpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmaxpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vmaxpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vmaxpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vmaxpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vmaxpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vmaxps %zmm4, %zmm5, %zmm6 # AVX512F
vmaxps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vmaxps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vmaxps {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmaxps (%ecx), %zmm5, %zmm6 # AVX512F
vmaxps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vmaxps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vmaxps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmaxps 8192(%edx), %zmm5, %zmm6 # AVX512F
vmaxps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmaxps -8256(%edx), %zmm5, %zmm6 # AVX512F
vmaxps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vmaxps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vmaxps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vmaxps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vmaxsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmaxsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmaxsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmaxsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vmaxsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vmaxsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmaxsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vmaxsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmaxsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vmaxss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmaxss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmaxss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmaxss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vmaxss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vmaxss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmaxss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vmaxss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmaxss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vminpd %zmm4, %zmm5, %zmm6 # AVX512F
vminpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vminpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vminpd {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vminpd (%ecx), %zmm5, %zmm6 # AVX512F
vminpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vminpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vminpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vminpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vminpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vminpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vminpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vminpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vminpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vminpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vminps %zmm4, %zmm5, %zmm6 # AVX512F
vminps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vminps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vminps {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vminps (%ecx), %zmm5, %zmm6 # AVX512F
vminps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vminps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vminps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vminps 8192(%edx), %zmm5, %zmm6 # AVX512F
vminps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vminps -8256(%edx), %zmm5, %zmm6 # AVX512F
vminps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vminps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vminps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vminps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vminsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vminsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vminsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vminsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vminsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vminsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vminsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vminsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vminsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vminss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vminss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vminss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vminss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vminss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vminss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vminss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vminss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vminss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vmovapd %zmm5, %zmm6 # AVX512F
vmovapd %zmm5, %zmm6{%k7} # AVX512F
vmovapd %zmm5, %zmm6{%k7}{z} # AVX512F
vmovapd (%ecx), %zmm6 # AVX512F
vmovapd -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovapd 8128(%edx), %zmm6 # AVX512F Disp8
vmovapd 8192(%edx), %zmm6 # AVX512F
vmovapd -8192(%edx), %zmm6 # AVX512F Disp8
vmovapd -8256(%edx), %zmm6 # AVX512F
vmovaps %zmm5, %zmm6 # AVX512F
vmovaps %zmm5, %zmm6{%k7} # AVX512F
vmovaps %zmm5, %zmm6{%k7}{z} # AVX512F
vmovaps (%ecx), %zmm6 # AVX512F
vmovaps -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovaps 8128(%edx), %zmm6 # AVX512F Disp8
vmovaps 8192(%edx), %zmm6 # AVX512F
vmovaps -8192(%edx), %zmm6 # AVX512F Disp8
vmovaps -8256(%edx), %zmm6 # AVX512F
vmovddup %zmm5, %zmm6 # AVX512F
vmovddup %zmm5, %zmm6{%k7} # AVX512F
vmovddup %zmm5, %zmm6{%k7}{z} # AVX512F
vmovddup (%ecx), %zmm6 # AVX512F
vmovddup -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovddup 8128(%edx), %zmm6 # AVX512F Disp8
vmovddup 8192(%edx), %zmm6 # AVX512F
vmovddup -8192(%edx), %zmm6 # AVX512F Disp8
vmovddup -8256(%edx), %zmm6 # AVX512F
vmovdqa32 %zmm5, %zmm6 # AVX512F
vmovdqa32 %zmm5, %zmm6{%k7} # AVX512F
vmovdqa32 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqa32 (%ecx), %zmm6 # AVX512F
vmovdqa32 -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovdqa32 8128(%edx), %zmm6 # AVX512F Disp8
vmovdqa32 8192(%edx), %zmm6 # AVX512F
vmovdqa32 -8192(%edx), %zmm6 # AVX512F Disp8
vmovdqa32 -8256(%edx), %zmm6 # AVX512F
vmovdqa64 %zmm5, %zmm6 # AVX512F
vmovdqa64 %zmm5, %zmm6{%k7} # AVX512F
vmovdqa64 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqa64 (%ecx), %zmm6 # AVX512F
vmovdqa64 -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovdqa64 8128(%edx), %zmm6 # AVX512F Disp8
vmovdqa64 8192(%edx), %zmm6 # AVX512F
vmovdqa64 -8192(%edx), %zmm6 # AVX512F Disp8
vmovdqa64 -8256(%edx), %zmm6 # AVX512F
vmovdqu32 %zmm5, %zmm6 # AVX512F
vmovdqu32 %zmm5, %zmm6{%k7} # AVX512F
vmovdqu32 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqu32 (%ecx), %zmm6 # AVX512F
vmovdqu32 -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovdqu32 8128(%edx), %zmm6 # AVX512F Disp8
vmovdqu32 8192(%edx), %zmm6 # AVX512F
vmovdqu32 -8192(%edx), %zmm6 # AVX512F Disp8
vmovdqu32 -8256(%edx), %zmm6 # AVX512F
vmovdqu64 %zmm5, %zmm6 # AVX512F
vmovdqu64 %zmm5, %zmm6{%k7} # AVX512F
vmovdqu64 %zmm5, %zmm6{%k7}{z} # AVX512F
vmovdqu64 (%ecx), %zmm6 # AVX512F
vmovdqu64 -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovdqu64 8128(%edx), %zmm6 # AVX512F Disp8
vmovdqu64 8192(%edx), %zmm6 # AVX512F
vmovdqu64 -8192(%edx), %zmm6 # AVX512F Disp8
vmovdqu64 -8256(%edx), %zmm6 # AVX512F
vmovntdq %zmm6, (%ecx) # AVX512F
vmovntdq %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovntdq %zmm6, 8128(%edx) # AVX512F Disp8
vmovntdq %zmm6, 8192(%edx) # AVX512F
vmovntdq %zmm6, -8192(%edx) # AVX512F Disp8
vmovntdq %zmm6, -8256(%edx) # AVX512F
vmovntdqa (%ecx), %zmm6 # AVX512F
vmovntdqa -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovntdqa 8128(%edx), %zmm6 # AVX512F Disp8
vmovntdqa 8192(%edx), %zmm6 # AVX512F
vmovntdqa -8192(%edx), %zmm6 # AVX512F Disp8
vmovntdqa -8256(%edx), %zmm6 # AVX512F
vmovntpd %zmm6, (%ecx) # AVX512F
vmovntpd %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovntpd %zmm6, 8128(%edx) # AVX512F Disp8
vmovntpd %zmm6, 8192(%edx) # AVX512F
vmovntpd %zmm6, -8192(%edx) # AVX512F Disp8
vmovntpd %zmm6, -8256(%edx) # AVX512F
vmovntps %zmm6, (%ecx) # AVX512F
vmovntps %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovntps %zmm6, 8128(%edx) # AVX512F Disp8
vmovntps %zmm6, 8192(%edx) # AVX512F
vmovntps %zmm6, -8192(%edx) # AVX512F Disp8
vmovntps %zmm6, -8256(%edx) # AVX512F
vmovsd (%ecx), %xmm6{%k7} # AVX512F
vmovsd (%ecx), %xmm6{%k7}{z} # AVX512F
vmovsd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512F
vmovsd 1016(%edx), %xmm6{%k7} # AVX512F Disp8
vmovsd 1024(%edx), %xmm6{%k7} # AVX512F
vmovsd -1024(%edx), %xmm6{%k7} # AVX512F Disp8
vmovsd -1032(%edx), %xmm6{%k7} # AVX512F
vmovsd %xmm6, (%ecx){%k7} # AVX512F
vmovsd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512F
vmovsd %xmm6, 1016(%edx){%k7} # AVX512F Disp8
vmovsd %xmm6, 1024(%edx){%k7} # AVX512F
vmovsd %xmm6, -1024(%edx){%k7} # AVX512F Disp8
vmovsd %xmm6, -1032(%edx){%k7} # AVX512F
vmovsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmovsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmovshdup %zmm5, %zmm6 # AVX512F
vmovshdup %zmm5, %zmm6{%k7} # AVX512F
vmovshdup %zmm5, %zmm6{%k7}{z} # AVX512F
vmovshdup (%ecx), %zmm6 # AVX512F
vmovshdup -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovshdup 8128(%edx), %zmm6 # AVX512F Disp8
vmovshdup 8192(%edx), %zmm6 # AVX512F
vmovshdup -8192(%edx), %zmm6 # AVX512F Disp8
vmovshdup -8256(%edx), %zmm6 # AVX512F
vmovsldup %zmm5, %zmm6 # AVX512F
vmovsldup %zmm5, %zmm6{%k7} # AVX512F
vmovsldup %zmm5, %zmm6{%k7}{z} # AVX512F
vmovsldup (%ecx), %zmm6 # AVX512F
vmovsldup -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovsldup 8128(%edx), %zmm6 # AVX512F Disp8
vmovsldup 8192(%edx), %zmm6 # AVX512F
vmovsldup -8192(%edx), %zmm6 # AVX512F Disp8
vmovsldup -8256(%edx), %zmm6 # AVX512F
vmovss (%ecx), %xmm6{%k7} # AVX512F
vmovss (%ecx), %xmm6{%k7}{z} # AVX512F
vmovss -123456(%esp,%esi,8), %xmm6{%k7} # AVX512F
vmovss 508(%edx), %xmm6{%k7} # AVX512F Disp8
vmovss 512(%edx), %xmm6{%k7} # AVX512F
vmovss -512(%edx), %xmm6{%k7} # AVX512F Disp8
vmovss -516(%edx), %xmm6{%k7} # AVX512F
vmovss %xmm6, (%ecx){%k7} # AVX512F
vmovss %xmm6, -123456(%esp,%esi,8){%k7} # AVX512F
vmovss %xmm6, 508(%edx){%k7} # AVX512F Disp8
vmovss %xmm6, 512(%edx){%k7} # AVX512F
vmovss %xmm6, -512(%edx){%k7} # AVX512F Disp8
vmovss %xmm6, -516(%edx){%k7} # AVX512F
vmovss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmovss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmovupd %zmm5, %zmm6 # AVX512F
vmovupd %zmm5, %zmm6{%k7} # AVX512F
vmovupd %zmm5, %zmm6{%k7}{z} # AVX512F
vmovupd (%ecx), %zmm6 # AVX512F
vmovupd -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovupd 8128(%edx), %zmm6 # AVX512F Disp8
vmovupd 8192(%edx), %zmm6 # AVX512F
vmovupd -8192(%edx), %zmm6 # AVX512F Disp8
vmovupd -8256(%edx), %zmm6 # AVX512F
vmovups %zmm5, %zmm6 # AVX512F
vmovups %zmm5, %zmm6{%k7} # AVX512F
vmovups %zmm5, %zmm6{%k7}{z} # AVX512F
vmovups (%ecx), %zmm6 # AVX512F
vmovups -123456(%esp,%esi,8), %zmm6 # AVX512F
vmovups 8128(%edx), %zmm6 # AVX512F Disp8
vmovups 8192(%edx), %zmm6 # AVX512F
vmovups -8192(%edx), %zmm6 # AVX512F Disp8
vmovups -8256(%edx), %zmm6 # AVX512F
vmulpd %zmm4, %zmm5, %zmm6 # AVX512F
vmulpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vmulpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vmulpd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulpd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulpd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulpd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulpd (%ecx), %zmm5, %zmm6 # AVX512F
vmulpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vmulpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vmulpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmulpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vmulpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmulpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vmulpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vmulpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vmulpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vmulpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vmulps %zmm4, %zmm5, %zmm6 # AVX512F
vmulps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vmulps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vmulps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vmulps (%ecx), %zmm5, %zmm6 # AVX512F
vmulps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vmulps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vmulps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmulps 8192(%edx), %zmm5, %zmm6 # AVX512F
vmulps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vmulps -8256(%edx), %zmm5, %zmm6 # AVX512F
vmulps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vmulps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vmulps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vmulps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vmulsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmulsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vmulsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vmulsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmulsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vmulsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmulsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vmulss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vmulss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vmulss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vmulss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vmulss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmulss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vmulss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vmulss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vpabsd %zmm5, %zmm6 # AVX512F
vpabsd %zmm5, %zmm6{%k7} # AVX512F
vpabsd %zmm5, %zmm6{%k7}{z} # AVX512F
vpabsd (%ecx), %zmm6 # AVX512F
vpabsd -123456(%esp,%esi,8), %zmm6 # AVX512F
vpabsd (%eax){1to16}, %zmm6 # AVX512F
vpabsd 8128(%edx), %zmm6 # AVX512F Disp8
vpabsd 8192(%edx), %zmm6 # AVX512F
vpabsd -8192(%edx), %zmm6 # AVX512F Disp8
vpabsd -8256(%edx), %zmm6 # AVX512F
vpabsd 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vpabsd 512(%edx){1to16}, %zmm6 # AVX512F
vpabsd -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vpabsd -516(%edx){1to16}, %zmm6 # AVX512F
vpabsq %zmm5, %zmm6 # AVX512F
vpabsq %zmm5, %zmm6{%k7} # AVX512F
vpabsq %zmm5, %zmm6{%k7}{z} # AVX512F
vpabsq (%ecx), %zmm6 # AVX512F
vpabsq -123456(%esp,%esi,8), %zmm6 # AVX512F
vpabsq (%eax){1to8}, %zmm6 # AVX512F
vpabsq 8128(%edx), %zmm6 # AVX512F Disp8
vpabsq 8192(%edx), %zmm6 # AVX512F
vpabsq -8192(%edx), %zmm6 # AVX512F Disp8
vpabsq -8256(%edx), %zmm6 # AVX512F
vpabsq 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vpabsq 1024(%edx){1to8}, %zmm6 # AVX512F
vpabsq -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vpabsq -1032(%edx){1to8}, %zmm6 # AVX512F
vpaddd %zmm4, %zmm5, %zmm6 # AVX512F
vpaddd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpaddd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpaddd (%ecx), %zmm5, %zmm6 # AVX512F
vpaddd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpaddd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpaddd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpaddd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpaddd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpaddd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpaddd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpaddd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpaddd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpaddd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpaddq %zmm4, %zmm5, %zmm6 # AVX512F
vpaddq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpaddq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpaddq (%ecx), %zmm5, %zmm6 # AVX512F
vpaddq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpaddq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpaddq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpaddq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpaddq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpaddq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpaddq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpaddq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpaddq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpaddq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpandd %zmm4, %zmm5, %zmm6 # AVX512F
vpandd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpandd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpandd (%ecx), %zmm5, %zmm6 # AVX512F
vpandd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpandd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpandd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpandd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpandd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpandd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpandd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpandd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpandnd %zmm4, %zmm5, %zmm6 # AVX512F
vpandnd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpandnd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpandnd (%ecx), %zmm5, %zmm6 # AVX512F
vpandnd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpandnd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpandnd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandnd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpandnd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandnd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpandnd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpandnd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpandnd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpandnd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpandnq %zmm4, %zmm5, %zmm6 # AVX512F
vpandnq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpandnq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpandnq (%ecx), %zmm5, %zmm6 # AVX512F
vpandnq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpandnq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpandnq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandnq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpandnq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandnq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpandnq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpandnq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpandnq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpandnq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpandq %zmm4, %zmm5, %zmm6 # AVX512F
vpandq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpandq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpandq (%ecx), %zmm5, %zmm6 # AVX512F
vpandq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpandq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpandq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpandq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpandq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpandq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpandq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpandq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpandq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpblendmd %zmm4, %zmm5, %zmm6 # AVX512F
vpblendmd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpblendmd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpblendmd (%ecx), %zmm5, %zmm6 # AVX512F
vpblendmd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpblendmd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpblendmd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpblendmd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpblendmd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpblendmd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpblendmd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpblendmd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpblendmd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpblendmd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpbroadcastd (%ecx), %zmm6 # AVX512F
vpbroadcastd (%ecx), %zmm6{%k7} # AVX512F
vpbroadcastd (%ecx), %zmm6{%k7}{z} # AVX512F
vpbroadcastd -123456(%esp,%esi,8), %zmm6 # AVX512F
vpbroadcastd 508(%edx), %zmm6 # AVX512F Disp8
vpbroadcastd 512(%edx), %zmm6 # AVX512F
vpbroadcastd -512(%edx), %zmm6 # AVX512F Disp8
vpbroadcastd -516(%edx), %zmm6 # AVX512F
vpbroadcastd %xmm5, %zmm6{%k7} # AVX512F
vpbroadcastd %xmm5, %zmm6{%k7}{z} # AVX512F
vpbroadcastd %eax, %zmm6 # AVX512F
vpbroadcastd %eax, %zmm6{%k7} # AVX512F
vpbroadcastd %eax, %zmm6{%k7}{z} # AVX512F
vpbroadcastd %ebp, %zmm6 # AVX512F
vpbroadcastq (%ecx), %zmm6 # AVX512F
vpbroadcastq (%ecx), %zmm6{%k7} # AVX512F
vpbroadcastq (%ecx), %zmm6{%k7}{z} # AVX512F
vpbroadcastq -123456(%esp,%esi,8), %zmm6 # AVX512F
vpbroadcastq 1016(%edx), %zmm6 # AVX512F Disp8
vpbroadcastq 1024(%edx), %zmm6 # AVX512F
vpbroadcastq -1024(%edx), %zmm6 # AVX512F Disp8
vpbroadcastq -1032(%edx), %zmm6 # AVX512F
vpbroadcastq %xmm5, %zmm6{%k7} # AVX512F
vpbroadcastq %xmm5, %zmm6{%k7}{z} # AVX512F
vpcmpd $0xab, %zmm5, %zmm6, %k5 # AVX512F
vpcmpd $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpd $123, %zmm5, %zmm6, %k5 # AVX512F
vpcmpd $123, (%ecx), %zmm6, %k5 # AVX512F
vpcmpd $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpd $123, (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpd $123, 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpd $123, 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpd $123, -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpd $123, -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpd $123, 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpd $123, 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpd $123, -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpd $123, -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpltd %zmm5, %zmm6, %k5 # AVX512F
vpcmpltd %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpltd (%ecx), %zmm6, %k5 # AVX512F
vpcmpltd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpltd (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpltd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltd 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpltd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltd -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpltd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpltd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpltd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpltd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpled %zmm5, %zmm6, %k5 # AVX512F
vpcmpled %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpled (%ecx), %zmm6, %k5 # AVX512F
vpcmpled -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpled (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpled 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpled 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpled -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpled -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpled 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpled 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpled -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpled -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpneqd %zmm5, %zmm6, %k5 # AVX512F
vpcmpneqd %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpneqd (%ecx), %zmm6, %k5 # AVX512F
vpcmpneqd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpneqd (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpneqd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpneqd 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpneqd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpneqd -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpneqd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpneqd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpneqd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpneqd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnltd %zmm5, %zmm6, %k5 # AVX512F
vpcmpnltd %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnltd (%ecx), %zmm6, %k5 # AVX512F
vpcmpnltd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnltd (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpnltd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltd 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnltd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltd -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnltd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnltd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnled %zmm5, %zmm6, %k5 # AVX512F
vpcmpnled %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnled (%ecx), %zmm6, %k5 # AVX512F
vpcmpnled -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnled (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpnled 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnled 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnled -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnled -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnled 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnled 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnled -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnled -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpeqd %zmm5, %zmm6, %k5 # AVX512F
vpcmpeqd %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpeqd (%ecx), %zmm6, %k5 # AVX512F
vpcmpeqd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpeqd (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpeqd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpeqd 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpeqd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpeqd -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpeqd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpeqd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpeqd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpeqd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpeqq %zmm5, %zmm6, %k5 # AVX512F
vpcmpeqq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpeqq (%ecx), %zmm6, %k5 # AVX512F
vpcmpeqq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpeqq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpeqq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpeqq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpeqq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpeqq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpeqq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpeqq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpeqq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpeqq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpgtd %zmm5, %zmm6, %k5 # AVX512F
vpcmpgtd %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpgtd (%ecx), %zmm6, %k5 # AVX512F
vpcmpgtd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpgtd (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpgtd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpgtd 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpgtd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpgtd -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpgtd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpgtd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpgtd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpgtd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpgtq %zmm5, %zmm6, %k5 # AVX512F
vpcmpgtq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpgtq (%ecx), %zmm6, %k5 # AVX512F
vpcmpgtq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpgtq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpgtq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpgtq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpgtq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpgtq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpgtq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpgtq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpgtq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpgtq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpq $0xab, %zmm5, %zmm6, %k5 # AVX512F
vpcmpq $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpq $123, %zmm5, %zmm6, %k5 # AVX512F
vpcmpq $123, (%ecx), %zmm6, %k5 # AVX512F
vpcmpq $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpq $123, (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpq $123, 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpq $123, 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpq $123, -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpq $123, -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpq $123, 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpq $123, 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpq $123, -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpq $123, -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpltq %zmm5, %zmm6, %k5 # AVX512F
vpcmpltq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpltq (%ecx), %zmm6, %k5 # AVX512F
vpcmpltq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpltq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpltq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpltq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpltq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpltq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpltq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpltq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpleq %zmm5, %zmm6, %k5 # AVX512F
vpcmpleq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpleq (%ecx), %zmm6, %k5 # AVX512F
vpcmpleq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpleq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpleq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpleq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpleq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpleq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpleq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpleq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpleq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpleq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpneqq %zmm5, %zmm6, %k5 # AVX512F
vpcmpneqq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpneqq (%ecx), %zmm6, %k5 # AVX512F
vpcmpneqq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpneqq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpneqq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpneqq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpneqq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpneqq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpneqq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpneqq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpneqq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpneqq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnltq %zmm5, %zmm6, %k5 # AVX512F
vpcmpnltq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnltq (%ecx), %zmm6, %k5 # AVX512F
vpcmpnltq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnltq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpnltq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnltq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnltq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnltq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnleq %zmm5, %zmm6, %k5 # AVX512F
vpcmpnleq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnleq (%ecx), %zmm6, %k5 # AVX512F
vpcmpnleq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnleq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpnleq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnleq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnleq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnleq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnleq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnleq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnleq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnleq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpud $0xab, %zmm5, %zmm6, %k5 # AVX512F
vpcmpud $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpud $123, %zmm5, %zmm6, %k5 # AVX512F
vpcmpud $123, (%ecx), %zmm6, %k5 # AVX512F
vpcmpud $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpud $123, (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpud $123, 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpud $123, 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpud $123, -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpud $123, -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpud $123, 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpud $123, 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpud $123, -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpud $123, -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpequd %zmm5, %zmm6, %k5 # AVX512F
vpcmpequd %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpequd (%ecx), %zmm6, %k5 # AVX512F
vpcmpequd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpequd (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpequd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpequd 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpequd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpequd -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpequd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpequd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpequd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpequd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpltud %zmm5, %zmm6, %k5 # AVX512F
vpcmpltud %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpltud (%ecx), %zmm6, %k5 # AVX512F
vpcmpltud -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpltud (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpltud 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltud 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpltud -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltud -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpltud 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpltud 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpltud -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpltud -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpleud %zmm5, %zmm6, %k5 # AVX512F
vpcmpleud %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpleud (%ecx), %zmm6, %k5 # AVX512F
vpcmpleud -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpleud (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpleud 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpleud 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpleud -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpleud -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpleud 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpleud 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpleud -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpleud -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnequd %zmm5, %zmm6, %k5 # AVX512F
vpcmpnequd %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnequd (%ecx), %zmm6, %k5 # AVX512F
vpcmpnequd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnequd (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpnequd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnequd 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnequd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnequd -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnequd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnequd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnequd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnequd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnltud %zmm5, %zmm6, %k5 # AVX512F
vpcmpnltud %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnltud (%ecx), %zmm6, %k5 # AVX512F
vpcmpnltud -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnltud (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpnltud 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltud 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnltud -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltud -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnltud 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltud 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnltud -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltud -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnleud %zmm5, %zmm6, %k5 # AVX512F
vpcmpnleud %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnleud (%ecx), %zmm6, %k5 # AVX512F
vpcmpnleud -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnleud (%eax){1to16}, %zmm6, %k5 # AVX512F
vpcmpnleud 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnleud 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnleud -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnleud -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnleud 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnleud 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpnleud -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vpcmpnleud -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vpcmpuq $0xab, %zmm5, %zmm6, %k5 # AVX512F
vpcmpuq $0xab, %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpuq $123, %zmm5, %zmm6, %k5 # AVX512F
vpcmpuq $123, (%ecx), %zmm6, %k5 # AVX512F
vpcmpuq $123, -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpuq $123, (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpuq $123, 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpuq $123, 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpuq $123, -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpuq $123, -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpuq $123, 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpuq $123, 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpuq $123, -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpuq $123, -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpequq %zmm5, %zmm6, %k5 # AVX512F
vpcmpequq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpequq (%ecx), %zmm6, %k5 # AVX512F
vpcmpequq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpequq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpequq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpequq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpequq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpequq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpequq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpequq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpequq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpequq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpltuq %zmm5, %zmm6, %k5 # AVX512F
vpcmpltuq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpltuq (%ecx), %zmm6, %k5 # AVX512F
vpcmpltuq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpltuq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpltuq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltuq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpltuq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpltuq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpltuq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpltuq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpltuq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpltuq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpleuq %zmm5, %zmm6, %k5 # AVX512F
vpcmpleuq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpleuq (%ecx), %zmm6, %k5 # AVX512F
vpcmpleuq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpleuq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpleuq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpleuq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpleuq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpleuq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpleuq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpleuq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpleuq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpleuq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnequq %zmm5, %zmm6, %k5 # AVX512F
vpcmpnequq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnequq (%ecx), %zmm6, %k5 # AVX512F
vpcmpnequq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnequq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpnequq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnequq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnequq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnequq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnequq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnequq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnequq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnequq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnltuq %zmm5, %zmm6, %k5 # AVX512F
vpcmpnltuq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnltuq (%ecx), %zmm6, %k5 # AVX512F
vpcmpnltuq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnltuq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpnltuq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltuq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnltuq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnltuq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnltuq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltuq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnltuq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnltuq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnleuq %zmm5, %zmm6, %k5 # AVX512F
vpcmpnleuq %zmm5, %zmm6, %k5{%k7} # AVX512F
vpcmpnleuq (%ecx), %zmm6, %k5 # AVX512F
vpcmpnleuq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vpcmpnleuq (%eax){1to8}, %zmm6, %k5 # AVX512F
vpcmpnleuq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnleuq 8192(%edx), %zmm6, %k5 # AVX512F
vpcmpnleuq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vpcmpnleuq -8256(%edx), %zmm6, %k5 # AVX512F
vpcmpnleuq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnleuq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vpcmpnleuq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vpcmpnleuq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpblendmq %zmm4, %zmm5, %zmm6 # AVX512F
vpblendmq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpblendmq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpblendmq (%ecx), %zmm5, %zmm6 # AVX512F
vpblendmq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpblendmq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpblendmq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpblendmq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpblendmq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpblendmq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpblendmq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpblendmq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpblendmq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpblendmq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpcompressd %zmm6, (%ecx) # AVX512F
vpcompressd %zmm6, (%ecx){%k7} # AVX512F
vpcompressd %zmm6, -123456(%esp,%esi,8) # AVX512F
vpcompressd %zmm6, 508(%edx) # AVX512F Disp8
vpcompressd %zmm6, 512(%edx) # AVX512F
vpcompressd %zmm6, -512(%edx) # AVX512F Disp8
vpcompressd %zmm6, -516(%edx) # AVX512F
vpcompressd %zmm5, %zmm6 # AVX512F
vpcompressd %zmm5, %zmm6{%k7} # AVX512F
vpcompressd %zmm5, %zmm6{%k7}{z} # AVX512F
vpermd %zmm4, %zmm5, %zmm6 # AVX512F
vpermd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermd (%ecx), %zmm5, %zmm6 # AVX512F
vpermd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpermd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermilpd $0xab, %zmm5, %zmm6 # AVX512F
vpermilpd $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpermilpd $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermilpd $123, %zmm5, %zmm6 # AVX512F
vpermilpd $123, (%ecx), %zmm6 # AVX512F
vpermilpd $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpermilpd $123, (%eax){1to8}, %zmm6 # AVX512F
vpermilpd $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpermilpd $123, 8192(%edx), %zmm6 # AVX512F
vpermilpd $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpermilpd $123, -8256(%edx), %zmm6 # AVX512F
vpermilpd $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vpermilpd $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vpermilpd $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vpermilpd $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vpermilpd %zmm4, %zmm5, %zmm6 # AVX512F
vpermilpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermilpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermilpd (%ecx), %zmm5, %zmm6 # AVX512F
vpermilpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermilpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpermilpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermilpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermilpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermilpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermilpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermilpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermilpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermilpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermilps $0xab, %zmm5, %zmm6 # AVX512F
vpermilps $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpermilps $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermilps $123, %zmm5, %zmm6 # AVX512F
vpermilps $123, (%ecx), %zmm6 # AVX512F
vpermilps $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpermilps $123, (%eax){1to16}, %zmm6 # AVX512F
vpermilps $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpermilps $123, 8192(%edx), %zmm6 # AVX512F
vpermilps $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpermilps $123, -8256(%edx), %zmm6 # AVX512F
vpermilps $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vpermilps $123, 512(%edx){1to16}, %zmm6 # AVX512F
vpermilps $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vpermilps $123, -516(%edx){1to16}, %zmm6 # AVX512F
vpermilps %zmm4, %zmm5, %zmm6 # AVX512F
vpermilps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermilps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermilps (%ecx), %zmm5, %zmm6 # AVX512F
vpermilps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermilps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpermilps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermilps 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermilps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermilps -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermilps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermilps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermilps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermilps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermpd $0xab, %zmm5, %zmm6 # AVX512F
vpermpd $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpermpd $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermpd $123, %zmm5, %zmm6 # AVX512F
vpermpd $123, (%ecx), %zmm6 # AVX512F
vpermpd $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpermpd $123, (%eax){1to8}, %zmm6 # AVX512F
vpermpd $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpermpd $123, 8192(%edx), %zmm6 # AVX512F
vpermpd $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpermpd $123, -8256(%edx), %zmm6 # AVX512F
vpermpd $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vpermpd $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vpermpd $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vpermpd $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vpermps %zmm4, %zmm5, %zmm6 # AVX512F
vpermps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermps (%ecx), %zmm5, %zmm6 # AVX512F
vpermps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpermps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermps 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermps -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermq $0xab, %zmm5, %zmm6 # AVX512F
vpermq $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpermq $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermq $123, %zmm5, %zmm6 # AVX512F
vpermq $123, (%ecx), %zmm6 # AVX512F
vpermq $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpermq $123, (%eax){1to8}, %zmm6 # AVX512F
vpermq $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpermq $123, 8192(%edx), %zmm6 # AVX512F
vpermq $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpermq $123, -8256(%edx), %zmm6 # AVX512F
vpermq $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vpermq $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vpermq $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vpermq $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vpexpandd (%ecx), %zmm6 # AVX512F
vpexpandd (%ecx), %zmm6{%k7} # AVX512F
vpexpandd (%ecx), %zmm6{%k7}{z} # AVX512F
vpexpandd -123456(%esp,%esi,8), %zmm6 # AVX512F
vpexpandd 508(%edx), %zmm6 # AVX512F Disp8
vpexpandd 512(%edx), %zmm6 # AVX512F
vpexpandd -512(%edx), %zmm6 # AVX512F Disp8
vpexpandd -516(%edx), %zmm6 # AVX512F
vpexpandd %zmm5, %zmm6 # AVX512F
vpexpandd %zmm5, %zmm6{%k7} # AVX512F
vpexpandd %zmm5, %zmm6{%k7}{z} # AVX512F
vpexpandq (%ecx), %zmm6 # AVX512F
vpexpandq (%ecx), %zmm6{%k7} # AVX512F
vpexpandq (%ecx), %zmm6{%k7}{z} # AVX512F
vpexpandq -123456(%esp,%esi,8), %zmm6 # AVX512F
vpexpandq 1016(%edx), %zmm6 # AVX512F Disp8
vpexpandq 1024(%edx), %zmm6 # AVX512F
vpexpandq -1024(%edx), %zmm6 # AVX512F Disp8
vpexpandq -1032(%edx), %zmm6 # AVX512F
vpexpandq %zmm5, %zmm6 # AVX512F
vpexpandq %zmm5, %zmm6{%k7} # AVX512F
vpexpandq %zmm5, %zmm6{%k7}{z} # AVX512F
vpgatherdd 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vpgatherdd 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vpgatherdd 256(%eax,%zmm7), %zmm6{%k1} # AVX512F
vpgatherdd 1024(%ecx,%zmm7,4), %zmm6{%k1} # AVX512F
vpgatherdq 123(%ebp,%ymm7,8), %zmm6{%k1} # AVX512F
vpgatherdq 123(%ebp,%ymm7,8), %zmm6{%k1} # AVX512F
vpgatherdq 256(%eax,%ymm7), %zmm6{%k1} # AVX512F
vpgatherdq 1024(%ecx,%ymm7,4), %zmm6{%k1} # AVX512F
vpgatherqd 123(%ebp,%zmm7,8), %ymm6{%k1} # AVX512F
vpgatherqd 123(%ebp,%zmm7,8), %ymm6{%k1} # AVX512F
vpgatherqd 256(%eax,%zmm7), %ymm6{%k1} # AVX512F
vpgatherqd 1024(%ecx,%zmm7,4), %ymm6{%k1} # AVX512F
vpgatherqq 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vpgatherqq 123(%ebp,%zmm7,8), %zmm6{%k1} # AVX512F
vpgatherqq 256(%eax,%zmm7), %zmm6{%k1} # AVX512F
vpgatherqq 1024(%ecx,%zmm7,4), %zmm6{%k1} # AVX512F
vpmaxsd %zmm4, %zmm5, %zmm6 # AVX512F
vpmaxsd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpmaxsd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpmaxsd (%ecx), %zmm5, %zmm6 # AVX512F
vpmaxsd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpmaxsd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpmaxsd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxsd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpmaxsd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxsd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpmaxsd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxsd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpmaxsd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxsd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpmaxsq %zmm4, %zmm5, %zmm6 # AVX512F
vpmaxsq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpmaxsq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpmaxsq (%ecx), %zmm5, %zmm6 # AVX512F
vpmaxsq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpmaxsq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpmaxsq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxsq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpmaxsq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxsq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpmaxsq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxsq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmaxsq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxsq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmaxud %zmm4, %zmm5, %zmm6 # AVX512F
vpmaxud %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpmaxud %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpmaxud (%ecx), %zmm5, %zmm6 # AVX512F
vpmaxud -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpmaxud (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpmaxud 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxud 8192(%edx), %zmm5, %zmm6 # AVX512F
vpmaxud -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxud -8256(%edx), %zmm5, %zmm6 # AVX512F
vpmaxud 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxud 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpmaxud -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxud -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpmaxuq %zmm4, %zmm5, %zmm6 # AVX512F
vpmaxuq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpmaxuq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpmaxuq (%ecx), %zmm5, %zmm6 # AVX512F
vpmaxuq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpmaxuq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpmaxuq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxuq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpmaxuq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmaxuq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpmaxuq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxuq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmaxuq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmaxuq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpminsd %zmm4, %zmm5, %zmm6 # AVX512F
vpminsd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpminsd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpminsd (%ecx), %zmm5, %zmm6 # AVX512F
vpminsd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpminsd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpminsd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminsd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpminsd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminsd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpminsd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpminsd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpminsd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpminsd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpminsq %zmm4, %zmm5, %zmm6 # AVX512F
vpminsq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpminsq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpminsq (%ecx), %zmm5, %zmm6 # AVX512F
vpminsq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpminsq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpminsq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminsq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpminsq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminsq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpminsq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpminsq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpminsq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpminsq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpminud %zmm4, %zmm5, %zmm6 # AVX512F
vpminud %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpminud %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpminud (%ecx), %zmm5, %zmm6 # AVX512F
vpminud -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpminud (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpminud 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminud 8192(%edx), %zmm5, %zmm6 # AVX512F
vpminud -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminud -8256(%edx), %zmm5, %zmm6 # AVX512F
vpminud 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpminud 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpminud -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpminud -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpminuq %zmm4, %zmm5, %zmm6 # AVX512F
vpminuq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpminuq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpminuq (%ecx), %zmm5, %zmm6 # AVX512F
vpminuq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpminuq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpminuq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminuq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpminuq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpminuq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpminuq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpminuq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpminuq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpminuq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmovsxbd %xmm5, %zmm6{%k7} # AVX512F
vpmovsxbd %xmm5, %zmm6{%k7}{z} # AVX512F
vpmovsxbd (%ecx), %zmm6{%k7} # AVX512F
vpmovsxbd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovsxbd 2032(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxbd 2048(%edx), %zmm6{%k7} # AVX512F
vpmovsxbd -2048(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxbd -2064(%edx), %zmm6{%k7} # AVX512F
vpmovsxbq %xmm5, %zmm6{%k7} # AVX512F
vpmovsxbq %xmm5, %zmm6{%k7}{z} # AVX512F
vpmovsxbq (%ecx), %zmm6{%k7} # AVX512F
vpmovsxbq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovsxbq 1016(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxbq 1024(%edx), %zmm6{%k7} # AVX512F
vpmovsxbq -1024(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxbq -1032(%edx), %zmm6{%k7} # AVX512F
vpmovsxdq %ymm5, %zmm6{%k7} # AVX512F
vpmovsxdq %ymm5, %zmm6{%k7}{z} # AVX512F
vpmovsxdq (%ecx), %zmm6{%k7} # AVX512F
vpmovsxdq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovsxdq 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxdq 4096(%edx), %zmm6{%k7} # AVX512F
vpmovsxdq -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxdq -4128(%edx), %zmm6{%k7} # AVX512F
vpmovsxwd %ymm5, %zmm6{%k7} # AVX512F
vpmovsxwd %ymm5, %zmm6{%k7}{z} # AVX512F
vpmovsxwd (%ecx), %zmm6{%k7} # AVX512F
vpmovsxwd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovsxwd 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxwd 4096(%edx), %zmm6{%k7} # AVX512F
vpmovsxwd -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxwd -4128(%edx), %zmm6{%k7} # AVX512F
vpmovsxwq %xmm5, %zmm6{%k7} # AVX512F
vpmovsxwq %xmm5, %zmm6{%k7}{z} # AVX512F
vpmovsxwq (%ecx), %zmm6{%k7} # AVX512F
vpmovsxwq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovsxwq 2032(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxwq 2048(%edx), %zmm6{%k7} # AVX512F
vpmovsxwq -2048(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovsxwq -2064(%edx), %zmm6{%k7} # AVX512F
vpmovzxbd %xmm5, %zmm6{%k7} # AVX512F
vpmovzxbd %xmm5, %zmm6{%k7}{z} # AVX512F
vpmovzxbd (%ecx), %zmm6{%k7} # AVX512F
vpmovzxbd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovzxbd 2032(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxbd 2048(%edx), %zmm6{%k7} # AVX512F
vpmovzxbd -2048(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxbd -2064(%edx), %zmm6{%k7} # AVX512F
vpmovzxbq %xmm5, %zmm6{%k7} # AVX512F
vpmovzxbq %xmm5, %zmm6{%k7}{z} # AVX512F
vpmovzxbq (%ecx), %zmm6{%k7} # AVX512F
vpmovzxbq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovzxbq 1016(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxbq 1024(%edx), %zmm6{%k7} # AVX512F
vpmovzxbq -1024(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxbq -1032(%edx), %zmm6{%k7} # AVX512F
vpmovzxdq %ymm5, %zmm6{%k7} # AVX512F
vpmovzxdq %ymm5, %zmm6{%k7}{z} # AVX512F
vpmovzxdq (%ecx), %zmm6{%k7} # AVX512F
vpmovzxdq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovzxdq 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxdq 4096(%edx), %zmm6{%k7} # AVX512F
vpmovzxdq -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxdq -4128(%edx), %zmm6{%k7} # AVX512F
vpmovzxwd %ymm5, %zmm6{%k7} # AVX512F
vpmovzxwd %ymm5, %zmm6{%k7}{z} # AVX512F
vpmovzxwd (%ecx), %zmm6{%k7} # AVX512F
vpmovzxwd -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovzxwd 4064(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxwd 4096(%edx), %zmm6{%k7} # AVX512F
vpmovzxwd -4096(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxwd -4128(%edx), %zmm6{%k7} # AVX512F
vpmovzxwq %xmm5, %zmm6{%k7} # AVX512F
vpmovzxwq %xmm5, %zmm6{%k7}{z} # AVX512F
vpmovzxwq (%ecx), %zmm6{%k7} # AVX512F
vpmovzxwq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512F
vpmovzxwq 2032(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxwq 2048(%edx), %zmm6{%k7} # AVX512F
vpmovzxwq -2048(%edx), %zmm6{%k7} # AVX512F Disp8
vpmovzxwq -2064(%edx), %zmm6{%k7} # AVX512F
vpmuldq %zmm4, %zmm5, %zmm6 # AVX512F
vpmuldq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpmuldq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpmuldq (%ecx), %zmm5, %zmm6 # AVX512F
vpmuldq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpmuldq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpmuldq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmuldq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpmuldq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmuldq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpmuldq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmuldq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmuldq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmuldq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmulld %zmm4, %zmm5, %zmm6 # AVX512F
vpmulld %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpmulld %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpmulld (%ecx), %zmm5, %zmm6 # AVX512F
vpmulld -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpmulld (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpmulld 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmulld 8192(%edx), %zmm5, %zmm6 # AVX512F
vpmulld -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmulld -8256(%edx), %zmm5, %zmm6 # AVX512F
vpmulld 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpmulld 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpmulld -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpmulld -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpmuludq %zmm4, %zmm5, %zmm6 # AVX512F
vpmuludq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpmuludq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpmuludq (%ecx), %zmm5, %zmm6 # AVX512F
vpmuludq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpmuludq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpmuludq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmuludq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpmuludq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpmuludq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpmuludq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmuludq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmuludq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpmuludq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpord %zmm4, %zmm5, %zmm6 # AVX512F
vpord %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpord %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpord (%ecx), %zmm5, %zmm6 # AVX512F
vpord -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpord (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpord 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpord 8192(%edx), %zmm5, %zmm6 # AVX512F
vpord -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpord -8256(%edx), %zmm5, %zmm6 # AVX512F
vpord 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpord 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpord -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpord -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vporq %zmm4, %zmm5, %zmm6 # AVX512F
vporq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vporq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vporq (%ecx), %zmm5, %zmm6 # AVX512F
vporq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vporq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vporq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vporq 8192(%edx), %zmm5, %zmm6 # AVX512F
vporq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vporq -8256(%edx), %zmm5, %zmm6 # AVX512F
vporq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vporq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vporq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vporq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpscatterdd %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vpscatterdd %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vpscatterdd %zmm6, 256(%eax,%zmm7){%k1} # AVX512F
vpscatterdd %zmm6, 1024(%ecx,%zmm7,4){%k1} # AVX512F
vpscatterdq %zmm6, 123(%ebp,%ymm7,8){%k1} # AVX512F
vpscatterdq %zmm6, 123(%ebp,%ymm7,8){%k1} # AVX512F
vpscatterdq %zmm6, 256(%eax,%ymm7){%k1} # AVX512F
vpscatterdq %zmm6, 1024(%ecx,%ymm7,4){%k1} # AVX512F
vpscatterqd %ymm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vpscatterqd %ymm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vpscatterqd %ymm6, 256(%eax,%zmm7){%k1} # AVX512F
vpscatterqd %ymm6, 1024(%ecx,%zmm7,4){%k1} # AVX512F
vpscatterqq %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vpscatterqq %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vpscatterqq %zmm6, 256(%eax,%zmm7){%k1} # AVX512F
vpscatterqq %zmm6, 1024(%ecx,%zmm7,4){%k1} # AVX512F
vpshufd $0xab, %zmm5, %zmm6 # AVX512F
vpshufd $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpshufd $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpshufd $123, %zmm5, %zmm6 # AVX512F
vpshufd $123, (%ecx), %zmm6 # AVX512F
vpshufd $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpshufd $123, (%eax){1to16}, %zmm6 # AVX512F
vpshufd $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpshufd $123, 8192(%edx), %zmm6 # AVX512F
vpshufd $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpshufd $123, -8256(%edx), %zmm6 # AVX512F
vpshufd $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vpshufd $123, 512(%edx){1to16}, %zmm6 # AVX512F
vpshufd $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vpshufd $123, -516(%edx){1to16}, %zmm6 # AVX512F
vpslld %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vpslld %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpslld (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vpslld -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vpslld 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpslld 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpslld -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpslld -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsllq %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsllq %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsllq (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vpsllq -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vpsllq 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsllq 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsllq -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsllq -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsllvd %zmm4, %zmm5, %zmm6 # AVX512F
vpsllvd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsllvd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsllvd (%ecx), %zmm5, %zmm6 # AVX512F
vpsllvd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsllvd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpsllvd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsllvd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsllvd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsllvd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsllvd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsllvd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsllvd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsllvd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsllvq %zmm4, %zmm5, %zmm6 # AVX512F
vpsllvq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsllvq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsllvq (%ecx), %zmm5, %zmm6 # AVX512F
vpsllvq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsllvq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpsllvq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsllvq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsllvq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsllvq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsllvq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsllvq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpsllvq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsllvq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpsrad %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsrad %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrad (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vpsrad -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vpsrad 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsrad 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsrad -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsrad -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsraq %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsraq %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsraq (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vpsraq -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vpsraq 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsraq 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsraq -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsraq -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsravd %zmm4, %zmm5, %zmm6 # AVX512F
vpsravd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsravd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsravd (%ecx), %zmm5, %zmm6 # AVX512F
vpsravd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsravd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpsravd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsravd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsravd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsravd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsravd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsravd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsravd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsravd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsravq %zmm4, %zmm5, %zmm6 # AVX512F
vpsravq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsravq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsravq (%ecx), %zmm5, %zmm6 # AVX512F
vpsravq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsravq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpsravq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsravq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsravq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsravq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsravq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsravq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpsravq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsravq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpsrld %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsrld %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrld (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vpsrld -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vpsrld 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsrld 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsrld -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsrld -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsrlq %xmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsrlq %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrlq (%ecx), %zmm5, %zmm6{%k7} # AVX512F
vpsrlq -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512F
vpsrlq 2032(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsrlq 2048(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsrlq -2048(%edx), %zmm5, %zmm6{%k7} # AVX512F Disp8
vpsrlq -2064(%edx), %zmm5, %zmm6{%k7} # AVX512F
vpsrlvd %zmm4, %zmm5, %zmm6 # AVX512F
vpsrlvd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsrlvd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrlvd (%ecx), %zmm5, %zmm6 # AVX512F
vpsrlvd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsrlvd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpsrlvd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsrlvd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsrlvd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsrlvd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsrlvd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsrlvd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsrlvd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsrlvd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsrlvq %zmm4, %zmm5, %zmm6 # AVX512F
vpsrlvq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsrlvq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrlvq (%ecx), %zmm5, %zmm6 # AVX512F
vpsrlvq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsrlvq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpsrlvq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsrlvq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsrlvq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsrlvq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsrlvq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsrlvq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpsrlvq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsrlvq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpsrld $0xab, %zmm5, %zmm6 # AVX512F
vpsrld $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpsrld $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrld $123, %zmm5, %zmm6 # AVX512F
vpsrld $123, (%ecx), %zmm6 # AVX512F
vpsrld $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpsrld $123, (%eax){1to16}, %zmm6 # AVX512F
vpsrld $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpsrld $123, 8192(%edx), %zmm6 # AVX512F
vpsrld $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpsrld $123, -8256(%edx), %zmm6 # AVX512F
vpsrld $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vpsrld $123, 512(%edx){1to16}, %zmm6 # AVX512F
vpsrld $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vpsrld $123, -516(%edx){1to16}, %zmm6 # AVX512F
vpsrlq $0xab, %zmm5, %zmm6 # AVX512F
vpsrlq $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpsrlq $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrlq $123, %zmm5, %zmm6 # AVX512F
vpsrlq $123, (%ecx), %zmm6 # AVX512F
vpsrlq $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpsrlq $123, (%eax){1to8}, %zmm6 # AVX512F
vpsrlq $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpsrlq $123, 8192(%edx), %zmm6 # AVX512F
vpsrlq $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpsrlq $123, -8256(%edx), %zmm6 # AVX512F
vpsrlq $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vpsrlq $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vpsrlq $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vpsrlq $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vpsubd %zmm4, %zmm5, %zmm6 # AVX512F
vpsubd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsubd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsubd (%ecx), %zmm5, %zmm6 # AVX512F
vpsubd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsubd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpsubd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsubd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsubd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsubd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsubd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsubd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsubd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpsubd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpsubq %zmm4, %zmm5, %zmm6 # AVX512F
vpsubq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpsubq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsubq (%ecx), %zmm5, %zmm6 # AVX512F
vpsubq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpsubq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpsubq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsubq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpsubq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpsubq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpsubq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsubq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpsubq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpsubq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vptestmd %zmm5, %zmm6, %k5 # AVX512F
vptestmd %zmm5, %zmm6, %k5{%k7} # AVX512F
vptestmd (%ecx), %zmm6, %k5 # AVX512F
vptestmd -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vptestmd (%eax){1to16}, %zmm6, %k5 # AVX512F
vptestmd 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vptestmd 8192(%edx), %zmm6, %k5 # AVX512F
vptestmd -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vptestmd -8256(%edx), %zmm6, %k5 # AVX512F
vptestmd 508(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vptestmd 512(%edx){1to16}, %zmm6, %k5 # AVX512F
vptestmd -512(%edx){1to16}, %zmm6, %k5 # AVX512F Disp8
vptestmd -516(%edx){1to16}, %zmm6, %k5 # AVX512F
vptestmq %zmm5, %zmm6, %k5 # AVX512F
vptestmq %zmm5, %zmm6, %k5{%k7} # AVX512F
vptestmq (%ecx), %zmm6, %k5 # AVX512F
vptestmq -123456(%esp,%esi,8), %zmm6, %k5 # AVX512F
vptestmq (%eax){1to8}, %zmm6, %k5 # AVX512F
vptestmq 8128(%edx), %zmm6, %k5 # AVX512F Disp8
vptestmq 8192(%edx), %zmm6, %k5 # AVX512F
vptestmq -8192(%edx), %zmm6, %k5 # AVX512F Disp8
vptestmq -8256(%edx), %zmm6, %k5 # AVX512F
vptestmq 1016(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vptestmq 1024(%edx){1to8}, %zmm6, %k5 # AVX512F
vptestmq -1024(%edx){1to8}, %zmm6, %k5 # AVX512F Disp8
vptestmq -1032(%edx){1to8}, %zmm6, %k5 # AVX512F
vpunpckhdq %zmm4, %zmm5, %zmm6 # AVX512F
vpunpckhdq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpunpckhdq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpunpckhdq (%ecx), %zmm5, %zmm6 # AVX512F
vpunpckhdq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpunpckhdq (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpunpckhdq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpckhdq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpunpckhdq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpckhdq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpunpckhdq 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpunpckhdq 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpunpckhdq -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpunpckhdq -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpunpckhqdq %zmm4, %zmm5, %zmm6 # AVX512F
vpunpckhqdq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpunpckhqdq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpunpckhqdq (%ecx), %zmm5, %zmm6 # AVX512F
vpunpckhqdq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpunpckhqdq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpunpckhqdq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpckhqdq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpunpckhqdq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpckhqdq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpunpckhqdq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpunpckhqdq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpunpckhqdq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpunpckhqdq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpunpckldq %zmm4, %zmm5, %zmm6 # AVX512F
vpunpckldq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpunpckldq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpunpckldq (%ecx), %zmm5, %zmm6 # AVX512F
vpunpckldq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpunpckldq (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpunpckldq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpckldq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpunpckldq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpckldq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpunpckldq 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpunpckldq 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpunpckldq -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpunpckldq -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpunpcklqdq %zmm4, %zmm5, %zmm6 # AVX512F
vpunpcklqdq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpunpcklqdq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpunpcklqdq (%ecx), %zmm5, %zmm6 # AVX512F
vpunpcklqdq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpunpcklqdq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpunpcklqdq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpcklqdq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpunpcklqdq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpunpcklqdq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpunpcklqdq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpunpcklqdq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpunpcklqdq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpunpcklqdq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpxord %zmm4, %zmm5, %zmm6 # AVX512F
vpxord %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpxord %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpxord (%ecx), %zmm5, %zmm6 # AVX512F
vpxord -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpxord (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpxord 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpxord 8192(%edx), %zmm5, %zmm6 # AVX512F
vpxord -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpxord -8256(%edx), %zmm5, %zmm6 # AVX512F
vpxord 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpxord 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpxord -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpxord -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpxorq %zmm4, %zmm5, %zmm6 # AVX512F
vpxorq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpxorq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpxorq (%ecx), %zmm5, %zmm6 # AVX512F
vpxorq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpxorq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpxorq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpxorq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpxorq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpxorq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpxorq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpxorq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpxorq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpxorq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vrcp14pd %zmm5, %zmm6 # AVX512F
vrcp14pd %zmm5, %zmm6{%k7} # AVX512F
vrcp14pd %zmm5, %zmm6{%k7}{z} # AVX512F
vrcp14pd (%ecx), %zmm6 # AVX512F
vrcp14pd -123456(%esp,%esi,8), %zmm6 # AVX512F
vrcp14pd (%eax){1to8}, %zmm6 # AVX512F
vrcp14pd 8128(%edx), %zmm6 # AVX512F Disp8
vrcp14pd 8192(%edx), %zmm6 # AVX512F
vrcp14pd -8192(%edx), %zmm6 # AVX512F Disp8
vrcp14pd -8256(%edx), %zmm6 # AVX512F
vrcp14pd 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vrcp14pd 1024(%edx){1to8}, %zmm6 # AVX512F
vrcp14pd -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vrcp14pd -1032(%edx){1to8}, %zmm6 # AVX512F
vrcp14ps %zmm5, %zmm6 # AVX512F
vrcp14ps %zmm5, %zmm6{%k7} # AVX512F
vrcp14ps %zmm5, %zmm6{%k7}{z} # AVX512F
vrcp14ps (%ecx), %zmm6 # AVX512F
vrcp14ps -123456(%esp,%esi,8), %zmm6 # AVX512F
vrcp14ps (%eax){1to16}, %zmm6 # AVX512F
vrcp14ps 8128(%edx), %zmm6 # AVX512F Disp8
vrcp14ps 8192(%edx), %zmm6 # AVX512F
vrcp14ps -8192(%edx), %zmm6 # AVX512F Disp8
vrcp14ps -8256(%edx), %zmm6 # AVX512F
vrcp14ps 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vrcp14ps 512(%edx){1to16}, %zmm6 # AVX512F
vrcp14ps -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vrcp14ps -516(%edx){1to16}, %zmm6 # AVX512F
vrcp14sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrcp14sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vrcp14sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vrcp14sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vrcp14sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrcp14sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrcp14sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrcp14sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrcp14ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrcp14ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vrcp14ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vrcp14ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vrcp14ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrcp14ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrcp14ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrcp14ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14pd %zmm5, %zmm6 # AVX512F
vrsqrt14pd %zmm5, %zmm6{%k7} # AVX512F
vrsqrt14pd %zmm5, %zmm6{%k7}{z} # AVX512F
vrsqrt14pd (%ecx), %zmm6 # AVX512F
vrsqrt14pd -123456(%esp,%esi,8), %zmm6 # AVX512F
vrsqrt14pd (%eax){1to8}, %zmm6 # AVX512F
vrsqrt14pd 8128(%edx), %zmm6 # AVX512F Disp8
vrsqrt14pd 8192(%edx), %zmm6 # AVX512F
vrsqrt14pd -8192(%edx), %zmm6 # AVX512F Disp8
vrsqrt14pd -8256(%edx), %zmm6 # AVX512F
vrsqrt14pd 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vrsqrt14pd 1024(%edx){1to8}, %zmm6 # AVX512F
vrsqrt14pd -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vrsqrt14pd -1032(%edx){1to8}, %zmm6 # AVX512F
vrsqrt14ps %zmm5, %zmm6 # AVX512F
vrsqrt14ps %zmm5, %zmm6{%k7} # AVX512F
vrsqrt14ps %zmm5, %zmm6{%k7}{z} # AVX512F
vrsqrt14ps (%ecx), %zmm6 # AVX512F
vrsqrt14ps -123456(%esp,%esi,8), %zmm6 # AVX512F
vrsqrt14ps (%eax){1to16}, %zmm6 # AVX512F
vrsqrt14ps 8128(%edx), %zmm6 # AVX512F Disp8
vrsqrt14ps 8192(%edx), %zmm6 # AVX512F
vrsqrt14ps -8192(%edx), %zmm6 # AVX512F Disp8
vrsqrt14ps -8256(%edx), %zmm6 # AVX512F
vrsqrt14ps 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vrsqrt14ps 512(%edx){1to16}, %zmm6 # AVX512F
vrsqrt14ps -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vrsqrt14ps -516(%edx){1to16}, %zmm6 # AVX512F
vrsqrt14sd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vrsqrt14sd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrsqrt14sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrsqrt14sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14ss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vrsqrt14ss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrsqrt14ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrsqrt14ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrsqrt14ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vscatterdpd %zmm6, 123(%ebp,%ymm7,8){%k1} # AVX512F
vscatterdpd %zmm6, 123(%ebp,%ymm7,8){%k1} # AVX512F
vscatterdpd %zmm6, 256(%eax,%ymm7){%k1} # AVX512F
vscatterdpd %zmm6, 1024(%ecx,%ymm7,4){%k1} # AVX512F
vscatterdps %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vscatterdps %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vscatterdps %zmm6, 256(%eax,%zmm7){%k1} # AVX512F
vscatterdps %zmm6, 1024(%ecx,%zmm7,4){%k1} # AVX512F
vscatterqpd %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vscatterqpd %zmm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vscatterqpd %zmm6, 256(%eax,%zmm7){%k1} # AVX512F
vscatterqpd %zmm6, 1024(%ecx,%zmm7,4){%k1} # AVX512F
vscatterqps %ymm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vscatterqps %ymm6, 123(%ebp,%zmm7,8){%k1} # AVX512F
vscatterqps %ymm6, 256(%eax,%zmm7){%k1} # AVX512F
vscatterqps %ymm6, 1024(%ecx,%zmm7,4){%k1} # AVX512F
vshufpd $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vshufpd $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vshufpd $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vshufpd $123, %zmm4, %zmm5, %zmm6 # AVX512F
vshufpd $123, (%ecx), %zmm5, %zmm6 # AVX512F
vshufpd $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vshufpd $123, (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vshufpd $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufpd $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vshufpd $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufpd $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vshufpd $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vshufpd $123, 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vshufpd $123, -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vshufpd $123, -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vshufps $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vshufps $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vshufps $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vshufps $123, %zmm4, %zmm5, %zmm6 # AVX512F
vshufps $123, (%ecx), %zmm5, %zmm6 # AVX512F
vshufps $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vshufps $123, (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vshufps $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufps $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vshufps $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufps $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vshufps $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vshufps $123, 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vshufps $123, -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vshufps $123, -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vsqrtpd %zmm5, %zmm6 # AVX512F
vsqrtpd %zmm5, %zmm6{%k7} # AVX512F
vsqrtpd %zmm5, %zmm6{%k7}{z} # AVX512F
vsqrtpd {rn-sae}, %zmm5, %zmm6 # AVX512F
vsqrtpd {ru-sae}, %zmm5, %zmm6 # AVX512F
vsqrtpd {rd-sae}, %zmm5, %zmm6 # AVX512F
vsqrtpd {rz-sae}, %zmm5, %zmm6 # AVX512F
vsqrtpd (%ecx), %zmm6 # AVX512F
vsqrtpd -123456(%esp,%esi,8), %zmm6 # AVX512F
vsqrtpd (%eax){1to8}, %zmm6 # AVX512F
vsqrtpd 8128(%edx), %zmm6 # AVX512F Disp8
vsqrtpd 8192(%edx), %zmm6 # AVX512F
vsqrtpd -8192(%edx), %zmm6 # AVX512F Disp8
vsqrtpd -8256(%edx), %zmm6 # AVX512F
vsqrtpd 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vsqrtpd 1024(%edx){1to8}, %zmm6 # AVX512F
vsqrtpd -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vsqrtpd -1032(%edx){1to8}, %zmm6 # AVX512F
vsqrtps %zmm5, %zmm6 # AVX512F
vsqrtps %zmm5, %zmm6{%k7} # AVX512F
vsqrtps %zmm5, %zmm6{%k7}{z} # AVX512F
vsqrtps {rn-sae}, %zmm5, %zmm6 # AVX512F
vsqrtps {ru-sae}, %zmm5, %zmm6 # AVX512F
vsqrtps {rd-sae}, %zmm5, %zmm6 # AVX512F
vsqrtps {rz-sae}, %zmm5, %zmm6 # AVX512F
vsqrtps (%ecx), %zmm6 # AVX512F
vsqrtps -123456(%esp,%esi,8), %zmm6 # AVX512F
vsqrtps (%eax){1to16}, %zmm6 # AVX512F
vsqrtps 8128(%edx), %zmm6 # AVX512F Disp8
vsqrtps 8192(%edx), %zmm6 # AVX512F
vsqrtps -8192(%edx), %zmm6 # AVX512F Disp8
vsqrtps -8256(%edx), %zmm6 # AVX512F
vsqrtps 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vsqrtps 512(%edx){1to16}, %zmm6 # AVX512F
vsqrtps -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vsqrtps -516(%edx){1to16}, %zmm6 # AVX512F
vsqrtsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vsqrtsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsqrtsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vsqrtsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsqrtsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vsqrtss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vsqrtss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsqrtss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vsqrtss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vsqrtss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsqrtss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vsqrtss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsqrtss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vsubpd %zmm4, %zmm5, %zmm6 # AVX512F
vsubpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vsubpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vsubpd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubpd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubpd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubpd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubpd (%ecx), %zmm5, %zmm6 # AVX512F
vsubpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vsubpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vsubpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vsubpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vsubpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vsubpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vsubpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vsubpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vsubpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vsubpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vsubps %zmm4, %zmm5, %zmm6 # AVX512F
vsubps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vsubps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vsubps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vsubps (%ecx), %zmm5, %zmm6 # AVX512F
vsubps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vsubps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vsubps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vsubps 8192(%edx), %zmm5, %zmm6 # AVX512F
vsubps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vsubps -8256(%edx), %zmm5, %zmm6 # AVX512F
vsubps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vsubps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vsubps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vsubps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vsubsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vsubsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vsubsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vsubsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsubsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vsubsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsubsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vsubss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vsubss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vsubss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vsubss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vsubss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsubss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vsubss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vsubss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vucomisd {sae}, %xmm5, %xmm6 # AVX512F
vucomiss {sae}, %xmm5, %xmm6 # AVX512F
vunpckhpd %zmm4, %zmm5, %zmm6 # AVX512F
vunpckhpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vunpckhpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vunpckhpd (%ecx), %zmm5, %zmm6 # AVX512F
vunpckhpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vunpckhpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vunpckhpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpckhpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vunpckhpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpckhpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vunpckhpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vunpckhpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vunpckhpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vunpckhpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vunpckhps %zmm4, %zmm5, %zmm6 # AVX512F
vunpckhps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vunpckhps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vunpckhps (%ecx), %zmm5, %zmm6 # AVX512F
vunpckhps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vunpckhps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vunpckhps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpckhps 8192(%edx), %zmm5, %zmm6 # AVX512F
vunpckhps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpckhps -8256(%edx), %zmm5, %zmm6 # AVX512F
vunpckhps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vunpckhps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vunpckhps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vunpckhps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vunpcklpd %zmm4, %zmm5, %zmm6 # AVX512F
vunpcklpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vunpcklpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vunpcklpd (%ecx), %zmm5, %zmm6 # AVX512F
vunpcklpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vunpcklpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vunpcklpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpcklpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vunpcklpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpcklpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vunpcklpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vunpcklpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vunpcklpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vunpcklpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vunpcklps %zmm4, %zmm5, %zmm6 # AVX512F
vunpcklps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vunpcklps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vunpcklps (%ecx), %zmm5, %zmm6 # AVX512F
vunpcklps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vunpcklps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vunpcklps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpcklps 8192(%edx), %zmm5, %zmm6 # AVX512F
vunpcklps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vunpcklps -8256(%edx), %zmm5, %zmm6 # AVX512F
vunpcklps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vunpcklps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vunpcklps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vunpcklps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpternlogd $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vpternlogd $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpternlogd $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpternlogd $123, %zmm4, %zmm5, %zmm6 # AVX512F
vpternlogd $123, (%ecx), %zmm5, %zmm6 # AVX512F
vpternlogd $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpternlogd $123, (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpternlogd $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpternlogd $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vpternlogd $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpternlogd $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vpternlogd $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpternlogd $123, 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpternlogd $123, -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpternlogd $123, -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpternlogq $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vpternlogq $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpternlogq $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpternlogq $123, %zmm4, %zmm5, %zmm6 # AVX512F
vpternlogq $123, (%ecx), %zmm5, %zmm6 # AVX512F
vpternlogq $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpternlogq $123, (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpternlogq $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpternlogq $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vpternlogq $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpternlogq $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vpternlogq $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpternlogq $123, 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpternlogq $123, -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpternlogq $123, -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpmovqb %zmm5, %xmm6{%k7} # AVX512F
vpmovqb %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovsqb %zmm5, %xmm6{%k7} # AVX512F
vpmovsqb %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovusqb %zmm5, %xmm6{%k7} # AVX512F
vpmovusqb %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovqw %zmm5, %xmm6{%k7} # AVX512F
vpmovqw %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovsqw %zmm5, %xmm6{%k7} # AVX512F
vpmovsqw %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovusqw %zmm5, %xmm6{%k7} # AVX512F
vpmovusqw %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovqd %zmm5, %ymm6{%k7} # AVX512F
vpmovqd %zmm5, %ymm6{%k7}{z} # AVX512F
vpmovsqd %zmm5, %ymm6{%k7} # AVX512F
vpmovsqd %zmm5, %ymm6{%k7}{z} # AVX512F
vpmovusqd %zmm5, %ymm6{%k7} # AVX512F
vpmovusqd %zmm5, %ymm6{%k7}{z} # AVX512F
vpmovdb %zmm5, %xmm6{%k7} # AVX512F
vpmovdb %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovsdb %zmm5, %xmm6{%k7} # AVX512F
vpmovsdb %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovusdb %zmm5, %xmm6{%k7} # AVX512F
vpmovusdb %zmm5, %xmm6{%k7}{z} # AVX512F
vpmovdw %zmm5, %ymm6{%k7} # AVX512F
vpmovdw %zmm5, %ymm6{%k7}{z} # AVX512F
vpmovsdw %zmm5, %ymm6{%k7} # AVX512F
vpmovsdw %zmm5, %ymm6{%k7}{z} # AVX512F
vpmovusdw %zmm5, %ymm6{%k7} # AVX512F
vpmovusdw %zmm5, %ymm6{%k7}{z} # AVX512F
vshuff32x4 $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vshuff32x4 $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vshuff32x4 $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vshuff32x4 $123, %zmm4, %zmm5, %zmm6 # AVX512F
vshuff32x4 $123, (%ecx), %zmm5, %zmm6 # AVX512F
vshuff32x4 $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vshuff32x4 $123, (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vshuff32x4 $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshuff32x4 $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vshuff32x4 $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshuff32x4 $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vshuff32x4 $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vshuff32x4 $123, 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vshuff32x4 $123, -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vshuff32x4 $123, -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vshuff64x2 $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vshuff64x2 $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vshuff64x2 $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vshuff64x2 $123, %zmm4, %zmm5, %zmm6 # AVX512F
vshuff64x2 $123, (%ecx), %zmm5, %zmm6 # AVX512F
vshuff64x2 $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vshuff64x2 $123, (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vshuff64x2 $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshuff64x2 $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vshuff64x2 $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshuff64x2 $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vshuff64x2 $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vshuff64x2 $123, 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vshuff64x2 $123, -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vshuff64x2 $123, -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vshufi32x4 $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vshufi32x4 $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vshufi32x4 $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vshufi32x4 $123, %zmm4, %zmm5, %zmm6 # AVX512F
vshufi32x4 $123, (%ecx), %zmm5, %zmm6 # AVX512F
vshufi32x4 $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vshufi32x4 $123, (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vshufi32x4 $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufi32x4 $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vshufi32x4 $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufi32x4 $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vshufi32x4 $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vshufi32x4 $123, 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vshufi32x4 $123, -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vshufi32x4 $123, -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vshufi64x2 $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vshufi64x2 $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vshufi64x2 $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vshufi64x2 $123, %zmm4, %zmm5, %zmm6 # AVX512F
vshufi64x2 $123, (%ecx), %zmm5, %zmm6 # AVX512F
vshufi64x2 $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vshufi64x2 $123, (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vshufi64x2 $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufi64x2 $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vshufi64x2 $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vshufi64x2 $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vshufi64x2 $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vshufi64x2 $123, 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vshufi64x2 $123, -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vshufi64x2 $123, -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermq %zmm4, %zmm5, %zmm6 # AVX512F
vpermq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermq (%ecx), %zmm5, %zmm6 # AVX512F
vpermq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpermq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermq 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermq -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermpd %zmm4, %zmm5, %zmm6 # AVX512F
vpermpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermpd (%ecx), %zmm5, %zmm6 # AVX512F
vpermpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpermpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermt2d %zmm4, %zmm5, %zmm6 # AVX512F
vpermt2d %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermt2d %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermt2d (%ecx), %zmm5, %zmm6 # AVX512F
vpermt2d -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermt2d (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpermt2d 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2d 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermt2d -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2d -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermt2d 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2d 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermt2d -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2d -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermt2q %zmm4, %zmm5, %zmm6 # AVX512F
vpermt2q %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermt2q %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermt2q (%ecx), %zmm5, %zmm6 # AVX512F
vpermt2q -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermt2q (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpermt2q 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2q 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermt2q -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2q -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermt2q 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2q 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermt2q -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2q -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermt2ps %zmm4, %zmm5, %zmm6 # AVX512F
vpermt2ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermt2ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermt2ps (%ecx), %zmm5, %zmm6 # AVX512F
vpermt2ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermt2ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpermt2ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermt2ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermt2ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermt2ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermt2pd %zmm4, %zmm5, %zmm6 # AVX512F
vpermt2pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermt2pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermt2pd (%ecx), %zmm5, %zmm6 # AVX512F
vpermt2pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermt2pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpermt2pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermt2pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermt2pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermt2pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermt2pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermt2pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
valignq $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
valignq $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
valignq $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
valignq $123, %zmm4, %zmm5, %zmm6 # AVX512F
valignq $123, (%ecx), %zmm5, %zmm6 # AVX512F
valignq $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
valignq $123, (%eax){1to8}, %zmm5, %zmm6 # AVX512F
valignq $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
valignq $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
valignq $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
valignq $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
valignq $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
valignq $123, 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
valignq $123, -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
valignq $123, -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vcvtsd2usi %xmm6, %eax # AVX512F
vcvtsd2usi {rn-sae}, %xmm6, %eax # AVX512F
vcvtsd2usi {ru-sae}, %xmm6, %eax # AVX512F
vcvtsd2usi {rd-sae}, %xmm6, %eax # AVX512F
vcvtsd2usi {rz-sae}, %xmm6, %eax # AVX512F
vcvtsd2usi (%ecx), %eax # AVX512F
vcvtsd2usi -123456(%esp,%esi,8), %eax # AVX512F
vcvtsd2usi 1016(%edx), %eax # AVX512F Disp8
vcvtsd2usi 1024(%edx), %eax # AVX512F
vcvtsd2usi -1024(%edx), %eax # AVX512F Disp8
vcvtsd2usi -1032(%edx), %eax # AVX512F
vcvtsd2usi %xmm6, %ebp # AVX512F
vcvtsd2usi {rn-sae}, %xmm6, %ebp # AVX512F
vcvtsd2usi {ru-sae}, %xmm6, %ebp # AVX512F
vcvtsd2usi {rd-sae}, %xmm6, %ebp # AVX512F
vcvtsd2usi {rz-sae}, %xmm6, %ebp # AVX512F
vcvtsd2usi (%ecx), %ebp # AVX512F
vcvtsd2usi -123456(%esp,%esi,8), %ebp # AVX512F
vcvtsd2usi 1016(%edx), %ebp # AVX512F Disp8
vcvtsd2usi 1024(%edx), %ebp # AVX512F
vcvtsd2usi -1024(%edx), %ebp # AVX512F Disp8
vcvtsd2usi -1032(%edx), %ebp # AVX512F
vcvtss2usi %xmm6, %eax # AVX512F
vcvtss2usi {rn-sae}, %xmm6, %eax # AVX512F
vcvtss2usi {ru-sae}, %xmm6, %eax # AVX512F
vcvtss2usi {rd-sae}, %xmm6, %eax # AVX512F
vcvtss2usi {rz-sae}, %xmm6, %eax # AVX512F
vcvtss2usi (%ecx), %eax # AVX512F
vcvtss2usi -123456(%esp,%esi,8), %eax # AVX512F
vcvtss2usi 508(%edx), %eax # AVX512F Disp8
vcvtss2usi 512(%edx), %eax # AVX512F
vcvtss2usi -512(%edx), %eax # AVX512F Disp8
vcvtss2usi -516(%edx), %eax # AVX512F
vcvtss2usi %xmm6, %ebp # AVX512F
vcvtss2usi {rn-sae}, %xmm6, %ebp # AVX512F
vcvtss2usi {ru-sae}, %xmm6, %ebp # AVX512F
vcvtss2usi {rd-sae}, %xmm6, %ebp # AVX512F
vcvtss2usi {rz-sae}, %xmm6, %ebp # AVX512F
vcvtss2usi (%ecx), %ebp # AVX512F
vcvtss2usi -123456(%esp,%esi,8), %ebp # AVX512F
vcvtss2usi 508(%edx), %ebp # AVX512F Disp8
vcvtss2usi 512(%edx), %ebp # AVX512F
vcvtss2usi -512(%edx), %ebp # AVX512F Disp8
vcvtss2usi -516(%edx), %ebp # AVX512F
vcvtusi2sdl %eax, %xmm5, %xmm6 # AVX512F
vcvtusi2sdl %ebp, %xmm5, %xmm6 # AVX512F
vcvtusi2sdl (%ecx), %xmm5, %xmm6 # AVX512F
vcvtusi2sdl -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512F
vcvtusi2sdl 508(%edx), %xmm5, %xmm6 # AVX512F Disp8
vcvtusi2sdl 512(%edx), %xmm5, %xmm6 # AVX512F
vcvtusi2sdl -512(%edx), %xmm5, %xmm6 # AVX512F Disp8
vcvtusi2sdl -516(%edx), %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %eax, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %eax, {rn-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %eax, {ru-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %eax, {rd-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %eax, {rz-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %ebp, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %ebp, {rn-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %ebp, {ru-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %ebp, {rd-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl %ebp, {rz-sae}, %xmm5, %xmm6 # AVX512F
vcvtusi2ssl (%ecx), %xmm5, %xmm6 # AVX512F
vcvtusi2ssl -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512F
vcvtusi2ssl 508(%edx), %xmm5, %xmm6 # AVX512F Disp8
vcvtusi2ssl 512(%edx), %xmm5, %xmm6 # AVX512F
vcvtusi2ssl -512(%edx), %xmm5, %xmm6 # AVX512F Disp8
vcvtusi2ssl -516(%edx), %xmm5, %xmm6 # AVX512F
vscalefpd %zmm4, %zmm5, %zmm6 # AVX512F
vscalefpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vscalefpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vscalefpd {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefpd {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefpd {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefpd {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefpd (%ecx), %zmm5, %zmm6 # AVX512F
vscalefpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vscalefpd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vscalefpd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vscalefpd 8192(%edx), %zmm5, %zmm6 # AVX512F
vscalefpd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vscalefpd -8256(%edx), %zmm5, %zmm6 # AVX512F
vscalefpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vscalefpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vscalefpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vscalefpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vscalefps %zmm4, %zmm5, %zmm6 # AVX512F
vscalefps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vscalefps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vscalefps {rn-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefps {ru-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefps {rd-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefps {rz-sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vscalefps (%ecx), %zmm5, %zmm6 # AVX512F
vscalefps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vscalefps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vscalefps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vscalefps 8192(%edx), %zmm5, %zmm6 # AVX512F
vscalefps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vscalefps -8256(%edx), %zmm5, %zmm6 # AVX512F
vscalefps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vscalefps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vscalefps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vscalefps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vscalefsd %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vscalefsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefsd (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vscalefsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vscalefsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vscalefsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vscalefsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vscalefsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vscalefss %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vscalefss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vscalefss (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vscalefss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vscalefss 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vscalefss 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vscalefss -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vscalefss -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmps $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmps $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfixupimmps $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfixupimmps $0xab, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmps $123, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmps $123, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmps $123, (%ecx), %zmm5, %zmm6 # AVX512F
vfixupimmps $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfixupimmps $123, (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vfixupimmps $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfixupimmps $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vfixupimmps $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfixupimmps $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vfixupimmps $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfixupimmps $123, 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfixupimmps $123, -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vfixupimmps $123, -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vfixupimmpd $0xab, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmpd $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vfixupimmpd $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vfixupimmpd $0xab, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, {sae}, %zmm4, %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, (%ecx), %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfixupimmpd $123, 8192(%edx), %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vfixupimmpd $123, -8256(%edx), %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfixupimmpd $123, 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfixupimmpd $123, -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vfixupimmpd $123, -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vfixupimmss $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfixupimmss $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfixupimmss $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmss $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfixupimmss $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vfixupimmsd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfixupimmsd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vfixupimmsd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vfixupimmsd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vpslld $0xab, %zmm5, %zmm6 # AVX512F
vpslld $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpslld $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpslld $123, %zmm5, %zmm6 # AVX512F
vpslld $123, (%ecx), %zmm6 # AVX512F
vpslld $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpslld $123, (%eax){1to16}, %zmm6 # AVX512F
vpslld $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpslld $123, 8192(%edx), %zmm6 # AVX512F
vpslld $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpslld $123, -8256(%edx), %zmm6 # AVX512F
vpslld $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vpslld $123, 512(%edx){1to16}, %zmm6 # AVX512F
vpslld $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vpslld $123, -516(%edx){1to16}, %zmm6 # AVX512F
vpsllq $0xab, %zmm5, %zmm6 # AVX512F
vpsllq $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpsllq $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsllq $123, %zmm5, %zmm6 # AVX512F
vpsllq $123, (%ecx), %zmm6 # AVX512F
vpsllq $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpsllq $123, (%eax){1to8}, %zmm6 # AVX512F
vpsllq $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpsllq $123, 8192(%edx), %zmm6 # AVX512F
vpsllq $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpsllq $123, -8256(%edx), %zmm6 # AVX512F
vpsllq $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vpsllq $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vpsllq $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vpsllq $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vpsrad $0xab, %zmm5, %zmm6 # AVX512F
vpsrad $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpsrad $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsrad $123, %zmm5, %zmm6 # AVX512F
vpsrad $123, (%ecx), %zmm6 # AVX512F
vpsrad $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpsrad $123, (%eax){1to16}, %zmm6 # AVX512F
vpsrad $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpsrad $123, 8192(%edx), %zmm6 # AVX512F
vpsrad $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpsrad $123, -8256(%edx), %zmm6 # AVX512F
vpsrad $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vpsrad $123, 512(%edx){1to16}, %zmm6 # AVX512F
vpsrad $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vpsrad $123, -516(%edx){1to16}, %zmm6 # AVX512F
vpsraq $0xab, %zmm5, %zmm6 # AVX512F
vpsraq $0xab, %zmm5, %zmm6{%k7} # AVX512F
vpsraq $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vpsraq $123, %zmm5, %zmm6 # AVX512F
vpsraq $123, (%ecx), %zmm6 # AVX512F
vpsraq $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vpsraq $123, (%eax){1to8}, %zmm6 # AVX512F
vpsraq $123, 8128(%edx), %zmm6 # AVX512F Disp8
vpsraq $123, 8192(%edx), %zmm6 # AVX512F
vpsraq $123, -8192(%edx), %zmm6 # AVX512F Disp8
vpsraq $123, -8256(%edx), %zmm6 # AVX512F
vpsraq $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vpsraq $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vpsraq $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vpsraq $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vprolvd %zmm4, %zmm5, %zmm6 # AVX512F
vprolvd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vprolvd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vprolvd (%ecx), %zmm5, %zmm6 # AVX512F
vprolvd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vprolvd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vprolvd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprolvd 8192(%edx), %zmm5, %zmm6 # AVX512F
vprolvd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprolvd -8256(%edx), %zmm5, %zmm6 # AVX512F
vprolvd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vprolvd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vprolvd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vprolvd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vprold $0xab, %zmm5, %zmm6 # AVX512F
vprold $0xab, %zmm5, %zmm6{%k7} # AVX512F
vprold $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vprold $123, %zmm5, %zmm6 # AVX512F
vprold $123, (%ecx), %zmm6 # AVX512F
vprold $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vprold $123, (%eax){1to16}, %zmm6 # AVX512F
vprold $123, 8128(%edx), %zmm6 # AVX512F Disp8
vprold $123, 8192(%edx), %zmm6 # AVX512F
vprold $123, -8192(%edx), %zmm6 # AVX512F Disp8
vprold $123, -8256(%edx), %zmm6 # AVX512F
vprold $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vprold $123, 512(%edx){1to16}, %zmm6 # AVX512F
vprold $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vprold $123, -516(%edx){1to16}, %zmm6 # AVX512F
vprolvq %zmm4, %zmm5, %zmm6 # AVX512F
vprolvq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vprolvq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vprolvq (%ecx), %zmm5, %zmm6 # AVX512F
vprolvq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vprolvq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vprolvq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprolvq 8192(%edx), %zmm5, %zmm6 # AVX512F
vprolvq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprolvq -8256(%edx), %zmm5, %zmm6 # AVX512F
vprolvq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vprolvq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vprolvq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vprolvq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vprolq $0xab, %zmm5, %zmm6 # AVX512F
vprolq $0xab, %zmm5, %zmm6{%k7} # AVX512F
vprolq $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vprolq $123, %zmm5, %zmm6 # AVX512F
vprolq $123, (%ecx), %zmm6 # AVX512F
vprolq $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vprolq $123, (%eax){1to8}, %zmm6 # AVX512F
vprolq $123, 8128(%edx), %zmm6 # AVX512F Disp8
vprolq $123, 8192(%edx), %zmm6 # AVX512F
vprolq $123, -8192(%edx), %zmm6 # AVX512F Disp8
vprolq $123, -8256(%edx), %zmm6 # AVX512F
vprolq $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vprolq $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vprolq $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vprolq $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vprorvd %zmm4, %zmm5, %zmm6 # AVX512F
vprorvd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vprorvd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vprorvd (%ecx), %zmm5, %zmm6 # AVX512F
vprorvd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vprorvd (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vprorvd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprorvd 8192(%edx), %zmm5, %zmm6 # AVX512F
vprorvd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprorvd -8256(%edx), %zmm5, %zmm6 # AVX512F
vprorvd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vprorvd 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vprorvd -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vprorvd -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vprord $0xab, %zmm5, %zmm6 # AVX512F
vprord $0xab, %zmm5, %zmm6{%k7} # AVX512F
vprord $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vprord $123, %zmm5, %zmm6 # AVX512F
vprord $123, (%ecx), %zmm6 # AVX512F
vprord $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vprord $123, (%eax){1to16}, %zmm6 # AVX512F
vprord $123, 8128(%edx), %zmm6 # AVX512F Disp8
vprord $123, 8192(%edx), %zmm6 # AVX512F
vprord $123, -8192(%edx), %zmm6 # AVX512F Disp8
vprord $123, -8256(%edx), %zmm6 # AVX512F
vprord $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vprord $123, 512(%edx){1to16}, %zmm6 # AVX512F
vprord $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vprord $123, -516(%edx){1to16}, %zmm6 # AVX512F
vprorvq %zmm4, %zmm5, %zmm6 # AVX512F
vprorvq %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vprorvq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vprorvq (%ecx), %zmm5, %zmm6 # AVX512F
vprorvq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vprorvq (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vprorvq 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprorvq 8192(%edx), %zmm5, %zmm6 # AVX512F
vprorvq -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vprorvq -8256(%edx), %zmm5, %zmm6 # AVX512F
vprorvq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vprorvq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vprorvq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vprorvq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vprorq $0xab, %zmm5, %zmm6 # AVX512F
vprorq $0xab, %zmm5, %zmm6{%k7} # AVX512F
vprorq $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vprorq $123, %zmm5, %zmm6 # AVX512F
vprorq $123, (%ecx), %zmm6 # AVX512F
vprorq $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vprorq $123, (%eax){1to8}, %zmm6 # AVX512F
vprorq $123, 8128(%edx), %zmm6 # AVX512F Disp8
vprorq $123, 8192(%edx), %zmm6 # AVX512F
vprorq $123, -8192(%edx), %zmm6 # AVX512F Disp8
vprorq $123, -8256(%edx), %zmm6 # AVX512F
vprorq $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vprorq $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vprorq $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vprorq $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vrndscalepd $0xab, %zmm5, %zmm6 # AVX512F
vrndscalepd $0xab, %zmm5, %zmm6{%k7} # AVX512F
vrndscalepd $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vrndscalepd $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vrndscalepd $123, %zmm5, %zmm6 # AVX512F
vrndscalepd $123, {sae}, %zmm5, %zmm6 # AVX512F
vrndscalepd $123, (%ecx), %zmm6 # AVX512F
vrndscalepd $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vrndscalepd $123, (%eax){1to8}, %zmm6 # AVX512F
vrndscalepd $123, 8128(%edx), %zmm6 # AVX512F Disp8
vrndscalepd $123, 8192(%edx), %zmm6 # AVX512F
vrndscalepd $123, -8192(%edx), %zmm6 # AVX512F Disp8
vrndscalepd $123, -8256(%edx), %zmm6 # AVX512F
vrndscalepd $123, 1016(%edx){1to8}, %zmm6 # AVX512F Disp8
vrndscalepd $123, 1024(%edx){1to8}, %zmm6 # AVX512F
vrndscalepd $123, -1024(%edx){1to8}, %zmm6 # AVX512F Disp8
vrndscalepd $123, -1032(%edx){1to8}, %zmm6 # AVX512F
vrndscaleps $0xab, %zmm5, %zmm6 # AVX512F
vrndscaleps $0xab, %zmm5, %zmm6{%k7} # AVX512F
vrndscaleps $0xab, %zmm5, %zmm6{%k7}{z} # AVX512F
vrndscaleps $0xab, {sae}, %zmm5, %zmm6 # AVX512F
vrndscaleps $123, %zmm5, %zmm6 # AVX512F
vrndscaleps $123, {sae}, %zmm5, %zmm6 # AVX512F
vrndscaleps $123, (%ecx), %zmm6 # AVX512F
vrndscaleps $123, -123456(%esp,%esi,8), %zmm6 # AVX512F
vrndscaleps $123, (%eax){1to16}, %zmm6 # AVX512F
vrndscaleps $123, 8128(%edx), %zmm6 # AVX512F Disp8
vrndscaleps $123, 8192(%edx), %zmm6 # AVX512F
vrndscaleps $123, -8192(%edx), %zmm6 # AVX512F Disp8
vrndscaleps $123, -8256(%edx), %zmm6 # AVX512F
vrndscaleps $123, 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vrndscaleps $123, 512(%edx){1to16}, %zmm6 # AVX512F
vrndscaleps $123, -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vrndscaleps $123, -516(%edx){1to16}, %zmm6 # AVX512F
vrndscalesd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vrndscalesd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrndscalesd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrndscalesd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrndscalesd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512F
vrndscaless $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrndscaless $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512F
vrndscaless $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512F Disp8
vrndscaless $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512F
vpcompressq %zmm6, (%ecx) # AVX512F
vpcompressq %zmm6, (%ecx){%k7} # AVX512F
vpcompressq %zmm6, -123456(%esp,%esi,8) # AVX512F
vpcompressq %zmm6, 1016(%edx) # AVX512F Disp8
vpcompressq %zmm6, 1024(%edx) # AVX512F
vpcompressq %zmm6, -1024(%edx) # AVX512F Disp8
vpcompressq %zmm6, -1032(%edx) # AVX512F
vpcompressq %zmm5, %zmm6 # AVX512F
vpcompressq %zmm5, %zmm6{%k7} # AVX512F
vpcompressq %zmm5, %zmm6{%k7}{z} # AVX512F
kandw %k7, %k6, %k5 # AVX512F
kandnw %k7, %k6, %k5 # AVX512F
korw %k7, %k6, %k5 # AVX512F
kxnorw %k7, %k6, %k5 # AVX512F
kxorw %k7, %k6, %k5 # AVX512F
knotw %k6, %k5 # AVX512F
kortestw %k6, %k5 # AVX512F
kshiftrw $0xab, %k6, %k5 # AVX512F
kshiftrw $123, %k6, %k5 # AVX512F
kshiftlw $0xab, %k6, %k5 # AVX512F
kshiftlw $123, %k6, %k5 # AVX512F
kmovw %k6, %k5 # AVX512F
kmovw (%ecx), %k5 # AVX512F
kmovw -123456(%esp,%esi,8), %k5 # AVX512F
kmovw %k5, (%ecx) # AVX512F
kmovw %k5, -123456(%esp,%esi,8) # AVX512F
kmovw %eax, %k5 # AVX512F
kmovw %ebp, %k5 # AVX512F
kmovw %k5, %eax # AVX512F
kmovw %k5, %ebp # AVX512F
kunpckbw %k7, %k6, %k5 # AVX512F
vcvtps2ph $0xab, %zmm6, (%ecx) # AVX512F
vcvtps2ph $0xab, %zmm6, (%ecx){%k7} # AVX512F
vcvtps2ph $123, %zmm6, (%ecx) # AVX512F
vcvtps2ph $123, %zmm6, -123456(%esp,%esi,8) # AVX512F
vcvtps2ph $123, %zmm6, 4064(%edx) # AVX512F Disp8
vcvtps2ph $123, %zmm6, 4096(%edx) # AVX512F
vcvtps2ph $123, %zmm6, -4096(%edx) # AVX512F Disp8
vcvtps2ph $123, %zmm6, -4128(%edx) # AVX512F
vextractf32x4 $0xab, %zmm6, (%ecx) # AVX512F
vextractf32x4 $0xab, %zmm6, (%ecx){%k7} # AVX512F
vextractf32x4 $123, %zmm6, (%ecx) # AVX512F
vextractf32x4 $123, %zmm6, -123456(%esp,%esi,8) # AVX512F
vextractf32x4 $123, %zmm6, 2032(%edx) # AVX512F Disp8
vextractf32x4 $123, %zmm6, 2048(%edx) # AVX512F
vextractf32x4 $123, %zmm6, -2048(%edx) # AVX512F Disp8
vextractf32x4 $123, %zmm6, -2064(%edx) # AVX512F
vextractf64x4 $0xab, %zmm6, (%ecx) # AVX512F
vextractf64x4 $0xab, %zmm6, (%ecx){%k7} # AVX512F
vextractf64x4 $123, %zmm6, (%ecx) # AVX512F
vextractf64x4 $123, %zmm6, -123456(%esp,%esi,8) # AVX512F
vextractf64x4 $123, %zmm6, 4064(%edx) # AVX512F Disp8
vextractf64x4 $123, %zmm6, 4096(%edx) # AVX512F
vextractf64x4 $123, %zmm6, -4096(%edx) # AVX512F Disp8
vextractf64x4 $123, %zmm6, -4128(%edx) # AVX512F
vextracti32x4 $0xab, %zmm6, (%ecx) # AVX512F
vextracti32x4 $0xab, %zmm6, (%ecx){%k7} # AVX512F
vextracti32x4 $123, %zmm6, (%ecx) # AVX512F
vextracti32x4 $123, %zmm6, -123456(%esp,%esi,8) # AVX512F
vextracti32x4 $123, %zmm6, 2032(%edx) # AVX512F Disp8
vextracti32x4 $123, %zmm6, 2048(%edx) # AVX512F
vextracti32x4 $123, %zmm6, -2048(%edx) # AVX512F Disp8
vextracti32x4 $123, %zmm6, -2064(%edx) # AVX512F
vextracti64x4 $0xab, %zmm6, (%ecx) # AVX512F
vextracti64x4 $0xab, %zmm6, (%ecx){%k7} # AVX512F
vextracti64x4 $123, %zmm6, (%ecx) # AVX512F
vextracti64x4 $123, %zmm6, -123456(%esp,%esi,8) # AVX512F
vextracti64x4 $123, %zmm6, 4064(%edx) # AVX512F Disp8
vextracti64x4 $123, %zmm6, 4096(%edx) # AVX512F
vextracti64x4 $123, %zmm6, -4096(%edx) # AVX512F Disp8
vextracti64x4 $123, %zmm6, -4128(%edx) # AVX512F
vmovapd %zmm6, (%ecx) # AVX512F
vmovapd %zmm6, (%ecx){%k7} # AVX512F
vmovapd %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovapd %zmm6, 8128(%edx) # AVX512F Disp8
vmovapd %zmm6, 8192(%edx) # AVX512F
vmovapd %zmm6, -8192(%edx) # AVX512F Disp8
vmovapd %zmm6, -8256(%edx) # AVX512F
vmovaps %zmm6, (%ecx) # AVX512F
vmovaps %zmm6, (%ecx){%k7} # AVX512F
vmovaps %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovaps %zmm6, 8128(%edx) # AVX512F Disp8
vmovaps %zmm6, 8192(%edx) # AVX512F
vmovaps %zmm6, -8192(%edx) # AVX512F Disp8
vmovaps %zmm6, -8256(%edx) # AVX512F
vmovdqa32 %zmm6, (%ecx) # AVX512F
vmovdqa32 %zmm6, (%ecx){%k7} # AVX512F
vmovdqa32 %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovdqa32 %zmm6, 8128(%edx) # AVX512F Disp8
vmovdqa32 %zmm6, 8192(%edx) # AVX512F
vmovdqa32 %zmm6, -8192(%edx) # AVX512F Disp8
vmovdqa32 %zmm6, -8256(%edx) # AVX512F
vmovdqa64 %zmm6, (%ecx) # AVX512F
vmovdqa64 %zmm6, (%ecx){%k7} # AVX512F
vmovdqa64 %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovdqa64 %zmm6, 8128(%edx) # AVX512F Disp8
vmovdqa64 %zmm6, 8192(%edx) # AVX512F
vmovdqa64 %zmm6, -8192(%edx) # AVX512F Disp8
vmovdqa64 %zmm6, -8256(%edx) # AVX512F
vmovdqu32 %zmm6, (%ecx) # AVX512F
vmovdqu32 %zmm6, (%ecx){%k7} # AVX512F
vmovdqu32 %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovdqu32 %zmm6, 8128(%edx) # AVX512F Disp8
vmovdqu32 %zmm6, 8192(%edx) # AVX512F
vmovdqu32 %zmm6, -8192(%edx) # AVX512F Disp8
vmovdqu32 %zmm6, -8256(%edx) # AVX512F
vmovdqu64 %zmm6, (%ecx) # AVX512F
vmovdqu64 %zmm6, (%ecx){%k7} # AVX512F
vmovdqu64 %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovdqu64 %zmm6, 8128(%edx) # AVX512F Disp8
vmovdqu64 %zmm6, 8192(%edx) # AVX512F
vmovdqu64 %zmm6, -8192(%edx) # AVX512F Disp8
vmovdqu64 %zmm6, -8256(%edx) # AVX512F
vmovupd %zmm6, (%ecx) # AVX512F
vmovupd %zmm6, (%ecx){%k7} # AVX512F
vmovupd %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovupd %zmm6, 8128(%edx) # AVX512F Disp8
vmovupd %zmm6, 8192(%edx) # AVX512F
vmovupd %zmm6, -8192(%edx) # AVX512F Disp8
vmovupd %zmm6, -8256(%edx) # AVX512F
vmovups %zmm6, (%ecx) # AVX512F
vmovups %zmm6, (%ecx){%k7} # AVX512F
vmovups %zmm6, -123456(%esp,%esi,8) # AVX512F
vmovups %zmm6, 8128(%edx) # AVX512F Disp8
vmovups %zmm6, 8192(%edx) # AVX512F
vmovups %zmm6, -8192(%edx) # AVX512F Disp8
vmovups %zmm6, -8256(%edx) # AVX512F
vpmovqb %zmm6, (%ecx) # AVX512F
vpmovqb %zmm6, (%ecx){%k7} # AVX512F
vpmovqb %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovqb %zmm6, 1016(%edx) # AVX512F Disp8
vpmovqb %zmm6, 1024(%edx) # AVX512F
vpmovqb %zmm6, -1024(%edx) # AVX512F Disp8
vpmovqb %zmm6, -1032(%edx) # AVX512F
vpmovsqb %zmm6, (%ecx) # AVX512F
vpmovsqb %zmm6, (%ecx){%k7} # AVX512F
vpmovsqb %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovsqb %zmm6, 1016(%edx) # AVX512F Disp8
vpmovsqb %zmm6, 1024(%edx) # AVX512F
vpmovsqb %zmm6, -1024(%edx) # AVX512F Disp8
vpmovsqb %zmm6, -1032(%edx) # AVX512F
vpmovusqb %zmm6, (%ecx) # AVX512F
vpmovusqb %zmm6, (%ecx){%k7} # AVX512F
vpmovusqb %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovusqb %zmm6, 1016(%edx) # AVX512F Disp8
vpmovusqb %zmm6, 1024(%edx) # AVX512F
vpmovusqb %zmm6, -1024(%edx) # AVX512F Disp8
vpmovusqb %zmm6, -1032(%edx) # AVX512F
vpmovqw %zmm6, (%ecx) # AVX512F
vpmovqw %zmm6, (%ecx){%k7} # AVX512F
vpmovqw %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovqw %zmm6, 2032(%edx) # AVX512F Disp8
vpmovqw %zmm6, 2048(%edx) # AVX512F
vpmovqw %zmm6, -2048(%edx) # AVX512F Disp8
vpmovqw %zmm6, -2064(%edx) # AVX512F
vpmovsqw %zmm6, (%ecx) # AVX512F
vpmovsqw %zmm6, (%ecx){%k7} # AVX512F
vpmovsqw %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovsqw %zmm6, 2032(%edx) # AVX512F Disp8
vpmovsqw %zmm6, 2048(%edx) # AVX512F
vpmovsqw %zmm6, -2048(%edx) # AVX512F Disp8
vpmovsqw %zmm6, -2064(%edx) # AVX512F
vpmovusqw %zmm6, (%ecx) # AVX512F
vpmovusqw %zmm6, (%ecx){%k7} # AVX512F
vpmovusqw %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovusqw %zmm6, 2032(%edx) # AVX512F Disp8
vpmovusqw %zmm6, 2048(%edx) # AVX512F
vpmovusqw %zmm6, -2048(%edx) # AVX512F Disp8
vpmovusqw %zmm6, -2064(%edx) # AVX512F
vpmovqd %zmm6, (%ecx) # AVX512F
vpmovqd %zmm6, (%ecx){%k7} # AVX512F
vpmovqd %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovqd %zmm6, 4064(%edx) # AVX512F Disp8
vpmovqd %zmm6, 4096(%edx) # AVX512F
vpmovqd %zmm6, -4096(%edx) # AVX512F Disp8
vpmovqd %zmm6, -4128(%edx) # AVX512F
vpmovsqd %zmm6, (%ecx) # AVX512F
vpmovsqd %zmm6, (%ecx){%k7} # AVX512F
vpmovsqd %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovsqd %zmm6, 4064(%edx) # AVX512F Disp8
vpmovsqd %zmm6, 4096(%edx) # AVX512F
vpmovsqd %zmm6, -4096(%edx) # AVX512F Disp8
vpmovsqd %zmm6, -4128(%edx) # AVX512F
vpmovusqd %zmm6, (%ecx) # AVX512F
vpmovusqd %zmm6, (%ecx){%k7} # AVX512F
vpmovusqd %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovusqd %zmm6, 4064(%edx) # AVX512F Disp8
vpmovusqd %zmm6, 4096(%edx) # AVX512F
vpmovusqd %zmm6, -4096(%edx) # AVX512F Disp8
vpmovusqd %zmm6, -4128(%edx) # AVX512F
vpmovdb %zmm6, (%ecx) # AVX512F
vpmovdb %zmm6, (%ecx){%k7} # AVX512F
vpmovdb %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovdb %zmm6, 2032(%edx) # AVX512F Disp8
vpmovdb %zmm6, 2048(%edx) # AVX512F
vpmovdb %zmm6, -2048(%edx) # AVX512F Disp8
vpmovdb %zmm6, -2064(%edx) # AVX512F
vpmovsdb %zmm6, (%ecx) # AVX512F
vpmovsdb %zmm6, (%ecx){%k7} # AVX512F
vpmovsdb %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovsdb %zmm6, 2032(%edx) # AVX512F Disp8
vpmovsdb %zmm6, 2048(%edx) # AVX512F
vpmovsdb %zmm6, -2048(%edx) # AVX512F Disp8
vpmovsdb %zmm6, -2064(%edx) # AVX512F
vpmovusdb %zmm6, (%ecx) # AVX512F
vpmovusdb %zmm6, (%ecx){%k7} # AVX512F
vpmovusdb %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovusdb %zmm6, 2032(%edx) # AVX512F Disp8
vpmovusdb %zmm6, 2048(%edx) # AVX512F
vpmovusdb %zmm6, -2048(%edx) # AVX512F Disp8
vpmovusdb %zmm6, -2064(%edx) # AVX512F
vpmovdw %zmm6, (%ecx) # AVX512F
vpmovdw %zmm6, (%ecx){%k7} # AVX512F
vpmovdw %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovdw %zmm6, 4064(%edx) # AVX512F Disp8
vpmovdw %zmm6, 4096(%edx) # AVX512F
vpmovdw %zmm6, -4096(%edx) # AVX512F Disp8
vpmovdw %zmm6, -4128(%edx) # AVX512F
vpmovsdw %zmm6, (%ecx) # AVX512F
vpmovsdw %zmm6, (%ecx){%k7} # AVX512F
vpmovsdw %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovsdw %zmm6, 4064(%edx) # AVX512F Disp8
vpmovsdw %zmm6, 4096(%edx) # AVX512F
vpmovsdw %zmm6, -4096(%edx) # AVX512F Disp8
vpmovsdw %zmm6, -4128(%edx) # AVX512F
vpmovusdw %zmm6, (%ecx) # AVX512F
vpmovusdw %zmm6, (%ecx){%k7} # AVX512F
vpmovusdw %zmm6, -123456(%esp,%esi,8) # AVX512F
vpmovusdw %zmm6, 4064(%edx) # AVX512F Disp8
vpmovusdw %zmm6, 4096(%edx) # AVX512F
vpmovusdw %zmm6, -4096(%edx) # AVX512F Disp8
vpmovusdw %zmm6, -4128(%edx) # AVX512F
vcvttpd2udq %zmm5, %ymm6{%k7} # AVX512F
vcvttpd2udq %zmm5, %ymm6{%k7}{z} # AVX512F
vcvttpd2udq {sae}, %zmm5, %ymm6{%k7} # AVX512F
vcvttpd2udq (%ecx), %ymm6{%k7} # AVX512F
vcvttpd2udq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512F
vcvttpd2udq (%eax){1to8}, %ymm6{%k7} # AVX512F
vcvttpd2udq 8128(%edx), %ymm6{%k7} # AVX512F Disp8
vcvttpd2udq 8192(%edx), %ymm6{%k7} # AVX512F
vcvttpd2udq -8192(%edx), %ymm6{%k7} # AVX512F Disp8
vcvttpd2udq -8256(%edx), %ymm6{%k7} # AVX512F
vcvttpd2udq 1016(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvttpd2udq 1024(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvttpd2udq -1024(%edx){1to8}, %ymm6{%k7} # AVX512F Disp8
vcvttpd2udq -1032(%edx){1to8}, %ymm6{%k7} # AVX512F
vcvttps2udq %zmm5, %zmm6 # AVX512F
vcvttps2udq %zmm5, %zmm6{%k7} # AVX512F
vcvttps2udq %zmm5, %zmm6{%k7}{z} # AVX512F
vcvttps2udq {sae}, %zmm5, %zmm6 # AVX512F
vcvttps2udq (%ecx), %zmm6 # AVX512F
vcvttps2udq -123456(%esp,%esi,8), %zmm6 # AVX512F
vcvttps2udq (%eax){1to16}, %zmm6 # AVX512F
vcvttps2udq 8128(%edx), %zmm6 # AVX512F Disp8
vcvttps2udq 8192(%edx), %zmm6 # AVX512F
vcvttps2udq -8192(%edx), %zmm6 # AVX512F Disp8
vcvttps2udq -8256(%edx), %zmm6 # AVX512F
vcvttps2udq 508(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvttps2udq 512(%edx){1to16}, %zmm6 # AVX512F
vcvttps2udq -512(%edx){1to16}, %zmm6 # AVX512F Disp8
vcvttps2udq -516(%edx){1to16}, %zmm6 # AVX512F
vcvttsd2usi %xmm6, %eax # AVX512F
vcvttsd2usi {sae}, %xmm6, %eax # AVX512F
vcvttsd2usi (%ecx), %eax # AVX512F
vcvttsd2usi -123456(%esp,%esi,8), %eax # AVX512F
vcvttsd2usi 1016(%edx), %eax # AVX512F Disp8
vcvttsd2usi 1024(%edx), %eax # AVX512F
vcvttsd2usi -1024(%edx), %eax # AVX512F Disp8
vcvttsd2usi -1032(%edx), %eax # AVX512F
vcvttsd2usi %xmm6, %ebp # AVX512F
vcvttsd2usi {sae}, %xmm6, %ebp # AVX512F
vcvttsd2usi (%ecx), %ebp # AVX512F
vcvttsd2usi -123456(%esp,%esi,8), %ebp # AVX512F
vcvttsd2usi 1016(%edx), %ebp # AVX512F Disp8
vcvttsd2usi 1024(%edx), %ebp # AVX512F
vcvttsd2usi -1024(%edx), %ebp # AVX512F Disp8
vcvttsd2usi -1032(%edx), %ebp # AVX512F
vcvttss2usi %xmm6, %eax # AVX512F
vcvttss2usi {sae}, %xmm6, %eax # AVX512F
vcvttss2usi (%ecx), %eax # AVX512F
vcvttss2usi -123456(%esp,%esi,8), %eax # AVX512F
vcvttss2usi 508(%edx), %eax # AVX512F Disp8
vcvttss2usi 512(%edx), %eax # AVX512F
vcvttss2usi -512(%edx), %eax # AVX512F Disp8
vcvttss2usi -516(%edx), %eax # AVX512F
vcvttss2usi %xmm6, %ebp # AVX512F
vcvttss2usi {sae}, %xmm6, %ebp # AVX512F
vcvttss2usi (%ecx), %ebp # AVX512F
vcvttss2usi -123456(%esp,%esi,8), %ebp # AVX512F
vcvttss2usi 508(%edx), %ebp # AVX512F Disp8
vcvttss2usi 512(%edx), %ebp # AVX512F
vcvttss2usi -512(%edx), %ebp # AVX512F Disp8
vcvttss2usi -516(%edx), %ebp # AVX512F
vpermi2d %zmm4, %zmm5, %zmm6 # AVX512F
vpermi2d %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermi2d %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermi2d (%ecx), %zmm5, %zmm6 # AVX512F
vpermi2d -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermi2d (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpermi2d 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2d 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermi2d -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2d -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermi2d 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2d 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermi2d -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2d -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermi2q %zmm4, %zmm5, %zmm6 # AVX512F
vpermi2q %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermi2q %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermi2q (%ecx), %zmm5, %zmm6 # AVX512F
vpermi2q -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermi2q (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpermi2q 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2q 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermi2q -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2q -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermi2q 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2q 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermi2q -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2q -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermi2ps %zmm4, %zmm5, %zmm6 # AVX512F
vpermi2ps %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermi2ps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermi2ps (%ecx), %zmm5, %zmm6 # AVX512F
vpermi2ps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermi2ps (%eax){1to16}, %zmm5, %zmm6 # AVX512F
vpermi2ps 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2ps 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermi2ps -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2ps -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermi2ps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2ps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermi2ps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2ps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512F
vpermi2pd %zmm4, %zmm5, %zmm6 # AVX512F
vpermi2pd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vpermi2pd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F
vpermi2pd (%ecx), %zmm5, %zmm6 # AVX512F
vpermi2pd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F
vpermi2pd (%eax){1to8}, %zmm5, %zmm6 # AVX512F
vpermi2pd 8128(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2pd 8192(%edx), %zmm5, %zmm6 # AVX512F
vpermi2pd -8192(%edx), %zmm5, %zmm6 # AVX512F Disp8
vpermi2pd -8256(%edx), %zmm5, %zmm6 # AVX512F
vpermi2pd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2pd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vpermi2pd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512F Disp8
vpermi2pd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512F
vptestnmd %zmm4, %zmm5, %k5 # AVX512F
vptestnmd %zmm4, %zmm5, %k5{%k7} # AVX512F
vptestnmd (%ecx), %zmm5, %k5 # AVX512F
vptestnmd -123456(%esp,%esi,8), %zmm5, %k5 # AVX512F
vptestnmd (%eax){1to16}, %zmm5, %k5 # AVX512F
vptestnmd 8128(%edx), %zmm5, %k5 # AVX512F Disp8
vptestnmd 8192(%edx), %zmm5, %k5 # AVX512F
vptestnmd -8192(%edx), %zmm5, %k5 # AVX512F Disp8
vptestnmd -8256(%edx), %zmm5, %k5 # AVX512F
vptestnmd 508(%edx){1to16}, %zmm5, %k5 # AVX512F Disp8
vptestnmd 512(%edx){1to16}, %zmm5, %k5 # AVX512F
vptestnmd -512(%edx){1to16}, %zmm5, %k5 # AVX512F Disp8
vptestnmd -516(%edx){1to16}, %zmm5, %k5 # AVX512F
vptestnmq %zmm4, %zmm5, %k5 # AVX512F
vptestnmq %zmm4, %zmm5, %k5{%k7} # AVX512F
vptestnmq (%ecx), %zmm5, %k5 # AVX512F
vptestnmq -123456(%esp,%esi,8), %zmm5, %k5 # AVX512F
vptestnmq (%eax){1to8}, %zmm5, %k5 # AVX512F
vptestnmq 8128(%edx), %zmm5, %k5 # AVX512F Disp8
vptestnmq 8192(%edx), %zmm5, %k5 # AVX512F
vptestnmq -8192(%edx), %zmm5, %k5 # AVX512F Disp8
vptestnmq -8256(%edx), %zmm5, %k5 # AVX512F
vptestnmq 1016(%edx){1to8}, %zmm5, %k5 # AVX512F Disp8
vptestnmq 1024(%edx){1to8}, %zmm5, %k5 # AVX512F
vptestnmq -1024(%edx){1to8}, %zmm5, %k5 # AVX512F Disp8
vptestnmq -1032(%edx){1to8}, %zmm5, %k5 # AVX512F
vaddps (%bx), %zmm0, %zmm0
vaddps 0x40(%bx), %zmm0, %zmm0
vaddps 0x1234(%bx), %zmm0, %zmm0
.intel_syntax noprefix
vaddpd zmm6, zmm5, zmm4 # AVX512F
vaddpd zmm6{k7}, zmm5, zmm4 # AVX512F
vaddpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vaddpd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vaddpd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vaddpd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vaddpd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vaddpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vaddpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vaddpd zmm6, zmm5, qword bcst [eax] # AVX512F
vaddpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vaddpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vaddpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vaddpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vaddpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vaddpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vaddpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vaddpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vaddps zmm6, zmm5, zmm4 # AVX512F
vaddps zmm6{k7}, zmm5, zmm4 # AVX512F
vaddps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vaddps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vaddps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vaddps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vaddps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vaddps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vaddps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vaddps zmm6, zmm5, dword bcst [eax] # AVX512F
vaddps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vaddps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vaddps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vaddps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vaddps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vaddps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vaddps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vaddps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vaddsd xmm6{k7}, xmm5, xmm4 # AVX512F
vaddsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vaddsd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vaddsd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vaddsd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vaddsd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vaddsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vaddsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vaddss xmm6{k7}, xmm5, xmm4 # AVX512F
vaddss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vaddss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vaddss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vaddss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vaddss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vaddss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vaddss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vaddss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vaddss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vaddss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vaddss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
valignd zmm6, zmm5, zmm4, 0xab # AVX512F
valignd zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
valignd zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
valignd zmm6, zmm5, zmm4, 123 # AVX512F
valignd zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
valignd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
valignd zmm6, zmm5, dword bcst [eax], 123 # AVX512F
valignd zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
valignd zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
valignd zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
valignd zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
valignd zmm6, zmm5, dword bcst [edx+508], 123 # AVX512F Disp8
valignd zmm6, zmm5, dword bcst [edx+512], 123 # AVX512F
valignd zmm6, zmm5, dword bcst [edx-512], 123 # AVX512F Disp8
valignd zmm6, zmm5, dword bcst [edx-516], 123 # AVX512F
vblendmpd zmm6, zmm5, zmm4 # AVX512F
vblendmpd zmm6{k7}, zmm5, zmm4 # AVX512F
vblendmpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vblendmpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vblendmpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vblendmpd zmm6, zmm5, qword bcst [eax] # AVX512F
vblendmpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vblendmpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vblendmpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vblendmpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vblendmpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vblendmpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vblendmpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vblendmpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vblendmps zmm6, zmm5, zmm4 # AVX512F
vblendmps zmm6{k7}, zmm5, zmm4 # AVX512F
vblendmps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vblendmps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vblendmps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vblendmps zmm6, zmm5, dword bcst [eax] # AVX512F
vblendmps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vblendmps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vblendmps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vblendmps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vblendmps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vblendmps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vblendmps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vblendmps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vbroadcastf32x4 zmm6, XMMWORD PTR [ecx] # AVX512F
vbroadcastf32x4 zmm6{k7}, XMMWORD PTR [ecx] # AVX512F
vbroadcastf32x4 zmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512F
vbroadcastf32x4 zmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vbroadcastf32x4 zmm6, XMMWORD PTR [edx+2032] # AVX512F Disp8
vbroadcastf32x4 zmm6, XMMWORD PTR [edx+2048] # AVX512F
vbroadcastf32x4 zmm6, XMMWORD PTR [edx-2048] # AVX512F Disp8
vbroadcastf32x4 zmm6, XMMWORD PTR [edx-2064] # AVX512F
vbroadcastf64x4 zmm6, YMMWORD PTR [ecx] # AVX512F
vbroadcastf64x4 zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vbroadcastf64x4 zmm6{k7}{z}, YMMWORD PTR [ecx] # AVX512F
vbroadcastf64x4 zmm6, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vbroadcastf64x4 zmm6, YMMWORD PTR [edx+4064] # AVX512F Disp8
vbroadcastf64x4 zmm6, YMMWORD PTR [edx+4096] # AVX512F
vbroadcastf64x4 zmm6, YMMWORD PTR [edx-4096] # AVX512F Disp8
vbroadcastf64x4 zmm6, YMMWORD PTR [edx-4128] # AVX512F
vbroadcasti32x4 zmm6, XMMWORD PTR [ecx] # AVX512F
vbroadcasti32x4 zmm6{k7}, XMMWORD PTR [ecx] # AVX512F
vbroadcasti32x4 zmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512F
vbroadcasti32x4 zmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vbroadcasti32x4 zmm6, XMMWORD PTR [edx+2032] # AVX512F Disp8
vbroadcasti32x4 zmm6, XMMWORD PTR [edx+2048] # AVX512F
vbroadcasti32x4 zmm6, XMMWORD PTR [edx-2048] # AVX512F Disp8
vbroadcasti32x4 zmm6, XMMWORD PTR [edx-2064] # AVX512F
vbroadcasti64x4 zmm6, YMMWORD PTR [ecx] # AVX512F
vbroadcasti64x4 zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vbroadcasti64x4 zmm6{k7}{z}, YMMWORD PTR [ecx] # AVX512F
vbroadcasti64x4 zmm6, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vbroadcasti64x4 zmm6, YMMWORD PTR [edx+4064] # AVX512F Disp8
vbroadcasti64x4 zmm6, YMMWORD PTR [edx+4096] # AVX512F
vbroadcasti64x4 zmm6, YMMWORD PTR [edx-4096] # AVX512F Disp8
vbroadcasti64x4 zmm6, YMMWORD PTR [edx-4128] # AVX512F
vbroadcastsd zmm6, QWORD PTR [ecx] # AVX512F
vbroadcastsd zmm6{k7}, QWORD PTR [ecx] # AVX512F
vbroadcastsd zmm6{k7}{z}, QWORD PTR [ecx] # AVX512F
vbroadcastsd zmm6, QWORD PTR [esp+esi*8-123456] # AVX512F
vbroadcastsd zmm6, QWORD PTR [edx+1016] # AVX512F Disp8
vbroadcastsd zmm6, QWORD PTR [edx+1024] # AVX512F
vbroadcastsd zmm6, QWORD PTR [edx-1024] # AVX512F Disp8
vbroadcastsd zmm6, QWORD PTR [edx-1032] # AVX512F
vbroadcastsd zmm6{k7}, xmm5 # AVX512F
vbroadcastsd zmm6{k7}{z}, xmm5 # AVX512F
vbroadcastss zmm6, DWORD PTR [ecx] # AVX512F
vbroadcastss zmm6{k7}, DWORD PTR [ecx] # AVX512F
vbroadcastss zmm6{k7}{z}, DWORD PTR [ecx] # AVX512F
vbroadcastss zmm6, DWORD PTR [esp+esi*8-123456] # AVX512F
vbroadcastss zmm6, DWORD PTR [edx+508] # AVX512F Disp8
vbroadcastss zmm6, DWORD PTR [edx+512] # AVX512F
vbroadcastss zmm6, DWORD PTR [edx-512] # AVX512F Disp8
vbroadcastss zmm6, DWORD PTR [edx-516] # AVX512F
vbroadcastss zmm6{k7}, xmm5 # AVX512F
vbroadcastss zmm6{k7}{z}, xmm5 # AVX512F
vcmppd k5, zmm6, zmm5, 0xab # AVX512F
vcmppd k5{k7}, zmm6, zmm5, 0xab # AVX512F
vcmppd k5, zmm6, zmm5{sae}, 0xab # AVX512F
vcmppd k5, zmm6, zmm5, 123 # AVX512F
vcmppd k5, zmm6, zmm5{sae}, 123 # AVX512F
vcmppd k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vcmppd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vcmppd k5, zmm6, qword bcst [eax], 123 # AVX512F
vcmppd k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vcmppd k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vcmppd k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vcmppd k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vcmppd k5, zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vcmppd k5, zmm6, qword bcst [edx+1024], 123 # AVX512F
vcmppd k5, zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vcmppd k5, zmm6, qword bcst [edx-1032], 123 # AVX512F
vcmpeq_oqpd k5, zmm6, zmm5 # AVX512F
vcmpeq_oqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_oqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_oqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_oqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_oqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpeq_oqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_oqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_oqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_oqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_oqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpeq_oqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpeq_oqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpeq_oqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpeqpd k5, zmm6, zmm5 # AVX512F
vcmpeqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpeqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpeqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpeqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpeqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpeqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpeqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmplt_ospd k5, zmm6, zmm5 # AVX512F
vcmplt_ospd k5{k7}, zmm6, zmm5 # AVX512F
vcmplt_ospd k5, zmm6, zmm5{sae} # AVX512F
vcmplt_ospd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmplt_ospd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_ospd k5, zmm6, qword bcst [eax] # AVX512F
vcmplt_ospd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmplt_ospd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmplt_ospd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmplt_ospd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmplt_ospd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmplt_ospd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmplt_ospd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmplt_ospd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpltpd k5, zmm6, zmm5 # AVX512F
vcmpltpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpltpd k5, zmm6, zmm5{sae} # AVX512F
vcmpltpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpltpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpltpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpltpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpltpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpltpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpltpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpltpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpltpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpltpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpltpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmple_ospd k5, zmm6, zmm5 # AVX512F
vcmple_ospd k5{k7}, zmm6, zmm5 # AVX512F
vcmple_ospd k5, zmm6, zmm5{sae} # AVX512F
vcmple_ospd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmple_ospd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_ospd k5, zmm6, qword bcst [eax] # AVX512F
vcmple_ospd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmple_ospd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmple_ospd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmple_ospd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmple_ospd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmple_ospd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmple_ospd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmple_ospd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmplepd k5, zmm6, zmm5 # AVX512F
vcmplepd k5{k7}, zmm6, zmm5 # AVX512F
vcmplepd k5, zmm6, zmm5{sae} # AVX512F
vcmplepd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmplepd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmplepd k5, zmm6, qword bcst [eax] # AVX512F
vcmplepd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmplepd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmplepd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmplepd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmplepd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmplepd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmplepd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmplepd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpunord_qpd k5, zmm6, zmm5 # AVX512F
vcmpunord_qpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpunord_qpd k5, zmm6, zmm5{sae} # AVX512F
vcmpunord_qpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpunord_qpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_qpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpunord_qpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpunord_qpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpunord_qpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpunord_qpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpunord_qpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpunord_qpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpunord_qpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpunord_qpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpunordpd k5, zmm6, zmm5 # AVX512F
vcmpunordpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpunordpd k5, zmm6, zmm5{sae} # AVX512F
vcmpunordpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpunordpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunordpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpunordpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpunordpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpunordpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpunordpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpunordpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpunordpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpunordpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpunordpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpneq_uqpd k5, zmm6, zmm5 # AVX512F
vcmpneq_uqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_uqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_uqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_uqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_uqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpneq_uqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_uqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_uqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_uqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_uqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpneq_uqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpneq_uqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpneq_uqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpneqpd k5, zmm6, zmm5 # AVX512F
vcmpneqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpneqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpneqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpneqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpneqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpneqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpneqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnlt_uspd k5, zmm6, zmm5 # AVX512F
vcmpnlt_uspd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnlt_uspd k5, zmm6, zmm5{sae} # AVX512F
vcmpnlt_uspd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnlt_uspd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_uspd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnlt_uspd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnlt_uspd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnlt_uspd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnlt_uspd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnlt_uspd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnlt_uspd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnlt_uspd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnlt_uspd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnltpd k5, zmm6, zmm5 # AVX512F
vcmpnltpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnltpd k5, zmm6, zmm5{sae} # AVX512F
vcmpnltpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnltpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnltpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnltpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnltpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnltpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnltpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnltpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnltpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnltpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnltpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnle_uspd k5, zmm6, zmm5 # AVX512F
vcmpnle_uspd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnle_uspd k5, zmm6, zmm5{sae} # AVX512F
vcmpnle_uspd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnle_uspd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_uspd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnle_uspd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnle_uspd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnle_uspd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnle_uspd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnle_uspd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnle_uspd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnle_uspd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnle_uspd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnlepd k5, zmm6, zmm5 # AVX512F
vcmpnlepd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnlepd k5, zmm6, zmm5{sae} # AVX512F
vcmpnlepd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnlepd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlepd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnlepd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnlepd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnlepd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnlepd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnlepd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnlepd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnlepd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnlepd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpord_qpd k5, zmm6, zmm5 # AVX512F
vcmpord_qpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpord_qpd k5, zmm6, zmm5{sae} # AVX512F
vcmpord_qpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpord_qpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_qpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpord_qpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpord_qpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpord_qpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpord_qpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpord_qpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpord_qpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpord_qpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpord_qpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpordpd k5, zmm6, zmm5 # AVX512F
vcmpordpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpordpd k5, zmm6, zmm5{sae} # AVX512F
vcmpordpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpordpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpordpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpordpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpordpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpordpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpordpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpordpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpordpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpordpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpordpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpeq_uqpd k5, zmm6, zmm5 # AVX512F
vcmpeq_uqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_uqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_uqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_uqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_uqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpeq_uqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_uqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_uqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_uqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_uqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpeq_uqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpeq_uqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpeq_uqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnge_uspd k5, zmm6, zmm5 # AVX512F
vcmpnge_uspd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnge_uspd k5, zmm6, zmm5{sae} # AVX512F
vcmpnge_uspd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnge_uspd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_uspd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnge_uspd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnge_uspd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnge_uspd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnge_uspd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnge_uspd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnge_uspd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnge_uspd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnge_uspd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpngepd k5, zmm6, zmm5 # AVX512F
vcmpngepd k5{k7}, zmm6, zmm5 # AVX512F
vcmpngepd k5, zmm6, zmm5{sae} # AVX512F
vcmpngepd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngepd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngepd k5, zmm6, qword bcst [eax] # AVX512F
vcmpngepd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngepd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngepd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngepd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngepd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpngepd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpngepd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpngepd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpngt_uspd k5, zmm6, zmm5 # AVX512F
vcmpngt_uspd k5{k7}, zmm6, zmm5 # AVX512F
vcmpngt_uspd k5, zmm6, zmm5{sae} # AVX512F
vcmpngt_uspd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngt_uspd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_uspd k5, zmm6, qword bcst [eax] # AVX512F
vcmpngt_uspd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngt_uspd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngt_uspd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngt_uspd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngt_uspd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpngt_uspd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpngt_uspd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpngt_uspd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpngtpd k5, zmm6, zmm5 # AVX512F
vcmpngtpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpngtpd k5, zmm6, zmm5{sae} # AVX512F
vcmpngtpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngtpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngtpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpngtpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngtpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngtpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngtpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngtpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpngtpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpngtpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpngtpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpfalse_oqpd k5, zmm6, zmm5 # AVX512F
vcmpfalse_oqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpfalse_oqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpfalse_oqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpfalse_oqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_oqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpfalse_oqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpfalse_oqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpfalse_oqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpfalse_oqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpfalse_oqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpfalse_oqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpfalse_oqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpfalse_oqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpfalsepd k5, zmm6, zmm5 # AVX512F
vcmpfalsepd k5{k7}, zmm6, zmm5 # AVX512F
vcmpfalsepd k5, zmm6, zmm5{sae} # AVX512F
vcmpfalsepd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpfalsepd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalsepd k5, zmm6, qword bcst [eax] # AVX512F
vcmpfalsepd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpfalsepd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpfalsepd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpfalsepd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpfalsepd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpfalsepd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpfalsepd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpfalsepd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpneq_oqpd k5, zmm6, zmm5 # AVX512F
vcmpneq_oqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_oqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_oqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_oqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_oqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpneq_oqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_oqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_oqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_oqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_oqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpneq_oqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpneq_oqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpneq_oqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpge_ospd k5, zmm6, zmm5 # AVX512F
vcmpge_ospd k5{k7}, zmm6, zmm5 # AVX512F
vcmpge_ospd k5, zmm6, zmm5{sae} # AVX512F
vcmpge_ospd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpge_ospd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_ospd k5, zmm6, qword bcst [eax] # AVX512F
vcmpge_ospd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpge_ospd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpge_ospd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpge_ospd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpge_ospd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpge_ospd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpge_ospd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpge_ospd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpgepd k5, zmm6, zmm5 # AVX512F
vcmpgepd k5{k7}, zmm6, zmm5 # AVX512F
vcmpgepd k5, zmm6, zmm5{sae} # AVX512F
vcmpgepd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgepd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgepd k5, zmm6, qword bcst [eax] # AVX512F
vcmpgepd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgepd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgepd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgepd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgepd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpgepd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpgepd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpgepd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpgt_ospd k5, zmm6, zmm5 # AVX512F
vcmpgt_ospd k5{k7}, zmm6, zmm5 # AVX512F
vcmpgt_ospd k5, zmm6, zmm5{sae} # AVX512F
vcmpgt_ospd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgt_ospd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_ospd k5, zmm6, qword bcst [eax] # AVX512F
vcmpgt_ospd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgt_ospd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgt_ospd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgt_ospd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgt_ospd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpgt_ospd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpgt_ospd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpgt_ospd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpgtpd k5, zmm6, zmm5 # AVX512F
vcmpgtpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpgtpd k5, zmm6, zmm5{sae} # AVX512F
vcmpgtpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgtpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgtpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpgtpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgtpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgtpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgtpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgtpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpgtpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpgtpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpgtpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmptrue_uqpd k5, zmm6, zmm5 # AVX512F
vcmptrue_uqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmptrue_uqpd k5, zmm6, zmm5{sae} # AVX512F
vcmptrue_uqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmptrue_uqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_uqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmptrue_uqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmptrue_uqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmptrue_uqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmptrue_uqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmptrue_uqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmptrue_uqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmptrue_uqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmptrue_uqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmptruepd k5, zmm6, zmm5 # AVX512F
vcmptruepd k5{k7}, zmm6, zmm5 # AVX512F
vcmptruepd k5, zmm6, zmm5{sae} # AVX512F
vcmptruepd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmptruepd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmptruepd k5, zmm6, qword bcst [eax] # AVX512F
vcmptruepd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmptruepd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmptruepd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmptruepd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmptruepd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmptruepd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmptruepd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmptruepd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpeq_ospd k5, zmm6, zmm5 # AVX512F
vcmpeq_ospd k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_ospd k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_ospd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_ospd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_ospd k5, zmm6, qword bcst [eax] # AVX512F
vcmpeq_ospd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_ospd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_ospd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_ospd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_ospd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpeq_ospd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpeq_ospd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpeq_ospd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmplt_oqpd k5, zmm6, zmm5 # AVX512F
vcmplt_oqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmplt_oqpd k5, zmm6, zmm5{sae} # AVX512F
vcmplt_oqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmplt_oqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_oqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmplt_oqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmplt_oqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmplt_oqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmplt_oqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmplt_oqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmplt_oqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmplt_oqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmplt_oqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmple_oqpd k5, zmm6, zmm5 # AVX512F
vcmple_oqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmple_oqpd k5, zmm6, zmm5{sae} # AVX512F
vcmple_oqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmple_oqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_oqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmple_oqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmple_oqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmple_oqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmple_oqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmple_oqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmple_oqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmple_oqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmple_oqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpunord_spd k5, zmm6, zmm5 # AVX512F
vcmpunord_spd k5{k7}, zmm6, zmm5 # AVX512F
vcmpunord_spd k5, zmm6, zmm5{sae} # AVX512F
vcmpunord_spd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpunord_spd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_spd k5, zmm6, qword bcst [eax] # AVX512F
vcmpunord_spd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpunord_spd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpunord_spd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpunord_spd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpunord_spd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpunord_spd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpunord_spd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpunord_spd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpneq_uspd k5, zmm6, zmm5 # AVX512F
vcmpneq_uspd k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_uspd k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_uspd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_uspd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_uspd k5, zmm6, qword bcst [eax] # AVX512F
vcmpneq_uspd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_uspd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_uspd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_uspd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_uspd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpneq_uspd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpneq_uspd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpneq_uspd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnlt_uqpd k5, zmm6, zmm5 # AVX512F
vcmpnlt_uqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnlt_uqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpnlt_uqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnlt_uqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_uqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnlt_uqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnlt_uqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnlt_uqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnlt_uqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnlt_uqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnlt_uqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnlt_uqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnlt_uqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnle_uqpd k5, zmm6, zmm5 # AVX512F
vcmpnle_uqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnle_uqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpnle_uqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnle_uqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_uqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnle_uqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnle_uqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnle_uqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnle_uqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnle_uqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnle_uqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnle_uqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnle_uqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpord_spd k5, zmm6, zmm5 # AVX512F
vcmpord_spd k5{k7}, zmm6, zmm5 # AVX512F
vcmpord_spd k5, zmm6, zmm5{sae} # AVX512F
vcmpord_spd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpord_spd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_spd k5, zmm6, qword bcst [eax] # AVX512F
vcmpord_spd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpord_spd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpord_spd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpord_spd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpord_spd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpord_spd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpord_spd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpord_spd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpeq_uspd k5, zmm6, zmm5 # AVX512F
vcmpeq_uspd k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_uspd k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_uspd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_uspd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_uspd k5, zmm6, qword bcst [eax] # AVX512F
vcmpeq_uspd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_uspd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_uspd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_uspd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_uspd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpeq_uspd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpeq_uspd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpeq_uspd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpnge_uqpd k5, zmm6, zmm5 # AVX512F
vcmpnge_uqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpnge_uqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpnge_uqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnge_uqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_uqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpnge_uqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnge_uqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnge_uqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnge_uqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnge_uqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpnge_uqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpnge_uqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpnge_uqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpngt_uqpd k5, zmm6, zmm5 # AVX512F
vcmpngt_uqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpngt_uqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpngt_uqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngt_uqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_uqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpngt_uqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngt_uqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngt_uqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngt_uqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngt_uqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpngt_uqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpngt_uqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpngt_uqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpfalse_ospd k5, zmm6, zmm5 # AVX512F
vcmpfalse_ospd k5{k7}, zmm6, zmm5 # AVX512F
vcmpfalse_ospd k5, zmm6, zmm5{sae} # AVX512F
vcmpfalse_ospd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpfalse_ospd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_ospd k5, zmm6, qword bcst [eax] # AVX512F
vcmpfalse_ospd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpfalse_ospd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpfalse_ospd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpfalse_ospd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpfalse_ospd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpfalse_ospd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpfalse_ospd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpfalse_ospd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpneq_ospd k5, zmm6, zmm5 # AVX512F
vcmpneq_ospd k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_ospd k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_ospd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_ospd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_ospd k5, zmm6, qword bcst [eax] # AVX512F
vcmpneq_ospd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_ospd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_ospd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_ospd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_ospd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpneq_ospd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpneq_ospd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpneq_ospd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpge_oqpd k5, zmm6, zmm5 # AVX512F
vcmpge_oqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpge_oqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpge_oqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpge_oqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_oqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpge_oqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpge_oqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpge_oqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpge_oqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpge_oqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpge_oqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpge_oqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpge_oqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpgt_oqpd k5, zmm6, zmm5 # AVX512F
vcmpgt_oqpd k5{k7}, zmm6, zmm5 # AVX512F
vcmpgt_oqpd k5, zmm6, zmm5{sae} # AVX512F
vcmpgt_oqpd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgt_oqpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_oqpd k5, zmm6, qword bcst [eax] # AVX512F
vcmpgt_oqpd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgt_oqpd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgt_oqpd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgt_oqpd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgt_oqpd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmpgt_oqpd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmpgt_oqpd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmpgt_oqpd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmptrue_uspd k5, zmm6, zmm5 # AVX512F
vcmptrue_uspd k5{k7}, zmm6, zmm5 # AVX512F
vcmptrue_uspd k5, zmm6, zmm5{sae} # AVX512F
vcmptrue_uspd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmptrue_uspd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_uspd k5, zmm6, qword bcst [eax] # AVX512F
vcmptrue_uspd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmptrue_uspd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmptrue_uspd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmptrue_uspd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmptrue_uspd k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vcmptrue_uspd k5, zmm6, qword bcst [edx+1024] # AVX512F
vcmptrue_uspd k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vcmptrue_uspd k5, zmm6, qword bcst [edx-1032] # AVX512F
vcmpps k5, zmm6, zmm5, 0xab # AVX512F
vcmpps k5{k7}, zmm6, zmm5, 0xab # AVX512F
vcmpps k5, zmm6, zmm5{sae}, 0xab # AVX512F
vcmpps k5, zmm6, zmm5, 123 # AVX512F
vcmpps k5, zmm6, zmm5{sae}, 123 # AVX512F
vcmpps k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vcmpps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vcmpps k5, zmm6, dword bcst [eax], 123 # AVX512F
vcmpps k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vcmpps k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vcmpps k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vcmpps k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vcmpps k5, zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vcmpps k5, zmm6, dword bcst [edx+512], 123 # AVX512F
vcmpps k5, zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vcmpps k5, zmm6, dword bcst [edx-516], 123 # AVX512F
vcmpeq_oqps k5, zmm6, zmm5 # AVX512F
vcmpeq_oqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_oqps k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_oqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_oqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_oqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpeq_oqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_oqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_oqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_oqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_oqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpeq_oqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpeq_oqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpeq_oqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpeqps k5, zmm6, zmm5 # AVX512F
vcmpeqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpeqps k5, zmm6, zmm5{sae} # AVX512F
vcmpeqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpeqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpeqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpeqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpeqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmplt_osps k5, zmm6, zmm5 # AVX512F
vcmplt_osps k5{k7}, zmm6, zmm5 # AVX512F
vcmplt_osps k5, zmm6, zmm5{sae} # AVX512F
vcmplt_osps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmplt_osps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_osps k5, zmm6, dword bcst [eax] # AVX512F
vcmplt_osps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmplt_osps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmplt_osps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmplt_osps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmplt_osps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmplt_osps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmplt_osps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmplt_osps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpltps k5, zmm6, zmm5 # AVX512F
vcmpltps k5{k7}, zmm6, zmm5 # AVX512F
vcmpltps k5, zmm6, zmm5{sae} # AVX512F
vcmpltps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpltps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpltps k5, zmm6, dword bcst [eax] # AVX512F
vcmpltps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpltps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpltps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpltps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpltps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpltps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpltps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpltps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmple_osps k5, zmm6, zmm5 # AVX512F
vcmple_osps k5{k7}, zmm6, zmm5 # AVX512F
vcmple_osps k5, zmm6, zmm5{sae} # AVX512F
vcmple_osps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmple_osps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_osps k5, zmm6, dword bcst [eax] # AVX512F
vcmple_osps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmple_osps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmple_osps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmple_osps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmple_osps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmple_osps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmple_osps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmple_osps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpleps k5, zmm6, zmm5 # AVX512F
vcmpleps k5{k7}, zmm6, zmm5 # AVX512F
vcmpleps k5, zmm6, zmm5{sae} # AVX512F
vcmpleps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpleps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpleps k5, zmm6, dword bcst [eax] # AVX512F
vcmpleps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpleps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpleps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpleps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpleps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpleps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpleps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpleps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpunord_qps k5, zmm6, zmm5 # AVX512F
vcmpunord_qps k5{k7}, zmm6, zmm5 # AVX512F
vcmpunord_qps k5, zmm6, zmm5{sae} # AVX512F
vcmpunord_qps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpunord_qps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_qps k5, zmm6, dword bcst [eax] # AVX512F
vcmpunord_qps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpunord_qps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpunord_qps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpunord_qps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpunord_qps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpunord_qps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpunord_qps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpunord_qps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpunordps k5, zmm6, zmm5 # AVX512F
vcmpunordps k5{k7}, zmm6, zmm5 # AVX512F
vcmpunordps k5, zmm6, zmm5{sae} # AVX512F
vcmpunordps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpunordps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunordps k5, zmm6, dword bcst [eax] # AVX512F
vcmpunordps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpunordps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpunordps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpunordps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpunordps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpunordps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpunordps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpunordps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpneq_uqps k5, zmm6, zmm5 # AVX512F
vcmpneq_uqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_uqps k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_uqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_uqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_uqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpneq_uqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_uqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_uqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_uqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_uqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpneq_uqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpneq_uqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpneq_uqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpneqps k5, zmm6, zmm5 # AVX512F
vcmpneqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpneqps k5, zmm6, zmm5{sae} # AVX512F
vcmpneqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpneqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpneqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpneqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpneqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnlt_usps k5, zmm6, zmm5 # AVX512F
vcmpnlt_usps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnlt_usps k5, zmm6, zmm5{sae} # AVX512F
vcmpnlt_usps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnlt_usps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_usps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnlt_usps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnlt_usps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnlt_usps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnlt_usps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnlt_usps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnlt_usps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnlt_usps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnlt_usps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnltps k5, zmm6, zmm5 # AVX512F
vcmpnltps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnltps k5, zmm6, zmm5{sae} # AVX512F
vcmpnltps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnltps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnltps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnltps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnltps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnltps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnltps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnltps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnltps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnltps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnltps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnle_usps k5, zmm6, zmm5 # AVX512F
vcmpnle_usps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnle_usps k5, zmm6, zmm5{sae} # AVX512F
vcmpnle_usps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnle_usps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_usps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnle_usps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnle_usps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnle_usps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnle_usps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnle_usps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnle_usps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnle_usps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnle_usps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnleps k5, zmm6, zmm5 # AVX512F
vcmpnleps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnleps k5, zmm6, zmm5{sae} # AVX512F
vcmpnleps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnleps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnleps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnleps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnleps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnleps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnleps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnleps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnleps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnleps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnleps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpord_qps k5, zmm6, zmm5 # AVX512F
vcmpord_qps k5{k7}, zmm6, zmm5 # AVX512F
vcmpord_qps k5, zmm6, zmm5{sae} # AVX512F
vcmpord_qps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpord_qps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_qps k5, zmm6, dword bcst [eax] # AVX512F
vcmpord_qps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpord_qps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpord_qps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpord_qps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpord_qps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpord_qps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpord_qps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpord_qps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpordps k5, zmm6, zmm5 # AVX512F
vcmpordps k5{k7}, zmm6, zmm5 # AVX512F
vcmpordps k5, zmm6, zmm5{sae} # AVX512F
vcmpordps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpordps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpordps k5, zmm6, dword bcst [eax] # AVX512F
vcmpordps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpordps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpordps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpordps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpordps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpordps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpordps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpordps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpeq_uqps k5, zmm6, zmm5 # AVX512F
vcmpeq_uqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_uqps k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_uqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_uqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_uqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpeq_uqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_uqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_uqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_uqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_uqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpeq_uqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpeq_uqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpeq_uqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnge_usps k5, zmm6, zmm5 # AVX512F
vcmpnge_usps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnge_usps k5, zmm6, zmm5{sae} # AVX512F
vcmpnge_usps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnge_usps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_usps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnge_usps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnge_usps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnge_usps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnge_usps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnge_usps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnge_usps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnge_usps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnge_usps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpngeps k5, zmm6, zmm5 # AVX512F
vcmpngeps k5{k7}, zmm6, zmm5 # AVX512F
vcmpngeps k5, zmm6, zmm5{sae} # AVX512F
vcmpngeps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngeps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngeps k5, zmm6, dword bcst [eax] # AVX512F
vcmpngeps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngeps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngeps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngeps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngeps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpngeps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpngeps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpngeps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpngt_usps k5, zmm6, zmm5 # AVX512F
vcmpngt_usps k5{k7}, zmm6, zmm5 # AVX512F
vcmpngt_usps k5, zmm6, zmm5{sae} # AVX512F
vcmpngt_usps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngt_usps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_usps k5, zmm6, dword bcst [eax] # AVX512F
vcmpngt_usps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngt_usps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngt_usps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngt_usps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngt_usps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpngt_usps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpngt_usps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpngt_usps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpngtps k5, zmm6, zmm5 # AVX512F
vcmpngtps k5{k7}, zmm6, zmm5 # AVX512F
vcmpngtps k5, zmm6, zmm5{sae} # AVX512F
vcmpngtps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngtps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngtps k5, zmm6, dword bcst [eax] # AVX512F
vcmpngtps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngtps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngtps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngtps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngtps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpngtps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpngtps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpngtps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpfalse_oqps k5, zmm6, zmm5 # AVX512F
vcmpfalse_oqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpfalse_oqps k5, zmm6, zmm5{sae} # AVX512F
vcmpfalse_oqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpfalse_oqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_oqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpfalse_oqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpfalse_oqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpfalse_oqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpfalse_oqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpfalse_oqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpfalse_oqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpfalse_oqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpfalse_oqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpfalseps k5, zmm6, zmm5 # AVX512F
vcmpfalseps k5{k7}, zmm6, zmm5 # AVX512F
vcmpfalseps k5, zmm6, zmm5{sae} # AVX512F
vcmpfalseps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpfalseps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalseps k5, zmm6, dword bcst [eax] # AVX512F
vcmpfalseps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpfalseps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpfalseps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpfalseps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpfalseps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpfalseps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpfalseps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpfalseps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpneq_oqps k5, zmm6, zmm5 # AVX512F
vcmpneq_oqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_oqps k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_oqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_oqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_oqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpneq_oqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_oqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_oqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_oqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_oqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpneq_oqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpneq_oqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpneq_oqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpge_osps k5, zmm6, zmm5 # AVX512F
vcmpge_osps k5{k7}, zmm6, zmm5 # AVX512F
vcmpge_osps k5, zmm6, zmm5{sae} # AVX512F
vcmpge_osps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpge_osps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_osps k5, zmm6, dword bcst [eax] # AVX512F
vcmpge_osps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpge_osps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpge_osps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpge_osps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpge_osps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpge_osps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpge_osps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpge_osps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpgeps k5, zmm6, zmm5 # AVX512F
vcmpgeps k5{k7}, zmm6, zmm5 # AVX512F
vcmpgeps k5, zmm6, zmm5{sae} # AVX512F
vcmpgeps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgeps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgeps k5, zmm6, dword bcst [eax] # AVX512F
vcmpgeps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgeps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgeps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgeps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgeps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpgeps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpgeps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpgeps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpgt_osps k5, zmm6, zmm5 # AVX512F
vcmpgt_osps k5{k7}, zmm6, zmm5 # AVX512F
vcmpgt_osps k5, zmm6, zmm5{sae} # AVX512F
vcmpgt_osps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgt_osps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_osps k5, zmm6, dword bcst [eax] # AVX512F
vcmpgt_osps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgt_osps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgt_osps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgt_osps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgt_osps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpgt_osps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpgt_osps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpgt_osps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpgtps k5, zmm6, zmm5 # AVX512F
vcmpgtps k5{k7}, zmm6, zmm5 # AVX512F
vcmpgtps k5, zmm6, zmm5{sae} # AVX512F
vcmpgtps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgtps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgtps k5, zmm6, dword bcst [eax] # AVX512F
vcmpgtps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgtps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgtps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgtps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgtps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpgtps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpgtps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpgtps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmptrue_uqps k5, zmm6, zmm5 # AVX512F
vcmptrue_uqps k5{k7}, zmm6, zmm5 # AVX512F
vcmptrue_uqps k5, zmm6, zmm5{sae} # AVX512F
vcmptrue_uqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmptrue_uqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_uqps k5, zmm6, dword bcst [eax] # AVX512F
vcmptrue_uqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmptrue_uqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmptrue_uqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmptrue_uqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmptrue_uqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmptrue_uqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmptrue_uqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmptrue_uqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmptrueps k5, zmm6, zmm5 # AVX512F
vcmptrueps k5{k7}, zmm6, zmm5 # AVX512F
vcmptrueps k5, zmm6, zmm5{sae} # AVX512F
vcmptrueps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmptrueps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrueps k5, zmm6, dword bcst [eax] # AVX512F
vcmptrueps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmptrueps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmptrueps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmptrueps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmptrueps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmptrueps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmptrueps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmptrueps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpeq_osps k5, zmm6, zmm5 # AVX512F
vcmpeq_osps k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_osps k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_osps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_osps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_osps k5, zmm6, dword bcst [eax] # AVX512F
vcmpeq_osps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_osps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_osps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_osps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_osps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpeq_osps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpeq_osps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpeq_osps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmplt_oqps k5, zmm6, zmm5 # AVX512F
vcmplt_oqps k5{k7}, zmm6, zmm5 # AVX512F
vcmplt_oqps k5, zmm6, zmm5{sae} # AVX512F
vcmplt_oqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmplt_oqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_oqps k5, zmm6, dword bcst [eax] # AVX512F
vcmplt_oqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmplt_oqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmplt_oqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmplt_oqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmplt_oqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmplt_oqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmplt_oqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmplt_oqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmple_oqps k5, zmm6, zmm5 # AVX512F
vcmple_oqps k5{k7}, zmm6, zmm5 # AVX512F
vcmple_oqps k5, zmm6, zmm5{sae} # AVX512F
vcmple_oqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmple_oqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_oqps k5, zmm6, dword bcst [eax] # AVX512F
vcmple_oqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmple_oqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmple_oqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmple_oqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmple_oqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmple_oqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmple_oqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmple_oqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpunord_sps k5, zmm6, zmm5 # AVX512F
vcmpunord_sps k5{k7}, zmm6, zmm5 # AVX512F
vcmpunord_sps k5, zmm6, zmm5{sae} # AVX512F
vcmpunord_sps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpunord_sps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_sps k5, zmm6, dword bcst [eax] # AVX512F
vcmpunord_sps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpunord_sps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpunord_sps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpunord_sps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpunord_sps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpunord_sps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpunord_sps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpunord_sps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpneq_usps k5, zmm6, zmm5 # AVX512F
vcmpneq_usps k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_usps k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_usps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_usps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_usps k5, zmm6, dword bcst [eax] # AVX512F
vcmpneq_usps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_usps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_usps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_usps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_usps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpneq_usps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpneq_usps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpneq_usps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnlt_uqps k5, zmm6, zmm5 # AVX512F
vcmpnlt_uqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnlt_uqps k5, zmm6, zmm5{sae} # AVX512F
vcmpnlt_uqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnlt_uqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_uqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnlt_uqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnlt_uqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnlt_uqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnlt_uqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnlt_uqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnlt_uqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnlt_uqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnlt_uqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnle_uqps k5, zmm6, zmm5 # AVX512F
vcmpnle_uqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnle_uqps k5, zmm6, zmm5{sae} # AVX512F
vcmpnle_uqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnle_uqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_uqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnle_uqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnle_uqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnle_uqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnle_uqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnle_uqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnle_uqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnle_uqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnle_uqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpord_sps k5, zmm6, zmm5 # AVX512F
vcmpord_sps k5{k7}, zmm6, zmm5 # AVX512F
vcmpord_sps k5, zmm6, zmm5{sae} # AVX512F
vcmpord_sps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpord_sps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_sps k5, zmm6, dword bcst [eax] # AVX512F
vcmpord_sps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpord_sps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpord_sps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpord_sps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpord_sps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpord_sps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpord_sps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpord_sps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpeq_usps k5, zmm6, zmm5 # AVX512F
vcmpeq_usps k5{k7}, zmm6, zmm5 # AVX512F
vcmpeq_usps k5, zmm6, zmm5{sae} # AVX512F
vcmpeq_usps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpeq_usps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_usps k5, zmm6, dword bcst [eax] # AVX512F
vcmpeq_usps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpeq_usps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpeq_usps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpeq_usps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpeq_usps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpeq_usps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpeq_usps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpeq_usps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpnge_uqps k5, zmm6, zmm5 # AVX512F
vcmpnge_uqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpnge_uqps k5, zmm6, zmm5{sae} # AVX512F
vcmpnge_uqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpnge_uqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_uqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpnge_uqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpnge_uqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpnge_uqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpnge_uqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpnge_uqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpnge_uqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpnge_uqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpnge_uqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpngt_uqps k5, zmm6, zmm5 # AVX512F
vcmpngt_uqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpngt_uqps k5, zmm6, zmm5{sae} # AVX512F
vcmpngt_uqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpngt_uqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_uqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpngt_uqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpngt_uqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpngt_uqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpngt_uqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpngt_uqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpngt_uqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpngt_uqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpngt_uqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpfalse_osps k5, zmm6, zmm5 # AVX512F
vcmpfalse_osps k5{k7}, zmm6, zmm5 # AVX512F
vcmpfalse_osps k5, zmm6, zmm5{sae} # AVX512F
vcmpfalse_osps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpfalse_osps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_osps k5, zmm6, dword bcst [eax] # AVX512F
vcmpfalse_osps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpfalse_osps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpfalse_osps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpfalse_osps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpfalse_osps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpfalse_osps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpfalse_osps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpfalse_osps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpneq_osps k5, zmm6, zmm5 # AVX512F
vcmpneq_osps k5{k7}, zmm6, zmm5 # AVX512F
vcmpneq_osps k5, zmm6, zmm5{sae} # AVX512F
vcmpneq_osps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpneq_osps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_osps k5, zmm6, dword bcst [eax] # AVX512F
vcmpneq_osps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpneq_osps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpneq_osps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpneq_osps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpneq_osps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpneq_osps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpneq_osps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpneq_osps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpge_oqps k5, zmm6, zmm5 # AVX512F
vcmpge_oqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpge_oqps k5, zmm6, zmm5{sae} # AVX512F
vcmpge_oqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpge_oqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_oqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpge_oqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpge_oqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpge_oqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpge_oqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpge_oqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpge_oqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpge_oqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpge_oqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpgt_oqps k5, zmm6, zmm5 # AVX512F
vcmpgt_oqps k5{k7}, zmm6, zmm5 # AVX512F
vcmpgt_oqps k5, zmm6, zmm5{sae} # AVX512F
vcmpgt_oqps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmpgt_oqps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_oqps k5, zmm6, dword bcst [eax] # AVX512F
vcmpgt_oqps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmpgt_oqps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmpgt_oqps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmpgt_oqps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmpgt_oqps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmpgt_oqps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmpgt_oqps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmpgt_oqps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmptrue_usps k5, zmm6, zmm5 # AVX512F
vcmptrue_usps k5{k7}, zmm6, zmm5 # AVX512F
vcmptrue_usps k5, zmm6, zmm5{sae} # AVX512F
vcmptrue_usps k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vcmptrue_usps k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_usps k5, zmm6, dword bcst [eax] # AVX512F
vcmptrue_usps k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcmptrue_usps k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcmptrue_usps k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcmptrue_usps k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcmptrue_usps k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vcmptrue_usps k5, zmm6, dword bcst [edx+512] # AVX512F
vcmptrue_usps k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vcmptrue_usps k5, zmm6, dword bcst [edx-516] # AVX512F
vcmpsd k5{k7}, xmm5, xmm4, 0xab # AVX512F
vcmpsd k5{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vcmpsd k5{k7}, xmm5, xmm4, 123 # AVX512F
vcmpsd k5{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vcmpsd k5{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512F
vcmpsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512F
vcmpsd k5{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512F Disp8
vcmpsd k5{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512F
vcmpsd k5{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512F Disp8
vcmpsd k5{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512F
vcmpeq_oqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_oqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpeqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpeqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpeqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmplt_ossd k5{k7}, xmm5, xmm4 # AVX512F
vcmplt_ossd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpltsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpltsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpltsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpltsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmple_ossd k5{k7}, xmm5, xmm4 # AVX512F
vcmple_ossd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmple_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmple_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmplesd k5{k7}, xmm5, xmm4 # AVX512F
vcmplesd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmplesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmplesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmplesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmplesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmplesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmplesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpunord_qsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpunord_qsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpunordsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpunordsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpunordsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpunordsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpneq_uqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_uqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpneqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpneqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpneqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnlt_ussd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnlt_ussd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnltsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnltsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnltsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnltsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnle_ussd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnle_ussd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnlesd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnlesd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnlesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnlesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpord_qsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpord_qsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpordsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpordsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpordsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpordsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpeq_uqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_uqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnge_ussd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnge_ussd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpngesd k5{k7}, xmm5, xmm4 # AVX512F
vcmpngesd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpngesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpngt_ussd k5{k7}, xmm5, xmm4 # AVX512F
vcmpngt_ussd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpngtsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpngtsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngtsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpngtsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpfalse_oqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpfalse_oqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpfalsesd k5{k7}, xmm5, xmm4 # AVX512F
vcmpfalsesd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpneq_oqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_oqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpge_ossd k5{k7}, xmm5, xmm4 # AVX512F
vcmpge_ossd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpgesd k5{k7}, xmm5, xmm4 # AVX512F
vcmpgesd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpgesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpgt_ossd k5{k7}, xmm5, xmm4 # AVX512F
vcmpgt_ossd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpgtsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpgtsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgtsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpgtsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmptrue_uqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmptrue_uqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmptruesd k5{k7}, xmm5, xmm4 # AVX512F
vcmptruesd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmptruesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmptruesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpeq_ossd k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_ossd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmplt_oqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmplt_oqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmple_oqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmple_oqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpunord_ssd k5{k7}, xmm5, xmm4 # AVX512F
vcmpunord_ssd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpneq_ussd k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_ussd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnlt_uqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnlt_uqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnle_uqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnle_uqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpord_ssd k5{k7}, xmm5, xmm4 # AVX512F
vcmpord_ssd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpeq_ussd k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_ussd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpnge_uqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpnge_uqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpngt_uqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpngt_uqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpfalse_ossd k5{k7}, xmm5, xmm4 # AVX512F
vcmpfalse_ossd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpneq_ossd k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_ossd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpge_oqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpge_oqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpgt_oqsd k5{k7}, xmm5, xmm4 # AVX512F
vcmpgt_oqsd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmptrue_ussd k5{k7}, xmm5, xmm4 # AVX512F
vcmptrue_ussd k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcmpss k5{k7}, xmm5, xmm4, 0xab # AVX512F
vcmpss k5{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vcmpss k5{k7}, xmm5, xmm4, 123 # AVX512F
vcmpss k5{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vcmpss k5{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512F
vcmpss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512F
vcmpss k5{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512F Disp8
vcmpss k5{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512F
vcmpss k5{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512F Disp8
vcmpss k5{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512F
vcmpeq_oqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_oqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpeqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpeqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpeqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmplt_osss k5{k7}, xmm5, xmm4 # AVX512F
vcmplt_osss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmplt_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmplt_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpltss k5{k7}, xmm5, xmm4 # AVX512F
vcmpltss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpltss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpltss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpltss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpltss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpltss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpltss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmple_osss k5{k7}, xmm5, xmm4 # AVX512F
vcmple_osss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmple_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmple_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpless k5{k7}, xmm5, xmm4 # AVX512F
vcmpless k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpless k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpless k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpless k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpless k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpless k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpless k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpunord_qss k5{k7}, xmm5, xmm4 # AVX512F
vcmpunord_qss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpunordss k5{k7}, xmm5, xmm4 # AVX512F
vcmpunordss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpunordss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpunordss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpneq_uqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_uqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpneqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpneqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpneqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnlt_usss k5{k7}, xmm5, xmm4 # AVX512F
vcmpnlt_usss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnltss k5{k7}, xmm5, xmm4 # AVX512F
vcmpnltss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnltss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnltss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnle_usss k5{k7}, xmm5, xmm4 # AVX512F
vcmpnle_usss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnless k5{k7}, xmm5, xmm4 # AVX512F
vcmpnless k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnless k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnless k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnless k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnless k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnless k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnless k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpord_qss k5{k7}, xmm5, xmm4 # AVX512F
vcmpord_qss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpord_qss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpord_qss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpordss k5{k7}, xmm5, xmm4 # AVX512F
vcmpordss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpordss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpordss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpordss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpordss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpordss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpordss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpeq_uqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_uqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnge_usss k5{k7}, xmm5, xmm4 # AVX512F
vcmpnge_usss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpngess k5{k7}, xmm5, xmm4 # AVX512F
vcmpngess k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpngess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpngess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpngess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpngess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpngt_usss k5{k7}, xmm5, xmm4 # AVX512F
vcmpngt_usss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpngtss k5{k7}, xmm5, xmm4 # AVX512F
vcmpngtss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngtss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpngtss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpfalse_oqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpfalse_oqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpfalsess k5{k7}, xmm5, xmm4 # AVX512F
vcmpfalsess k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpfalsess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpfalsess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpneq_oqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_oqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpge_osss k5{k7}, xmm5, xmm4 # AVX512F
vcmpge_osss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpge_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpge_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpgess k5{k7}, xmm5, xmm4 # AVX512F
vcmpgess k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpgess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpgess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpgess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpgess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpgt_osss k5{k7}, xmm5, xmm4 # AVX512F
vcmpgt_osss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpgtss k5{k7}, xmm5, xmm4 # AVX512F
vcmpgtss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgtss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpgtss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmptrue_uqss k5{k7}, xmm5, xmm4 # AVX512F
vcmptrue_uqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmptruess k5{k7}, xmm5, xmm4 # AVX512F
vcmptruess k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmptruess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmptruess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmptruess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmptruess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmptruess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmptruess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpeq_osss k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_osss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmplt_oqss k5{k7}, xmm5, xmm4 # AVX512F
vcmplt_oqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmple_oqss k5{k7}, xmm5, xmm4 # AVX512F
vcmple_oqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmple_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmple_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpunord_sss k5{k7}, xmm5, xmm4 # AVX512F
vcmpunord_sss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpneq_usss k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_usss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnlt_uqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpnlt_uqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnle_uqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpnle_uqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpord_sss k5{k7}, xmm5, xmm4 # AVX512F
vcmpord_sss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpord_sss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpord_sss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpeq_usss k5{k7}, xmm5, xmm4 # AVX512F
vcmpeq_usss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpnge_uqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpnge_uqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpngt_uqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpngt_uqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpfalse_osss k5{k7}, xmm5, xmm4 # AVX512F
vcmpfalse_osss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpneq_osss k5{k7}, xmm5, xmm4 # AVX512F
vcmpneq_osss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpge_oqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpge_oqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmpgt_oqss k5{k7}, xmm5, xmm4 # AVX512F
vcmpgt_oqss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcmptrue_usss k5{k7}, xmm5, xmm4 # AVX512F
vcmptrue_usss k5{k7}, xmm5, xmm4{sae} # AVX512F
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcomisd xmm6, xmm5{sae} # AVX512F
vcomiss xmm6, xmm5{sae} # AVX512F
vcompresspd ZMMWORD PTR [ecx], zmm6 # AVX512F
vcompresspd ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vcompresspd ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vcompresspd ZMMWORD PTR [edx+1016], zmm6 # AVX512F Disp8
vcompresspd ZMMWORD PTR [edx+1024], zmm6 # AVX512F
vcompresspd ZMMWORD PTR [edx-1024], zmm6 # AVX512F Disp8
vcompresspd ZMMWORD PTR [edx-1032], zmm6 # AVX512F
vcompresspd zmm6, zmm5 # AVX512F
vcompresspd zmm6{k7}, zmm5 # AVX512F
vcompresspd zmm6{k7}{z}, zmm5 # AVX512F
vcompressps ZMMWORD PTR [ecx], zmm6 # AVX512F
vcompressps ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vcompressps ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vcompressps ZMMWORD PTR [edx+508], zmm6 # AVX512F Disp8
vcompressps ZMMWORD PTR [edx+512], zmm6 # AVX512F
vcompressps ZMMWORD PTR [edx-512], zmm6 # AVX512F Disp8
vcompressps ZMMWORD PTR [edx-516], zmm6 # AVX512F
vcompressps zmm6, zmm5 # AVX512F
vcompressps zmm6{k7}, zmm5 # AVX512F
vcompressps zmm6{k7}{z}, zmm5 # AVX512F
vcvtdq2pd zmm6{k7}, ymm5 # AVX512F
vcvtdq2pd zmm6{k7}{z}, ymm5 # AVX512F
vcvtdq2pd zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vcvtdq2pd zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtdq2pd zmm6{k7}, dword bcst [eax] # AVX512F
vcvtdq2pd zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vcvtdq2pd zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vcvtdq2pd zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vcvtdq2pd zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vcvtdq2pd zmm6{k7}, dword bcst [edx+508] # AVX512F Disp8
vcvtdq2pd zmm6{k7}, dword bcst [edx+512] # AVX512F
vcvtdq2pd zmm6{k7}, dword bcst [edx-512] # AVX512F Disp8
vcvtdq2pd zmm6{k7}, dword bcst [edx-516] # AVX512F
vcvtdq2ps zmm6, zmm5 # AVX512F
vcvtdq2ps zmm6{k7}, zmm5 # AVX512F
vcvtdq2ps zmm6{k7}{z}, zmm5 # AVX512F
vcvtdq2ps zmm6, zmm5{rn-sae} # AVX512F
vcvtdq2ps zmm6, zmm5{ru-sae} # AVX512F
vcvtdq2ps zmm6, zmm5{rd-sae} # AVX512F
vcvtdq2ps zmm6, zmm5{rz-sae} # AVX512F
vcvtdq2ps zmm6, ZMMWORD PTR [ecx] # AVX512F
vcvtdq2ps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtdq2ps zmm6, dword bcst [eax] # AVX512F
vcvtdq2ps zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvtdq2ps zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcvtdq2ps zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvtdq2ps zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcvtdq2ps zmm6, dword bcst [edx+508] # AVX512F Disp8
vcvtdq2ps zmm6, dword bcst [edx+512] # AVX512F
vcvtdq2ps zmm6, dword bcst [edx-512] # AVX512F Disp8
vcvtdq2ps zmm6, dword bcst [edx-516] # AVX512F
vcvtpd2dq ymm6{k7}, zmm5 # AVX512F
vcvtpd2dq ymm6{k7}{z}, zmm5 # AVX512F
vcvtpd2dq ymm6{k7}, zmm5{rn-sae} # AVX512F
vcvtpd2dq ymm6{k7}, zmm5{ru-sae} # AVX512F
vcvtpd2dq ymm6{k7}, zmm5{rd-sae} # AVX512F
vcvtpd2dq ymm6{k7}, zmm5{rz-sae} # AVX512F
vcvtpd2dq ymm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vcvtpd2dq ymm6{k7}, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtpd2dq ymm6{k7}, qword bcst [eax] # AVX512F
vcvtpd2dq ymm6{k7}, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvtpd2dq ymm6{k7}, ZMMWORD PTR [edx+8192] # AVX512F
vcvtpd2dq ymm6{k7}, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvtpd2dq ymm6{k7}, ZMMWORD PTR [edx-8256] # AVX512F
vcvtpd2dq ymm6{k7}, qword bcst [edx+1016] # AVX512F Disp8
vcvtpd2dq ymm6{k7}, qword bcst [edx+1024] # AVX512F
vcvtpd2dq ymm6{k7}, qword bcst [edx-1024] # AVX512F Disp8
vcvtpd2dq ymm6{k7}, qword bcst [edx-1032] # AVX512F
vcvtpd2ps ymm6{k7}, zmm5 # AVX512F
vcvtpd2ps ymm6{k7}{z}, zmm5 # AVX512F
vcvtpd2ps ymm6{k7}, zmm5{rn-sae} # AVX512F
vcvtpd2ps ymm6{k7}, zmm5{ru-sae} # AVX512F
vcvtpd2ps ymm6{k7}, zmm5{rd-sae} # AVX512F
vcvtpd2ps ymm6{k7}, zmm5{rz-sae} # AVX512F
vcvtpd2ps ymm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vcvtpd2ps ymm6{k7}, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtpd2ps ymm6{k7}, qword bcst [eax] # AVX512F
vcvtpd2ps ymm6{k7}, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvtpd2ps ymm6{k7}, ZMMWORD PTR [edx+8192] # AVX512F
vcvtpd2ps ymm6{k7}, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvtpd2ps ymm6{k7}, ZMMWORD PTR [edx-8256] # AVX512F
vcvtpd2ps ymm6{k7}, qword bcst [edx+1016] # AVX512F Disp8
vcvtpd2ps ymm6{k7}, qword bcst [edx+1024] # AVX512F
vcvtpd2ps ymm6{k7}, qword bcst [edx-1024] # AVX512F Disp8
vcvtpd2ps ymm6{k7}, qword bcst [edx-1032] # AVX512F
vcvtpd2udq ymm6{k7}, zmm5 # AVX512F
vcvtpd2udq ymm6{k7}{z}, zmm5 # AVX512F
vcvtpd2udq ymm6{k7}, zmm5{rn-sae} # AVX512F
vcvtpd2udq ymm6{k7}, zmm5{ru-sae} # AVX512F
vcvtpd2udq ymm6{k7}, zmm5{rd-sae} # AVX512F
vcvtpd2udq ymm6{k7}, zmm5{rz-sae} # AVX512F
vcvtpd2udq ymm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vcvtpd2udq ymm6{k7}, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtpd2udq ymm6{k7}, qword bcst [eax] # AVX512F
vcvtpd2udq ymm6{k7}, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvtpd2udq ymm6{k7}, ZMMWORD PTR [edx+8192] # AVX512F
vcvtpd2udq ymm6{k7}, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvtpd2udq ymm6{k7}, ZMMWORD PTR [edx-8256] # AVX512F
vcvtpd2udq ymm6{k7}, qword bcst [edx+1016] # AVX512F Disp8
vcvtpd2udq ymm6{k7}, qword bcst [edx+1024] # AVX512F
vcvtpd2udq ymm6{k7}, qword bcst [edx-1024] # AVX512F Disp8
vcvtpd2udq ymm6{k7}, qword bcst [edx-1032] # AVX512F
vcvtph2ps zmm6{k7}, ymm5 # AVX512F
vcvtph2ps zmm6{k7}{z}, ymm5 # AVX512F
vcvtph2ps zmm6{k7}, ymm5{sae} # AVX512F
vcvtph2ps zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vcvtph2ps zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtph2ps zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vcvtph2ps zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vcvtph2ps zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vcvtph2ps zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vcvtps2dq zmm6, zmm5 # AVX512F
vcvtps2dq zmm6{k7}, zmm5 # AVX512F
vcvtps2dq zmm6{k7}{z}, zmm5 # AVX512F
vcvtps2dq zmm6, zmm5{rn-sae} # AVX512F
vcvtps2dq zmm6, zmm5{ru-sae} # AVX512F
vcvtps2dq zmm6, zmm5{rd-sae} # AVX512F
vcvtps2dq zmm6, zmm5{rz-sae} # AVX512F
vcvtps2dq zmm6, ZMMWORD PTR [ecx] # AVX512F
vcvtps2dq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtps2dq zmm6, dword bcst [eax] # AVX512F
vcvtps2dq zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvtps2dq zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcvtps2dq zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvtps2dq zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcvtps2dq zmm6, dword bcst [edx+508] # AVX512F Disp8
vcvtps2dq zmm6, dword bcst [edx+512] # AVX512F
vcvtps2dq zmm6, dword bcst [edx-512] # AVX512F Disp8
vcvtps2dq zmm6, dword bcst [edx-516] # AVX512F
vcvtps2pd zmm6{k7}, ymm5 # AVX512F
vcvtps2pd zmm6{k7}{z}, ymm5 # AVX512F
vcvtps2pd zmm6{k7}, ymm5{sae} # AVX512F
vcvtps2pd zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vcvtps2pd zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtps2pd zmm6{k7}, dword bcst [eax] # AVX512F
vcvtps2pd zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vcvtps2pd zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vcvtps2pd zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vcvtps2pd zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vcvtps2pd zmm6{k7}, dword bcst [edx+508] # AVX512F Disp8
vcvtps2pd zmm6{k7}, dword bcst [edx+512] # AVX512F
vcvtps2pd zmm6{k7}, dword bcst [edx-512] # AVX512F Disp8
vcvtps2pd zmm6{k7}, dword bcst [edx-516] # AVX512F
vcvtps2ph ymm6{k7}, zmm5, 0xab # AVX512F
vcvtps2ph ymm6{k7}{z}, zmm5, 0xab # AVX512F
vcvtps2ph ymm6{k7}, zmm5{sae}, 0xab # AVX512F
vcvtps2ph ymm6{k7}, zmm5, 123 # AVX512F
vcvtps2ph ymm6{k7}, zmm5{sae}, 123 # AVX512F
vcvtps2udq zmm6, zmm5 # AVX512F
vcvtps2udq zmm6{k7}, zmm5 # AVX512F
vcvtps2udq zmm6{k7}{z}, zmm5 # AVX512F
vcvtps2udq zmm6, zmm5{rn-sae} # AVX512F
vcvtps2udq zmm6, zmm5{ru-sae} # AVX512F
vcvtps2udq zmm6, zmm5{rd-sae} # AVX512F
vcvtps2udq zmm6, zmm5{rz-sae} # AVX512F
vcvtps2udq zmm6, ZMMWORD PTR [ecx] # AVX512F
vcvtps2udq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtps2udq zmm6, dword bcst [eax] # AVX512F
vcvtps2udq zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvtps2udq zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcvtps2udq zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvtps2udq zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcvtps2udq zmm6, dword bcst [edx+508] # AVX512F Disp8
vcvtps2udq zmm6, dword bcst [edx+512] # AVX512F
vcvtps2udq zmm6, dword bcst [edx-512] # AVX512F Disp8
vcvtps2udq zmm6, dword bcst [edx-516] # AVX512F
vcvtsd2si eax, xmm6{rn-sae} # AVX512F
vcvtsd2si eax, xmm6{ru-sae} # AVX512F
vcvtsd2si eax, xmm6{rd-sae} # AVX512F
vcvtsd2si eax, xmm6{rz-sae} # AVX512F
vcvtsd2si ebp, xmm6{rn-sae} # AVX512F
vcvtsd2si ebp, xmm6{ru-sae} # AVX512F
vcvtsd2si ebp, xmm6{rd-sae} # AVX512F
vcvtsd2si ebp, xmm6{rz-sae} # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, xmm4 # AVX512F
vcvtsd2ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vcvtsi2ss xmm6, xmm5, eax{rn-sae} # AVX512F
vcvtsi2ss xmm6, xmm5, eax{ru-sae} # AVX512F
vcvtsi2ss xmm6, xmm5, eax{rd-sae} # AVX512F
vcvtsi2ss xmm6, xmm5, eax{rz-sae} # AVX512F
vcvtsi2ss xmm6, xmm5, ebp{rn-sae} # AVX512F
vcvtsi2ss xmm6, xmm5, ebp{ru-sae} # AVX512F
vcvtsi2ss xmm6, xmm5, ebp{rd-sae} # AVX512F
vcvtsi2ss xmm6, xmm5, ebp{rz-sae} # AVX512F
vcvtss2sd xmm6{k7}, xmm5, xmm4 # AVX512F
vcvtss2sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vcvtss2sd xmm6{k7}, xmm5, xmm4{sae} # AVX512F
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vcvtss2si eax, xmm6{rn-sae} # AVX512F
vcvtss2si eax, xmm6{ru-sae} # AVX512F
vcvtss2si eax, xmm6{rd-sae} # AVX512F
vcvtss2si eax, xmm6{rz-sae} # AVX512F
vcvtss2si ebp, xmm6{rn-sae} # AVX512F
vcvtss2si ebp, xmm6{ru-sae} # AVX512F
vcvtss2si ebp, xmm6{rd-sae} # AVX512F
vcvtss2si ebp, xmm6{rz-sae} # AVX512F
vcvttpd2dq ymm6{k7}, zmm5 # AVX512F
vcvttpd2dq ymm6{k7}{z}, zmm5 # AVX512F
vcvttpd2dq ymm6{k7}, zmm5{sae} # AVX512F
vcvttpd2dq ymm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vcvttpd2dq ymm6{k7}, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvttpd2dq ymm6{k7}, qword bcst [eax] # AVX512F
vcvttpd2dq ymm6{k7}, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvttpd2dq ymm6{k7}, ZMMWORD PTR [edx+8192] # AVX512F
vcvttpd2dq ymm6{k7}, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvttpd2dq ymm6{k7}, ZMMWORD PTR [edx-8256] # AVX512F
vcvttpd2dq ymm6{k7}, qword bcst [edx+1016] # AVX512F Disp8
vcvttpd2dq ymm6{k7}, qword bcst [edx+1024] # AVX512F
vcvttpd2dq ymm6{k7}, qword bcst [edx-1024] # AVX512F Disp8
vcvttpd2dq ymm6{k7}, qword bcst [edx-1032] # AVX512F
vcvttps2dq zmm6, zmm5 # AVX512F
vcvttps2dq zmm6{k7}, zmm5 # AVX512F
vcvttps2dq zmm6{k7}{z}, zmm5 # AVX512F
vcvttps2dq zmm6, zmm5{sae} # AVX512F
vcvttps2dq zmm6, ZMMWORD PTR [ecx] # AVX512F
vcvttps2dq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvttps2dq zmm6, dword bcst [eax] # AVX512F
vcvttps2dq zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvttps2dq zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcvttps2dq zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvttps2dq zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcvttps2dq zmm6, dword bcst [edx+508] # AVX512F Disp8
vcvttps2dq zmm6, dword bcst [edx+512] # AVX512F
vcvttps2dq zmm6, dword bcst [edx-512] # AVX512F Disp8
vcvttps2dq zmm6, dword bcst [edx-516] # AVX512F
vcvttsd2si eax, xmm6{sae} # AVX512F
vcvttsd2si ebp, xmm6{sae} # AVX512F
vcvttss2si eax, xmm6{sae} # AVX512F
vcvttss2si ebp, xmm6{sae} # AVX512F
vcvtudq2pd zmm6{k7}, ymm5 # AVX512F
vcvtudq2pd zmm6{k7}{z}, ymm5 # AVX512F
vcvtudq2pd zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vcvtudq2pd zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtudq2pd zmm6{k7}, dword bcst [eax] # AVX512F
vcvtudq2pd zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vcvtudq2pd zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vcvtudq2pd zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vcvtudq2pd zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vcvtudq2pd zmm6{k7}, dword bcst [edx+508] # AVX512F Disp8
vcvtudq2pd zmm6{k7}, dword bcst [edx+512] # AVX512F
vcvtudq2pd zmm6{k7}, dword bcst [edx-512] # AVX512F Disp8
vcvtudq2pd zmm6{k7}, dword bcst [edx-516] # AVX512F
vcvtudq2ps zmm6, zmm5 # AVX512F
vcvtudq2ps zmm6{k7}, zmm5 # AVX512F
vcvtudq2ps zmm6{k7}{z}, zmm5 # AVX512F
vcvtudq2ps zmm6, zmm5{rn-sae} # AVX512F
vcvtudq2ps zmm6, zmm5{ru-sae} # AVX512F
vcvtudq2ps zmm6, zmm5{rd-sae} # AVX512F
vcvtudq2ps zmm6, zmm5{rz-sae} # AVX512F
vcvtudq2ps zmm6, ZMMWORD PTR [ecx] # AVX512F
vcvtudq2ps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvtudq2ps zmm6, dword bcst [eax] # AVX512F
vcvtudq2ps zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvtudq2ps zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcvtudq2ps zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvtudq2ps zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcvtudq2ps zmm6, dword bcst [edx+508] # AVX512F Disp8
vcvtudq2ps zmm6, dword bcst [edx+512] # AVX512F
vcvtudq2ps zmm6, dword bcst [edx-512] # AVX512F Disp8
vcvtudq2ps zmm6, dword bcst [edx-516] # AVX512F
vdivpd zmm6, zmm5, zmm4 # AVX512F
vdivpd zmm6{k7}, zmm5, zmm4 # AVX512F
vdivpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vdivpd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vdivpd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vdivpd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vdivpd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vdivpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vdivpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vdivpd zmm6, zmm5, qword bcst [eax] # AVX512F
vdivpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vdivpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vdivpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vdivpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vdivpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vdivpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vdivpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vdivpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vdivps zmm6, zmm5, zmm4 # AVX512F
vdivps zmm6{k7}, zmm5, zmm4 # AVX512F
vdivps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vdivps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vdivps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vdivps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vdivps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vdivps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vdivps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vdivps zmm6, zmm5, dword bcst [eax] # AVX512F
vdivps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vdivps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vdivps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vdivps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vdivps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vdivps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vdivps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vdivps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vdivsd xmm6{k7}, xmm5, xmm4 # AVX512F
vdivsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vdivsd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vdivsd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vdivsd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vdivsd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vdivsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vdivsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vdivss xmm6{k7}, xmm5, xmm4 # AVX512F
vdivss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vdivss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vdivss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vdivss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vdivss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vdivss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vdivss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vdivss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vdivss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vdivss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vdivss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vexpandpd zmm6, ZMMWORD PTR [ecx] # AVX512F
vexpandpd zmm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vexpandpd zmm6{k7}{z}, ZMMWORD PTR [ecx] # AVX512F
vexpandpd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vexpandpd zmm6, ZMMWORD PTR [edx+1016] # AVX512F Disp8
vexpandpd zmm6, ZMMWORD PTR [edx+1024] # AVX512F
vexpandpd zmm6, ZMMWORD PTR [edx-1024] # AVX512F Disp8
vexpandpd zmm6, ZMMWORD PTR [edx-1032] # AVX512F
vexpandpd zmm6, zmm5 # AVX512F
vexpandpd zmm6{k7}, zmm5 # AVX512F
vexpandpd zmm6{k7}{z}, zmm5 # AVX512F
vexpandps zmm6, ZMMWORD PTR [ecx] # AVX512F
vexpandps zmm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vexpandps zmm6{k7}{z}, ZMMWORD PTR [ecx] # AVX512F
vexpandps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vexpandps zmm6, ZMMWORD PTR [edx+508] # AVX512F Disp8
vexpandps zmm6, ZMMWORD PTR [edx+512] # AVX512F
vexpandps zmm6, ZMMWORD PTR [edx-512] # AVX512F Disp8
vexpandps zmm6, ZMMWORD PTR [edx-516] # AVX512F
vexpandps zmm6, zmm5 # AVX512F
vexpandps zmm6{k7}, zmm5 # AVX512F
vexpandps zmm6{k7}{z}, zmm5 # AVX512F
vextractf32x4 xmm6{k7}, zmm5, 0xab # AVX512F
vextractf32x4 xmm6{k7}{z}, zmm5, 0xab # AVX512F
vextractf32x4 xmm6{k7}, zmm5, 123 # AVX512F
vextractf64x4 ymm6{k7}, zmm5, 0xab # AVX512F
vextractf64x4 ymm6{k7}{z}, zmm5, 0xab # AVX512F
vextractf64x4 ymm6{k7}, zmm5, 123 # AVX512F
vextracti32x4 xmm6{k7}, zmm5, 0xab # AVX512F
vextracti32x4 xmm6{k7}{z}, zmm5, 0xab # AVX512F
vextracti32x4 xmm6{k7}, zmm5, 123 # AVX512F
vextracti64x4 ymm6{k7}, zmm5, 0xab # AVX512F
vextracti64x4 ymm6{k7}{z}, zmm5, 0xab # AVX512F
vextracti64x4 ymm6{k7}, zmm5, 123 # AVX512F
vfmadd132pd zmm6, zmm5, zmm4 # AVX512F
vfmadd132pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmadd132pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmadd132pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmadd132pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmadd132pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmadd132pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmadd132pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmadd132pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd132pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmadd132pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmadd132pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmadd132pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmadd132pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmadd132pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmadd132pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmadd132pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmadd132pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmadd132ps zmm6, zmm5, zmm4 # AVX512F
vfmadd132ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmadd132ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmadd132ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmadd132ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmadd132ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmadd132ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmadd132ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmadd132ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd132ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmadd132ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmadd132ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmadd132ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmadd132ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmadd132ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmadd132ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmadd132ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmadd132ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmadd132sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfmadd132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmadd132sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmadd132sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmadd132sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmadd132sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfmadd132ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfmadd132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmadd132ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmadd132ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmadd132ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmadd132ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfmadd213pd zmm6, zmm5, zmm4 # AVX512F
vfmadd213pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmadd213pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmadd213pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmadd213pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmadd213pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmadd213pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmadd213pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmadd213pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd213pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmadd213pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmadd213pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmadd213pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmadd213pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmadd213pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmadd213pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmadd213pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmadd213pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmadd213ps zmm6, zmm5, zmm4 # AVX512F
vfmadd213ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmadd213ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmadd213ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmadd213ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmadd213ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmadd213ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmadd213ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmadd213ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd213ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmadd213ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmadd213ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmadd213ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmadd213ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmadd213ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmadd213ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmadd213ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmadd213ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmadd213sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfmadd213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmadd213sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmadd213sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmadd213sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmadd213sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfmadd213ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfmadd213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmadd213ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmadd213ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmadd213ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmadd213ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfmadd231pd zmm6, zmm5, zmm4 # AVX512F
vfmadd231pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmadd231pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmadd231pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmadd231pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmadd231pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmadd231pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmadd231pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmadd231pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd231pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmadd231pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmadd231pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmadd231pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmadd231pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmadd231pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmadd231pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmadd231pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmadd231pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmadd231ps zmm6, zmm5, zmm4 # AVX512F
vfmadd231ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmadd231ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmadd231ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmadd231ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmadd231ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmadd231ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmadd231ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmadd231ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd231ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmadd231ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmadd231ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmadd231ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmadd231ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmadd231ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmadd231ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmadd231ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmadd231ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmadd231sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfmadd231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmadd231sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmadd231sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmadd231sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmadd231sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfmadd231ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfmadd231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmadd231ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmadd231ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmadd231ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmadd231ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfmaddsub132pd zmm6, zmm5, zmm4 # AVX512F
vfmaddsub132pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmaddsub132pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmaddsub132pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmaddsub132pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmaddsub132pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmaddsub132pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmaddsub132pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmaddsub132pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmaddsub132pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmaddsub132pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmaddsub132pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmaddsub132pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmaddsub132pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmaddsub132pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmaddsub132pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmaddsub132pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmaddsub132pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmaddsub132ps zmm6, zmm5, zmm4 # AVX512F
vfmaddsub132ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmaddsub132ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmaddsub132ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmaddsub132ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmaddsub132ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmaddsub132ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmaddsub132ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmaddsub132ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmaddsub132ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmaddsub132ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmaddsub132ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmaddsub132ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmaddsub132ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmaddsub132ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmaddsub132ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmaddsub132ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmaddsub132ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmaddsub213pd zmm6, zmm5, zmm4 # AVX512F
vfmaddsub213pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmaddsub213pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmaddsub213pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmaddsub213pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmaddsub213pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmaddsub213pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmaddsub213pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmaddsub213pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmaddsub213pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmaddsub213pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmaddsub213pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmaddsub213pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmaddsub213pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmaddsub213pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmaddsub213pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmaddsub213pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmaddsub213pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmaddsub213ps zmm6, zmm5, zmm4 # AVX512F
vfmaddsub213ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmaddsub213ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmaddsub213ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmaddsub213ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmaddsub213ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmaddsub213ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmaddsub213ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmaddsub213ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmaddsub213ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmaddsub213ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmaddsub213ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmaddsub213ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmaddsub213ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmaddsub213ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmaddsub213ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmaddsub213ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmaddsub213ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmaddsub231pd zmm6, zmm5, zmm4 # AVX512F
vfmaddsub231pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmaddsub231pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmaddsub231pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmaddsub231pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmaddsub231pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmaddsub231pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmaddsub231pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmaddsub231pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmaddsub231pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmaddsub231pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmaddsub231pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmaddsub231pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmaddsub231pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmaddsub231pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmaddsub231pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmaddsub231pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmaddsub231pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmaddsub231ps zmm6, zmm5, zmm4 # AVX512F
vfmaddsub231ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmaddsub231ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmaddsub231ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmaddsub231ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmaddsub231ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmaddsub231ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmaddsub231ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmaddsub231ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmaddsub231ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmaddsub231ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmaddsub231ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmaddsub231ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmaddsub231ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmaddsub231ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmaddsub231ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmaddsub231ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmaddsub231ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmsub132pd zmm6, zmm5, zmm4 # AVX512F
vfmsub132pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsub132pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsub132pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsub132pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsub132pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsub132pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsub132pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsub132pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub132pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmsub132pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsub132pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsub132pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsub132pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsub132pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmsub132pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmsub132pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmsub132pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmsub132ps zmm6, zmm5, zmm4 # AVX512F
vfmsub132ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsub132ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsub132ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsub132ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsub132ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsub132ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsub132ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsub132ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub132ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmsub132ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsub132ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsub132ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsub132ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsub132ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmsub132ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmsub132ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmsub132ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmsub132sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfmsub132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmsub132sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmsub132sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmsub132sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmsub132sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfmsub132ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfmsub132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmsub132ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmsub132ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmsub132ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmsub132ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfmsub213pd zmm6, zmm5, zmm4 # AVX512F
vfmsub213pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsub213pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsub213pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsub213pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsub213pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsub213pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsub213pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsub213pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub213pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmsub213pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsub213pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsub213pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsub213pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsub213pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmsub213pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmsub213pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmsub213pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmsub213ps zmm6, zmm5, zmm4 # AVX512F
vfmsub213ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsub213ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsub213ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsub213ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsub213ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsub213ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsub213ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsub213ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub213ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmsub213ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsub213ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsub213ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsub213ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsub213ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmsub213ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmsub213ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmsub213ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmsub213sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfmsub213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmsub213sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmsub213sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmsub213sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmsub213sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfmsub213ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfmsub213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmsub213ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmsub213ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmsub213ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmsub213ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfmsub231pd zmm6, zmm5, zmm4 # AVX512F
vfmsub231pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsub231pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsub231pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsub231pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsub231pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsub231pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsub231pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsub231pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub231pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmsub231pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsub231pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsub231pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsub231pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsub231pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmsub231pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmsub231pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmsub231pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmsub231ps zmm6, zmm5, zmm4 # AVX512F
vfmsub231ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsub231ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsub231ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsub231ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsub231ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsub231ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsub231ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsub231ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub231ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmsub231ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsub231ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsub231ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsub231ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsub231ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmsub231ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmsub231ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmsub231ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmsub231sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfmsub231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmsub231sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmsub231sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmsub231sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmsub231sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfmsub231ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfmsub231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfmsub231ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfmsub231ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfmsub231ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfmsub231ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfmsubadd132pd zmm6, zmm5, zmm4 # AVX512F
vfmsubadd132pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsubadd132pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsubadd132pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsubadd132pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsubadd132pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsubadd132pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsubadd132pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsubadd132pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsubadd132pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmsubadd132pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsubadd132pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsubadd132pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsubadd132pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsubadd132pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmsubadd132pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmsubadd132pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmsubadd132pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmsubadd132ps zmm6, zmm5, zmm4 # AVX512F
vfmsubadd132ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsubadd132ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsubadd132ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsubadd132ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsubadd132ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsubadd132ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsubadd132ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsubadd132ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsubadd132ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmsubadd132ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsubadd132ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsubadd132ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsubadd132ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsubadd132ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmsubadd132ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmsubadd132ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmsubadd132ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmsubadd213pd zmm6, zmm5, zmm4 # AVX512F
vfmsubadd213pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsubadd213pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsubadd213pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsubadd213pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsubadd213pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsubadd213pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsubadd213pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsubadd213pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsubadd213pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmsubadd213pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsubadd213pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsubadd213pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsubadd213pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsubadd213pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmsubadd213pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmsubadd213pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmsubadd213pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmsubadd213ps zmm6, zmm5, zmm4 # AVX512F
vfmsubadd213ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsubadd213ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsubadd213ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsubadd213ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsubadd213ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsubadd213ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsubadd213ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsubadd213ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsubadd213ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmsubadd213ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsubadd213ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsubadd213ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsubadd213ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsubadd213ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmsubadd213ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmsubadd213ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmsubadd213ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfmsubadd231pd zmm6, zmm5, zmm4 # AVX512F
vfmsubadd231pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsubadd231pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsubadd231pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsubadd231pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsubadd231pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsubadd231pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsubadd231pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsubadd231pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsubadd231pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfmsubadd231pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsubadd231pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsubadd231pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsubadd231pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsubadd231pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfmsubadd231pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfmsubadd231pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfmsubadd231pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfmsubadd231ps zmm6, zmm5, zmm4 # AVX512F
vfmsubadd231ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfmsubadd231ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfmsubadd231ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfmsubadd231ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfmsubadd231ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfmsubadd231ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfmsubadd231ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfmsubadd231ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfmsubadd231ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfmsubadd231ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfmsubadd231ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfmsubadd231ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfmsubadd231ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfmsubadd231ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfmsubadd231ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfmsubadd231ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfmsubadd231ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfnmadd132pd zmm6, zmm5, zmm4 # AVX512F
vfnmadd132pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmadd132pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmadd132pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmadd132pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmadd132pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmadd132pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmadd132pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmadd132pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd132pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfnmadd132pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmadd132pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmadd132pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmadd132pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmadd132pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfnmadd132pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfnmadd132pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfnmadd132pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfnmadd132ps zmm6, zmm5, zmm4 # AVX512F
vfnmadd132ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmadd132ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmadd132ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmadd132ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmadd132ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmadd132ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmadd132ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmadd132ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd132ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfnmadd132ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmadd132ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmadd132ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmadd132ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmadd132ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfnmadd132ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfnmadd132ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfnmadd132ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmadd132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmadd132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfnmadd213pd zmm6, zmm5, zmm4 # AVX512F
vfnmadd213pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmadd213pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmadd213pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmadd213pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmadd213pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmadd213pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmadd213pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmadd213pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd213pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfnmadd213pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmadd213pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmadd213pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmadd213pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmadd213pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfnmadd213pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfnmadd213pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfnmadd213pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfnmadd213ps zmm6, zmm5, zmm4 # AVX512F
vfnmadd213ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmadd213ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmadd213ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmadd213ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmadd213ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmadd213ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmadd213ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmadd213ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd213ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfnmadd213ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmadd213ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmadd213ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmadd213ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmadd213ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfnmadd213ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfnmadd213ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfnmadd213ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmadd213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmadd213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfnmadd231pd zmm6, zmm5, zmm4 # AVX512F
vfnmadd231pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmadd231pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmadd231pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmadd231pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmadd231pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmadd231pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmadd231pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmadd231pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd231pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfnmadd231pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmadd231pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmadd231pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmadd231pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmadd231pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfnmadd231pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfnmadd231pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfnmadd231pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfnmadd231ps zmm6, zmm5, zmm4 # AVX512F
vfnmadd231ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmadd231ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmadd231ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmadd231ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmadd231ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmadd231ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmadd231ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmadd231ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd231ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfnmadd231ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmadd231ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmadd231ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmadd231ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmadd231ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfnmadd231ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfnmadd231ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfnmadd231ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmadd231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmadd231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfnmsub132pd zmm6, zmm5, zmm4 # AVX512F
vfnmsub132pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmsub132pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmsub132pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmsub132pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmsub132pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmsub132pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmsub132pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmsub132pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub132pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfnmsub132pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmsub132pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmsub132pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmsub132pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmsub132pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfnmsub132pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfnmsub132pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfnmsub132pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfnmsub132ps zmm6, zmm5, zmm4 # AVX512F
vfnmsub132ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmsub132ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmsub132ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmsub132ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmsub132ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmsub132ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmsub132ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmsub132ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub132ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfnmsub132ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmsub132ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmsub132ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmsub132ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmsub132ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfnmsub132ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfnmsub132ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfnmsub132ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmsub132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmsub132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfnmsub213pd zmm6, zmm5, zmm4 # AVX512F
vfnmsub213pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmsub213pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmsub213pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmsub213pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmsub213pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmsub213pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmsub213pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmsub213pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub213pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfnmsub213pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmsub213pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmsub213pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmsub213pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmsub213pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfnmsub213pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfnmsub213pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfnmsub213pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfnmsub213ps zmm6, zmm5, zmm4 # AVX512F
vfnmsub213ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmsub213ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmsub213ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmsub213ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmsub213ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmsub213ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmsub213ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmsub213ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub213ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfnmsub213ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmsub213ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmsub213ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmsub213ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmsub213ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfnmsub213ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfnmsub213ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfnmsub213ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmsub213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmsub213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfnmsub231pd zmm6, zmm5, zmm4 # AVX512F
vfnmsub231pd zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmsub231pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmsub231pd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmsub231pd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmsub231pd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmsub231pd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmsub231pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmsub231pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub231pd zmm6, zmm5, qword bcst [eax] # AVX512F
vfnmsub231pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmsub231pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmsub231pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmsub231pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmsub231pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vfnmsub231pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vfnmsub231pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vfnmsub231pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vfnmsub231ps zmm6, zmm5, zmm4 # AVX512F
vfnmsub231ps zmm6{k7}, zmm5, zmm4 # AVX512F
vfnmsub231ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vfnmsub231ps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vfnmsub231ps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vfnmsub231ps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vfnmsub231ps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vfnmsub231ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vfnmsub231ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub231ps zmm6, zmm5, dword bcst [eax] # AVX512F
vfnmsub231ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vfnmsub231ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vfnmsub231ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vfnmsub231ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vfnmsub231ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vfnmsub231ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vfnmsub231ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vfnmsub231ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmsub231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, xmm4 # AVX512F
vfnmsub231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vgatherdpd zmm6{k1}, [ebp+ymm7*8-123] # AVX512F
vgatherdpd zmm6{k1}, qword ptr [ebp+ymm7*8-123] # AVX512F
vgatherdpd zmm6{k1}, [eax+ymm7+256] # AVX512F
vgatherdpd zmm6{k1}, [ecx+ymm7*4+1024] # AVX512F
vgatherdps zmm6{k1}, [ebp+zmm7*8-123] # AVX512F
vgatherdps zmm6{k1}, dword ptr [ebp+zmm7*8-123] # AVX512F
vgatherdps zmm6{k1}, [eax+zmm7+256] # AVX512F
vgatherdps zmm6{k1}, [ecx+zmm7*4+1024] # AVX512F
vgatherqpd zmm6{k1}, [ebp+zmm7*8-123] # AVX512F
vgatherqpd zmm6{k1}, qword ptr [ebp+zmm7*8-123] # AVX512F
vgatherqpd zmm6{k1}, [eax+zmm7+256] # AVX512F
vgatherqpd zmm6{k1}, [ecx+zmm7*4+1024] # AVX512F
vgatherqps ymm6{k1}, [ebp+zmm7*8-123] # AVX512F
vgatherqps ymm6{k1}, dword ptr [ebp+zmm7*8-123] # AVX512F
vgatherqps ymm6{k1}, [eax+zmm7+256] # AVX512F
vgatherqps ymm6{k1}, [ecx+zmm7*4+1024] # AVX512F
vgetexppd zmm6, zmm5 # AVX512F
vgetexppd zmm6{k7}, zmm5 # AVX512F
vgetexppd zmm6{k7}{z}, zmm5 # AVX512F
vgetexppd zmm6, zmm5{sae} # AVX512F
vgetexppd zmm6, ZMMWORD PTR [ecx] # AVX512F
vgetexppd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vgetexppd zmm6, qword bcst [eax] # AVX512F
vgetexppd zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vgetexppd zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vgetexppd zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vgetexppd zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vgetexppd zmm6, qword bcst [edx+1016] # AVX512F Disp8
vgetexppd zmm6, qword bcst [edx+1024] # AVX512F
vgetexppd zmm6, qword bcst [edx-1024] # AVX512F Disp8
vgetexppd zmm6, qword bcst [edx-1032] # AVX512F
vgetexpps zmm6, zmm5 # AVX512F
vgetexpps zmm6{k7}, zmm5 # AVX512F
vgetexpps zmm6{k7}{z}, zmm5 # AVX512F
vgetexpps zmm6, zmm5{sae} # AVX512F
vgetexpps zmm6, ZMMWORD PTR [ecx] # AVX512F
vgetexpps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vgetexpps zmm6, dword bcst [eax] # AVX512F
vgetexpps zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vgetexpps zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vgetexpps zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vgetexpps zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vgetexpps zmm6, dword bcst [edx+508] # AVX512F Disp8
vgetexpps zmm6, dword bcst [edx+512] # AVX512F
vgetexpps zmm6, dword bcst [edx-512] # AVX512F Disp8
vgetexpps zmm6, dword bcst [edx-516] # AVX512F
vgetexpsd xmm6{k7}, xmm5, xmm4 # AVX512F
vgetexpsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vgetexpsd xmm6{k7}, xmm5, xmm4{sae} # AVX512F
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vgetexpss xmm6{k7}, xmm5, xmm4 # AVX512F
vgetexpss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vgetexpss xmm6{k7}, xmm5, xmm4{sae} # AVX512F
vgetexpss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vgetexpss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vgetmantpd zmm6, zmm5, 0xab # AVX512F
vgetmantpd zmm6{k7}, zmm5, 0xab # AVX512F
vgetmantpd zmm6{k7}{z}, zmm5, 0xab # AVX512F
vgetmantpd zmm6, zmm5{sae}, 0xab # AVX512F
vgetmantpd zmm6, zmm5, 123 # AVX512F
vgetmantpd zmm6, zmm5{sae}, 123 # AVX512F
vgetmantpd zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vgetmantpd zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vgetmantpd zmm6, qword bcst [eax], 123 # AVX512F
vgetmantpd zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vgetmantpd zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vgetmantpd zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vgetmantpd zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vgetmantpd zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vgetmantpd zmm6, qword bcst [edx+1024], 123 # AVX512F
vgetmantpd zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vgetmantpd zmm6, qword bcst [edx-1032], 123 # AVX512F
vgetmantps zmm6, zmm5, 0xab # AVX512F
vgetmantps zmm6{k7}, zmm5, 0xab # AVX512F
vgetmantps zmm6{k7}{z}, zmm5, 0xab # AVX512F
vgetmantps zmm6, zmm5{sae}, 0xab # AVX512F
vgetmantps zmm6, zmm5, 123 # AVX512F
vgetmantps zmm6, zmm5{sae}, 123 # AVX512F
vgetmantps zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vgetmantps zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vgetmantps zmm6, dword bcst [eax], 123 # AVX512F
vgetmantps zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vgetmantps zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vgetmantps zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vgetmantps zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vgetmantps zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vgetmantps zmm6, dword bcst [edx+512], 123 # AVX512F
vgetmantps zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vgetmantps zmm6, dword bcst [edx-516], 123 # AVX512F
vgetmantsd xmm6{k7}, xmm5, xmm4, 0xab # AVX512F
vgetmantsd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512F
vgetmantsd xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vgetmantsd xmm6{k7}, xmm5, xmm4, 123 # AVX512F
vgetmantsd xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512F
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512F
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512F Disp8
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512F
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512F Disp8
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512F
vgetmantss xmm6{k7}, xmm5, xmm4, 0xab # AVX512F
vgetmantss xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512F
vgetmantss xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vgetmantss xmm6{k7}, xmm5, xmm4, 123 # AVX512F
vgetmantss xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vgetmantss xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512F
vgetmantss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512F
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512F Disp8
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512F
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512F Disp8
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512F
vinsertf32x4 zmm6{k7}, zmm5, xmm4, 0xab # AVX512F
vinsertf32x4 zmm6{k7}{z}, zmm5, xmm4, 0xab # AVX512F
vinsertf32x4 zmm6{k7}, zmm5, xmm4, 123 # AVX512F
vinsertf32x4 zmm6{k7}, zmm5, XMMWORD PTR [ecx], 123 # AVX512F
vinsertf32x4 zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vinsertf32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx+2032], 123 # AVX512F Disp8
vinsertf32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx+2048], 123 # AVX512F
vinsertf32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx-2048], 123 # AVX512F Disp8
vinsertf32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx-2064], 123 # AVX512F
vinsertf64x4 zmm6{k7}, zmm5, ymm4, 0xab # AVX512F
vinsertf64x4 zmm6{k7}{z}, zmm5, ymm4, 0xab # AVX512F
vinsertf64x4 zmm6{k7}, zmm5, ymm4, 123 # AVX512F
vinsertf64x4 zmm6{k7}, zmm5, YMMWORD PTR [ecx], 123 # AVX512F
vinsertf64x4 zmm6{k7}, zmm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vinsertf64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx+4064], 123 # AVX512F Disp8
vinsertf64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx+4096], 123 # AVX512F
vinsertf64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx-4096], 123 # AVX512F Disp8
vinsertf64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx-4128], 123 # AVX512F
vinserti32x4 zmm6{k7}, zmm5, xmm4, 0xab # AVX512F
vinserti32x4 zmm6{k7}{z}, zmm5, xmm4, 0xab # AVX512F
vinserti32x4 zmm6{k7}, zmm5, xmm4, 123 # AVX512F
vinserti32x4 zmm6{k7}, zmm5, XMMWORD PTR [ecx], 123 # AVX512F
vinserti32x4 zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vinserti32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx+2032], 123 # AVX512F Disp8
vinserti32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx+2048], 123 # AVX512F
vinserti32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx-2048], 123 # AVX512F Disp8
vinserti32x4 zmm6{k7}, zmm5, XMMWORD PTR [edx-2064], 123 # AVX512F
vinserti64x4 zmm6{k7}, zmm5, ymm4, 0xab # AVX512F
vinserti64x4 zmm6{k7}{z}, zmm5, ymm4, 0xab # AVX512F
vinserti64x4 zmm6{k7}, zmm5, ymm4, 123 # AVX512F
vinserti64x4 zmm6{k7}, zmm5, YMMWORD PTR [ecx], 123 # AVX512F
vinserti64x4 zmm6{k7}, zmm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vinserti64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx+4064], 123 # AVX512F Disp8
vinserti64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx+4096], 123 # AVX512F
vinserti64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx-4096], 123 # AVX512F Disp8
vinserti64x4 zmm6{k7}, zmm5, YMMWORD PTR [edx-4128], 123 # AVX512F
vmaxpd zmm6, zmm5, zmm4 # AVX512F
vmaxpd zmm6{k7}, zmm5, zmm4 # AVX512F
vmaxpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vmaxpd zmm6, zmm5, zmm4{sae} # AVX512F
vmaxpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vmaxpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmaxpd zmm6, zmm5, qword bcst [eax] # AVX512F
vmaxpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmaxpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vmaxpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmaxpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vmaxpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vmaxpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vmaxpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vmaxpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vmaxps zmm6, zmm5, zmm4 # AVX512F
vmaxps zmm6{k7}, zmm5, zmm4 # AVX512F
vmaxps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vmaxps zmm6, zmm5, zmm4{sae} # AVX512F
vmaxps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vmaxps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmaxps zmm6, zmm5, dword bcst [eax] # AVX512F
vmaxps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmaxps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vmaxps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmaxps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vmaxps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vmaxps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vmaxps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vmaxps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vmaxsd xmm6{k7}, xmm5, xmm4 # AVX512F
vmaxsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmaxsd xmm6{k7}, xmm5, xmm4{sae} # AVX512F
vmaxsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vmaxsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vmaxss xmm6{k7}, xmm5, xmm4 # AVX512F
vmaxss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmaxss xmm6{k7}, xmm5, xmm4{sae} # AVX512F
vmaxss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vmaxss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vminpd zmm6, zmm5, zmm4 # AVX512F
vminpd zmm6{k7}, zmm5, zmm4 # AVX512F
vminpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vminpd zmm6, zmm5, zmm4{sae} # AVX512F
vminpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vminpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vminpd zmm6, zmm5, qword bcst [eax] # AVX512F
vminpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vminpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vminpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vminpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vminpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vminpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vminpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vminpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vminps zmm6, zmm5, zmm4 # AVX512F
vminps zmm6{k7}, zmm5, zmm4 # AVX512F
vminps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vminps zmm6, zmm5, zmm4{sae} # AVX512F
vminps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vminps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vminps zmm6, zmm5, dword bcst [eax] # AVX512F
vminps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vminps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vminps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vminps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vminps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vminps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vminps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vminps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vminsd xmm6{k7}, xmm5, xmm4 # AVX512F
vminsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vminsd xmm6{k7}, xmm5, xmm4{sae} # AVX512F
vminsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vminsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vminsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vminsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vminsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vminsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vminss xmm6{k7}, xmm5, xmm4 # AVX512F
vminss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vminss xmm6{k7}, xmm5, xmm4{sae} # AVX512F
vminss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vminss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vminss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vminss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vminss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vminss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vmovapd zmm6, zmm5 # AVX512F
vmovapd zmm6{k7}, zmm5 # AVX512F
vmovapd zmm6{k7}{z}, zmm5 # AVX512F
vmovapd zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovapd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovapd zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovapd zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovapd zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovapd zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovaps zmm6, zmm5 # AVX512F
vmovaps zmm6{k7}, zmm5 # AVX512F
vmovaps zmm6{k7}{z}, zmm5 # AVX512F
vmovaps zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovaps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovaps zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovaps zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovaps zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovaps zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovddup zmm6, zmm5 # AVX512F
vmovddup zmm6{k7}, zmm5 # AVX512F
vmovddup zmm6{k7}{z}, zmm5 # AVX512F
vmovddup zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovddup zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovddup zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovddup zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovddup zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovddup zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovdqa32 zmm6, zmm5 # AVX512F
vmovdqa32 zmm6{k7}, zmm5 # AVX512F
vmovdqa32 zmm6{k7}{z}, zmm5 # AVX512F
vmovdqa32 zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovdqa32 zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovdqa32 zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovdqa32 zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovdqa32 zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovdqa32 zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovdqa64 zmm6, zmm5 # AVX512F
vmovdqa64 zmm6{k7}, zmm5 # AVX512F
vmovdqa64 zmm6{k7}{z}, zmm5 # AVX512F
vmovdqa64 zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovdqa64 zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovdqa64 zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovdqa64 zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovdqa64 zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovdqa64 zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovdqu32 zmm6, zmm5 # AVX512F
vmovdqu32 zmm6{k7}, zmm5 # AVX512F
vmovdqu32 zmm6{k7}{z}, zmm5 # AVX512F
vmovdqu32 zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovdqu32 zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovdqu32 zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovdqu32 zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovdqu32 zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovdqu32 zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovdqu64 zmm6, zmm5 # AVX512F
vmovdqu64 zmm6{k7}, zmm5 # AVX512F
vmovdqu64 zmm6{k7}{z}, zmm5 # AVX512F
vmovdqu64 zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovdqu64 zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovdqu64 zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovdqu64 zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovdqu64 zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovdqu64 zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovntdq ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovntdq ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovntdq ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovntdq ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovntdq ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovntdq ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovntdqa zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovntdqa zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovntdqa zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovntdqa zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovntdqa zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovntdqa zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovntpd ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovntpd ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovntpd ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovntpd ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovntpd ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovntpd ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovntps ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovntps ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovntps ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovntps ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovntps ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovntps ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovsd xmm6{k7}, QWORD PTR [ecx] # AVX512F
vmovsd xmm6{k7}{z}, QWORD PTR [ecx] # AVX512F
vmovsd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512F
vmovsd xmm6{k7}, QWORD PTR [edx+1016] # AVX512F Disp8
vmovsd xmm6{k7}, QWORD PTR [edx+1024] # AVX512F
vmovsd xmm6{k7}, QWORD PTR [edx-1024] # AVX512F Disp8
vmovsd xmm6{k7}, QWORD PTR [edx-1032] # AVX512F
vmovsd QWORD PTR [ecx]{k7}, xmm6 # AVX512F
vmovsd QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512F
vmovsd QWORD PTR [edx+1016]{k7}, xmm6 # AVX512F Disp8
vmovsd QWORD PTR [edx+1024]{k7}, xmm6 # AVX512F
vmovsd QWORD PTR [edx-1024]{k7}, xmm6 # AVX512F Disp8
vmovsd QWORD PTR [edx-1032]{k7}, xmm6 # AVX512F
vmovsd xmm6{k7}, xmm5, xmm4 # AVX512F
vmovsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmovshdup zmm6, zmm5 # AVX512F
vmovshdup zmm6{k7}, zmm5 # AVX512F
vmovshdup zmm6{k7}{z}, zmm5 # AVX512F
vmovshdup zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovshdup zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovshdup zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovshdup zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovshdup zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovshdup zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovsldup zmm6, zmm5 # AVX512F
vmovsldup zmm6{k7}, zmm5 # AVX512F
vmovsldup zmm6{k7}{z}, zmm5 # AVX512F
vmovsldup zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovsldup zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovsldup zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovsldup zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovsldup zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovsldup zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovss xmm6{k7}, DWORD PTR [ecx] # AVX512F
vmovss xmm6{k7}{z}, DWORD PTR [ecx] # AVX512F
vmovss xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512F
vmovss xmm6{k7}, DWORD PTR [edx+508] # AVX512F Disp8
vmovss xmm6{k7}, DWORD PTR [edx+512] # AVX512F
vmovss xmm6{k7}, DWORD PTR [edx-512] # AVX512F Disp8
vmovss xmm6{k7}, DWORD PTR [edx-516] # AVX512F
vmovss DWORD PTR [ecx]{k7}, xmm6 # AVX512F
vmovss DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512F
vmovss DWORD PTR [edx+508]{k7}, xmm6 # AVX512F Disp8
vmovss DWORD PTR [edx+512]{k7}, xmm6 # AVX512F
vmovss DWORD PTR [edx-512]{k7}, xmm6 # AVX512F Disp8
vmovss DWORD PTR [edx-516]{k7}, xmm6 # AVX512F
vmovss xmm6{k7}, xmm5, xmm4 # AVX512F
vmovss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmovupd zmm6, zmm5 # AVX512F
vmovupd zmm6{k7}, zmm5 # AVX512F
vmovupd zmm6{k7}{z}, zmm5 # AVX512F
vmovupd zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovupd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovupd zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovupd zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovupd zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovupd zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmovups zmm6, zmm5 # AVX512F
vmovups zmm6{k7}, zmm5 # AVX512F
vmovups zmm6{k7}{z}, zmm5 # AVX512F
vmovups zmm6, ZMMWORD PTR [ecx] # AVX512F
vmovups zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmovups zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmovups zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vmovups zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmovups zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vmulpd zmm6, zmm5, zmm4 # AVX512F
vmulpd zmm6{k7}, zmm5, zmm4 # AVX512F
vmulpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vmulpd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vmulpd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vmulpd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vmulpd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vmulpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vmulpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmulpd zmm6, zmm5, qword bcst [eax] # AVX512F
vmulpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmulpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vmulpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmulpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vmulpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vmulpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vmulpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vmulpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vmulps zmm6, zmm5, zmm4 # AVX512F
vmulps zmm6{k7}, zmm5, zmm4 # AVX512F
vmulps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vmulps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vmulps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vmulps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vmulps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vmulps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vmulps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vmulps zmm6, zmm5, dword bcst [eax] # AVX512F
vmulps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vmulps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vmulps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vmulps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vmulps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vmulps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vmulps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vmulps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vmulsd xmm6{k7}, xmm5, xmm4 # AVX512F
vmulsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmulsd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vmulsd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vmulsd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vmulsd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vmulsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vmulsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vmulss xmm6{k7}, xmm5, xmm4 # AVX512F
vmulss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vmulss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vmulss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vmulss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vmulss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vmulss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vmulss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vmulss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vmulss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vmulss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vmulss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vpabsd zmm6, zmm5 # AVX512F
vpabsd zmm6{k7}, zmm5 # AVX512F
vpabsd zmm6{k7}{z}, zmm5 # AVX512F
vpabsd zmm6, ZMMWORD PTR [ecx] # AVX512F
vpabsd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpabsd zmm6, dword bcst [eax] # AVX512F
vpabsd zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpabsd zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpabsd zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpabsd zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpabsd zmm6, dword bcst [edx+508] # AVX512F Disp8
vpabsd zmm6, dword bcst [edx+512] # AVX512F
vpabsd zmm6, dword bcst [edx-512] # AVX512F Disp8
vpabsd zmm6, dword bcst [edx-516] # AVX512F
vpabsq zmm6, zmm5 # AVX512F
vpabsq zmm6{k7}, zmm5 # AVX512F
vpabsq zmm6{k7}{z}, zmm5 # AVX512F
vpabsq zmm6, ZMMWORD PTR [ecx] # AVX512F
vpabsq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpabsq zmm6, qword bcst [eax] # AVX512F
vpabsq zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpabsq zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpabsq zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpabsq zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpabsq zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpabsq zmm6, qword bcst [edx+1024] # AVX512F
vpabsq zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpabsq zmm6, qword bcst [edx-1032] # AVX512F
vpaddd zmm6, zmm5, zmm4 # AVX512F
vpaddd zmm6{k7}, zmm5, zmm4 # AVX512F
vpaddd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpaddd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpaddd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpaddd zmm6, zmm5, dword bcst [eax] # AVX512F
vpaddd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpaddd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpaddd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpaddd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpaddd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpaddd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpaddd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpaddd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpaddq zmm6, zmm5, zmm4 # AVX512F
vpaddq zmm6{k7}, zmm5, zmm4 # AVX512F
vpaddq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpaddq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpaddq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpaddq zmm6, zmm5, qword bcst [eax] # AVX512F
vpaddq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpaddq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpaddq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpaddq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpaddq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpaddq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpaddq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpaddq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpandd zmm6, zmm5, zmm4 # AVX512F
vpandd zmm6{k7}, zmm5, zmm4 # AVX512F
vpandd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpandd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpandd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpandd zmm6, zmm5, dword bcst [eax] # AVX512F
vpandd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpandd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpandd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpandd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpandd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpandd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpandd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpandd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpandnd zmm6, zmm5, zmm4 # AVX512F
vpandnd zmm6{k7}, zmm5, zmm4 # AVX512F
vpandnd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpandnd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpandnd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpandnd zmm6, zmm5, dword bcst [eax] # AVX512F
vpandnd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpandnd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpandnd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpandnd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpandnd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpandnd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpandnd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpandnd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpandnq zmm6, zmm5, zmm4 # AVX512F
vpandnq zmm6{k7}, zmm5, zmm4 # AVX512F
vpandnq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpandnq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpandnq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpandnq zmm6, zmm5, qword bcst [eax] # AVX512F
vpandnq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpandnq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpandnq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpandnq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpandnq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpandnq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpandnq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpandnq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpandq zmm6, zmm5, zmm4 # AVX512F
vpandq zmm6{k7}, zmm5, zmm4 # AVX512F
vpandq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpandq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpandq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpandq zmm6, zmm5, qword bcst [eax] # AVX512F
vpandq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpandq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpandq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpandq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpandq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpandq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpandq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpandq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpblendmd zmm6, zmm5, zmm4 # AVX512F
vpblendmd zmm6{k7}, zmm5, zmm4 # AVX512F
vpblendmd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpblendmd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpblendmd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpblendmd zmm6, zmm5, dword bcst [eax] # AVX512F
vpblendmd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpblendmd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpblendmd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpblendmd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpblendmd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpblendmd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpblendmd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpblendmd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpbroadcastd zmm6, DWORD PTR [ecx] # AVX512F
vpbroadcastd zmm6{k7}, DWORD PTR [ecx] # AVX512F
vpbroadcastd zmm6{k7}{z}, DWORD PTR [ecx] # AVX512F
vpbroadcastd zmm6, DWORD PTR [esp+esi*8-123456] # AVX512F
vpbroadcastd zmm6, DWORD PTR [edx+508] # AVX512F Disp8
vpbroadcastd zmm6, DWORD PTR [edx+512] # AVX512F
vpbroadcastd zmm6, DWORD PTR [edx-512] # AVX512F Disp8
vpbroadcastd zmm6, DWORD PTR [edx-516] # AVX512F
vpbroadcastd zmm6{k7}, xmm5 # AVX512F
vpbroadcastd zmm6{k7}{z}, xmm5 # AVX512F
vpbroadcastd zmm6, eax # AVX512F
vpbroadcastd zmm6{k7}, eax # AVX512F
vpbroadcastd zmm6{k7}{z}, eax # AVX512F
vpbroadcastd zmm6, ebp # AVX512F
vpbroadcastq zmm6, QWORD PTR [ecx] # AVX512F
vpbroadcastq zmm6{k7}, QWORD PTR [ecx] # AVX512F
vpbroadcastq zmm6{k7}{z}, QWORD PTR [ecx] # AVX512F
vpbroadcastq zmm6, QWORD PTR [esp+esi*8-123456] # AVX512F
vpbroadcastq zmm6, QWORD PTR [edx+1016] # AVX512F Disp8
vpbroadcastq zmm6, QWORD PTR [edx+1024] # AVX512F
vpbroadcastq zmm6, QWORD PTR [edx-1024] # AVX512F Disp8
vpbroadcastq zmm6, QWORD PTR [edx-1032] # AVX512F
vpbroadcastq zmm6{k7}, xmm5 # AVX512F
vpbroadcastq zmm6{k7}{z}, xmm5 # AVX512F
vpcmpd k5, zmm6, zmm5, 0xab # AVX512F
vpcmpd k5{k7}, zmm6, zmm5, 0xab # AVX512F
vpcmpd k5, zmm6, zmm5, 123 # AVX512F
vpcmpd k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpcmpd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpcmpd k5, zmm6, dword bcst [eax], 123 # AVX512F
vpcmpd k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpcmpd k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpcmpd k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpcmpd k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpcmpd k5, zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vpcmpd k5, zmm6, dword bcst [edx+512], 123 # AVX512F
vpcmpd k5, zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vpcmpd k5, zmm6, dword bcst [edx-516], 123 # AVX512F
vpcmpltd k5, zmm6, zmm5 # AVX512F
vpcmpltd k5{k7}, zmm6, zmm5 # AVX512F
vpcmpltd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpltd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpltd k5, zmm6, dword bcst [eax] # AVX512F
vpcmpltd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpltd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpltd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpltd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpltd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpltd k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpltd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpltd k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpled k5, zmm6, zmm5 # AVX512F
vpcmpled k5{k7}, zmm6, zmm5 # AVX512F
vpcmpled k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpled k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpled k5, zmm6, dword bcst [eax] # AVX512F
vpcmpled k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpled k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpled k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpled k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpled k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpled k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpled k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpled k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpneqd k5, zmm6, zmm5 # AVX512F
vpcmpneqd k5{k7}, zmm6, zmm5 # AVX512F
vpcmpneqd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpneqd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpneqd k5, zmm6, dword bcst [eax] # AVX512F
vpcmpneqd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpneqd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpneqd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpneqd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpneqd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpneqd k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpneqd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpneqd k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpnltd k5, zmm6, zmm5 # AVX512F
vpcmpnltd k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnltd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnltd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnltd k5, zmm6, dword bcst [eax] # AVX512F
vpcmpnltd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnltd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnltd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnltd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnltd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpnltd k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpnltd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpnltd k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpnled k5, zmm6, zmm5 # AVX512F
vpcmpnled k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnled k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnled k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnled k5, zmm6, dword bcst [eax] # AVX512F
vpcmpnled k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnled k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnled k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnled k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnled k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpnled k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpnled k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpnled k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpeqd k5, zmm6, zmm5 # AVX512F
vpcmpeqd k5{k7}, zmm6, zmm5 # AVX512F
vpcmpeqd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpeqd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpeqd k5, zmm6, dword bcst [eax] # AVX512F
vpcmpeqd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpeqd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpeqd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpeqd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpeqd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpeqd k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpeqd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpeqd k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpeqq k5, zmm6, zmm5 # AVX512F
vpcmpeqq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpeqq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpeqq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpeqq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpeqq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpeqq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpeqq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpeqq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpeqq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpeqq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpeqq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpeqq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpgtd k5, zmm6, zmm5 # AVX512F
vpcmpgtd k5{k7}, zmm6, zmm5 # AVX512F
vpcmpgtd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpgtd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpgtd k5, zmm6, dword bcst [eax] # AVX512F
vpcmpgtd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpgtd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpgtd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpgtd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpgtd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpgtd k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpgtd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpgtd k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpgtq k5, zmm6, zmm5 # AVX512F
vpcmpgtq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpgtq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpgtq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpgtq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpgtq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpgtq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpgtq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpgtq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpgtq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpgtq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpgtq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpgtq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpq k5, zmm6, zmm5, 0xab # AVX512F
vpcmpq k5{k7}, zmm6, zmm5, 0xab # AVX512F
vpcmpq k5, zmm6, zmm5, 123 # AVX512F
vpcmpq k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpcmpq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpcmpq k5, zmm6, qword bcst [eax], 123 # AVX512F
vpcmpq k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpcmpq k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpcmpq k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpcmpq k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpcmpq k5, zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpcmpq k5, zmm6, qword bcst [edx+1024], 123 # AVX512F
vpcmpq k5, zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpcmpq k5, zmm6, qword bcst [edx-1032], 123 # AVX512F
vpcmpltq k5, zmm6, zmm5 # AVX512F
vpcmpltq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpltq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpltq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpltq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpltq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpltq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpltq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpltq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpltq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpltq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpltq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpltq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpleq k5, zmm6, zmm5 # AVX512F
vpcmpleq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpleq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpleq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpleq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpleq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpleq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpleq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpleq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpleq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpleq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpleq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpleq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpneqq k5, zmm6, zmm5 # AVX512F
vpcmpneqq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpneqq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpneqq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpneqq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpneqq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpneqq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpneqq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpneqq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpneqq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpneqq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpneqq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpneqq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpnltq k5, zmm6, zmm5 # AVX512F
vpcmpnltq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnltq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnltq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnltq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpnltq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnltq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnltq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnltq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnltq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpnltq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpnltq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpnltq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpnleq k5, zmm6, zmm5 # AVX512F
vpcmpnleq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnleq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnleq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnleq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpnleq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnleq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnleq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnleq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnleq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpnleq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpnleq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpnleq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpud k5, zmm6, zmm5, 0xab # AVX512F
vpcmpud k5{k7}, zmm6, zmm5, 0xab # AVX512F
vpcmpud k5, zmm6, zmm5, 123 # AVX512F
vpcmpud k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpcmpud k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpcmpud k5, zmm6, dword bcst [eax], 123 # AVX512F
vpcmpud k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpcmpud k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpcmpud k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpcmpud k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpcmpud k5, zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vpcmpud k5, zmm6, dword bcst [edx+512], 123 # AVX512F
vpcmpud k5, zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vpcmpud k5, zmm6, dword bcst [edx-516], 123 # AVX512F
vpcmpequd k5, zmm6, zmm5 # AVX512F
vpcmpequd k5{k7}, zmm6, zmm5 # AVX512F
vpcmpequd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpequd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpequd k5, zmm6, dword bcst [eax] # AVX512F
vpcmpequd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpequd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpequd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpequd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpequd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpequd k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpequd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpequd k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpltud k5, zmm6, zmm5 # AVX512F
vpcmpltud k5{k7}, zmm6, zmm5 # AVX512F
vpcmpltud k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpltud k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpltud k5, zmm6, dword bcst [eax] # AVX512F
vpcmpltud k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpltud k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpltud k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpltud k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpltud k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpltud k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpltud k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpltud k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpleud k5, zmm6, zmm5 # AVX512F
vpcmpleud k5{k7}, zmm6, zmm5 # AVX512F
vpcmpleud k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpleud k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpleud k5, zmm6, dword bcst [eax] # AVX512F
vpcmpleud k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpleud k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpleud k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpleud k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpleud k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpleud k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpleud k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpleud k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpnequd k5, zmm6, zmm5 # AVX512F
vpcmpnequd k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnequd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnequd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnequd k5, zmm6, dword bcst [eax] # AVX512F
vpcmpnequd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnequd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnequd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnequd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnequd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpnequd k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpnequd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpnequd k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpnltud k5, zmm6, zmm5 # AVX512F
vpcmpnltud k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnltud k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnltud k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnltud k5, zmm6, dword bcst [eax] # AVX512F
vpcmpnltud k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnltud k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnltud k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnltud k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnltud k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpnltud k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpnltud k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpnltud k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpnleud k5, zmm6, zmm5 # AVX512F
vpcmpnleud k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnleud k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnleud k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnleud k5, zmm6, dword bcst [eax] # AVX512F
vpcmpnleud k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnleud k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnleud k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnleud k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnleud k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vpcmpnleud k5, zmm6, dword bcst [edx+512] # AVX512F
vpcmpnleud k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vpcmpnleud k5, zmm6, dword bcst [edx-516] # AVX512F
vpcmpuq k5, zmm6, zmm5, 0xab # AVX512F
vpcmpuq k5{k7}, zmm6, zmm5, 0xab # AVX512F
vpcmpuq k5, zmm6, zmm5, 123 # AVX512F
vpcmpuq k5, zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpcmpuq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpcmpuq k5, zmm6, qword bcst [eax], 123 # AVX512F
vpcmpuq k5, zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpcmpuq k5, zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpcmpuq k5, zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpcmpuq k5, zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpcmpuq k5, zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpcmpuq k5, zmm6, qword bcst [edx+1024], 123 # AVX512F
vpcmpuq k5, zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpcmpuq k5, zmm6, qword bcst [edx-1032], 123 # AVX512F
vpcmpequq k5, zmm6, zmm5 # AVX512F
vpcmpequq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpequq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpequq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpequq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpequq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpequq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpequq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpequq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpequq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpequq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpequq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpequq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpltuq k5, zmm6, zmm5 # AVX512F
vpcmpltuq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpltuq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpltuq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpltuq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpltuq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpltuq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpltuq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpltuq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpltuq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpltuq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpltuq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpltuq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpleuq k5, zmm6, zmm5 # AVX512F
vpcmpleuq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpleuq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpleuq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpleuq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpleuq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpleuq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpleuq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpleuq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpleuq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpleuq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpleuq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpleuq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpnequq k5, zmm6, zmm5 # AVX512F
vpcmpnequq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnequq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnequq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnequq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpnequq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnequq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnequq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnequq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnequq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpnequq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpnequq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpnequq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpnltuq k5, zmm6, zmm5 # AVX512F
vpcmpnltuq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnltuq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnltuq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnltuq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpnltuq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnltuq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnltuq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnltuq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnltuq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpnltuq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpnltuq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpnltuq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpcmpnleuq k5, zmm6, zmm5 # AVX512F
vpcmpnleuq k5{k7}, zmm6, zmm5 # AVX512F
vpcmpnleuq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vpcmpnleuq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpcmpnleuq k5, zmm6, qword bcst [eax] # AVX512F
vpcmpnleuq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpcmpnleuq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vpcmpnleuq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpcmpnleuq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vpcmpnleuq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vpcmpnleuq k5, zmm6, qword bcst [edx+1024] # AVX512F
vpcmpnleuq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vpcmpnleuq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpblendmq zmm6, zmm5, zmm4 # AVX512F
vpblendmq zmm6{k7}, zmm5, zmm4 # AVX512F
vpblendmq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpblendmq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpblendmq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpblendmq zmm6, zmm5, qword bcst [eax] # AVX512F
vpblendmq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpblendmq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpblendmq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpblendmq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpblendmq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpblendmq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpblendmq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpblendmq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpcompressd ZMMWORD PTR [ecx], zmm6 # AVX512F
vpcompressd ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpcompressd ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpcompressd ZMMWORD PTR [edx+508], zmm6 # AVX512F Disp8
vpcompressd ZMMWORD PTR [edx+512], zmm6 # AVX512F
vpcompressd ZMMWORD PTR [edx-512], zmm6 # AVX512F Disp8
vpcompressd ZMMWORD PTR [edx-516], zmm6 # AVX512F
vpcompressd zmm6, zmm5 # AVX512F
vpcompressd zmm6{k7}, zmm5 # AVX512F
vpcompressd zmm6{k7}{z}, zmm5 # AVX512F
vpermd zmm6, zmm5, zmm4 # AVX512F
vpermd zmm6{k7}, zmm5, zmm4 # AVX512F
vpermd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermd zmm6, zmm5, dword bcst [eax] # AVX512F
vpermd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpermd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpermd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpermd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpermilpd zmm6, zmm5, 0xab # AVX512F
vpermilpd zmm6{k7}, zmm5, 0xab # AVX512F
vpermilpd zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpermilpd zmm6, zmm5, 123 # AVX512F
vpermilpd zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpermilpd zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpermilpd zmm6, qword bcst [eax], 123 # AVX512F
vpermilpd zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpermilpd zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpermilpd zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpermilpd zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpermilpd zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpermilpd zmm6, qword bcst [edx+1024], 123 # AVX512F
vpermilpd zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpermilpd zmm6, qword bcst [edx-1032], 123 # AVX512F
vpermilpd zmm6, zmm5, zmm4 # AVX512F
vpermilpd zmm6{k7}, zmm5, zmm4 # AVX512F
vpermilpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermilpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermilpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermilpd zmm6, zmm5, qword bcst [eax] # AVX512F
vpermilpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermilpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermilpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermilpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermilpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpermilpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpermilpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpermilpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpermilps zmm6, zmm5, 0xab # AVX512F
vpermilps zmm6{k7}, zmm5, 0xab # AVX512F
vpermilps zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpermilps zmm6, zmm5, 123 # AVX512F
vpermilps zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpermilps zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpermilps zmm6, dword bcst [eax], 123 # AVX512F
vpermilps zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpermilps zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpermilps zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpermilps zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpermilps zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vpermilps zmm6, dword bcst [edx+512], 123 # AVX512F
vpermilps zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vpermilps zmm6, dword bcst [edx-516], 123 # AVX512F
vpermilps zmm6, zmm5, zmm4 # AVX512F
vpermilps zmm6{k7}, zmm5, zmm4 # AVX512F
vpermilps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermilps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermilps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermilps zmm6, zmm5, dword bcst [eax] # AVX512F
vpermilps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermilps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermilps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermilps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermilps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpermilps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpermilps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpermilps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpermpd zmm6, zmm5, 0xab # AVX512F
vpermpd zmm6{k7}, zmm5, 0xab # AVX512F
vpermpd zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpermpd zmm6, zmm5, 123 # AVX512F
vpermpd zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpermpd zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpermpd zmm6, qword bcst [eax], 123 # AVX512F
vpermpd zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpermpd zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpermpd zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpermpd zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpermpd zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpermpd zmm6, qword bcst [edx+1024], 123 # AVX512F
vpermpd zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpermpd zmm6, qword bcst [edx-1032], 123 # AVX512F
vpermps zmm6, zmm5, zmm4 # AVX512F
vpermps zmm6{k7}, zmm5, zmm4 # AVX512F
vpermps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermps zmm6, zmm5, dword bcst [eax] # AVX512F
vpermps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpermps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpermps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpermps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpermq zmm6, zmm5, 0xab # AVX512F
vpermq zmm6{k7}, zmm5, 0xab # AVX512F
vpermq zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpermq zmm6, zmm5, 123 # AVX512F
vpermq zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpermq zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpermq zmm6, qword bcst [eax], 123 # AVX512F
vpermq zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpermq zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpermq zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpermq zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpermq zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpermq zmm6, qword bcst [edx+1024], 123 # AVX512F
vpermq zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpermq zmm6, qword bcst [edx-1032], 123 # AVX512F
vpexpandd zmm6, ZMMWORD PTR [ecx] # AVX512F
vpexpandd zmm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vpexpandd zmm6{k7}{z}, ZMMWORD PTR [ecx] # AVX512F
vpexpandd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpexpandd zmm6, ZMMWORD PTR [edx+508] # AVX512F Disp8
vpexpandd zmm6, ZMMWORD PTR [edx+512] # AVX512F
vpexpandd zmm6, ZMMWORD PTR [edx-512] # AVX512F Disp8
vpexpandd zmm6, ZMMWORD PTR [edx-516] # AVX512F
vpexpandd zmm6, zmm5 # AVX512F
vpexpandd zmm6{k7}, zmm5 # AVX512F
vpexpandd zmm6{k7}{z}, zmm5 # AVX512F
vpexpandq zmm6, ZMMWORD PTR [ecx] # AVX512F
vpexpandq zmm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vpexpandq zmm6{k7}{z}, ZMMWORD PTR [ecx] # AVX512F
vpexpandq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpexpandq zmm6, ZMMWORD PTR [edx+1016] # AVX512F Disp8
vpexpandq zmm6, ZMMWORD PTR [edx+1024] # AVX512F
vpexpandq zmm6, ZMMWORD PTR [edx-1024] # AVX512F Disp8
vpexpandq zmm6, ZMMWORD PTR [edx-1032] # AVX512F
vpexpandq zmm6, zmm5 # AVX512F
vpexpandq zmm6{k7}, zmm5 # AVX512F
vpexpandq zmm6{k7}{z}, zmm5 # AVX512F
vpgatherdd zmm6{k1}, [ebp+zmm7*8-123] # AVX512F
vpgatherdd zmm6{k1}, dword ptr [ebp+zmm7*8-123] # AVX512F
vpgatherdd zmm6{k1}, [eax+zmm7+256] # AVX512F
vpgatherdd zmm6{k1}, [ecx+zmm7*4+1024] # AVX512F
vpgatherdq zmm6{k1}, [ebp+ymm7*8-123] # AVX512F
vpgatherdq zmm6{k1}, qword ptr [ebp+ymm7*8-123] # AVX512F
vpgatherdq zmm6{k1}, [eax+ymm7+256] # AVX512F
vpgatherdq zmm6{k1}, [ecx+ymm7*4+1024] # AVX512F
vpgatherqd ymm6{k1}, [ebp+zmm7*8-123] # AVX512F
vpgatherqd ymm6{k1}, dword ptr [ebp+zmm7*8-123] # AVX512F
vpgatherqd ymm6{k1}, [eax+zmm7+256] # AVX512F
vpgatherqd ymm6{k1}, [ecx+zmm7*4+1024] # AVX512F
vpgatherqq zmm6{k1}, [ebp+zmm7*8-123] # AVX512F
vpgatherqq zmm6{k1}, qword ptr [ebp+zmm7*8-123] # AVX512F
vpgatherqq zmm6{k1}, [eax+zmm7+256] # AVX512F
vpgatherqq zmm6{k1}, [ecx+zmm7*4+1024] # AVX512F
vpmaxsd zmm6, zmm5, zmm4 # AVX512F
vpmaxsd zmm6{k7}, zmm5, zmm4 # AVX512F
vpmaxsd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpmaxsd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpmaxsd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmaxsd zmm6, zmm5, dword bcst [eax] # AVX512F
vpmaxsd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpmaxsd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpmaxsd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpmaxsd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpmaxsd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpmaxsd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpmaxsd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpmaxsd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpmaxsq zmm6, zmm5, zmm4 # AVX512F
vpmaxsq zmm6{k7}, zmm5, zmm4 # AVX512F
vpmaxsq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpmaxsq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpmaxsq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmaxsq zmm6, zmm5, qword bcst [eax] # AVX512F
vpmaxsq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpmaxsq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpmaxsq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpmaxsq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpmaxsq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpmaxsq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpmaxsq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpmaxsq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpmaxud zmm6, zmm5, zmm4 # AVX512F
vpmaxud zmm6{k7}, zmm5, zmm4 # AVX512F
vpmaxud zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpmaxud zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpmaxud zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmaxud zmm6, zmm5, dword bcst [eax] # AVX512F
vpmaxud zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpmaxud zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpmaxud zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpmaxud zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpmaxud zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpmaxud zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpmaxud zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpmaxud zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpmaxuq zmm6, zmm5, zmm4 # AVX512F
vpmaxuq zmm6{k7}, zmm5, zmm4 # AVX512F
vpmaxuq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpmaxuq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpmaxuq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmaxuq zmm6, zmm5, qword bcst [eax] # AVX512F
vpmaxuq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpmaxuq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpmaxuq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpmaxuq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpmaxuq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpmaxuq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpmaxuq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpmaxuq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpminsd zmm6, zmm5, zmm4 # AVX512F
vpminsd zmm6{k7}, zmm5, zmm4 # AVX512F
vpminsd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpminsd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpminsd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpminsd zmm6, zmm5, dword bcst [eax] # AVX512F
vpminsd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpminsd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpminsd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpminsd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpminsd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpminsd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpminsd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpminsd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpminsq zmm6, zmm5, zmm4 # AVX512F
vpminsq zmm6{k7}, zmm5, zmm4 # AVX512F
vpminsq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpminsq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpminsq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpminsq zmm6, zmm5, qword bcst [eax] # AVX512F
vpminsq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpminsq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpminsq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpminsq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpminsq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpminsq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpminsq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpminsq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpminud zmm6, zmm5, zmm4 # AVX512F
vpminud zmm6{k7}, zmm5, zmm4 # AVX512F
vpminud zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpminud zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpminud zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpminud zmm6, zmm5, dword bcst [eax] # AVX512F
vpminud zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpminud zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpminud zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpminud zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpminud zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpminud zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpminud zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpminud zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpminuq zmm6, zmm5, zmm4 # AVX512F
vpminuq zmm6{k7}, zmm5, zmm4 # AVX512F
vpminuq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpminuq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpminuq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpminuq zmm6, zmm5, qword bcst [eax] # AVX512F
vpminuq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpminuq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpminuq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpminuq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpminuq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpminuq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpminuq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpminuq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpmovsxbd zmm6{k7}, xmm5 # AVX512F
vpmovsxbd zmm6{k7}{z}, xmm5 # AVX512F
vpmovsxbd zmm6{k7}, XMMWORD PTR [ecx] # AVX512F
vpmovsxbd zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512F
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpmovsxbd zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512F
vpmovsxbq zmm6{k7}, xmm5 # AVX512F
vpmovsxbq zmm6{k7}{z}, xmm5 # AVX512F
vpmovsxbq zmm6{k7}, QWORD PTR [ecx] # AVX512F
vpmovsxbq zmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512F
vpmovsxbq zmm6{k7}, QWORD PTR [edx+1016] # AVX512F Disp8
vpmovsxbq zmm6{k7}, QWORD PTR [edx+1024] # AVX512F
vpmovsxbq zmm6{k7}, QWORD PTR [edx-1024] # AVX512F Disp8
vpmovsxbq zmm6{k7}, QWORD PTR [edx-1032] # AVX512F
vpmovsxdq zmm6{k7}, ymm5 # AVX512F
vpmovsxdq zmm6{k7}{z}, ymm5 # AVX512F
vpmovsxdq zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vpmovsxdq zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovsxdq zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vpmovsxdq zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vpmovsxdq zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vpmovsxdq zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vpmovsxwd zmm6{k7}, ymm5 # AVX512F
vpmovsxwd zmm6{k7}{z}, ymm5 # AVX512F
vpmovsxwd zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vpmovsxwd zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vpmovsxwd zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vpmovsxwq zmm6{k7}, xmm5 # AVX512F
vpmovsxwq zmm6{k7}{z}, xmm5 # AVX512F
vpmovsxwq zmm6{k7}, XMMWORD PTR [ecx] # AVX512F
vpmovsxwq zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512F
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpmovsxwq zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512F
vpmovzxbd zmm6{k7}, xmm5 # AVX512F
vpmovzxbd zmm6{k7}{z}, xmm5 # AVX512F
vpmovzxbd zmm6{k7}, XMMWORD PTR [ecx] # AVX512F
vpmovzxbd zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512F
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpmovzxbd zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512F
vpmovzxbq zmm6{k7}, xmm5 # AVX512F
vpmovzxbq zmm6{k7}{z}, xmm5 # AVX512F
vpmovzxbq zmm6{k7}, QWORD PTR [ecx] # AVX512F
vpmovzxbq zmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512F
vpmovzxbq zmm6{k7}, QWORD PTR [edx+1016] # AVX512F Disp8
vpmovzxbq zmm6{k7}, QWORD PTR [edx+1024] # AVX512F
vpmovzxbq zmm6{k7}, QWORD PTR [edx-1024] # AVX512F Disp8
vpmovzxbq zmm6{k7}, QWORD PTR [edx-1032] # AVX512F
vpmovzxdq zmm6{k7}, ymm5 # AVX512F
vpmovzxdq zmm6{k7}{z}, ymm5 # AVX512F
vpmovzxdq zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vpmovzxdq zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovzxdq zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vpmovzxdq zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vpmovzxdq zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vpmovzxdq zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vpmovzxwd zmm6{k7}, ymm5 # AVX512F
vpmovzxwd zmm6{k7}{z}, ymm5 # AVX512F
vpmovzxwd zmm6{k7}, YMMWORD PTR [ecx] # AVX512F
vpmovzxwd zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512F Disp8
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512F
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512F Disp8
vpmovzxwd zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512F
vpmovzxwq zmm6{k7}, xmm5 # AVX512F
vpmovzxwq zmm6{k7}{z}, xmm5 # AVX512F
vpmovzxwq zmm6{k7}, XMMWORD PTR [ecx] # AVX512F
vpmovzxwq zmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx+2048] # AVX512F
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpmovzxwq zmm6{k7}, XMMWORD PTR [edx-2064] # AVX512F
vpmuldq zmm6, zmm5, zmm4 # AVX512F
vpmuldq zmm6{k7}, zmm5, zmm4 # AVX512F
vpmuldq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpmuldq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpmuldq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmuldq zmm6, zmm5, qword bcst [eax] # AVX512F
vpmuldq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpmuldq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpmuldq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpmuldq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpmuldq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpmuldq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpmuldq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpmuldq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpmulld zmm6, zmm5, zmm4 # AVX512F
vpmulld zmm6{k7}, zmm5, zmm4 # AVX512F
vpmulld zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpmulld zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpmulld zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmulld zmm6, zmm5, dword bcst [eax] # AVX512F
vpmulld zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpmulld zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpmulld zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpmulld zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpmulld zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpmulld zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpmulld zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpmulld zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpmuludq zmm6, zmm5, zmm4 # AVX512F
vpmuludq zmm6{k7}, zmm5, zmm4 # AVX512F
vpmuludq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpmuludq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpmuludq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpmuludq zmm6, zmm5, qword bcst [eax] # AVX512F
vpmuludq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpmuludq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpmuludq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpmuludq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpmuludq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpmuludq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpmuludq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpmuludq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpord zmm6, zmm5, zmm4 # AVX512F
vpord zmm6{k7}, zmm5, zmm4 # AVX512F
vpord zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpord zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpord zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpord zmm6, zmm5, dword bcst [eax] # AVX512F
vpord zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpord zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpord zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpord zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpord zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpord zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpord zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpord zmm6, zmm5, dword bcst [edx-516] # AVX512F
vporq zmm6, zmm5, zmm4 # AVX512F
vporq zmm6{k7}, zmm5, zmm4 # AVX512F
vporq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vporq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vporq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vporq zmm6, zmm5, qword bcst [eax] # AVX512F
vporq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vporq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vporq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vporq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vporq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vporq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vporq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vporq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpscatterdd [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vpscatterdd dword ptr [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vpscatterdd [eax+zmm7+256]{k1}, zmm6 # AVX512F
vpscatterdd [ecx+zmm7*4+1024]{k1}, zmm6 # AVX512F
vpscatterdq [ebp+ymm7*8-123]{k1}, zmm6 # AVX512F
vpscatterdq qword ptr [ebp+ymm7*8-123]{k1}, zmm6 # AVX512F
vpscatterdq [eax+ymm7+256]{k1}, zmm6 # AVX512F
vpscatterdq [ecx+ymm7*4+1024]{k1}, zmm6 # AVX512F
vpscatterqd [ebp+zmm7*8-123]{k1}, ymm6 # AVX512F
vpscatterqd dword ptr [ebp+zmm7*8-123]{k1}, ymm6 # AVX512F
vpscatterqd [eax+zmm7+256]{k1}, ymm6 # AVX512F
vpscatterqd [ecx+zmm7*4+1024]{k1}, ymm6 # AVX512F
vpscatterqq [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vpscatterqq qword ptr [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vpscatterqq [eax+zmm7+256]{k1}, zmm6 # AVX512F
vpscatterqq [ecx+zmm7*4+1024]{k1}, zmm6 # AVX512F
vpshufd zmm6, zmm5, 0xab # AVX512F
vpshufd zmm6{k7}, zmm5, 0xab # AVX512F
vpshufd zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpshufd zmm6, zmm5, 123 # AVX512F
vpshufd zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpshufd zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpshufd zmm6, dword bcst [eax], 123 # AVX512F
vpshufd zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpshufd zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpshufd zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpshufd zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpshufd zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vpshufd zmm6, dword bcst [edx+512], 123 # AVX512F
vpshufd zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vpshufd zmm6, dword bcst [edx-516], 123 # AVX512F
vpslld zmm6{k7}, zmm5, xmm4 # AVX512F
vpslld zmm6{k7}{z}, zmm5, xmm4 # AVX512F
vpslld zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512F
vpslld zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpslld zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpslld zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512F
vpslld zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpslld zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512F
vpsllq zmm6{k7}, zmm5, xmm4 # AVX512F
vpsllq zmm6{k7}{z}, zmm5, xmm4 # AVX512F
vpsllq zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512F
vpsllq zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsllq zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpsllq zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512F
vpsllq zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpsllq zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512F
vpsllvd zmm6, zmm5, zmm4 # AVX512F
vpsllvd zmm6{k7}, zmm5, zmm4 # AVX512F
vpsllvd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsllvd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsllvd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsllvd zmm6, zmm5, dword bcst [eax] # AVX512F
vpsllvd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsllvd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsllvd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsllvd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsllvd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpsllvd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpsllvd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpsllvd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpsllvq zmm6, zmm5, zmm4 # AVX512F
vpsllvq zmm6{k7}, zmm5, zmm4 # AVX512F
vpsllvq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsllvq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsllvq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsllvq zmm6, zmm5, qword bcst [eax] # AVX512F
vpsllvq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsllvq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsllvq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsllvq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsllvq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpsllvq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpsllvq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpsllvq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpsrad zmm6{k7}, zmm5, xmm4 # AVX512F
vpsrad zmm6{k7}{z}, zmm5, xmm4 # AVX512F
vpsrad zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512F
vpsrad zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsrad zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpsrad zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512F
vpsrad zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpsrad zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512F
vpsraq zmm6{k7}, zmm5, xmm4 # AVX512F
vpsraq zmm6{k7}{z}, zmm5, xmm4 # AVX512F
vpsraq zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512F
vpsraq zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsraq zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpsraq zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512F
vpsraq zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpsraq zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512F
vpsravd zmm6, zmm5, zmm4 # AVX512F
vpsravd zmm6{k7}, zmm5, zmm4 # AVX512F
vpsravd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsravd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsravd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsravd zmm6, zmm5, dword bcst [eax] # AVX512F
vpsravd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsravd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsravd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsravd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsravd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpsravd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpsravd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpsravd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpsravq zmm6, zmm5, zmm4 # AVX512F
vpsravq zmm6{k7}, zmm5, zmm4 # AVX512F
vpsravq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsravq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsravq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsravq zmm6, zmm5, qword bcst [eax] # AVX512F
vpsravq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsravq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsravq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsravq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsravq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpsravq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpsravq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpsravq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpsrld zmm6{k7}, zmm5, xmm4 # AVX512F
vpsrld zmm6{k7}{z}, zmm5, xmm4 # AVX512F
vpsrld zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512F
vpsrld zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsrld zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpsrld zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512F
vpsrld zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpsrld zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512F
vpsrlq zmm6{k7}, zmm5, xmm4 # AVX512F
vpsrlq zmm6{k7}{z}, zmm5, xmm4 # AVX512F
vpsrlq zmm6{k7}, zmm5, XMMWORD PTR [ecx] # AVX512F
vpsrlq zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsrlq zmm6{k7}, zmm5, XMMWORD PTR [edx+2032] # AVX512F Disp8
vpsrlq zmm6{k7}, zmm5, XMMWORD PTR [edx+2048] # AVX512F
vpsrlq zmm6{k7}, zmm5, XMMWORD PTR [edx-2048] # AVX512F Disp8
vpsrlq zmm6{k7}, zmm5, XMMWORD PTR [edx-2064] # AVX512F
vpsrlvd zmm6, zmm5, zmm4 # AVX512F
vpsrlvd zmm6{k7}, zmm5, zmm4 # AVX512F
vpsrlvd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsrlvd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsrlvd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsrlvd zmm6, zmm5, dword bcst [eax] # AVX512F
vpsrlvd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsrlvd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsrlvd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsrlvd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsrlvd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpsrlvd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpsrlvd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpsrlvd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpsrlvq zmm6, zmm5, zmm4 # AVX512F
vpsrlvq zmm6{k7}, zmm5, zmm4 # AVX512F
vpsrlvq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsrlvq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsrlvq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsrlvq zmm6, zmm5, qword bcst [eax] # AVX512F
vpsrlvq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsrlvq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsrlvq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsrlvq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsrlvq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpsrlvq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpsrlvq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpsrlvq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpsrld zmm6, zmm5, 0xab # AVX512F
vpsrld zmm6{k7}, zmm5, 0xab # AVX512F
vpsrld zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpsrld zmm6, zmm5, 123 # AVX512F
vpsrld zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpsrld zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpsrld zmm6, dword bcst [eax], 123 # AVX512F
vpsrld zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpsrld zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpsrld zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpsrld zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpsrld zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vpsrld zmm6, dword bcst [edx+512], 123 # AVX512F
vpsrld zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vpsrld zmm6, dword bcst [edx-516], 123 # AVX512F
vpsrlq zmm6, zmm5, 0xab # AVX512F
vpsrlq zmm6{k7}, zmm5, 0xab # AVX512F
vpsrlq zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpsrlq zmm6, zmm5, 123 # AVX512F
vpsrlq zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpsrlq zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpsrlq zmm6, qword bcst [eax], 123 # AVX512F
vpsrlq zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpsrlq zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpsrlq zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpsrlq zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpsrlq zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpsrlq zmm6, qword bcst [edx+1024], 123 # AVX512F
vpsrlq zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpsrlq zmm6, qword bcst [edx-1032], 123 # AVX512F
vpsubd zmm6, zmm5, zmm4 # AVX512F
vpsubd zmm6{k7}, zmm5, zmm4 # AVX512F
vpsubd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsubd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsubd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsubd zmm6, zmm5, dword bcst [eax] # AVX512F
vpsubd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsubd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsubd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsubd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsubd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpsubd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpsubd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpsubd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpsubq zmm6, zmm5, zmm4 # AVX512F
vpsubq zmm6{k7}, zmm5, zmm4 # AVX512F
vpsubq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpsubq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpsubq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpsubq zmm6, zmm5, qword bcst [eax] # AVX512F
vpsubq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpsubq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpsubq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpsubq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpsubq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpsubq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpsubq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpsubq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vptestmd k5, zmm6, zmm5 # AVX512F
vptestmd k5{k7}, zmm6, zmm5 # AVX512F
vptestmd k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vptestmd k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vptestmd k5, zmm6, dword bcst [eax] # AVX512F
vptestmd k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vptestmd k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vptestmd k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vptestmd k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vptestmd k5, zmm6, dword bcst [edx+508] # AVX512F Disp8
vptestmd k5, zmm6, dword bcst [edx+512] # AVX512F
vptestmd k5, zmm6, dword bcst [edx-512] # AVX512F Disp8
vptestmd k5, zmm6, dword bcst [edx-516] # AVX512F
vptestmq k5, zmm6, zmm5 # AVX512F
vptestmq k5{k7}, zmm6, zmm5 # AVX512F
vptestmq k5, zmm6, ZMMWORD PTR [ecx] # AVX512F
vptestmq k5, zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vptestmq k5, zmm6, qword bcst [eax] # AVX512F
vptestmq k5, zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vptestmq k5, zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vptestmq k5, zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vptestmq k5, zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vptestmq k5, zmm6, qword bcst [edx+1016] # AVX512F Disp8
vptestmq k5, zmm6, qword bcst [edx+1024] # AVX512F
vptestmq k5, zmm6, qword bcst [edx-1024] # AVX512F Disp8
vptestmq k5, zmm6, qword bcst [edx-1032] # AVX512F
vpunpckhdq zmm6, zmm5, zmm4 # AVX512F
vpunpckhdq zmm6{k7}, zmm5, zmm4 # AVX512F
vpunpckhdq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpunpckhdq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpunpckhdq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpunpckhdq zmm6, zmm5, dword bcst [eax] # AVX512F
vpunpckhdq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpunpckhdq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpunpckhdq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpunpckhdq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpunpckhdq zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpunpckhdq zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpunpckhdq zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpunpckhdq zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpunpckhqdq zmm6, zmm5, zmm4 # AVX512F
vpunpckhqdq zmm6{k7}, zmm5, zmm4 # AVX512F
vpunpckhqdq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpunpckhqdq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpunpckhqdq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpunpckhqdq zmm6, zmm5, qword bcst [eax] # AVX512F
vpunpckhqdq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpunpckhqdq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpunpckhqdq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpunpckhqdq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpunpckhqdq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpunpckhqdq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpunpckhqdq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpunpckhqdq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpunpckldq zmm6, zmm5, zmm4 # AVX512F
vpunpckldq zmm6{k7}, zmm5, zmm4 # AVX512F
vpunpckldq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpunpckldq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpunpckldq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpunpckldq zmm6, zmm5, dword bcst [eax] # AVX512F
vpunpckldq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpunpckldq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpunpckldq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpunpckldq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpunpckldq zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpunpckldq zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpunpckldq zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpunpckldq zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpunpcklqdq zmm6, zmm5, zmm4 # AVX512F
vpunpcklqdq zmm6{k7}, zmm5, zmm4 # AVX512F
vpunpcklqdq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpunpcklqdq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpunpcklqdq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpunpcklqdq zmm6, zmm5, qword bcst [eax] # AVX512F
vpunpcklqdq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpunpcklqdq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpunpcklqdq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpunpcklqdq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpunpcklqdq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpunpcklqdq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpunpcklqdq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpunpcklqdq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpxord zmm6, zmm5, zmm4 # AVX512F
vpxord zmm6{k7}, zmm5, zmm4 # AVX512F
vpxord zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpxord zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpxord zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpxord zmm6, zmm5, dword bcst [eax] # AVX512F
vpxord zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpxord zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpxord zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpxord zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpxord zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpxord zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpxord zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpxord zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpxorq zmm6, zmm5, zmm4 # AVX512F
vpxorq zmm6{k7}, zmm5, zmm4 # AVX512F
vpxorq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpxorq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpxorq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpxorq zmm6, zmm5, qword bcst [eax] # AVX512F
vpxorq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpxorq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpxorq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpxorq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpxorq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpxorq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpxorq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpxorq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vrcp14pd zmm6, zmm5 # AVX512F
vrcp14pd zmm6{k7}, zmm5 # AVX512F
vrcp14pd zmm6{k7}{z}, zmm5 # AVX512F
vrcp14pd zmm6, ZMMWORD PTR [ecx] # AVX512F
vrcp14pd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vrcp14pd zmm6, qword bcst [eax] # AVX512F
vrcp14pd zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vrcp14pd zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vrcp14pd zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vrcp14pd zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vrcp14pd zmm6, qword bcst [edx+1016] # AVX512F Disp8
vrcp14pd zmm6, qword bcst [edx+1024] # AVX512F
vrcp14pd zmm6, qword bcst [edx-1024] # AVX512F Disp8
vrcp14pd zmm6, qword bcst [edx-1032] # AVX512F
vrcp14ps zmm6, zmm5 # AVX512F
vrcp14ps zmm6{k7}, zmm5 # AVX512F
vrcp14ps zmm6{k7}{z}, zmm5 # AVX512F
vrcp14ps zmm6, ZMMWORD PTR [ecx] # AVX512F
vrcp14ps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vrcp14ps zmm6, dword bcst [eax] # AVX512F
vrcp14ps zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vrcp14ps zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vrcp14ps zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vrcp14ps zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vrcp14ps zmm6, dword bcst [edx+508] # AVX512F Disp8
vrcp14ps zmm6, dword bcst [edx+512] # AVX512F
vrcp14ps zmm6, dword bcst [edx-512] # AVX512F Disp8
vrcp14ps zmm6, dword bcst [edx-516] # AVX512F
vrcp14sd xmm6{k7}, xmm5, xmm4 # AVX512F
vrcp14sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vrcp14ss xmm6{k7}, xmm5, xmm4 # AVX512F
vrcp14ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vrsqrt14pd zmm6, zmm5 # AVX512F
vrsqrt14pd zmm6{k7}, zmm5 # AVX512F
vrsqrt14pd zmm6{k7}{z}, zmm5 # AVX512F
vrsqrt14pd zmm6, ZMMWORD PTR [ecx] # AVX512F
vrsqrt14pd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vrsqrt14pd zmm6, qword bcst [eax] # AVX512F
vrsqrt14pd zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vrsqrt14pd zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vrsqrt14pd zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vrsqrt14pd zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vrsqrt14pd zmm6, qword bcst [edx+1016] # AVX512F Disp8
vrsqrt14pd zmm6, qword bcst [edx+1024] # AVX512F
vrsqrt14pd zmm6, qword bcst [edx-1024] # AVX512F Disp8
vrsqrt14pd zmm6, qword bcst [edx-1032] # AVX512F
vrsqrt14ps zmm6, zmm5 # AVX512F
vrsqrt14ps zmm6{k7}, zmm5 # AVX512F
vrsqrt14ps zmm6{k7}{z}, zmm5 # AVX512F
vrsqrt14ps zmm6, ZMMWORD PTR [ecx] # AVX512F
vrsqrt14ps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vrsqrt14ps zmm6, dword bcst [eax] # AVX512F
vrsqrt14ps zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vrsqrt14ps zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vrsqrt14ps zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vrsqrt14ps zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vrsqrt14ps zmm6, dword bcst [edx+508] # AVX512F Disp8
vrsqrt14ps zmm6, dword bcst [edx+512] # AVX512F
vrsqrt14ps zmm6, dword bcst [edx-512] # AVX512F Disp8
vrsqrt14ps zmm6, dword bcst [edx-516] # AVX512F
vrsqrt14sd xmm6{k7}, xmm5, xmm4 # AVX512F
vrsqrt14sd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vrsqrt14ss xmm6{k7}, xmm5, xmm4 # AVX512F
vrsqrt14ss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vscatterdpd [ebp+ymm7*8-123]{k1}, zmm6 # AVX512F
vscatterdpd qword ptr [ebp+ymm7*8-123]{k1}, zmm6 # AVX512F
vscatterdpd [eax+ymm7+256]{k1}, zmm6 # AVX512F
vscatterdpd [ecx+ymm7*4+1024]{k1}, zmm6 # AVX512F
vscatterdps [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vscatterdps dword ptr [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vscatterdps [eax+zmm7+256]{k1}, zmm6 # AVX512F
vscatterdps [ecx+zmm7*4+1024]{k1}, zmm6 # AVX512F
vscatterqpd [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vscatterqpd qword ptr [ebp+zmm7*8-123]{k1}, zmm6 # AVX512F
vscatterqpd [eax+zmm7+256]{k1}, zmm6 # AVX512F
vscatterqpd [ecx+zmm7*4+1024]{k1}, zmm6 # AVX512F
vscatterqps [ebp+zmm7*8-123]{k1}, ymm6 # AVX512F
vscatterqps dword ptr [ebp+zmm7*8-123]{k1}, ymm6 # AVX512F
vscatterqps [eax+zmm7+256]{k1}, ymm6 # AVX512F
vscatterqps [ecx+zmm7*4+1024]{k1}, ymm6 # AVX512F
vshufpd zmm6, zmm5, zmm4, 0xab # AVX512F
vshufpd zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vshufpd zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vshufpd zmm6, zmm5, zmm4, 123 # AVX512F
vshufpd zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vshufpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vshufpd zmm6, zmm5, qword bcst [eax], 123 # AVX512F
vshufpd zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vshufpd zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vshufpd zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vshufpd zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vshufpd zmm6, zmm5, qword bcst [edx+1016], 123 # AVX512F Disp8
vshufpd zmm6, zmm5, qword bcst [edx+1024], 123 # AVX512F
vshufpd zmm6, zmm5, qword bcst [edx-1024], 123 # AVX512F Disp8
vshufpd zmm6, zmm5, qword bcst [edx-1032], 123 # AVX512F
vshufps zmm6, zmm5, zmm4, 0xab # AVX512F
vshufps zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vshufps zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vshufps zmm6, zmm5, zmm4, 123 # AVX512F
vshufps zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vshufps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vshufps zmm6, zmm5, dword bcst [eax], 123 # AVX512F
vshufps zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vshufps zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vshufps zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vshufps zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vshufps zmm6, zmm5, dword bcst [edx+508], 123 # AVX512F Disp8
vshufps zmm6, zmm5, dword bcst [edx+512], 123 # AVX512F
vshufps zmm6, zmm5, dword bcst [edx-512], 123 # AVX512F Disp8
vshufps zmm6, zmm5, dword bcst [edx-516], 123 # AVX512F
vsqrtpd zmm6, zmm5 # AVX512F
vsqrtpd zmm6{k7}, zmm5 # AVX512F
vsqrtpd zmm6{k7}{z}, zmm5 # AVX512F
vsqrtpd zmm6, zmm5{rn-sae} # AVX512F
vsqrtpd zmm6, zmm5{ru-sae} # AVX512F
vsqrtpd zmm6, zmm5{rd-sae} # AVX512F
vsqrtpd zmm6, zmm5{rz-sae} # AVX512F
vsqrtpd zmm6, ZMMWORD PTR [ecx] # AVX512F
vsqrtpd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vsqrtpd zmm6, qword bcst [eax] # AVX512F
vsqrtpd zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vsqrtpd zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vsqrtpd zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vsqrtpd zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vsqrtpd zmm6, qword bcst [edx+1016] # AVX512F Disp8
vsqrtpd zmm6, qword bcst [edx+1024] # AVX512F
vsqrtpd zmm6, qword bcst [edx-1024] # AVX512F Disp8
vsqrtpd zmm6, qword bcst [edx-1032] # AVX512F
vsqrtps zmm6, zmm5 # AVX512F
vsqrtps zmm6{k7}, zmm5 # AVX512F
vsqrtps zmm6{k7}{z}, zmm5 # AVX512F
vsqrtps zmm6, zmm5{rn-sae} # AVX512F
vsqrtps zmm6, zmm5{ru-sae} # AVX512F
vsqrtps zmm6, zmm5{rd-sae} # AVX512F
vsqrtps zmm6, zmm5{rz-sae} # AVX512F
vsqrtps zmm6, ZMMWORD PTR [ecx] # AVX512F
vsqrtps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vsqrtps zmm6, dword bcst [eax] # AVX512F
vsqrtps zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vsqrtps zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vsqrtps zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vsqrtps zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vsqrtps zmm6, dword bcst [edx+508] # AVX512F Disp8
vsqrtps zmm6, dword bcst [edx+512] # AVX512F
vsqrtps zmm6, dword bcst [edx-512] # AVX512F Disp8
vsqrtps zmm6, dword bcst [edx-516] # AVX512F
vsqrtsd xmm6{k7}, xmm5, xmm4 # AVX512F
vsqrtsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vsqrtsd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vsqrtsd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vsqrtsd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vsqrtsd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vsqrtss xmm6{k7}, xmm5, xmm4 # AVX512F
vsqrtss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vsqrtss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vsqrtss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vsqrtss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vsqrtss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vsqrtss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vsqrtss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vsubpd zmm6, zmm5, zmm4 # AVX512F
vsubpd zmm6{k7}, zmm5, zmm4 # AVX512F
vsubpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vsubpd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vsubpd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vsubpd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vsubpd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vsubpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vsubpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vsubpd zmm6, zmm5, qword bcst [eax] # AVX512F
vsubpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vsubpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vsubpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vsubpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vsubpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vsubpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vsubpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vsubpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vsubps zmm6, zmm5, zmm4 # AVX512F
vsubps zmm6{k7}, zmm5, zmm4 # AVX512F
vsubps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vsubps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vsubps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vsubps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vsubps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vsubps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vsubps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vsubps zmm6, zmm5, dword bcst [eax] # AVX512F
vsubps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vsubps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vsubps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vsubps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vsubps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vsubps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vsubps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vsubps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vsubsd xmm6{k7}, xmm5, xmm4 # AVX512F
vsubsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vsubsd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vsubsd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vsubsd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vsubsd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vsubsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vsubsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vsubss xmm6{k7}, xmm5, xmm4 # AVX512F
vsubss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vsubss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vsubss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vsubss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vsubss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vsubss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vsubss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vsubss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vsubss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vsubss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vsubss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vucomisd xmm6, xmm5{sae} # AVX512F
vucomiss xmm6, xmm5{sae} # AVX512F
vunpckhpd zmm6, zmm5, zmm4 # AVX512F
vunpckhpd zmm6{k7}, zmm5, zmm4 # AVX512F
vunpckhpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vunpckhpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vunpckhpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vunpckhpd zmm6, zmm5, qword bcst [eax] # AVX512F
vunpckhpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vunpckhpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vunpckhpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vunpckhpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vunpckhpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vunpckhpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vunpckhpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vunpckhpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vunpckhps zmm6, zmm5, zmm4 # AVX512F
vunpckhps zmm6{k7}, zmm5, zmm4 # AVX512F
vunpckhps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vunpckhps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vunpckhps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vunpckhps zmm6, zmm5, dword bcst [eax] # AVX512F
vunpckhps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vunpckhps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vunpckhps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vunpckhps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vunpckhps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vunpckhps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vunpckhps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vunpckhps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vunpcklpd zmm6, zmm5, zmm4 # AVX512F
vunpcklpd zmm6{k7}, zmm5, zmm4 # AVX512F
vunpcklpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vunpcklpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vunpcklpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vunpcklpd zmm6, zmm5, qword bcst [eax] # AVX512F
vunpcklpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vunpcklpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vunpcklpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vunpcklpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vunpcklpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vunpcklpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vunpcklpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vunpcklpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vunpcklps zmm6, zmm5, zmm4 # AVX512F
vunpcklps zmm6{k7}, zmm5, zmm4 # AVX512F
vunpcklps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vunpcklps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vunpcklps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vunpcklps zmm6, zmm5, dword bcst [eax] # AVX512F
vunpcklps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vunpcklps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vunpcklps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vunpcklps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vunpcklps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vunpcklps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vunpcklps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vunpcklps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpternlogd zmm6, zmm5, zmm4, 0xab # AVX512F
vpternlogd zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vpternlogd zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vpternlogd zmm6, zmm5, zmm4, 123 # AVX512F
vpternlogd zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vpternlogd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpternlogd zmm6, zmm5, dword bcst [eax], 123 # AVX512F
vpternlogd zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpternlogd zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpternlogd zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpternlogd zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpternlogd zmm6, zmm5, dword bcst [edx+508], 123 # AVX512F Disp8
vpternlogd zmm6, zmm5, dword bcst [edx+512], 123 # AVX512F
vpternlogd zmm6, zmm5, dword bcst [edx-512], 123 # AVX512F Disp8
vpternlogd zmm6, zmm5, dword bcst [edx-516], 123 # AVX512F
vpternlogq zmm6, zmm5, zmm4, 0xab # AVX512F
vpternlogq zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vpternlogq zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vpternlogq zmm6, zmm5, zmm4, 123 # AVX512F
vpternlogq zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vpternlogq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpternlogq zmm6, zmm5, qword bcst [eax], 123 # AVX512F
vpternlogq zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpternlogq zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpternlogq zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpternlogq zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpternlogq zmm6, zmm5, qword bcst [edx+1016], 123 # AVX512F Disp8
vpternlogq zmm6, zmm5, qword bcst [edx+1024], 123 # AVX512F
vpternlogq zmm6, zmm5, qword bcst [edx-1024], 123 # AVX512F Disp8
vpternlogq zmm6, zmm5, qword bcst [edx-1032], 123 # AVX512F
vpmovqb xmm6{k7}, zmm5 # AVX512F
vpmovqb xmm6{k7}{z}, zmm5 # AVX512F
vpmovsqb xmm6{k7}, zmm5 # AVX512F
vpmovsqb xmm6{k7}{z}, zmm5 # AVX512F
vpmovusqb xmm6{k7}, zmm5 # AVX512F
vpmovusqb xmm6{k7}{z}, zmm5 # AVX512F
vpmovqw xmm6{k7}, zmm5 # AVX512F
vpmovqw xmm6{k7}{z}, zmm5 # AVX512F
vpmovsqw xmm6{k7}, zmm5 # AVX512F
vpmovsqw xmm6{k7}{z}, zmm5 # AVX512F
vpmovusqw xmm6{k7}, zmm5 # AVX512F
vpmovusqw xmm6{k7}{z}, zmm5 # AVX512F
vpmovqd ymm6{k7}, zmm5 # AVX512F
vpmovqd ymm6{k7}{z}, zmm5 # AVX512F
vpmovsqd ymm6{k7}, zmm5 # AVX512F
vpmovsqd ymm6{k7}{z}, zmm5 # AVX512F
vpmovusqd ymm6{k7}, zmm5 # AVX512F
vpmovusqd ymm6{k7}{z}, zmm5 # AVX512F
vpmovdb xmm6{k7}, zmm5 # AVX512F
vpmovdb xmm6{k7}{z}, zmm5 # AVX512F
vpmovsdb xmm6{k7}, zmm5 # AVX512F
vpmovsdb xmm6{k7}{z}, zmm5 # AVX512F
vpmovusdb xmm6{k7}, zmm5 # AVX512F
vpmovusdb xmm6{k7}{z}, zmm5 # AVX512F
vpmovdw ymm6{k7}, zmm5 # AVX512F
vpmovdw ymm6{k7}{z}, zmm5 # AVX512F
vpmovsdw ymm6{k7}, zmm5 # AVX512F
vpmovsdw ymm6{k7}{z}, zmm5 # AVX512F
vpmovusdw ymm6{k7}, zmm5 # AVX512F
vpmovusdw ymm6{k7}{z}, zmm5 # AVX512F
vshuff32x4 zmm6, zmm5, zmm4, 0xab # AVX512F
vshuff32x4 zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vshuff32x4 zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vshuff32x4 zmm6, zmm5, zmm4, 123 # AVX512F
vshuff32x4 zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vshuff32x4 zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vshuff32x4 zmm6, zmm5, dword bcst [eax], 123 # AVX512F
vshuff32x4 zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vshuff32x4 zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vshuff32x4 zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vshuff32x4 zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vshuff32x4 zmm6, zmm5, dword bcst [edx+508], 123 # AVX512F Disp8
vshuff32x4 zmm6, zmm5, dword bcst [edx+512], 123 # AVX512F
vshuff32x4 zmm6, zmm5, dword bcst [edx-512], 123 # AVX512F Disp8
vshuff32x4 zmm6, zmm5, dword bcst [edx-516], 123 # AVX512F
vshuff64x2 zmm6, zmm5, zmm4, 0xab # AVX512F
vshuff64x2 zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vshuff64x2 zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vshuff64x2 zmm6, zmm5, zmm4, 123 # AVX512F
vshuff64x2 zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vshuff64x2 zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vshuff64x2 zmm6, zmm5, qword bcst [eax], 123 # AVX512F
vshuff64x2 zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vshuff64x2 zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vshuff64x2 zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vshuff64x2 zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vshuff64x2 zmm6, zmm5, qword bcst [edx+1016], 123 # AVX512F Disp8
vshuff64x2 zmm6, zmm5, qword bcst [edx+1024], 123 # AVX512F
vshuff64x2 zmm6, zmm5, qword bcst [edx-1024], 123 # AVX512F Disp8
vshuff64x2 zmm6, zmm5, qword bcst [edx-1032], 123 # AVX512F
vshufi32x4 zmm6, zmm5, zmm4, 0xab # AVX512F
vshufi32x4 zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vshufi32x4 zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vshufi32x4 zmm6, zmm5, zmm4, 123 # AVX512F
vshufi32x4 zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vshufi32x4 zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vshufi32x4 zmm6, zmm5, dword bcst [eax], 123 # AVX512F
vshufi32x4 zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vshufi32x4 zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vshufi32x4 zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vshufi32x4 zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vshufi32x4 zmm6, zmm5, dword bcst [edx+508], 123 # AVX512F Disp8
vshufi32x4 zmm6, zmm5, dword bcst [edx+512], 123 # AVX512F
vshufi32x4 zmm6, zmm5, dword bcst [edx-512], 123 # AVX512F Disp8
vshufi32x4 zmm6, zmm5, dword bcst [edx-516], 123 # AVX512F
vshufi64x2 zmm6, zmm5, zmm4, 0xab # AVX512F
vshufi64x2 zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vshufi64x2 zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vshufi64x2 zmm6, zmm5, zmm4, 123 # AVX512F
vshufi64x2 zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vshufi64x2 zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vshufi64x2 zmm6, zmm5, qword bcst [eax], 123 # AVX512F
vshufi64x2 zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vshufi64x2 zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vshufi64x2 zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vshufi64x2 zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vshufi64x2 zmm6, zmm5, qword bcst [edx+1016], 123 # AVX512F Disp8
vshufi64x2 zmm6, zmm5, qword bcst [edx+1024], 123 # AVX512F
vshufi64x2 zmm6, zmm5, qword bcst [edx-1024], 123 # AVX512F Disp8
vshufi64x2 zmm6, zmm5, qword bcst [edx-1032], 123 # AVX512F
vpermq zmm6, zmm5, zmm4 # AVX512F
vpermq zmm6{k7}, zmm5, zmm4 # AVX512F
vpermq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermq zmm6, zmm5, qword bcst [eax] # AVX512F
vpermq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpermq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpermq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpermq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpermpd zmm6, zmm5, zmm4 # AVX512F
vpermpd zmm6{k7}, zmm5, zmm4 # AVX512F
vpermpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermpd zmm6, zmm5, qword bcst [eax] # AVX512F
vpermpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpermpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpermpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpermpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpermt2d zmm6, zmm5, zmm4 # AVX512F
vpermt2d zmm6{k7}, zmm5, zmm4 # AVX512F
vpermt2d zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermt2d zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermt2d zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermt2d zmm6, zmm5, dword bcst [eax] # AVX512F
vpermt2d zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermt2d zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermt2d zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermt2d zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermt2d zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpermt2d zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpermt2d zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpermt2d zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpermt2q zmm6, zmm5, zmm4 # AVX512F
vpermt2q zmm6{k7}, zmm5, zmm4 # AVX512F
vpermt2q zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermt2q zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermt2q zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermt2q zmm6, zmm5, qword bcst [eax] # AVX512F
vpermt2q zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermt2q zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermt2q zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermt2q zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermt2q zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpermt2q zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpermt2q zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpermt2q zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpermt2ps zmm6, zmm5, zmm4 # AVX512F
vpermt2ps zmm6{k7}, zmm5, zmm4 # AVX512F
vpermt2ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermt2ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermt2ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermt2ps zmm6, zmm5, dword bcst [eax] # AVX512F
vpermt2ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermt2ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermt2ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermt2ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermt2ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpermt2ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpermt2ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpermt2ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpermt2pd zmm6, zmm5, zmm4 # AVX512F
vpermt2pd zmm6{k7}, zmm5, zmm4 # AVX512F
vpermt2pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermt2pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermt2pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermt2pd zmm6, zmm5, qword bcst [eax] # AVX512F
vpermt2pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermt2pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermt2pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermt2pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermt2pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpermt2pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpermt2pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpermt2pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
valignq zmm6, zmm5, zmm4, 0xab # AVX512F
valignq zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
valignq zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
valignq zmm6, zmm5, zmm4, 123 # AVX512F
valignq zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
valignq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
valignq zmm6, zmm5, qword bcst [eax], 123 # AVX512F
valignq zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
valignq zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
valignq zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
valignq zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
valignq zmm6, zmm5, qword bcst [edx+1016], 123 # AVX512F Disp8
valignq zmm6, zmm5, qword bcst [edx+1024], 123 # AVX512F
valignq zmm6, zmm5, qword bcst [edx-1024], 123 # AVX512F Disp8
valignq zmm6, zmm5, qword bcst [edx-1032], 123 # AVX512F
vcvtsd2usi eax, xmm6 # AVX512F
vcvtsd2usi eax, xmm6{rn-sae} # AVX512F
vcvtsd2usi eax, xmm6{ru-sae} # AVX512F
vcvtsd2usi eax, xmm6{rd-sae} # AVX512F
vcvtsd2usi eax, xmm6{rz-sae} # AVX512F
vcvtsd2usi eax, QWORD PTR [ecx] # AVX512F
vcvtsd2usi eax, QWORD PTR [esp+esi*8-123456] # AVX512F
vcvtsd2usi eax, QWORD PTR [edx+1016] # AVX512F Disp8
vcvtsd2usi eax, QWORD PTR [edx+1024] # AVX512F
vcvtsd2usi eax, QWORD PTR [edx-1024] # AVX512F Disp8
vcvtsd2usi eax, QWORD PTR [edx-1032] # AVX512F
vcvtsd2usi ebp, xmm6 # AVX512F
vcvtsd2usi ebp, xmm6{rn-sae} # AVX512F
vcvtsd2usi ebp, xmm6{ru-sae} # AVX512F
vcvtsd2usi ebp, xmm6{rd-sae} # AVX512F
vcvtsd2usi ebp, xmm6{rz-sae} # AVX512F
vcvtsd2usi ebp, QWORD PTR [ecx] # AVX512F
vcvtsd2usi ebp, QWORD PTR [esp+esi*8-123456] # AVX512F
vcvtsd2usi ebp, QWORD PTR [edx+1016] # AVX512F Disp8
vcvtsd2usi ebp, QWORD PTR [edx+1024] # AVX512F
vcvtsd2usi ebp, QWORD PTR [edx-1024] # AVX512F Disp8
vcvtsd2usi ebp, QWORD PTR [edx-1032] # AVX512F
vcvtss2usi eax, xmm6 # AVX512F
vcvtss2usi eax, xmm6{rn-sae} # AVX512F
vcvtss2usi eax, xmm6{ru-sae} # AVX512F
vcvtss2usi eax, xmm6{rd-sae} # AVX512F
vcvtss2usi eax, xmm6{rz-sae} # AVX512F
vcvtss2usi eax, DWORD PTR [ecx] # AVX512F
vcvtss2usi eax, DWORD PTR [esp+esi*8-123456] # AVX512F
vcvtss2usi eax, DWORD PTR [edx+508] # AVX512F Disp8
vcvtss2usi eax, DWORD PTR [edx+512] # AVX512F
vcvtss2usi eax, DWORD PTR [edx-512] # AVX512F Disp8
vcvtss2usi eax, DWORD PTR [edx-516] # AVX512F
vcvtss2usi ebp, xmm6 # AVX512F
vcvtss2usi ebp, xmm6{rn-sae} # AVX512F
vcvtss2usi ebp, xmm6{ru-sae} # AVX512F
vcvtss2usi ebp, xmm6{rd-sae} # AVX512F
vcvtss2usi ebp, xmm6{rz-sae} # AVX512F
vcvtss2usi ebp, DWORD PTR [ecx] # AVX512F
vcvtss2usi ebp, DWORD PTR [esp+esi*8-123456] # AVX512F
vcvtss2usi ebp, DWORD PTR [edx+508] # AVX512F Disp8
vcvtss2usi ebp, DWORD PTR [edx+512] # AVX512F
vcvtss2usi ebp, DWORD PTR [edx-512] # AVX512F Disp8
vcvtss2usi ebp, DWORD PTR [edx-516] # AVX512F
vcvtusi2sd xmm6, xmm5, eax # AVX512F
vcvtusi2sd xmm6, xmm5, ebp # AVX512F
vcvtusi2sd xmm6, xmm5, DWORD PTR [ecx] # AVX512F
vcvtusi2sd xmm6, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx+512] # AVX512F
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx-516] # AVX512F
vcvtusi2ss xmm6, xmm5, eax # AVX512F
vcvtusi2ss xmm6, xmm5, eax{rn-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, eax{ru-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, eax{rd-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, eax{rz-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, ebp # AVX512F
vcvtusi2ss xmm6, xmm5, ebp{rn-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, ebp{ru-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, ebp{rd-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, ebp{rz-sae} # AVX512F
vcvtusi2ss xmm6, xmm5, DWORD PTR [ecx] # AVX512F
vcvtusi2ss xmm6, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx+512] # AVX512F
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx-516] # AVX512F
vscalefpd zmm6, zmm5, zmm4 # AVX512F
vscalefpd zmm6{k7}, zmm5, zmm4 # AVX512F
vscalefpd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vscalefpd zmm6, zmm5, zmm4{rn-sae} # AVX512F
vscalefpd zmm6, zmm5, zmm4{ru-sae} # AVX512F
vscalefpd zmm6, zmm5, zmm4{rd-sae} # AVX512F
vscalefpd zmm6, zmm5, zmm4{rz-sae} # AVX512F
vscalefpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vscalefpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vscalefpd zmm6, zmm5, qword bcst [eax] # AVX512F
vscalefpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vscalefpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vscalefpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vscalefpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vscalefpd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vscalefpd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vscalefpd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vscalefpd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vscalefps zmm6, zmm5, zmm4 # AVX512F
vscalefps zmm6{k7}, zmm5, zmm4 # AVX512F
vscalefps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vscalefps zmm6, zmm5, zmm4{rn-sae} # AVX512F
vscalefps zmm6, zmm5, zmm4{ru-sae} # AVX512F
vscalefps zmm6, zmm5, zmm4{rd-sae} # AVX512F
vscalefps zmm6, zmm5, zmm4{rz-sae} # AVX512F
vscalefps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vscalefps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vscalefps zmm6, zmm5, dword bcst [eax] # AVX512F
vscalefps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vscalefps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vscalefps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vscalefps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vscalefps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vscalefps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vscalefps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vscalefps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vscalefsd xmm6{k7}, xmm5, xmm4 # AVX512F
vscalefsd xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vscalefsd xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vscalefsd xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vscalefsd xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vscalefsd xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vscalefsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512F
vscalefsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512F
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512F Disp8
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512F
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512F Disp8
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512F
vscalefss xmm6{k7}, xmm5, xmm4 # AVX512F
vscalefss xmm6{k7}{z}, xmm5, xmm4 # AVX512F
vscalefss xmm6{k7}, xmm5, xmm4{rn-sae} # AVX512F
vscalefss xmm6{k7}, xmm5, xmm4{ru-sae} # AVX512F
vscalefss xmm6{k7}, xmm5, xmm4{rd-sae} # AVX512F
vscalefss xmm6{k7}, xmm5, xmm4{rz-sae} # AVX512F
vscalefss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512F
vscalefss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512F
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512F Disp8
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512F
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512F Disp8
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512F
vfixupimmps zmm6, zmm5, zmm4, 0xab # AVX512F
vfixupimmps zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vfixupimmps zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vfixupimmps zmm6, zmm5, zmm4{sae}, 0xab # AVX512F
vfixupimmps zmm6, zmm5, zmm4, 123 # AVX512F
vfixupimmps zmm6, zmm5, zmm4{sae}, 123 # AVX512F
vfixupimmps zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vfixupimmps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vfixupimmps zmm6, zmm5, dword bcst [eax], 123 # AVX512F
vfixupimmps zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vfixupimmps zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vfixupimmps zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vfixupimmps zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vfixupimmps zmm6, zmm5, dword bcst [edx+508], 123 # AVX512F Disp8
vfixupimmps zmm6, zmm5, dword bcst [edx+512], 123 # AVX512F
vfixupimmps zmm6, zmm5, dword bcst [edx-512], 123 # AVX512F Disp8
vfixupimmps zmm6, zmm5, dword bcst [edx-516], 123 # AVX512F
vfixupimmpd zmm6, zmm5, zmm4, 0xab # AVX512F
vfixupimmpd zmm6{k7}, zmm5, zmm4, 0xab # AVX512F
vfixupimmpd zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F
vfixupimmpd zmm6, zmm5, zmm4{sae}, 0xab # AVX512F
vfixupimmpd zmm6, zmm5, zmm4, 123 # AVX512F
vfixupimmpd zmm6, zmm5, zmm4{sae}, 123 # AVX512F
vfixupimmpd zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512F
vfixupimmpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vfixupimmpd zmm6, zmm5, qword bcst [eax], 123 # AVX512F
vfixupimmpd zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vfixupimmpd zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512F
vfixupimmpd zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vfixupimmpd zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512F
vfixupimmpd zmm6, zmm5, qword bcst [edx+1016], 123 # AVX512F Disp8
vfixupimmpd zmm6, zmm5, qword bcst [edx+1024], 123 # AVX512F
vfixupimmpd zmm6, zmm5, qword bcst [edx-1024], 123 # AVX512F Disp8
vfixupimmpd zmm6, zmm5, qword bcst [edx-1032], 123 # AVX512F
vfixupimmss xmm6{k7}, xmm5, xmm4, 0xab # AVX512F
vfixupimmss xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512F
vfixupimmss xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vfixupimmss xmm6{k7}, xmm5, xmm4, 123 # AVX512F
vfixupimmss xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512F
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512F
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512F Disp8
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512F
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512F Disp8
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512F
vfixupimmsd xmm6{k7}, xmm5, xmm4, 0xab # AVX512F
vfixupimmsd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512F
vfixupimmsd xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vfixupimmsd xmm6{k7}, xmm5, xmm4, 123 # AVX512F
vfixupimmsd xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512F
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512F
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512F Disp8
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512F
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512F Disp8
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512F
vpslld zmm6, zmm5, 0xab # AVX512F
vpslld zmm6{k7}, zmm5, 0xab # AVX512F
vpslld zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpslld zmm6, zmm5, 123 # AVX512F
vpslld zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpslld zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpslld zmm6, dword bcst [eax], 123 # AVX512F
vpslld zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpslld zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpslld zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpslld zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpslld zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vpslld zmm6, dword bcst [edx+512], 123 # AVX512F
vpslld zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vpslld zmm6, dword bcst [edx-516], 123 # AVX512F
vpsllq zmm6, zmm5, 0xab # AVX512F
vpsllq zmm6{k7}, zmm5, 0xab # AVX512F
vpsllq zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpsllq zmm6, zmm5, 123 # AVX512F
vpsllq zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpsllq zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpsllq zmm6, qword bcst [eax], 123 # AVX512F
vpsllq zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpsllq zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpsllq zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpsllq zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpsllq zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpsllq zmm6, qword bcst [edx+1024], 123 # AVX512F
vpsllq zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpsllq zmm6, qword bcst [edx-1032], 123 # AVX512F
vpsrad zmm6, zmm5, 0xab # AVX512F
vpsrad zmm6{k7}, zmm5, 0xab # AVX512F
vpsrad zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpsrad zmm6, zmm5, 123 # AVX512F
vpsrad zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpsrad zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpsrad zmm6, dword bcst [eax], 123 # AVX512F
vpsrad zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpsrad zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpsrad zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpsrad zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpsrad zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vpsrad zmm6, dword bcst [edx+512], 123 # AVX512F
vpsrad zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vpsrad zmm6, dword bcst [edx-516], 123 # AVX512F
vpsraq zmm6, zmm5, 0xab # AVX512F
vpsraq zmm6{k7}, zmm5, 0xab # AVX512F
vpsraq zmm6{k7}{z}, zmm5, 0xab # AVX512F
vpsraq zmm6, zmm5, 123 # AVX512F
vpsraq zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vpsraq zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vpsraq zmm6, qword bcst [eax], 123 # AVX512F
vpsraq zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vpsraq zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vpsraq zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vpsraq zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vpsraq zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vpsraq zmm6, qword bcst [edx+1024], 123 # AVX512F
vpsraq zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vpsraq zmm6, qword bcst [edx-1032], 123 # AVX512F
vprolvd zmm6, zmm5, zmm4 # AVX512F
vprolvd zmm6{k7}, zmm5, zmm4 # AVX512F
vprolvd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vprolvd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vprolvd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vprolvd zmm6, zmm5, dword bcst [eax] # AVX512F
vprolvd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vprolvd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vprolvd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vprolvd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vprolvd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vprolvd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vprolvd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vprolvd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vprold zmm6, zmm5, 0xab # AVX512F
vprold zmm6{k7}, zmm5, 0xab # AVX512F
vprold zmm6{k7}{z}, zmm5, 0xab # AVX512F
vprold zmm6, zmm5, 123 # AVX512F
vprold zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vprold zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vprold zmm6, dword bcst [eax], 123 # AVX512F
vprold zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vprold zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vprold zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vprold zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vprold zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vprold zmm6, dword bcst [edx+512], 123 # AVX512F
vprold zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vprold zmm6, dword bcst [edx-516], 123 # AVX512F
vprolvq zmm6, zmm5, zmm4 # AVX512F
vprolvq zmm6{k7}, zmm5, zmm4 # AVX512F
vprolvq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vprolvq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vprolvq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vprolvq zmm6, zmm5, qword bcst [eax] # AVX512F
vprolvq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vprolvq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vprolvq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vprolvq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vprolvq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vprolvq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vprolvq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vprolvq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vprolq zmm6, zmm5, 0xab # AVX512F
vprolq zmm6{k7}, zmm5, 0xab # AVX512F
vprolq zmm6{k7}{z}, zmm5, 0xab # AVX512F
vprolq zmm6, zmm5, 123 # AVX512F
vprolq zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vprolq zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vprolq zmm6, qword bcst [eax], 123 # AVX512F
vprolq zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vprolq zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vprolq zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vprolq zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vprolq zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vprolq zmm6, qword bcst [edx+1024], 123 # AVX512F
vprolq zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vprolq zmm6, qword bcst [edx-1032], 123 # AVX512F
vprorvd zmm6, zmm5, zmm4 # AVX512F
vprorvd zmm6{k7}, zmm5, zmm4 # AVX512F
vprorvd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vprorvd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vprorvd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vprorvd zmm6, zmm5, dword bcst [eax] # AVX512F
vprorvd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vprorvd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vprorvd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vprorvd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vprorvd zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vprorvd zmm6, zmm5, dword bcst [edx+512] # AVX512F
vprorvd zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vprorvd zmm6, zmm5, dword bcst [edx-516] # AVX512F
vprord zmm6, zmm5, 0xab # AVX512F
vprord zmm6{k7}, zmm5, 0xab # AVX512F
vprord zmm6{k7}{z}, zmm5, 0xab # AVX512F
vprord zmm6, zmm5, 123 # AVX512F
vprord zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vprord zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vprord zmm6, dword bcst [eax], 123 # AVX512F
vprord zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vprord zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vprord zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vprord zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vprord zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vprord zmm6, dword bcst [edx+512], 123 # AVX512F
vprord zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vprord zmm6, dword bcst [edx-516], 123 # AVX512F
vprorvq zmm6, zmm5, zmm4 # AVX512F
vprorvq zmm6{k7}, zmm5, zmm4 # AVX512F
vprorvq zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vprorvq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vprorvq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vprorvq zmm6, zmm5, qword bcst [eax] # AVX512F
vprorvq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vprorvq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vprorvq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vprorvq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vprorvq zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vprorvq zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vprorvq zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vprorvq zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vprorq zmm6, zmm5, 0xab # AVX512F
vprorq zmm6{k7}, zmm5, 0xab # AVX512F
vprorq zmm6{k7}{z}, zmm5, 0xab # AVX512F
vprorq zmm6, zmm5, 123 # AVX512F
vprorq zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vprorq zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vprorq zmm6, qword bcst [eax], 123 # AVX512F
vprorq zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vprorq zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vprorq zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vprorq zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vprorq zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vprorq zmm6, qword bcst [edx+1024], 123 # AVX512F
vprorq zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vprorq zmm6, qword bcst [edx-1032], 123 # AVX512F
vrndscalepd zmm6, zmm5, 0xab # AVX512F
vrndscalepd zmm6{k7}, zmm5, 0xab # AVX512F
vrndscalepd zmm6{k7}{z}, zmm5, 0xab # AVX512F
vrndscalepd zmm6, zmm5{sae}, 0xab # AVX512F
vrndscalepd zmm6, zmm5, 123 # AVX512F
vrndscalepd zmm6, zmm5{sae}, 123 # AVX512F
vrndscalepd zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vrndscalepd zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vrndscalepd zmm6, qword bcst [eax], 123 # AVX512F
vrndscalepd zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vrndscalepd zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vrndscalepd zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vrndscalepd zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vrndscalepd zmm6, qword bcst [edx+1016], 123 # AVX512F Disp8
vrndscalepd zmm6, qword bcst [edx+1024], 123 # AVX512F
vrndscalepd zmm6, qword bcst [edx-1024], 123 # AVX512F Disp8
vrndscalepd zmm6, qword bcst [edx-1032], 123 # AVX512F
vrndscaleps zmm6, zmm5, 0xab # AVX512F
vrndscaleps zmm6{k7}, zmm5, 0xab # AVX512F
vrndscaleps zmm6{k7}{z}, zmm5, 0xab # AVX512F
vrndscaleps zmm6, zmm5{sae}, 0xab # AVX512F
vrndscaleps zmm6, zmm5, 123 # AVX512F
vrndscaleps zmm6, zmm5{sae}, 123 # AVX512F
vrndscaleps zmm6, ZMMWORD PTR [ecx], 123 # AVX512F
vrndscaleps zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F
vrndscaleps zmm6, dword bcst [eax], 123 # AVX512F
vrndscaleps zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512F Disp8
vrndscaleps zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512F
vrndscaleps zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512F Disp8
vrndscaleps zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512F
vrndscaleps zmm6, dword bcst [edx+508], 123 # AVX512F Disp8
vrndscaleps zmm6, dword bcst [edx+512], 123 # AVX512F
vrndscaleps zmm6, dword bcst [edx-512], 123 # AVX512F Disp8
vrndscaleps zmm6, dword bcst [edx-516], 123 # AVX512F
vrndscalesd xmm6{k7}, xmm5, xmm4, 0xab # AVX512F
vrndscalesd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512F
vrndscalesd xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vrndscalesd xmm6{k7}, xmm5, xmm4, 123 # AVX512F
vrndscalesd xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512F
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512F
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512F Disp8
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512F
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512F Disp8
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512F
vrndscaless xmm6{k7}, xmm5, xmm4, 0xab # AVX512F
vrndscaless xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512F
vrndscaless xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512F
vrndscaless xmm6{k7}, xmm5, xmm4, 123 # AVX512F
vrndscaless xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512F
vrndscaless xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512F
vrndscaless xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512F
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512F Disp8
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512F
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512F Disp8
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512F
vpcompressq ZMMWORD PTR [ecx], zmm6 # AVX512F
vpcompressq ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpcompressq ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpcompressq ZMMWORD PTR [edx+1016], zmm6 # AVX512F Disp8
vpcompressq ZMMWORD PTR [edx+1024], zmm6 # AVX512F
vpcompressq ZMMWORD PTR [edx-1024], zmm6 # AVX512F Disp8
vpcompressq ZMMWORD PTR [edx-1032], zmm6 # AVX512F
vpcompressq zmm6, zmm5 # AVX512F
vpcompressq zmm6{k7}, zmm5 # AVX512F
vpcompressq zmm6{k7}{z}, zmm5 # AVX512F
kandw k5, k6, k7 # AVX512F
kandnw k5, k6, k7 # AVX512F
korw k5, k6, k7 # AVX512F
kxnorw k5, k6, k7 # AVX512F
kxorw k5, k6, k7 # AVX512F
knotw k5, k6 # AVX512F
kortestw k5, k6 # AVX512F
kshiftrw k5, k6, 0xab # AVX512F
kshiftrw k5, k6, 123 # AVX512F
kshiftlw k5, k6, 0xab # AVX512F
kshiftlw k5, k6, 123 # AVX512F
kmovw k5, k6 # AVX512F
kmovw k5, WORD PTR [ecx] # AVX512F
kmovw k5, WORD PTR [esp+esi*8-123456] # AVX512F
kmovw WORD PTR [ecx], k5 # AVX512F
kmovw WORD PTR [esp+esi*8-123456], k5 # AVX512F
kmovw k5, eax # AVX512F
kmovw k5, ebp # AVX512F
kmovw eax, k5 # AVX512F
kmovw ebp, k5 # AVX512F
kunpckbw k5, k6, k7 # AVX512F
vcvtps2ph YMMWORD PTR [ecx], zmm6, 0xab # AVX512F
vcvtps2ph YMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512F
vcvtps2ph YMMWORD PTR [ecx], zmm6, 123 # AVX512F
vcvtps2ph YMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512F
vcvtps2ph YMMWORD PTR [edx+4064], zmm6, 123 # AVX512F Disp8
vcvtps2ph YMMWORD PTR [edx+4096], zmm6, 123 # AVX512F
vcvtps2ph YMMWORD PTR [edx-4096], zmm6, 123 # AVX512F Disp8
vcvtps2ph YMMWORD PTR [edx-4128], zmm6, 123 # AVX512F
vextractf32x4 XMMWORD PTR [ecx], zmm6, 0xab # AVX512F
vextractf32x4 XMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512F
vextractf32x4 XMMWORD PTR [ecx], zmm6, 123 # AVX512F
vextractf32x4 XMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512F
vextractf32x4 XMMWORD PTR [edx+2032], zmm6, 123 # AVX512F Disp8
vextractf32x4 XMMWORD PTR [edx+2048], zmm6, 123 # AVX512F
vextractf32x4 XMMWORD PTR [edx-2048], zmm6, 123 # AVX512F Disp8
vextractf32x4 XMMWORD PTR [edx-2064], zmm6, 123 # AVX512F
vextractf64x4 YMMWORD PTR [ecx], zmm6, 0xab # AVX512F
vextractf64x4 YMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512F
vextractf64x4 YMMWORD PTR [ecx], zmm6, 123 # AVX512F
vextractf64x4 YMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512F
vextractf64x4 YMMWORD PTR [edx+4064], zmm6, 123 # AVX512F Disp8
vextractf64x4 YMMWORD PTR [edx+4096], zmm6, 123 # AVX512F
vextractf64x4 YMMWORD PTR [edx-4096], zmm6, 123 # AVX512F Disp8
vextractf64x4 YMMWORD PTR [edx-4128], zmm6, 123 # AVX512F
vextracti32x4 XMMWORD PTR [ecx], zmm6, 0xab # AVX512F
vextracti32x4 XMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512F
vextracti32x4 XMMWORD PTR [ecx], zmm6, 123 # AVX512F
vextracti32x4 XMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512F
vextracti32x4 XMMWORD PTR [edx+2032], zmm6, 123 # AVX512F Disp8
vextracti32x4 XMMWORD PTR [edx+2048], zmm6, 123 # AVX512F
vextracti32x4 XMMWORD PTR [edx-2048], zmm6, 123 # AVX512F Disp8
vextracti32x4 XMMWORD PTR [edx-2064], zmm6, 123 # AVX512F
vextracti64x4 YMMWORD PTR [ecx], zmm6, 0xab # AVX512F
vextracti64x4 YMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512F
vextracti64x4 YMMWORD PTR [ecx], zmm6, 123 # AVX512F
vextracti64x4 YMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512F
vextracti64x4 YMMWORD PTR [edx+4064], zmm6, 123 # AVX512F Disp8
vextracti64x4 YMMWORD PTR [edx+4096], zmm6, 123 # AVX512F
vextracti64x4 YMMWORD PTR [edx-4096], zmm6, 123 # AVX512F Disp8
vextracti64x4 YMMWORD PTR [edx-4128], zmm6, 123 # AVX512F
vmovapd ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovapd ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovapd ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovapd ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovapd ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovapd ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovapd ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovaps ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovaps ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovaps ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovaps ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovaps ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovaps ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovaps ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovdqa32 ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovdqa32 ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovdqa32 ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovdqa32 ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovdqa32 ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovdqa32 ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovdqa32 ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovdqa64 ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovdqa64 ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovdqa64 ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovdqa64 ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovdqa64 ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovdqa64 ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovdqa64 ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovdqu32 ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovdqu32 ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovdqu32 ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovdqu32 ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovdqu32 ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovdqu32 ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovdqu32 ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovdqu64 ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovdqu64 ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovdqu64 ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovdqu64 ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovdqu64 ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovdqu64 ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovdqu64 ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovupd ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovupd ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovupd ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovupd ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovupd ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovupd ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovupd ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vmovups ZMMWORD PTR [ecx], zmm6 # AVX512F
vmovups ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vmovups ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vmovups ZMMWORD PTR [edx+8128], zmm6 # AVX512F Disp8
vmovups ZMMWORD PTR [edx+8192], zmm6 # AVX512F
vmovups ZMMWORD PTR [edx-8192], zmm6 # AVX512F Disp8
vmovups ZMMWORD PTR [edx-8256], zmm6 # AVX512F
vpmovqb QWORD PTR [ecx], zmm6 # AVX512F
vpmovqb QWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovqb QWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovqb QWORD PTR [edx+1016], zmm6 # AVX512F Disp8
vpmovqb QWORD PTR [edx+1024], zmm6 # AVX512F
vpmovqb QWORD PTR [edx-1024], zmm6 # AVX512F Disp8
vpmovqb QWORD PTR [edx-1032], zmm6 # AVX512F
vpmovsqb QWORD PTR [ecx], zmm6 # AVX512F
vpmovsqb QWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovsqb QWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovsqb QWORD PTR [edx+1016], zmm6 # AVX512F Disp8
vpmovsqb QWORD PTR [edx+1024], zmm6 # AVX512F
vpmovsqb QWORD PTR [edx-1024], zmm6 # AVX512F Disp8
vpmovsqb QWORD PTR [edx-1032], zmm6 # AVX512F
vpmovusqb QWORD PTR [ecx], zmm6 # AVX512F
vpmovusqb QWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovusqb QWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovusqb QWORD PTR [edx+1016], zmm6 # AVX512F Disp8
vpmovusqb QWORD PTR [edx+1024], zmm6 # AVX512F
vpmovusqb QWORD PTR [edx-1024], zmm6 # AVX512F Disp8
vpmovusqb QWORD PTR [edx-1032], zmm6 # AVX512F
vpmovqw XMMWORD PTR [ecx], zmm6 # AVX512F
vpmovqw XMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovqw XMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovqw XMMWORD PTR [edx+2032], zmm6 # AVX512F Disp8
vpmovqw XMMWORD PTR [edx+2048], zmm6 # AVX512F
vpmovqw XMMWORD PTR [edx-2048], zmm6 # AVX512F Disp8
vpmovqw XMMWORD PTR [edx-2064], zmm6 # AVX512F
vpmovsqw XMMWORD PTR [ecx], zmm6 # AVX512F
vpmovsqw XMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovsqw XMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovsqw XMMWORD PTR [edx+2032], zmm6 # AVX512F Disp8
vpmovsqw XMMWORD PTR [edx+2048], zmm6 # AVX512F
vpmovsqw XMMWORD PTR [edx-2048], zmm6 # AVX512F Disp8
vpmovsqw XMMWORD PTR [edx-2064], zmm6 # AVX512F
vpmovusqw XMMWORD PTR [ecx], zmm6 # AVX512F
vpmovusqw XMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovusqw XMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovusqw XMMWORD PTR [edx+2032], zmm6 # AVX512F Disp8
vpmovusqw XMMWORD PTR [edx+2048], zmm6 # AVX512F
vpmovusqw XMMWORD PTR [edx-2048], zmm6 # AVX512F Disp8
vpmovusqw XMMWORD PTR [edx-2064], zmm6 # AVX512F
vpmovqd YMMWORD PTR [ecx], zmm6 # AVX512F
vpmovqd YMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovqd YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovqd YMMWORD PTR [edx+4064], zmm6 # AVX512F Disp8
vpmovqd YMMWORD PTR [edx+4096], zmm6 # AVX512F
vpmovqd YMMWORD PTR [edx-4096], zmm6 # AVX512F Disp8
vpmovqd YMMWORD PTR [edx-4128], zmm6 # AVX512F
vpmovsqd YMMWORD PTR [ecx], zmm6 # AVX512F
vpmovsqd YMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovsqd YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovsqd YMMWORD PTR [edx+4064], zmm6 # AVX512F Disp8
vpmovsqd YMMWORD PTR [edx+4096], zmm6 # AVX512F
vpmovsqd YMMWORD PTR [edx-4096], zmm6 # AVX512F Disp8
vpmovsqd YMMWORD PTR [edx-4128], zmm6 # AVX512F
vpmovusqd YMMWORD PTR [ecx], zmm6 # AVX512F
vpmovusqd YMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovusqd YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovusqd YMMWORD PTR [edx+4064], zmm6 # AVX512F Disp8
vpmovusqd YMMWORD PTR [edx+4096], zmm6 # AVX512F
vpmovusqd YMMWORD PTR [edx-4096], zmm6 # AVX512F Disp8
vpmovusqd YMMWORD PTR [edx-4128], zmm6 # AVX512F
vpmovdb XMMWORD PTR [ecx], zmm6 # AVX512F
vpmovdb XMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovdb XMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovdb XMMWORD PTR [edx+2032], zmm6 # AVX512F Disp8
vpmovdb XMMWORD PTR [edx+2048], zmm6 # AVX512F
vpmovdb XMMWORD PTR [edx-2048], zmm6 # AVX512F Disp8
vpmovdb XMMWORD PTR [edx-2064], zmm6 # AVX512F
vpmovsdb XMMWORD PTR [ecx], zmm6 # AVX512F
vpmovsdb XMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovsdb XMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovsdb XMMWORD PTR [edx+2032], zmm6 # AVX512F Disp8
vpmovsdb XMMWORD PTR [edx+2048], zmm6 # AVX512F
vpmovsdb XMMWORD PTR [edx-2048], zmm6 # AVX512F Disp8
vpmovsdb XMMWORD PTR [edx-2064], zmm6 # AVX512F
vpmovusdb XMMWORD PTR [ecx], zmm6 # AVX512F
vpmovusdb XMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovusdb XMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovusdb XMMWORD PTR [edx+2032], zmm6 # AVX512F Disp8
vpmovusdb XMMWORD PTR [edx+2048], zmm6 # AVX512F
vpmovusdb XMMWORD PTR [edx-2048], zmm6 # AVX512F Disp8
vpmovusdb XMMWORD PTR [edx-2064], zmm6 # AVX512F
vpmovdw YMMWORD PTR [ecx], zmm6 # AVX512F
vpmovdw YMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovdw YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovdw YMMWORD PTR [edx+4064], zmm6 # AVX512F Disp8
vpmovdw YMMWORD PTR [edx+4096], zmm6 # AVX512F
vpmovdw YMMWORD PTR [edx-4096], zmm6 # AVX512F Disp8
vpmovdw YMMWORD PTR [edx-4128], zmm6 # AVX512F
vpmovsdw YMMWORD PTR [ecx], zmm6 # AVX512F
vpmovsdw YMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovsdw YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovsdw YMMWORD PTR [edx+4064], zmm6 # AVX512F Disp8
vpmovsdw YMMWORD PTR [edx+4096], zmm6 # AVX512F
vpmovsdw YMMWORD PTR [edx-4096], zmm6 # AVX512F Disp8
vpmovsdw YMMWORD PTR [edx-4128], zmm6 # AVX512F
vpmovusdw YMMWORD PTR [ecx], zmm6 # AVX512F
vpmovusdw YMMWORD PTR [ecx]{k7}, zmm6 # AVX512F
vpmovusdw YMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512F
vpmovusdw YMMWORD PTR [edx+4064], zmm6 # AVX512F Disp8
vpmovusdw YMMWORD PTR [edx+4096], zmm6 # AVX512F
vpmovusdw YMMWORD PTR [edx-4096], zmm6 # AVX512F Disp8
vpmovusdw YMMWORD PTR [edx-4128], zmm6 # AVX512F
vcvttpd2udq ymm6{k7}, zmm5 # AVX512F
vcvttpd2udq ymm6{k7}{z}, zmm5 # AVX512F
vcvttpd2udq ymm6{k7}, zmm5{sae} # AVX512F
vcvttpd2udq ymm6{k7}, ZMMWORD PTR [ecx] # AVX512F
vcvttpd2udq ymm6{k7}, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvttpd2udq ymm6{k7}, qword bcst [eax] # AVX512F
vcvttpd2udq ymm6{k7}, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvttpd2udq ymm6{k7}, ZMMWORD PTR [edx+8192] # AVX512F
vcvttpd2udq ymm6{k7}, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvttpd2udq ymm6{k7}, ZMMWORD PTR [edx-8256] # AVX512F
vcvttpd2udq ymm6{k7}, qword bcst [edx+1016] # AVX512F Disp8
vcvttpd2udq ymm6{k7}, qword bcst [edx+1024] # AVX512F
vcvttpd2udq ymm6{k7}, qword bcst [edx-1024] # AVX512F Disp8
vcvttpd2udq ymm6{k7}, qword bcst [edx-1032] # AVX512F
vcvttps2udq zmm6, zmm5 # AVX512F
vcvttps2udq zmm6{k7}, zmm5 # AVX512F
vcvttps2udq zmm6{k7}{z}, zmm5 # AVX512F
vcvttps2udq zmm6, zmm5{sae} # AVX512F
vcvttps2udq zmm6, ZMMWORD PTR [ecx] # AVX512F
vcvttps2udq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vcvttps2udq zmm6, dword bcst [eax] # AVX512F
vcvttps2udq zmm6, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vcvttps2udq zmm6, ZMMWORD PTR [edx+8192] # AVX512F
vcvttps2udq zmm6, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vcvttps2udq zmm6, ZMMWORD PTR [edx-8256] # AVX512F
vcvttps2udq zmm6, dword bcst [edx+508] # AVX512F Disp8
vcvttps2udq zmm6, dword bcst [edx+512] # AVX512F
vcvttps2udq zmm6, dword bcst [edx-512] # AVX512F Disp8
vcvttps2udq zmm6, dword bcst [edx-516] # AVX512F
vcvttsd2usi eax, xmm6 # AVX512F
vcvttsd2usi eax, xmm6{sae} # AVX512F
vcvttsd2usi eax, QWORD PTR [ecx] # AVX512F
vcvttsd2usi eax, QWORD PTR [esp+esi*8-123456] # AVX512F
vcvttsd2usi eax, QWORD PTR [edx+1016] # AVX512F Disp8
vcvttsd2usi eax, QWORD PTR [edx+1024] # AVX512F
vcvttsd2usi eax, QWORD PTR [edx-1024] # AVX512F Disp8
vcvttsd2usi eax, QWORD PTR [edx-1032] # AVX512F
vcvttsd2usi ebp, xmm6 # AVX512F
vcvttsd2usi ebp, xmm6{sae} # AVX512F
vcvttsd2usi ebp, QWORD PTR [ecx] # AVX512F
vcvttsd2usi ebp, QWORD PTR [esp+esi*8-123456] # AVX512F
vcvttsd2usi ebp, QWORD PTR [edx+1016] # AVX512F Disp8
vcvttsd2usi ebp, QWORD PTR [edx+1024] # AVX512F
vcvttsd2usi ebp, QWORD PTR [edx-1024] # AVX512F Disp8
vcvttsd2usi ebp, QWORD PTR [edx-1032] # AVX512F
vcvttss2usi eax, xmm6 # AVX512F
vcvttss2usi eax, xmm6{sae} # AVX512F
vcvttss2usi eax, DWORD PTR [ecx] # AVX512F
vcvttss2usi eax, DWORD PTR [esp+esi*8-123456] # AVX512F
vcvttss2usi eax, DWORD PTR [edx+508] # AVX512F Disp8
vcvttss2usi eax, DWORD PTR [edx+512] # AVX512F
vcvttss2usi eax, DWORD PTR [edx-512] # AVX512F Disp8
vcvttss2usi eax, DWORD PTR [edx-516] # AVX512F
vcvttss2usi ebp, xmm6 # AVX512F
vcvttss2usi ebp, xmm6{sae} # AVX512F
vcvttss2usi ebp, DWORD PTR [ecx] # AVX512F
vcvttss2usi ebp, DWORD PTR [esp+esi*8-123456] # AVX512F
vcvttss2usi ebp, DWORD PTR [edx+508] # AVX512F Disp8
vcvttss2usi ebp, DWORD PTR [edx+512] # AVX512F
vcvttss2usi ebp, DWORD PTR [edx-512] # AVX512F Disp8
vcvttss2usi ebp, DWORD PTR [edx-516] # AVX512F
vpermi2d zmm6, zmm5, zmm4 # AVX512F
vpermi2d zmm6{k7}, zmm5, zmm4 # AVX512F
vpermi2d zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermi2d zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermi2d zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermi2d zmm6, zmm5, dword bcst [eax] # AVX512F
vpermi2d zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermi2d zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermi2d zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermi2d zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermi2d zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpermi2d zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpermi2d zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpermi2d zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpermi2q zmm6, zmm5, zmm4 # AVX512F
vpermi2q zmm6{k7}, zmm5, zmm4 # AVX512F
vpermi2q zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermi2q zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermi2q zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermi2q zmm6, zmm5, qword bcst [eax] # AVX512F
vpermi2q zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermi2q zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermi2q zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermi2q zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermi2q zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpermi2q zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpermi2q zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpermi2q zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vpermi2ps zmm6, zmm5, zmm4 # AVX512F
vpermi2ps zmm6{k7}, zmm5, zmm4 # AVX512F
vpermi2ps zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermi2ps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermi2ps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermi2ps zmm6, zmm5, dword bcst [eax] # AVX512F
vpermi2ps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermi2ps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermi2ps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermi2ps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermi2ps zmm6, zmm5, dword bcst [edx+508] # AVX512F Disp8
vpermi2ps zmm6, zmm5, dword bcst [edx+512] # AVX512F
vpermi2ps zmm6, zmm5, dword bcst [edx-512] # AVX512F Disp8
vpermi2ps zmm6, zmm5, dword bcst [edx-516] # AVX512F
vpermi2pd zmm6, zmm5, zmm4 # AVX512F
vpermi2pd zmm6{k7}, zmm5, zmm4 # AVX512F
vpermi2pd zmm6{k7}{z}, zmm5, zmm4 # AVX512F
vpermi2pd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512F
vpermi2pd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vpermi2pd zmm6, zmm5, qword bcst [eax] # AVX512F
vpermi2pd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vpermi2pd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vpermi2pd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vpermi2pd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vpermi2pd zmm6, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vpermi2pd zmm6, zmm5, qword bcst [edx+1024] # AVX512F
vpermi2pd zmm6, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vpermi2pd zmm6, zmm5, qword bcst [edx-1032] # AVX512F
vptestnmd k5, zmm5, zmm4 # AVX512F
vptestnmd k5{k7}, zmm5, zmm4 # AVX512F
vptestnmd k5, zmm5, ZMMWORD PTR [ecx] # AVX512F
vptestnmd k5, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vptestnmd k5, zmm5, dword bcst [eax] # AVX512F
vptestnmd k5, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vptestnmd k5, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vptestnmd k5, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vptestnmd k5, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vptestnmd k5, zmm5, dword bcst [edx+508] # AVX512F Disp8
vptestnmd k5, zmm5, dword bcst [edx+512] # AVX512F
vptestnmd k5, zmm5, dword bcst [edx-512] # AVX512F Disp8
vptestnmd k5, zmm5, dword bcst [edx-516] # AVX512F
vptestnmq k5, zmm5, zmm4 # AVX512F
vptestnmq k5{k7}, zmm5, zmm4 # AVX512F
vptestnmq k5, zmm5, ZMMWORD PTR [ecx] # AVX512F
vptestnmq k5, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F
vptestnmq k5, zmm5, qword bcst [eax] # AVX512F
vptestnmq k5, zmm5, ZMMWORD PTR [edx+8128] # AVX512F Disp8
vptestnmq k5, zmm5, ZMMWORD PTR [edx+8192] # AVX512F
vptestnmq k5, zmm5, ZMMWORD PTR [edx-8192] # AVX512F Disp8
vptestnmq k5, zmm5, ZMMWORD PTR [edx-8256] # AVX512F
vptestnmq k5, zmm5, qword bcst [edx+1016] # AVX512F Disp8
vptestnmq k5, zmm5, qword bcst [edx+1024] # AVX512F
vptestnmq k5, zmm5, qword bcst [edx-1024] # AVX512F Disp8
vptestnmq k5, zmm5, qword bcst [edx-1032] # AVX512F
vaddps zmm0, zmm0, [bx]
vaddps zmm0, zmm0, [bx+0x40]
vaddps zmm0, zmm0, [bx+0x1234]
|
tactcomplabs/xbgas-binutils-gdb
| 76,778
|
gas/testsuite/gas/i386/x86-64-xop.s
|
# Check XOP instructions (maxcombos=16, maxops=3, archbits=64, seed=1)
.allow_index_reg
.text
_start:
# Tests for op VFRCZPD xmm2/mem128, xmm1 (at&t syntax)
VFRCZPD %xmm2,%xmm15
VFRCZPD %xmm0,%xmm12
VFRCZPD (%r12),%xmm0
VFRCZPD (%rax),%xmm15
VFRCZPD %xmm0,%xmm0
VFRCZPD (%r10),%xmm15
VFRCZPD %xmm2,%xmm0
VFRCZPD %xmm15,%xmm12
VFRCZPD %xmm15,%xmm0
VFRCZPD %xmm0,%xmm15
VFRCZPD (%r10),%xmm12
VFRCZPD %xmm15,%xmm15
VFRCZPD (%rax),%xmm0
VFRCZPD (%r12),%xmm15
VFRCZPD (%rax),%xmm12
VFRCZPD (%r10),%xmm0
# Tests for op VFRCZPD ymm2/mem256, ymm1 (at&t syntax)
VFRCZPD %ymm2,%ymm15
VFRCZPD %ymm0,%ymm12
VFRCZPD (%r12),%ymm0
VFRCZPD (%rax),%ymm15
VFRCZPD %ymm0,%ymm0
VFRCZPD (%r10),%ymm15
VFRCZPD %ymm2,%ymm0
VFRCZPD %ymm15,%ymm12
VFRCZPD %ymm15,%ymm0
VFRCZPD %ymm0,%ymm15
VFRCZPD (%r10),%ymm12
VFRCZPD %ymm15,%ymm15
VFRCZPD (%rax),%ymm0
VFRCZPD (%r12),%ymm15
VFRCZPD (%rax),%ymm12
VFRCZPD (%r10),%ymm0
# Tests for op VFRCZPS xmm2/mem128, xmm1 (at&t syntax)
VFRCZPS %xmm2,%xmm15
VFRCZPS %xmm0,%xmm12
VFRCZPS (%r12),%xmm0
VFRCZPS (%rax),%xmm15
VFRCZPS %xmm0,%xmm0
VFRCZPS (%r10),%xmm15
VFRCZPS %xmm2,%xmm0
VFRCZPS %xmm15,%xmm12
VFRCZPS %xmm15,%xmm0
VFRCZPS %xmm0,%xmm15
VFRCZPS (%r10),%xmm12
VFRCZPS %xmm15,%xmm15
VFRCZPS (%rax),%xmm0
VFRCZPS (%r12),%xmm15
VFRCZPS (%rax),%xmm12
VFRCZPS (%r10),%xmm0
# Tests for op VFRCZPS ymm2/mem256, ymm1 (at&t syntax)
VFRCZPS %ymm2,%ymm15
VFRCZPS %ymm0,%ymm12
VFRCZPS (%r12),%ymm0
VFRCZPS (%rax),%ymm15
VFRCZPS %ymm0,%ymm0
VFRCZPS (%r10),%ymm15
VFRCZPS %ymm2,%ymm0
VFRCZPS %ymm15,%ymm12
VFRCZPS %ymm15,%ymm0
VFRCZPS %ymm0,%ymm15
VFRCZPS (%r10),%ymm12
VFRCZPS %ymm15,%ymm15
VFRCZPS (%rax),%ymm0
VFRCZPS (%r12),%ymm15
VFRCZPS (%rax),%ymm12
VFRCZPS (%r10),%ymm0
# Tests for op VFRCZSD xmm2/mem64, xmm1 (at&t syntax)
VFRCZSD %xmm2,%xmm15
VFRCZSD %xmm0,%xmm12
VFRCZSD (%r12),%xmm0
VFRCZSD (%rax),%xmm15
VFRCZSD %xmm0,%xmm0
VFRCZSD (%r10),%xmm15
VFRCZSD %xmm2,%xmm0
VFRCZSD %xmm15,%xmm12
VFRCZSD %xmm15,%xmm0
VFRCZSD %xmm0,%xmm15
VFRCZSD (%r10),%xmm12
VFRCZSD %xmm15,%xmm15
VFRCZSD (%rax),%xmm0
VFRCZSD (%r12),%xmm15
VFRCZSD (%rax),%xmm12
VFRCZSD (%r10),%xmm0
# Tests for op VFRCZSS xmm2/mem32, xmm1 (at&t syntax)
VFRCZSS %xmm2,%xmm15
VFRCZSS %xmm0,%xmm12
VFRCZSS (%r12),%xmm0
VFRCZSS (%rax),%xmm15
VFRCZSS %xmm0,%xmm0
VFRCZSS (%r10),%xmm15
VFRCZSS %xmm2,%xmm0
VFRCZSS %xmm15,%xmm12
VFRCZSS %xmm15,%xmm0
VFRCZSS %xmm0,%xmm15
VFRCZSS (%r10),%xmm12
VFRCZSS %xmm15,%xmm15
VFRCZSS (%rax),%xmm0
VFRCZSS (%r12),%xmm15
VFRCZSS (%rax),%xmm12
VFRCZSS (%r10),%xmm0
# Tests for op VPCMOV xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCMOV %xmm0,%xmm15,%xmm7,%xmm0
VPCMOV %xmm2,(%r9),%xmm0,%xmm0
VPCMOV %xmm15,(%r9),%xmm15,%xmm0
VPCMOV %xmm15,%xmm0,%xmm0,%xmm11
VPCMOV %xmm15,%xmm12,%xmm0,%xmm0
VPCMOV %xmm15,%xmm12,%xmm0,%xmm15
VPCMOV %xmm15,(%r12),%xmm0,%xmm15
VPCMOV %xmm2,%xmm0,%xmm0,%xmm15
VPCMOV %xmm2,(%r9),%xmm15,%xmm15
VPCMOV %xmm2,%xmm12,%xmm15,%xmm15
VPCMOV %xmm2,(%r12),%xmm15,%xmm0
VPCMOV %xmm0,(%r13),%xmm15,%xmm0
VPCMOV %xmm15,(%r13),%xmm7,%xmm11
VPCMOV %xmm15,(%r12),%xmm7,%xmm11
VPCMOV %xmm15,%xmm15,%xmm0,%xmm0
VPCMOV %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPCMOV ymm4, ymm3/mem256, ymm2, ymm1 (at&t syntax)
VPCMOV %ymm0,%ymm15,%ymm7,%ymm0
VPCMOV %ymm2,(%r9),%ymm0,%ymm0
VPCMOV %ymm15,(%r9),%ymm15,%ymm0
VPCMOV %ymm15,%ymm0,%ymm0,%ymm11
VPCMOV %ymm15,%ymm12,%ymm0,%ymm0
VPCMOV %ymm15,%ymm12,%ymm0,%ymm15
VPCMOV %ymm15,(%r12),%ymm0,%ymm15
VPCMOV %ymm2,%ymm0,%ymm0,%ymm15
VPCMOV %ymm2,(%r9),%ymm15,%ymm15
VPCMOV %ymm2,%ymm12,%ymm15,%ymm15
VPCMOV %ymm2,(%r12),%ymm15,%ymm0
VPCMOV %ymm0,(%r13),%ymm15,%ymm0
VPCMOV %ymm15,(%r13),%ymm7,%ymm11
VPCMOV %ymm15,(%r12),%ymm7,%ymm11
VPCMOV %ymm15,%ymm15,%ymm0,%ymm0
VPCMOV %ymm2,(%r9),%ymm15,%ymm11
# Tests for op VPCMOV xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPCMOV %xmm0,%xmm12,%xmm7,%xmm0
VPCMOV (%rax),%xmm15,%xmm0,%xmm0
VPCMOV (%r10),%xmm15,%xmm15,%xmm0
VPCMOV %xmm2,%xmm0,%xmm0,%xmm11
VPCMOV %xmm2,%xmm0,%xmm0,%xmm0
VPCMOV %xmm2,%xmm0,%xmm0,%xmm15
VPCMOV (%r10),%xmm12,%xmm0,%xmm15
VPCMOV (%rax),%xmm0,%xmm0,%xmm15
VPCMOV (%r12),%xmm15,%xmm15,%xmm15
VPCMOV (%r12),%xmm0,%xmm15,%xmm15
VPCMOV (%rax),%xmm12,%xmm15,%xmm0
VPCMOV %xmm15,%xmm15,%xmm15,%xmm0
VPCMOV (%r10),%xmm15,%xmm7,%xmm11
VPCMOV %xmm2,%xmm12,%xmm7,%xmm11
VPCMOV %xmm2,%xmm12,%xmm0,%xmm0
VPCMOV (%r12),%xmm15,%xmm15,%xmm11
# Tests for op VPCMOV ymm4/mem256, ymm3, ymm2, ymm1 (at&t syntax)
VPCMOV %ymm0,%ymm12,%ymm7,%ymm0
VPCMOV (%rax),%ymm15,%ymm0,%ymm0
VPCMOV (%r10),%ymm15,%ymm15,%ymm0
VPCMOV %ymm2,%ymm0,%ymm0,%ymm11
VPCMOV %ymm2,%ymm0,%ymm0,%ymm0
VPCMOV %ymm2,%ymm0,%ymm0,%ymm15
VPCMOV (%r10),%ymm12,%ymm0,%ymm15
VPCMOV (%rax),%ymm0,%ymm0,%ymm15
VPCMOV (%r12),%ymm15,%ymm15,%ymm15
VPCMOV (%r12),%ymm0,%ymm15,%ymm15
VPCMOV (%rax),%ymm12,%ymm15,%ymm0
VPCMOV %ymm15,%ymm15,%ymm15,%ymm0
VPCMOV (%r10),%ymm15,%ymm7,%ymm11
VPCMOV %ymm2,%ymm12,%ymm7,%ymm11
VPCMOV %ymm2,%ymm12,%ymm0,%ymm0
VPCMOV (%r12),%ymm15,%ymm15,%ymm11
# Tests for op VPCOMB imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMB $0x3,(%rdi),%xmm0,%xmm15
VPCOMB $0xFF,%xmm0,%xmm0,%xmm1
VPCOMB $0xFF,%xmm15,%xmm0,%xmm1
VPCOMB $0x0,%xmm11,%xmm11,%xmm1
VPCOMB $0x0,%xmm11,%xmm0,%xmm1
VPCOMB $0x0,%xmm0,%xmm15,%xmm1
VPCOMB $0x3,%xmm11,%xmm15,%xmm15
VPCOMB $0x0,%xmm11,%xmm15,%xmm15
VPCOMB $0xFF,%xmm15,%xmm15,%xmm15
VPCOMB $0x0,(%rcx),%xmm15,%xmm15
VPCOMB $0x3,(%rcx),%xmm0,%xmm0
VPCOMB $0xFF,(%rdi),%xmm11,%xmm1
VPCOMB $0x3,(%rdi),%xmm11,%xmm1
VPCOMB $0x3,%xmm0,%xmm0,%xmm15
VPCOMB $0xFF,(%rcx),%xmm11,%xmm0
VPCOMB $0x3,(%rsi),%xmm15,%xmm1
# Tests for op VPCOMD imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMD $0x3,(%rdi),%xmm0,%xmm15
VPCOMD $0xFF,%xmm0,%xmm0,%xmm1
VPCOMD $0xFF,%xmm15,%xmm0,%xmm1
VPCOMD $0x0,%xmm11,%xmm11,%xmm1
VPCOMD $0x0,%xmm11,%xmm0,%xmm1
VPCOMD $0x0,%xmm0,%xmm15,%xmm1
VPCOMD $0x3,%xmm11,%xmm15,%xmm15
VPCOMD $0x0,%xmm11,%xmm15,%xmm15
VPCOMD $0xFF,%xmm15,%xmm15,%xmm15
VPCOMD $0x0,(%rcx),%xmm15,%xmm15
VPCOMD $0x3,(%rcx),%xmm0,%xmm0
VPCOMD $0xFF,(%rdi),%xmm11,%xmm1
VPCOMD $0x3,(%rdi),%xmm11,%xmm1
VPCOMD $0x3,%xmm0,%xmm0,%xmm15
VPCOMD $0xFF,(%rcx),%xmm11,%xmm0
VPCOMD $0x3,(%rsi),%xmm15,%xmm1
# Tests for op VPCOMQ imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMQ $0x3,(%rdi),%xmm0,%xmm15
VPCOMQ $0xFF,%xmm0,%xmm0,%xmm1
VPCOMQ $0xFF,%xmm15,%xmm0,%xmm1
VPCOMQ $0x0,%xmm11,%xmm11,%xmm1
VPCOMQ $0x0,%xmm11,%xmm0,%xmm1
VPCOMQ $0x0,%xmm0,%xmm15,%xmm1
VPCOMQ $0x3,%xmm11,%xmm15,%xmm15
VPCOMQ $0x0,%xmm11,%xmm15,%xmm15
VPCOMQ $0xFF,%xmm15,%xmm15,%xmm15
VPCOMQ $0x0,(%rcx),%xmm15,%xmm15
VPCOMQ $0x3,(%rcx),%xmm0,%xmm0
VPCOMQ $0xFF,(%rdi),%xmm11,%xmm1
VPCOMQ $0x3,(%rdi),%xmm11,%xmm1
VPCOMQ $0x3,%xmm0,%xmm0,%xmm15
VPCOMQ $0xFF,(%rcx),%xmm11,%xmm0
VPCOMQ $0x3,(%rsi),%xmm15,%xmm1
# Tests for op VPCOMUB imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUB $0x3,(%rdi),%xmm0,%xmm15
VPCOMUB $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUB $0xFF,%xmm15,%xmm0,%xmm1
VPCOMUB $0x0,%xmm11,%xmm11,%xmm1
VPCOMUB $0x0,%xmm11,%xmm0,%xmm1
VPCOMUB $0x0,%xmm0,%xmm15,%xmm1
VPCOMUB $0x3,%xmm11,%xmm15,%xmm15
VPCOMUB $0x0,%xmm11,%xmm15,%xmm15
VPCOMUB $0xFF,%xmm15,%xmm15,%xmm15
VPCOMUB $0x0,(%rcx),%xmm15,%xmm15
VPCOMUB $0x3,(%rcx),%xmm0,%xmm0
VPCOMUB $0xFF,(%rdi),%xmm11,%xmm1
VPCOMUB $0x3,(%rdi),%xmm11,%xmm1
VPCOMUB $0x3,%xmm0,%xmm0,%xmm15
VPCOMUB $0xFF,(%rcx),%xmm11,%xmm0
VPCOMUB $0x3,(%rsi),%xmm15,%xmm1
# Tests for op VPCOMUD imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUD $0x3,(%rdi),%xmm0,%xmm15
VPCOMUD $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUD $0xFF,%xmm15,%xmm0,%xmm1
VPCOMUD $0x0,%xmm11,%xmm11,%xmm1
VPCOMUD $0x0,%xmm11,%xmm0,%xmm1
VPCOMUD $0x0,%xmm0,%xmm15,%xmm1
VPCOMUD $0x3,%xmm11,%xmm15,%xmm15
VPCOMUD $0x0,%xmm11,%xmm15,%xmm15
VPCOMUD $0xFF,%xmm15,%xmm15,%xmm15
VPCOMUD $0x0,(%rcx),%xmm15,%xmm15
VPCOMUD $0x3,(%rcx),%xmm0,%xmm0
VPCOMUD $0xFF,(%rdi),%xmm11,%xmm1
VPCOMUD $0x3,(%rdi),%xmm11,%xmm1
VPCOMUD $0x3,%xmm0,%xmm0,%xmm15
VPCOMUD $0xFF,(%rcx),%xmm11,%xmm0
VPCOMUD $0x3,(%rsi),%xmm15,%xmm1
# Tests for op VPCOMUQ imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUQ $0x3,(%rdi),%xmm0,%xmm15
VPCOMUQ $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUQ $0xFF,%xmm15,%xmm0,%xmm1
VPCOMUQ $0x0,%xmm11,%xmm11,%xmm1
VPCOMUQ $0x0,%xmm11,%xmm0,%xmm1
VPCOMUQ $0x0,%xmm0,%xmm15,%xmm1
VPCOMUQ $0x3,%xmm11,%xmm15,%xmm15
VPCOMUQ $0x0,%xmm11,%xmm15,%xmm15
VPCOMUQ $0xFF,%xmm15,%xmm15,%xmm15
VPCOMUQ $0x0,(%rcx),%xmm15,%xmm15
VPCOMUQ $0x3,(%rcx),%xmm0,%xmm0
VPCOMUQ $0xFF,(%rdi),%xmm11,%xmm1
VPCOMUQ $0x3,(%rdi),%xmm11,%xmm1
VPCOMUQ $0x3,%xmm0,%xmm0,%xmm15
VPCOMUQ $0xFF,(%rcx),%xmm11,%xmm0
VPCOMUQ $0x3,(%rsi),%xmm15,%xmm1
# Tests for op VPCOMUW imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMUW $0x3,(%rdi),%xmm0,%xmm15
VPCOMUW $0xFF,%xmm0,%xmm0,%xmm1
VPCOMUW $0xFF,%xmm15,%xmm0,%xmm1
VPCOMUW $0x0,%xmm11,%xmm11,%xmm1
VPCOMUW $0x0,%xmm11,%xmm0,%xmm1
VPCOMUW $0x0,%xmm0,%xmm15,%xmm1
VPCOMUW $0x3,%xmm11,%xmm15,%xmm15
VPCOMUW $0x0,%xmm11,%xmm15,%xmm15
VPCOMUW $0xFF,%xmm15,%xmm15,%xmm15
VPCOMUW $0x0,(%rcx),%xmm15,%xmm15
VPCOMUW $0x3,(%rcx),%xmm0,%xmm0
VPCOMUW $0xFF,(%rdi),%xmm11,%xmm1
VPCOMUW $0x3,(%rdi),%xmm11,%xmm1
VPCOMUW $0x3,%xmm0,%xmm0,%xmm15
VPCOMUW $0xFF,(%rcx),%xmm11,%xmm0
VPCOMUW $0x3,(%rsi),%xmm15,%xmm1
# Tests for op VPCOMW imm8, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPCOMW $0x3,(%rdi),%xmm0,%xmm15
VPCOMW $0xFF,%xmm0,%xmm0,%xmm1
VPCOMW $0xFF,%xmm15,%xmm0,%xmm1
VPCOMW $0x0,%xmm11,%xmm11,%xmm1
VPCOMW $0x0,%xmm11,%xmm0,%xmm1
VPCOMW $0x0,%xmm0,%xmm15,%xmm1
VPCOMW $0x3,%xmm11,%xmm15,%xmm15
VPCOMW $0x0,%xmm11,%xmm15,%xmm15
VPCOMW $0xFF,%xmm15,%xmm15,%xmm15
VPCOMW $0x0,(%rcx),%xmm15,%xmm15
VPCOMW $0x3,(%rcx),%xmm0,%xmm0
VPCOMW $0xFF,(%rdi),%xmm11,%xmm1
VPCOMW $0x3,(%rdi),%xmm11,%xmm1
VPCOMW $0x3,%xmm0,%xmm0,%xmm15
VPCOMW $0xFF,(%rcx),%xmm11,%xmm0
VPCOMW $0x3,(%rsi),%xmm15,%xmm1
# Testing VPERMIL2PD imm8, xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPERMIL2PD $0x3,%xmm14,%xmm13,%xmm11,%xmm10
VPERMIL2PD $0x2,%xmm15,(%rdi,%r8),%xmm1,%xmm0
VPERMIL2PD $0x1,%xmm0,0x23(%r12,%r12,8),%xmm0,%xmm2
VPERMIL2PD $0x0,%xmm3,%xmm15,%xmm13,%xmm2
VPERMIL2PD $0x2,%xmm3,%xmm14,%xmm11,%xmm0
VPERMIL2PD $0x1,%xmm0,(%rdx),%xmm1,%xmm0
VPERMIL2PD $0x3,%xmm3,%xmm0,%xmm0,%xmm9
VPERMIL2PD $0x0,%xmm2,0x23(%r12,%r12,8),%xmm0,%xmm3
# Testing VPERMIL2PD imm8, xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPERMIL2PD $0x0,%xmm0,%xmm7,%xmm11,%xmm0
VPERMIL2PD $0x1,%xmm4,%xmm5,%xmm15,%xmm0
VPERMIL2PD $0x3,0x4(%r12,%rbx,4),%xmm8,%xmm0,%xmm10
VPERMIL2PD $0x2,%xmm1,%xmm0,%xmm7,%xmm6
VPERMIL2PD $0x1,(%r13,%rbx),%xmm12,%xmm6,%xmm10
VPERMIL2PD $0x2,%xmm4,%xmm6,%xmm0,%xmm9
VPERMIL2PD $0x0,(%rbx,%rbx,8),%xmm8,%xmm6,%xmm11
VPERMIL2PD $0x3,%xmm5,%xmm13,%xmm6,%xmm0
# Testing VPERMIL2PD imm8, ymm4, ymm3/mem256, ymm2, ymm1 (at&t syntax)
VPERMIL2PD $0x1,%ymm7,%ymm5,%ymm0,%ymm13
VPERMIL2PD $0x0,%ymm7,(%rcx,%r9,2),%ymm4,%ymm12
VPERMIL2PD $0x3,%ymm3,(%r14,%r11),%ymm0,%ymm8
VPERMIL2PD $0x2,%ymm7,0x107(%r9,%rax,4),%ymm0,%ymm9
VPERMIL2PD $0x2,%ymm7,(%r14,%r11),%ymm0,%ymm8
VPERMIL2PD $0x3,%ymm0,(%rcx,%r9,2),%ymm4,%ymm0
VPERMIL2PD $0x1,%ymm8,(%r14,%r11),%ymm11,%ymm5
VPERMIL2PD $0x0,%ymm2,(%rsi),%ymm0,%ymm13
# Testing VPERMIL2PD imm8, ymm4/mem256, ymm3, ymm2, ymm1 (at&t syntax)
VPERMIL2PD $0x1,(%rcx,%rsi),%ymm0,%ymm10,%ymm15
VPERMIL2PD $0x2,(%r9),%ymm12,%ymm0,%ymm0
VPERMIL2PD $0x0,0x15D8D9(%rcx,%r14,1),%ymm8,%ymm11,%ymm4
VPERMIL2PD $0x3,%ymm9,%ymm12,%ymm0,%ymm0
VPERMIL2PD $0x3,(%r13,%r11,1),%ymm1,%ymm14,%ymm8
VPERMIL2PD $0x0,0x15D8D9(%rcx,%r14,1),%ymm0,%ymm0,%ymm11
VPERMIL2PD $0x1,(%r13,%r11,1),%ymm1,%ymm15,%ymm0
VPERMIL2PD $0x2,%ymm9,%ymm13,%ymm11,%ymm5
# Testing VPERMIL2PS imm8, xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPERMIL2PS $0x2,%xmm0,0x23(%r12,%r12,8),%xmm0,%xmm15
VPERMIL2PS $0x0,%xmm0,0x23(%r12,%r12,8),%xmm3,%xmm9
VPERMIL2PS $0x3,%xmm0,%xmm12,%xmm7,%xmm11
VPERMIL2PS $0x1,%xmm3,(%rdx),%xmm0,%xmm0
VPERMIL2PS $0x2,%xmm3,(%r14,%r9,1),%xmm0,%xmm0
VPERMIL2PS $0x1,%xmm3,(%rdx),%xmm1,%xmm0
VPERMIL2PS $0x0,%xmm3,%xmm8,%xmm0,%xmm12
VPERMIL2PS $0x3,%xmm3,(%r14,%r9,1),%xmm1,%xmm2
# Testing VPERMIL2PS imm8, xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPERMIL2PS $0x1,(%r13,%rax),%xmm0,%xmm0,%xmm11
VPERMIL2PS $0x3,(%rdi),%xmm15,%xmm6,%xmm10
VPERMIL2PS $0x2,%xmm0,%xmm13,%xmm0,%xmm0
VPERMIL2PS $0x0,%xmm4,%xmm0,%xmm14,%xmm10
VPERMIL2PS $0x1,%xmm0,%xmm0,%xmm3,%xmm11
VPERMIL2PS $0x2,(%rbx,%rbx,8),%xmm11,%xmm6,%xmm10
VPERMIL2PS $0x3,%xmm4,%xmm5,%xmm6,%xmm15
VPERMIL2PS $0x0,0x4(%r12,%rbx,4),%xmm0,%xmm0,%xmm10
# Testing VPERMIL2PS imm8, ymm4, ymm3/mem256, ymm2, ymm1 (at&t syntax)
VPERMIL2PS $0x1,%ymm0,(%rsi),%ymm0,%ymm0
VPERMIL2PS $0x2,%ymm15,0x107(%r9,%rax,4),%ymm11,%ymm0
VPERMIL2PS $0x0,%ymm7,%ymm0,%ymm4,%ymm0
VPERMIL2PS $0x3,%ymm7,(%rcx,%r9,2),%ymm4,%ymm15
VPERMIL2PS $0x2,%ymm0,%ymm0,%ymm4,%ymm15
VPERMIL2PS $0x3,%ymm0,0x107(%r9,%rax,4),%ymm15,%ymm13
VPERMIL2PS $0x0,%ymm7,%ymm8,%ymm14,%ymm13
VPERMIL2PS $0x1,%ymm7,%ymm15,%ymm4,%ymm13
# Testing VPERMIL2PS imm8, ymm4/mem256, ymm3, ymm2, ymm1 (at&t syntax)
VPERMIL2PS $0x0,0x15D8D9(%rcx,%r14,1),%ymm0,%ymm0,%ymm15
VPERMIL2PS $0x3,(%r13,%r11,1),%ymm12,%ymm15,%ymm4
VPERMIL2PS $0x2,0x15D8D9(%rcx,%r14,1),%ymm0,%ymm0,%ymm0
VPERMIL2PS $0x1,%ymm0,%ymm2,%ymm3,%ymm4
VPERMIL2PS $0x3,(%rcx,%rsi),%ymm12,%ymm0,%ymm4
VPERMIL2PS $0x2,(%rcx,%rsi),%ymm1,%ymm0,%ymm4
VPERMIL2PS $0x0,(%r13,%r11,1),%ymm0,%ymm0,%ymm4
VPERMIL2PS $0x1,%ymm8,%ymm7,%ymm4,%ymm0
# Tests for op VPHADDBD xmm2/mem128, xmm1 (at&t syntax)
VPHADDBD %xmm2,%xmm15
VPHADDBD %xmm0,%xmm12
VPHADDBD (%r12),%xmm0
VPHADDBD (%rax),%xmm15
VPHADDBD %xmm0,%xmm0
VPHADDBD (%r10),%xmm15
VPHADDBD %xmm2,%xmm0
VPHADDBD %xmm15,%xmm12
VPHADDBD %xmm15,%xmm0
VPHADDBD %xmm0,%xmm15
VPHADDBD (%r10),%xmm12
VPHADDBD %xmm15,%xmm15
VPHADDBD (%rax),%xmm0
VPHADDBD (%r12),%xmm15
VPHADDBD (%rax),%xmm12
VPHADDBD (%r10),%xmm0
# Tests for op VPHADDBQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDBQ %xmm2,%xmm15
VPHADDBQ %xmm0,%xmm12
VPHADDBQ (%r12),%xmm0
VPHADDBQ (%rax),%xmm15
VPHADDBQ %xmm0,%xmm0
VPHADDBQ (%r10),%xmm15
VPHADDBQ %xmm2,%xmm0
VPHADDBQ %xmm15,%xmm12
VPHADDBQ %xmm15,%xmm0
VPHADDBQ %xmm0,%xmm15
VPHADDBQ (%r10),%xmm12
VPHADDBQ %xmm15,%xmm15
VPHADDBQ (%rax),%xmm0
VPHADDBQ (%r12),%xmm15
VPHADDBQ (%rax),%xmm12
VPHADDBQ (%r10),%xmm0
# Tests for op VPHADDBW xmm2/mem128, xmm1 (at&t syntax)
VPHADDBW %xmm2,%xmm15
VPHADDBW %xmm0,%xmm12
VPHADDBW (%r12),%xmm0
VPHADDBW (%rax),%xmm15
VPHADDBW %xmm0,%xmm0
VPHADDBW (%r10),%xmm15
VPHADDBW %xmm2,%xmm0
VPHADDBW %xmm15,%xmm12
VPHADDBW %xmm15,%xmm0
VPHADDBW %xmm0,%xmm15
VPHADDBW (%r10),%xmm12
VPHADDBW %xmm15,%xmm15
VPHADDBW (%rax),%xmm0
VPHADDBW (%r12),%xmm15
VPHADDBW (%rax),%xmm12
VPHADDBW (%r10),%xmm0
# Tests for op VPHADDDQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDDQ %xmm2,%xmm15
VPHADDDQ %xmm0,%xmm12
VPHADDDQ (%r12),%xmm0
VPHADDDQ (%rax),%xmm15
VPHADDDQ %xmm0,%xmm0
VPHADDDQ (%r10),%xmm15
VPHADDDQ %xmm2,%xmm0
VPHADDDQ %xmm15,%xmm12
VPHADDDQ %xmm15,%xmm0
VPHADDDQ %xmm0,%xmm15
VPHADDDQ (%r10),%xmm12
VPHADDDQ %xmm15,%xmm15
VPHADDDQ (%rax),%xmm0
VPHADDDQ (%r12),%xmm15
VPHADDDQ (%rax),%xmm12
VPHADDDQ (%r10),%xmm0
# Tests for op VPHADDUBD xmm2/mem128, xmm1 (at&t syntax)
VPHADDUBD %xmm2,%xmm15
VPHADDUBD %xmm0,%xmm12
VPHADDUBD (%r12),%xmm0
VPHADDUBD (%rax),%xmm15
VPHADDUBD %xmm0,%xmm0
VPHADDUBD (%r10),%xmm15
VPHADDUBD %xmm2,%xmm0
VPHADDUBD %xmm15,%xmm12
VPHADDUBD %xmm15,%xmm0
VPHADDUBD %xmm0,%xmm15
VPHADDUBD (%r10),%xmm12
VPHADDUBD %xmm15,%xmm15
VPHADDUBD (%rax),%xmm0
VPHADDUBD (%r12),%xmm15
VPHADDUBD (%rax),%xmm12
VPHADDUBD (%r10),%xmm0
# Tests for op VPHADDUBQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDUBQ %xmm2,%xmm15
VPHADDUBQ %xmm0,%xmm12
VPHADDUBQ (%r12),%xmm0
VPHADDUBQ (%rax),%xmm15
VPHADDUBQ %xmm0,%xmm0
VPHADDUBQ (%r10),%xmm15
VPHADDUBQ %xmm2,%xmm0
VPHADDUBQ %xmm15,%xmm12
VPHADDUBQ %xmm15,%xmm0
VPHADDUBQ %xmm0,%xmm15
VPHADDUBQ (%r10),%xmm12
VPHADDUBQ %xmm15,%xmm15
VPHADDUBQ (%rax),%xmm0
VPHADDUBQ (%r12),%xmm15
VPHADDUBQ (%rax),%xmm12
VPHADDUBQ (%r10),%xmm0
# Tests for op VPHADDUBW xmm2/mem128, xmm1 (at&t syntax)
VPHADDUBW %xmm2,%xmm15
VPHADDUBW %xmm0,%xmm12
VPHADDUBW (%r12),%xmm0
VPHADDUBW (%rax),%xmm15
VPHADDUBW %xmm0,%xmm0
VPHADDUBW (%r10),%xmm15
VPHADDUBW %xmm2,%xmm0
VPHADDUBW %xmm15,%xmm12
VPHADDUBW %xmm15,%xmm0
VPHADDUBW %xmm0,%xmm15
VPHADDUBW (%r10),%xmm12
VPHADDUBW %xmm15,%xmm15
VPHADDUBW (%rax),%xmm0
VPHADDUBW (%r12),%xmm15
VPHADDUBW (%rax),%xmm12
VPHADDUBW (%r10),%xmm0
# Tests for op VPHADDUDQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDUDQ %xmm2,%xmm15
VPHADDUDQ %xmm0,%xmm12
VPHADDUDQ (%r12),%xmm0
VPHADDUDQ (%rax),%xmm15
VPHADDUDQ %xmm0,%xmm0
VPHADDUDQ (%r10),%xmm15
VPHADDUDQ %xmm2,%xmm0
VPHADDUDQ %xmm15,%xmm12
VPHADDUDQ %xmm15,%xmm0
VPHADDUDQ %xmm0,%xmm15
VPHADDUDQ (%r10),%xmm12
VPHADDUDQ %xmm15,%xmm15
VPHADDUDQ (%rax),%xmm0
VPHADDUDQ (%r12),%xmm15
VPHADDUDQ (%rax),%xmm12
VPHADDUDQ (%r10),%xmm0
# Tests for op VPHADDUWD xmm2/mem128, xmm1 (at&t syntax)
VPHADDUWD %xmm2,%xmm15
VPHADDUWD %xmm0,%xmm12
VPHADDUWD (%r12),%xmm0
VPHADDUWD (%rax),%xmm15
VPHADDUWD %xmm0,%xmm0
VPHADDUWD (%r10),%xmm15
VPHADDUWD %xmm2,%xmm0
VPHADDUWD %xmm15,%xmm12
VPHADDUWD %xmm15,%xmm0
VPHADDUWD %xmm0,%xmm15
VPHADDUWD (%r10),%xmm12
VPHADDUWD %xmm15,%xmm15
VPHADDUWD (%rax),%xmm0
VPHADDUWD (%r12),%xmm15
VPHADDUWD (%rax),%xmm12
VPHADDUWD (%r10),%xmm0
# Tests for op VPHADDUWQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDUWQ %xmm2,%xmm15
VPHADDUWQ %xmm0,%xmm12
VPHADDUWQ (%r12),%xmm0
VPHADDUWQ (%rax),%xmm15
VPHADDUWQ %xmm0,%xmm0
VPHADDUWQ (%r10),%xmm15
VPHADDUWQ %xmm2,%xmm0
VPHADDUWQ %xmm15,%xmm12
VPHADDUWQ %xmm15,%xmm0
VPHADDUWQ %xmm0,%xmm15
VPHADDUWQ (%r10),%xmm12
VPHADDUWQ %xmm15,%xmm15
VPHADDUWQ (%rax),%xmm0
VPHADDUWQ (%r12),%xmm15
VPHADDUWQ (%rax),%xmm12
VPHADDUWQ (%r10),%xmm0
# Tests for op VPHADDWD xmm2/mem128, xmm1 (at&t syntax)
VPHADDWD %xmm2,%xmm15
VPHADDWD %xmm0,%xmm12
VPHADDWD (%r12),%xmm0
VPHADDWD (%rax),%xmm15
VPHADDWD %xmm0,%xmm0
VPHADDWD (%r10),%xmm15
VPHADDWD %xmm2,%xmm0
VPHADDWD %xmm15,%xmm12
VPHADDWD %xmm15,%xmm0
VPHADDWD %xmm0,%xmm15
VPHADDWD (%r10),%xmm12
VPHADDWD %xmm15,%xmm15
VPHADDWD (%rax),%xmm0
VPHADDWD (%r12),%xmm15
VPHADDWD (%rax),%xmm12
VPHADDWD (%r10),%xmm0
# Tests for op VPHADDWQ xmm2/mem128, xmm1 (at&t syntax)
VPHADDWQ %xmm2,%xmm15
VPHADDWQ %xmm0,%xmm12
VPHADDWQ (%r12),%xmm0
VPHADDWQ (%rax),%xmm15
VPHADDWQ %xmm0,%xmm0
VPHADDWQ (%r10),%xmm15
VPHADDWQ %xmm2,%xmm0
VPHADDWQ %xmm15,%xmm12
VPHADDWQ %xmm15,%xmm0
VPHADDWQ %xmm0,%xmm15
VPHADDWQ (%r10),%xmm12
VPHADDWQ %xmm15,%xmm15
VPHADDWQ (%rax),%xmm0
VPHADDWQ (%r12),%xmm15
VPHADDWQ (%rax),%xmm12
VPHADDWQ (%r10),%xmm0
# Tests for op VPHSUBBW xmm2/mem128, xmm1 (at&t syntax)
VPHSUBBW %xmm2,%xmm15
VPHSUBBW %xmm0,%xmm12
VPHSUBBW (%r12),%xmm0
VPHSUBBW (%rax),%xmm15
VPHSUBBW %xmm0,%xmm0
VPHSUBBW (%r10),%xmm15
VPHSUBBW %xmm2,%xmm0
VPHSUBBW %xmm15,%xmm12
VPHSUBBW %xmm15,%xmm0
VPHSUBBW %xmm0,%xmm15
VPHSUBBW (%r10),%xmm12
VPHSUBBW %xmm15,%xmm15
VPHSUBBW (%rax),%xmm0
VPHSUBBW (%r12),%xmm15
VPHSUBBW (%rax),%xmm12
VPHSUBBW (%r10),%xmm0
# Tests for op VPHSUBDQ xmm2/mem128, xmm1 (at&t syntax)
VPHSUBDQ %xmm2,%xmm15
VPHSUBDQ %xmm0,%xmm12
VPHSUBDQ (%r12),%xmm0
VPHSUBDQ (%rax),%xmm15
VPHSUBDQ %xmm0,%xmm0
VPHSUBDQ (%r10),%xmm15
VPHSUBDQ %xmm2,%xmm0
VPHSUBDQ %xmm15,%xmm12
VPHSUBDQ %xmm15,%xmm0
VPHSUBDQ %xmm0,%xmm15
VPHSUBDQ (%r10),%xmm12
VPHSUBDQ %xmm15,%xmm15
VPHSUBDQ (%rax),%xmm0
VPHSUBDQ (%r12),%xmm15
VPHSUBDQ (%rax),%xmm12
VPHSUBDQ (%r10),%xmm0
# Tests for op VPHSUBWD xmm2/mem128, xmm1 (at&t syntax)
VPHSUBWD %xmm2,%xmm15
VPHSUBWD %xmm0,%xmm12
VPHSUBWD (%r12),%xmm0
VPHSUBWD (%rax),%xmm15
VPHSUBWD %xmm0,%xmm0
VPHSUBWD (%r10),%xmm15
VPHSUBWD %xmm2,%xmm0
VPHSUBWD %xmm15,%xmm12
VPHSUBWD %xmm15,%xmm0
VPHSUBWD %xmm0,%xmm15
VPHSUBWD (%r10),%xmm12
VPHSUBWD %xmm15,%xmm15
VPHSUBWD (%rax),%xmm0
VPHSUBWD (%r12),%xmm15
VPHSUBWD (%rax),%xmm12
VPHSUBWD (%r10),%xmm0
# Tests for op VPMACSDD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSDD %xmm0,%xmm15,%xmm7,%xmm0
VPMACSDD %xmm2,(%r9),%xmm0,%xmm0
VPMACSDD %xmm15,(%r9),%xmm15,%xmm0
VPMACSDD %xmm15,%xmm0,%xmm0,%xmm11
VPMACSDD %xmm15,%xmm12,%xmm0,%xmm0
VPMACSDD %xmm15,%xmm12,%xmm0,%xmm15
VPMACSDD %xmm15,(%r12),%xmm0,%xmm15
VPMACSDD %xmm2,%xmm0,%xmm0,%xmm15
VPMACSDD %xmm2,(%r9),%xmm15,%xmm15
VPMACSDD %xmm2,%xmm12,%xmm15,%xmm15
VPMACSDD %xmm2,(%r12),%xmm15,%xmm0
VPMACSDD %xmm0,(%r13),%xmm15,%xmm0
VPMACSDD %xmm15,(%r13),%xmm7,%xmm11
VPMACSDD %xmm15,(%r12),%xmm7,%xmm11
VPMACSDD %xmm15,%xmm15,%xmm0,%xmm0
VPMACSDD %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSDQH xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSDQH %xmm0,%xmm15,%xmm7,%xmm0
VPMACSDQH %xmm2,(%r9),%xmm0,%xmm0
VPMACSDQH %xmm15,(%r9),%xmm15,%xmm0
VPMACSDQH %xmm15,%xmm0,%xmm0,%xmm11
VPMACSDQH %xmm15,%xmm12,%xmm0,%xmm0
VPMACSDQH %xmm15,%xmm12,%xmm0,%xmm15
VPMACSDQH %xmm15,(%r12),%xmm0,%xmm15
VPMACSDQH %xmm2,%xmm0,%xmm0,%xmm15
VPMACSDQH %xmm2,(%r9),%xmm15,%xmm15
VPMACSDQH %xmm2,%xmm12,%xmm15,%xmm15
VPMACSDQH %xmm2,(%r12),%xmm15,%xmm0
VPMACSDQH %xmm0,(%r13),%xmm15,%xmm0
VPMACSDQH %xmm15,(%r13),%xmm7,%xmm11
VPMACSDQH %xmm15,(%r12),%xmm7,%xmm11
VPMACSDQH %xmm15,%xmm15,%xmm0,%xmm0
VPMACSDQH %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSDQL xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSDQL %xmm0,%xmm15,%xmm7,%xmm0
VPMACSDQL %xmm2,(%r9),%xmm0,%xmm0
VPMACSDQL %xmm15,(%r9),%xmm15,%xmm0
VPMACSDQL %xmm15,%xmm0,%xmm0,%xmm11
VPMACSDQL %xmm15,%xmm12,%xmm0,%xmm0
VPMACSDQL %xmm15,%xmm12,%xmm0,%xmm15
VPMACSDQL %xmm15,(%r12),%xmm0,%xmm15
VPMACSDQL %xmm2,%xmm0,%xmm0,%xmm15
VPMACSDQL %xmm2,(%r9),%xmm15,%xmm15
VPMACSDQL %xmm2,%xmm12,%xmm15,%xmm15
VPMACSDQL %xmm2,(%r12),%xmm15,%xmm0
VPMACSDQL %xmm0,(%r13),%xmm15,%xmm0
VPMACSDQL %xmm15,(%r13),%xmm7,%xmm11
VPMACSDQL %xmm15,(%r12),%xmm7,%xmm11
VPMACSDQL %xmm15,%xmm15,%xmm0,%xmm0
VPMACSDQL %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSSDD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSDD %xmm0,%xmm15,%xmm7,%xmm0
VPMACSSDD %xmm2,(%r9),%xmm0,%xmm0
VPMACSSDD %xmm15,(%r9),%xmm15,%xmm0
VPMACSSDD %xmm15,%xmm0,%xmm0,%xmm11
VPMACSSDD %xmm15,%xmm12,%xmm0,%xmm0
VPMACSSDD %xmm15,%xmm12,%xmm0,%xmm15
VPMACSSDD %xmm15,(%r12),%xmm0,%xmm15
VPMACSSDD %xmm2,%xmm0,%xmm0,%xmm15
VPMACSSDD %xmm2,(%r9),%xmm15,%xmm15
VPMACSSDD %xmm2,%xmm12,%xmm15,%xmm15
VPMACSSDD %xmm2,(%r12),%xmm15,%xmm0
VPMACSSDD %xmm0,(%r13),%xmm15,%xmm0
VPMACSSDD %xmm15,(%r13),%xmm7,%xmm11
VPMACSSDD %xmm15,(%r12),%xmm7,%xmm11
VPMACSSDD %xmm15,%xmm15,%xmm0,%xmm0
VPMACSSDD %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSSDQH xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSDQH %xmm0,%xmm15,%xmm7,%xmm0
VPMACSSDQH %xmm2,(%r9),%xmm0,%xmm0
VPMACSSDQH %xmm15,(%r9),%xmm15,%xmm0
VPMACSSDQH %xmm15,%xmm0,%xmm0,%xmm11
VPMACSSDQH %xmm15,%xmm12,%xmm0,%xmm0
VPMACSSDQH %xmm15,%xmm12,%xmm0,%xmm15
VPMACSSDQH %xmm15,(%r12),%xmm0,%xmm15
VPMACSSDQH %xmm2,%xmm0,%xmm0,%xmm15
VPMACSSDQH %xmm2,(%r9),%xmm15,%xmm15
VPMACSSDQH %xmm2,%xmm12,%xmm15,%xmm15
VPMACSSDQH %xmm2,(%r12),%xmm15,%xmm0
VPMACSSDQH %xmm0,(%r13),%xmm15,%xmm0
VPMACSSDQH %xmm15,(%r13),%xmm7,%xmm11
VPMACSSDQH %xmm15,(%r12),%xmm7,%xmm11
VPMACSSDQH %xmm15,%xmm15,%xmm0,%xmm0
VPMACSSDQH %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSSDQL xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSDQL %xmm0,%xmm15,%xmm7,%xmm0
VPMACSSDQL %xmm2,(%r9),%xmm0,%xmm0
VPMACSSDQL %xmm15,(%r9),%xmm15,%xmm0
VPMACSSDQL %xmm15,%xmm0,%xmm0,%xmm11
VPMACSSDQL %xmm15,%xmm12,%xmm0,%xmm0
VPMACSSDQL %xmm15,%xmm12,%xmm0,%xmm15
VPMACSSDQL %xmm15,(%r12),%xmm0,%xmm15
VPMACSSDQL %xmm2,%xmm0,%xmm0,%xmm15
VPMACSSDQL %xmm2,(%r9),%xmm15,%xmm15
VPMACSSDQL %xmm2,%xmm12,%xmm15,%xmm15
VPMACSSDQL %xmm2,(%r12),%xmm15,%xmm0
VPMACSSDQL %xmm0,(%r13),%xmm15,%xmm0
VPMACSSDQL %xmm15,(%r13),%xmm7,%xmm11
VPMACSSDQL %xmm15,(%r12),%xmm7,%xmm11
VPMACSSDQL %xmm15,%xmm15,%xmm0,%xmm0
VPMACSSDQL %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSWD %xmm0,%xmm15,%xmm7,%xmm0
VPMACSSWD %xmm2,(%r9),%xmm0,%xmm0
VPMACSSWD %xmm15,(%r9),%xmm15,%xmm0
VPMACSSWD %xmm15,%xmm0,%xmm0,%xmm11
VPMACSSWD %xmm15,%xmm12,%xmm0,%xmm0
VPMACSSWD %xmm15,%xmm12,%xmm0,%xmm15
VPMACSSWD %xmm15,(%r12),%xmm0,%xmm15
VPMACSSWD %xmm2,%xmm0,%xmm0,%xmm15
VPMACSSWD %xmm2,(%r9),%xmm15,%xmm15
VPMACSSWD %xmm2,%xmm12,%xmm15,%xmm15
VPMACSSWD %xmm2,(%r12),%xmm15,%xmm0
VPMACSSWD %xmm0,(%r13),%xmm15,%xmm0
VPMACSSWD %xmm15,(%r13),%xmm7,%xmm11
VPMACSSWD %xmm15,(%r12),%xmm7,%xmm11
VPMACSSWD %xmm15,%xmm15,%xmm0,%xmm0
VPMACSSWD %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSSWW xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSSWW %xmm0,%xmm15,%xmm7,%xmm0
VPMACSSWW %xmm2,(%r9),%xmm0,%xmm0
VPMACSSWW %xmm15,(%r9),%xmm15,%xmm0
VPMACSSWW %xmm15,%xmm0,%xmm0,%xmm11
VPMACSSWW %xmm15,%xmm12,%xmm0,%xmm0
VPMACSSWW %xmm15,%xmm12,%xmm0,%xmm15
VPMACSSWW %xmm15,(%r12),%xmm0,%xmm15
VPMACSSWW %xmm2,%xmm0,%xmm0,%xmm15
VPMACSSWW %xmm2,(%r9),%xmm15,%xmm15
VPMACSSWW %xmm2,%xmm12,%xmm15,%xmm15
VPMACSSWW %xmm2,(%r12),%xmm15,%xmm0
VPMACSSWW %xmm0,(%r13),%xmm15,%xmm0
VPMACSSWW %xmm15,(%r13),%xmm7,%xmm11
VPMACSSWW %xmm15,(%r12),%xmm7,%xmm11
VPMACSSWW %xmm15,%xmm15,%xmm0,%xmm0
VPMACSSWW %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSWD %xmm0,%xmm15,%xmm7,%xmm0
VPMACSWD %xmm2,(%r9),%xmm0,%xmm0
VPMACSWD %xmm15,(%r9),%xmm15,%xmm0
VPMACSWD %xmm15,%xmm0,%xmm0,%xmm11
VPMACSWD %xmm15,%xmm12,%xmm0,%xmm0
VPMACSWD %xmm15,%xmm12,%xmm0,%xmm15
VPMACSWD %xmm15,(%r12),%xmm0,%xmm15
VPMACSWD %xmm2,%xmm0,%xmm0,%xmm15
VPMACSWD %xmm2,(%r9),%xmm15,%xmm15
VPMACSWD %xmm2,%xmm12,%xmm15,%xmm15
VPMACSWD %xmm2,(%r12),%xmm15,%xmm0
VPMACSWD %xmm0,(%r13),%xmm15,%xmm0
VPMACSWD %xmm15,(%r13),%xmm7,%xmm11
VPMACSWD %xmm15,(%r12),%xmm7,%xmm11
VPMACSWD %xmm15,%xmm15,%xmm0,%xmm0
VPMACSWD %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMACSWW xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMACSWW %xmm0,%xmm15,%xmm7,%xmm0
VPMACSWW %xmm2,(%r9),%xmm0,%xmm0
VPMACSWW %xmm15,(%r9),%xmm15,%xmm0
VPMACSWW %xmm15,%xmm0,%xmm0,%xmm11
VPMACSWW %xmm15,%xmm12,%xmm0,%xmm0
VPMACSWW %xmm15,%xmm12,%xmm0,%xmm15
VPMACSWW %xmm15,(%r12),%xmm0,%xmm15
VPMACSWW %xmm2,%xmm0,%xmm0,%xmm15
VPMACSWW %xmm2,(%r9),%xmm15,%xmm15
VPMACSWW %xmm2,%xmm12,%xmm15,%xmm15
VPMACSWW %xmm2,(%r12),%xmm15,%xmm0
VPMACSWW %xmm0,(%r13),%xmm15,%xmm0
VPMACSWW %xmm15,(%r13),%xmm7,%xmm11
VPMACSWW %xmm15,(%r12),%xmm7,%xmm11
VPMACSWW %xmm15,%xmm15,%xmm0,%xmm0
VPMACSWW %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMADCSSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMADCSSWD %xmm0,%xmm15,%xmm7,%xmm0
VPMADCSSWD %xmm2,(%r9),%xmm0,%xmm0
VPMADCSSWD %xmm15,(%r9),%xmm15,%xmm0
VPMADCSSWD %xmm15,%xmm0,%xmm0,%xmm11
VPMADCSSWD %xmm15,%xmm12,%xmm0,%xmm0
VPMADCSSWD %xmm15,%xmm12,%xmm0,%xmm15
VPMADCSSWD %xmm15,(%r12),%xmm0,%xmm15
VPMADCSSWD %xmm2,%xmm0,%xmm0,%xmm15
VPMADCSSWD %xmm2,(%r9),%xmm15,%xmm15
VPMADCSSWD %xmm2,%xmm12,%xmm15,%xmm15
VPMADCSSWD %xmm2,(%r12),%xmm15,%xmm0
VPMADCSSWD %xmm0,(%r13),%xmm15,%xmm0
VPMADCSSWD %xmm15,(%r13),%xmm7,%xmm11
VPMADCSSWD %xmm15,(%r12),%xmm7,%xmm11
VPMADCSSWD %xmm15,%xmm15,%xmm0,%xmm0
VPMADCSSWD %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPMADCSWD xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPMADCSWD %xmm0,%xmm15,%xmm7,%xmm0
VPMADCSWD %xmm2,(%r9),%xmm0,%xmm0
VPMADCSWD %xmm15,(%r9),%xmm15,%xmm0
VPMADCSWD %xmm15,%xmm0,%xmm0,%xmm11
VPMADCSWD %xmm15,%xmm12,%xmm0,%xmm0
VPMADCSWD %xmm15,%xmm12,%xmm0,%xmm15
VPMADCSWD %xmm15,(%r12),%xmm0,%xmm15
VPMADCSWD %xmm2,%xmm0,%xmm0,%xmm15
VPMADCSWD %xmm2,(%r9),%xmm15,%xmm15
VPMADCSWD %xmm2,%xmm12,%xmm15,%xmm15
VPMADCSWD %xmm2,(%r12),%xmm15,%xmm0
VPMADCSWD %xmm0,(%r13),%xmm15,%xmm0
VPMADCSWD %xmm15,(%r13),%xmm7,%xmm11
VPMADCSWD %xmm15,(%r12),%xmm7,%xmm11
VPMADCSWD %xmm15,%xmm15,%xmm0,%xmm0
VPMADCSWD %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPPERM xmm4/mem128, xmm3, xmm2, xmm1 (at&t syntax)
VPPERM %xmm0,%xmm12,%xmm7,%xmm0
VPPERM (%rax),%xmm15,%xmm0,%xmm0
VPPERM (%r10),%xmm15,%xmm15,%xmm0
VPPERM %xmm2,%xmm0,%xmm0,%xmm11
VPPERM %xmm2,%xmm0,%xmm0,%xmm0
VPPERM %xmm2,%xmm0,%xmm0,%xmm15
VPPERM (%r10),%xmm12,%xmm0,%xmm15
VPPERM (%rax),%xmm0,%xmm0,%xmm15
VPPERM (%r12),%xmm15,%xmm15,%xmm15
VPPERM (%r12),%xmm0,%xmm15,%xmm15
VPPERM (%rax),%xmm12,%xmm15,%xmm0
VPPERM %xmm15,%xmm15,%xmm15,%xmm0
VPPERM (%r10),%xmm15,%xmm7,%xmm11
VPPERM %xmm2,%xmm12,%xmm7,%xmm11
VPPERM %xmm2,%xmm12,%xmm0,%xmm0
VPPERM (%r12),%xmm15,%xmm15,%xmm11
# Tests for op VPPERM xmm4, xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPPERM %xmm0,%xmm15,%xmm7,%xmm0
VPPERM %xmm2,(%r9),%xmm0,%xmm0
VPPERM %xmm15,(%r9),%xmm15,%xmm0
VPPERM %xmm15,%xmm0,%xmm0,%xmm11
VPPERM %xmm15,%xmm12,%xmm0,%xmm0
VPPERM %xmm15,%xmm12,%xmm0,%xmm15
VPPERM %xmm15,(%r12),%xmm0,%xmm15
VPPERM %xmm2,%xmm0,%xmm0,%xmm15
VPPERM %xmm2,(%r9),%xmm15,%xmm15
VPPERM %xmm2,%xmm12,%xmm15,%xmm15
VPPERM %xmm2,(%r12),%xmm15,%xmm0
VPPERM %xmm0,(%r13),%xmm15,%xmm0
VPPERM %xmm15,(%r13),%xmm7,%xmm11
VPPERM %xmm15,(%r12),%xmm7,%xmm11
VPPERM %xmm15,%xmm15,%xmm0,%xmm0
VPPERM %xmm2,(%r9),%xmm15,%xmm11
# Tests for op VPROTB xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTB %xmm2,%xmm0,%xmm15
VPROTB %xmm2,%xmm12,%xmm7
VPROTB %xmm2,%xmm0,%xmm0
VPROTB %xmm15,(%r9),%xmm15
VPROTB %xmm0,%xmm15,%xmm0
VPROTB %xmm0,%xmm15,%xmm15
VPROTB %xmm0,%xmm12,%xmm0
VPROTB %xmm15,%xmm12,%xmm0
VPROTB %xmm2,(%r12),%xmm15
VPROTB %xmm0,(%r9),%xmm7
VPROTB %xmm0,%xmm12,%xmm7
VPROTB %xmm2,(%r9),%xmm15
VPROTB %xmm2,(%r13),%xmm0
VPROTB %xmm15,(%r9),%xmm7
VPROTB %xmm15,(%r12),%xmm0
VPROTB %xmm0,(%r13),%xmm15
# Tests for op VPROTB xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTB (%r12),%xmm0,%xmm15
VPROTB (%r12),%xmm0,%xmm7
VPROTB (%rax),%xmm0,%xmm0
VPROTB (%r10),%xmm15,%xmm15
VPROTB %xmm15,%xmm12,%xmm0
VPROTB %xmm15,%xmm12,%xmm15
VPROTB %xmm15,%xmm0,%xmm0
VPROTB %xmm2,%xmm0,%xmm0
VPROTB (%rax),%xmm12,%xmm15
VPROTB %xmm15,%xmm15,%xmm7
VPROTB %xmm0,%xmm0,%xmm7
VPROTB (%r12),%xmm15,%xmm15
VPROTB (%r12),%xmm15,%xmm0
VPROTB (%r10),%xmm15,%xmm7
VPROTB (%r10),%xmm12,%xmm0
VPROTB %xmm15,%xmm15,%xmm15
# Tests for op VPROTB imm8, xmm2, xmm1 (at&t syntax)
VPROTB $0x3,%xmm11,%xmm15
VPROTB $0xFF,%xmm0,%xmm0
VPROTB $0xFF,%xmm11,%xmm4
VPROTB $0x0,%xmm11,%xmm4
VPROTB $0x0,%xmm15,%xmm4
VPROTB $0x0,%xmm0,%xmm15
VPROTB $0xFF,%xmm11,%xmm0
VPROTB $0x3,%xmm0,%xmm0
VPROTB $0x3,%xmm11,%xmm0
VPROTB $0x0,%xmm0,%xmm4
VPROTB $0xFF,%xmm15,%xmm0
VPROTB $0xFF,%xmm0,%xmm15
VPROTB $0xFF,%xmm15,%xmm15
VPROTB $0x3,%xmm15,%xmm4
VPROTB $0xFF,%xmm11,%xmm15
VPROTB $0x3,%xmm0,%xmm15
# Tests for op VPROTD xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTD %xmm2,%xmm0,%xmm15
VPROTD %xmm2,%xmm12,%xmm7
VPROTD %xmm2,%xmm0,%xmm0
VPROTD %xmm15,(%r9),%xmm15
VPROTD %xmm0,%xmm15,%xmm0
VPROTD %xmm0,%xmm15,%xmm15
VPROTD %xmm0,%xmm12,%xmm0
VPROTD %xmm15,%xmm12,%xmm0
VPROTD %xmm2,(%r12),%xmm15
VPROTD %xmm0,(%r9),%xmm7
VPROTD %xmm0,%xmm12,%xmm7
VPROTD %xmm2,(%r9),%xmm15
VPROTD %xmm2,(%r13),%xmm0
VPROTD %xmm15,(%r9),%xmm7
VPROTD %xmm15,(%r12),%xmm0
VPROTD %xmm0,(%r13),%xmm15
# Tests for op VPROTD xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTD (%r12),%xmm0,%xmm15
VPROTD (%r12),%xmm0,%xmm7
VPROTD (%rax),%xmm0,%xmm0
VPROTD (%r10),%xmm15,%xmm15
VPROTD %xmm15,%xmm12,%xmm0
VPROTD %xmm15,%xmm12,%xmm15
VPROTD %xmm15,%xmm0,%xmm0
VPROTD %xmm2,%xmm0,%xmm0
VPROTD (%rax),%xmm12,%xmm15
VPROTD %xmm15,%xmm15,%xmm7
VPROTD %xmm0,%xmm0,%xmm7
VPROTD (%r12),%xmm15,%xmm15
VPROTD (%r12),%xmm15,%xmm0
VPROTD (%r10),%xmm15,%xmm7
VPROTD (%r10),%xmm12,%xmm0
VPROTD %xmm15,%xmm15,%xmm15
# Tests for op VPROTD imm8, xmm2/mem128, xmm1 (at&t syntax)
VPROTD $0x0,%xmm15,%xmm15
VPROTD $0x0,(%rsi),%xmm15
VPROTD $0x0,%xmm0,%xmm11
VPROTD $0xFF,%xmm15,%xmm0
VPROTD $0x3,%xmm0,%xmm0
VPROTD $0x3,%xmm15,%xmm0
VPROTD $0x0,%xmm11,%xmm11
VPROTD $0x0,%xmm0,%xmm15
VPROTD $0x3,(%rcx),%xmm0
VPROTD $0xFF,(%rsi),%xmm0
VPROTD $0x0,(%rdi),%xmm15
VPROTD $0xFF,%xmm15,%xmm15
VPROTD $0xFF,%xmm11,%xmm11
VPROTD $0xFF,(%rsi),%xmm11
VPROTD $0x3,(%rdi),%xmm15
VPROTD $0x3,%xmm15,%xmm11
# Tests for op VPROTQ xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTQ %xmm2,%xmm0,%xmm15
VPROTQ %xmm2,%xmm12,%xmm7
VPROTQ %xmm2,%xmm0,%xmm0
VPROTQ %xmm15,(%r9),%xmm15
VPROTQ %xmm0,%xmm15,%xmm0
VPROTQ %xmm0,%xmm15,%xmm15
VPROTQ %xmm0,%xmm12,%xmm0
VPROTQ %xmm15,%xmm12,%xmm0
VPROTQ %xmm2,(%r12),%xmm15
VPROTQ %xmm0,(%r9),%xmm7
VPROTQ %xmm0,%xmm12,%xmm7
VPROTQ %xmm2,(%r9),%xmm15
VPROTQ %xmm2,(%r13),%xmm0
VPROTQ %xmm15,(%r9),%xmm7
VPROTQ %xmm15,(%r12),%xmm0
VPROTQ %xmm0,(%r13),%xmm15
# Tests for op VPROTQ xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTQ (%r12),%xmm0,%xmm15
VPROTQ (%r12),%xmm0,%xmm7
VPROTQ (%rax),%xmm0,%xmm0
VPROTQ (%r10),%xmm15,%xmm15
VPROTQ %xmm15,%xmm12,%xmm0
VPROTQ %xmm15,%xmm12,%xmm15
VPROTQ %xmm15,%xmm0,%xmm0
VPROTQ %xmm2,%xmm0,%xmm0
VPROTQ (%rax),%xmm12,%xmm15
VPROTQ %xmm15,%xmm15,%xmm7
VPROTQ %xmm0,%xmm0,%xmm7
VPROTQ (%r12),%xmm15,%xmm15
VPROTQ (%r12),%xmm15,%xmm0
VPROTQ (%r10),%xmm15,%xmm7
VPROTQ (%r10),%xmm12,%xmm0
VPROTQ %xmm15,%xmm15,%xmm15
# Tests for op VPROTQ imm8, xmm2/mem128, xmm1 (at&t syntax)
VPROTQ $0x0,%xmm15,%xmm15
VPROTQ $0x0,(%rsi),%xmm15
VPROTQ $0x0,%xmm0,%xmm11
VPROTQ $0xFF,%xmm15,%xmm0
VPROTQ $0x3,%xmm0,%xmm0
VPROTQ $0x3,%xmm15,%xmm0
VPROTQ $0x0,%xmm11,%xmm11
VPROTQ $0x0,%xmm0,%xmm15
VPROTQ $0x3,(%rcx),%xmm0
VPROTQ $0xFF,(%rsi),%xmm0
VPROTQ $0x0,(%rdi),%xmm15
VPROTQ $0xFF,%xmm15,%xmm15
VPROTQ $0xFF,%xmm11,%xmm11
VPROTQ $0xFF,(%rsi),%xmm11
VPROTQ $0x3,(%rdi),%xmm15
VPROTQ $0x3,%xmm15,%xmm11
# Tests for op VPROTW xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPROTW %xmm2,%xmm0,%xmm15
VPROTW %xmm2,%xmm12,%xmm7
VPROTW %xmm2,%xmm0,%xmm0
VPROTW %xmm15,(%r9),%xmm15
VPROTW %xmm0,%xmm15,%xmm0
VPROTW %xmm0,%xmm15,%xmm15
VPROTW %xmm0,%xmm12,%xmm0
VPROTW %xmm15,%xmm12,%xmm0
VPROTW %xmm2,(%r12),%xmm15
VPROTW %xmm0,(%r9),%xmm7
VPROTW %xmm0,%xmm12,%xmm7
VPROTW %xmm2,(%r9),%xmm15
VPROTW %xmm2,(%r13),%xmm0
VPROTW %xmm15,(%r9),%xmm7
VPROTW %xmm15,(%r12),%xmm0
VPROTW %xmm0,(%r13),%xmm15
# Tests for op VPROTW xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPROTW (%r12),%xmm0,%xmm15
VPROTW (%r12),%xmm0,%xmm7
VPROTW (%rax),%xmm0,%xmm0
VPROTW (%r10),%xmm15,%xmm15
VPROTW %xmm15,%xmm12,%xmm0
VPROTW %xmm15,%xmm12,%xmm15
VPROTW %xmm15,%xmm0,%xmm0
VPROTW %xmm2,%xmm0,%xmm0
VPROTW (%rax),%xmm12,%xmm15
VPROTW %xmm15,%xmm15,%xmm7
VPROTW %xmm0,%xmm0,%xmm7
VPROTW (%r12),%xmm15,%xmm15
VPROTW (%r12),%xmm15,%xmm0
VPROTW (%r10),%xmm15,%xmm7
VPROTW (%r10),%xmm12,%xmm0
VPROTW %xmm15,%xmm15,%xmm15
# Tests for op VPROTW imm8, xmm2/mem128, xmm1 (at&t syntax)
VPROTW $0x0,%xmm15,%xmm15
VPROTW $0x0,(%rsi),%xmm15
VPROTW $0x0,%xmm0,%xmm11
VPROTW $0xFF,%xmm15,%xmm0
VPROTW $0x3,%xmm0,%xmm0
VPROTW $0x3,%xmm15,%xmm0
VPROTW $0x0,%xmm11,%xmm11
VPROTW $0x0,%xmm0,%xmm15
VPROTW $0x3,(%rcx),%xmm0
VPROTW $0xFF,(%rsi),%xmm0
VPROTW $0x0,(%rdi),%xmm15
VPROTW $0xFF,%xmm15,%xmm15
VPROTW $0xFF,%xmm11,%xmm11
VPROTW $0xFF,(%rsi),%xmm11
VPROTW $0x3,(%rdi),%xmm15
VPROTW $0x3,%xmm15,%xmm11
# Tests for op VPSHAB xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAB %xmm2,%xmm0,%xmm15
VPSHAB %xmm2,%xmm12,%xmm7
VPSHAB %xmm2,%xmm0,%xmm0
VPSHAB %xmm15,(%r9),%xmm15
VPSHAB %xmm0,%xmm15,%xmm0
VPSHAB %xmm0,%xmm15,%xmm15
VPSHAB %xmm0,%xmm12,%xmm0
VPSHAB %xmm15,%xmm12,%xmm0
VPSHAB %xmm2,(%r12),%xmm15
VPSHAB %xmm0,(%r9),%xmm7
VPSHAB %xmm0,%xmm12,%xmm7
VPSHAB %xmm2,(%r9),%xmm15
VPSHAB %xmm2,(%r13),%xmm0
VPSHAB %xmm15,(%r9),%xmm7
VPSHAB %xmm15,(%r12),%xmm0
VPSHAB %xmm0,(%r13),%xmm15
# Tests for op VPSHAB xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAB (%r12),%xmm0,%xmm15
VPSHAB (%r12),%xmm0,%xmm7
VPSHAB (%rax),%xmm0,%xmm0
VPSHAB (%r10),%xmm15,%xmm15
VPSHAB %xmm15,%xmm12,%xmm0
VPSHAB %xmm15,%xmm12,%xmm15
VPSHAB %xmm15,%xmm0,%xmm0
VPSHAB %xmm2,%xmm0,%xmm0
VPSHAB (%rax),%xmm12,%xmm15
VPSHAB %xmm15,%xmm15,%xmm7
VPSHAB %xmm0,%xmm0,%xmm7
VPSHAB (%r12),%xmm15,%xmm15
VPSHAB (%r12),%xmm15,%xmm0
VPSHAB (%r10),%xmm15,%xmm7
VPSHAB (%r10),%xmm12,%xmm0
VPSHAB %xmm15,%xmm15,%xmm15
# Tests for op VPSHAD xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAD %xmm2,%xmm0,%xmm15
VPSHAD %xmm2,%xmm12,%xmm7
VPSHAD %xmm2,%xmm0,%xmm0
VPSHAD %xmm15,(%r9),%xmm15
VPSHAD %xmm0,%xmm15,%xmm0
VPSHAD %xmm0,%xmm15,%xmm15
VPSHAD %xmm0,%xmm12,%xmm0
VPSHAD %xmm15,%xmm12,%xmm0
VPSHAD %xmm2,(%r12),%xmm15
VPSHAD %xmm0,(%r9),%xmm7
VPSHAD %xmm0,%xmm12,%xmm7
VPSHAD %xmm2,(%r9),%xmm15
VPSHAD %xmm2,(%r13),%xmm0
VPSHAD %xmm15,(%r9),%xmm7
VPSHAD %xmm15,(%r12),%xmm0
VPSHAD %xmm0,(%r13),%xmm15
# Tests for op VPSHAD xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAD (%r12),%xmm0,%xmm15
VPSHAD (%r12),%xmm0,%xmm7
VPSHAD (%rax),%xmm0,%xmm0
VPSHAD (%r10),%xmm15,%xmm15
VPSHAD %xmm15,%xmm12,%xmm0
VPSHAD %xmm15,%xmm12,%xmm15
VPSHAD %xmm15,%xmm0,%xmm0
VPSHAD %xmm2,%xmm0,%xmm0
VPSHAD (%rax),%xmm12,%xmm15
VPSHAD %xmm15,%xmm15,%xmm7
VPSHAD %xmm0,%xmm0,%xmm7
VPSHAD (%r12),%xmm15,%xmm15
VPSHAD (%r12),%xmm15,%xmm0
VPSHAD (%r10),%xmm15,%xmm7
VPSHAD (%r10),%xmm12,%xmm0
VPSHAD %xmm15,%xmm15,%xmm15
# Tests for op VPSHAQ xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAQ %xmm2,%xmm0,%xmm15
VPSHAQ %xmm2,%xmm12,%xmm7
VPSHAQ %xmm2,%xmm0,%xmm0
VPSHAQ %xmm15,(%r9),%xmm15
VPSHAQ %xmm0,%xmm15,%xmm0
VPSHAQ %xmm0,%xmm15,%xmm15
VPSHAQ %xmm0,%xmm12,%xmm0
VPSHAQ %xmm15,%xmm12,%xmm0
VPSHAQ %xmm2,(%r12),%xmm15
VPSHAQ %xmm0,(%r9),%xmm7
VPSHAQ %xmm0,%xmm12,%xmm7
VPSHAQ %xmm2,(%r9),%xmm15
VPSHAQ %xmm2,(%r13),%xmm0
VPSHAQ %xmm15,(%r9),%xmm7
VPSHAQ %xmm15,(%r12),%xmm0
VPSHAQ %xmm0,(%r13),%xmm15
# Tests for op VPSHAQ xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAQ (%r12),%xmm0,%xmm15
VPSHAQ (%r12),%xmm0,%xmm7
VPSHAQ (%rax),%xmm0,%xmm0
VPSHAQ (%r10),%xmm15,%xmm15
VPSHAQ %xmm15,%xmm12,%xmm0
VPSHAQ %xmm15,%xmm12,%xmm15
VPSHAQ %xmm15,%xmm0,%xmm0
VPSHAQ %xmm2,%xmm0,%xmm0
VPSHAQ (%rax),%xmm12,%xmm15
VPSHAQ %xmm15,%xmm15,%xmm7
VPSHAQ %xmm0,%xmm0,%xmm7
VPSHAQ (%r12),%xmm15,%xmm15
VPSHAQ (%r12),%xmm15,%xmm0
VPSHAQ (%r10),%xmm15,%xmm7
VPSHAQ (%r10),%xmm12,%xmm0
VPSHAQ %xmm15,%xmm15,%xmm15
# Tests for op VPSHAW xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHAW %xmm2,%xmm0,%xmm15
VPSHAW %xmm2,%xmm12,%xmm7
VPSHAW %xmm2,%xmm0,%xmm0
VPSHAW %xmm15,(%r9),%xmm15
VPSHAW %xmm0,%xmm15,%xmm0
VPSHAW %xmm0,%xmm15,%xmm15
VPSHAW %xmm0,%xmm12,%xmm0
VPSHAW %xmm15,%xmm12,%xmm0
VPSHAW %xmm2,(%r12),%xmm15
VPSHAW %xmm0,(%r9),%xmm7
VPSHAW %xmm0,%xmm12,%xmm7
VPSHAW %xmm2,(%r9),%xmm15
VPSHAW %xmm2,(%r13),%xmm0
VPSHAW %xmm15,(%r9),%xmm7
VPSHAW %xmm15,(%r12),%xmm0
VPSHAW %xmm0,(%r13),%xmm15
# Tests for op VPSHAW xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHAW (%r12),%xmm0,%xmm15
VPSHAW (%r12),%xmm0,%xmm7
VPSHAW (%rax),%xmm0,%xmm0
VPSHAW (%r10),%xmm15,%xmm15
VPSHAW %xmm15,%xmm12,%xmm0
VPSHAW %xmm15,%xmm12,%xmm15
VPSHAW %xmm15,%xmm0,%xmm0
VPSHAW %xmm2,%xmm0,%xmm0
VPSHAW (%rax),%xmm12,%xmm15
VPSHAW %xmm15,%xmm15,%xmm7
VPSHAW %xmm0,%xmm0,%xmm7
VPSHAW (%r12),%xmm15,%xmm15
VPSHAW (%r12),%xmm15,%xmm0
VPSHAW (%r10),%xmm15,%xmm7
VPSHAW (%r10),%xmm12,%xmm0
VPSHAW %xmm15,%xmm15,%xmm15
# Tests for op VPSHLB xmm3, xmm2/mem128, xmm1 (at&t syntax)
VPSHLB %xmm2,%xmm0,%xmm15
VPSHLB %xmm2,%xmm12,%xmm7
VPSHLB %xmm2,%xmm0,%xmm0
VPSHLB %xmm15,(%r9),%xmm15
VPSHLB %xmm0,%xmm15,%xmm0
VPSHLB %xmm0,%xmm15,%xmm15
VPSHLB %xmm0,%xmm12,%xmm0
VPSHLB %xmm15,%xmm12,%xmm0
VPSHLB %xmm2,(%r12),%xmm15
VPSHLB %xmm0,(%r9),%xmm7
VPSHLB %xmm0,%xmm12,%xmm7
VPSHLB %xmm2,(%r9),%xmm15
VPSHLB %xmm2,(%r13),%xmm0
VPSHLB %xmm15,(%r9),%xmm7
VPSHLB %xmm15,(%r12),%xmm0
VPSHLB %xmm0,(%r13),%xmm15
# Tests for op VPSHLB xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLB (%r12),%xmm0,%xmm15
VPSHLB (%r12),%xmm0,%xmm7
VPSHLB (%rax),%xmm0,%xmm0
VPSHLB (%r10),%xmm15,%xmm15
VPSHLB %xmm15,%xmm12,%xmm0
VPSHLB %xmm15,%xmm12,%xmm15
VPSHLB %xmm15,%xmm0,%xmm0
VPSHLB %xmm2,%xmm0,%xmm0
VPSHLB (%rax),%xmm12,%xmm15
VPSHLB %xmm15,%xmm15,%xmm7
VPSHLB %xmm0,%xmm0,%xmm7
VPSHLB (%r12),%xmm15,%xmm15
VPSHLB (%r12),%xmm15,%xmm0
VPSHLB (%r10),%xmm15,%xmm7
VPSHLB (%r10),%xmm12,%xmm0
VPSHLB %xmm15,%xmm15,%xmm15
# Tests for op VPSHLD xmm2, xmm3/mem128, xmm1 (at&t syntax)
VPSHLD %xmm2,%xmm0,%xmm15
VPSHLD %xmm2,%xmm12,%xmm7
VPSHLD %xmm2,%xmm0,%xmm0
VPSHLD %xmm15,(%r9),%xmm15
VPSHLD %xmm0,%xmm15,%xmm0
VPSHLD %xmm0,%xmm15,%xmm15
VPSHLD %xmm0,%xmm12,%xmm0
VPSHLD %xmm15,%xmm12,%xmm0
VPSHLD %xmm2,(%r12),%xmm15
VPSHLD %xmm0,(%r9),%xmm7
VPSHLD %xmm0,%xmm12,%xmm7
VPSHLD %xmm2,(%r9),%xmm15
VPSHLD %xmm2,(%r13),%xmm0
VPSHLD %xmm15,(%r9),%xmm7
VPSHLD %xmm15,(%r12),%xmm0
VPSHLD %xmm0,(%r13),%xmm15
# Tests for op VPSHLD xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLD (%r12),%xmm0,%xmm15
VPSHLD (%r12),%xmm0,%xmm7
VPSHLD (%rax),%xmm0,%xmm0
VPSHLD (%r10),%xmm15,%xmm15
VPSHLD %xmm15,%xmm12,%xmm0
VPSHLD %xmm15,%xmm12,%xmm15
VPSHLD %xmm15,%xmm0,%xmm0
VPSHLD %xmm2,%xmm0,%xmm0
VPSHLD (%rax),%xmm12,%xmm15
VPSHLD %xmm15,%xmm15,%xmm7
VPSHLD %xmm0,%xmm0,%xmm7
VPSHLD (%r12),%xmm15,%xmm15
VPSHLD (%r12),%xmm15,%xmm0
VPSHLD (%r10),%xmm15,%xmm7
VPSHLD (%r10),%xmm12,%xmm0
VPSHLD %xmm15,%xmm15,%xmm15
# Tests for op VPSHLQ xmm2, xmm3/mem128, xmm1 (at&t syntax)
VPSHLQ %xmm2,%xmm0,%xmm15
VPSHLQ %xmm2,%xmm12,%xmm7
VPSHLQ %xmm2,%xmm0,%xmm0
VPSHLQ %xmm15,(%r9),%xmm15
VPSHLQ %xmm0,%xmm15,%xmm0
VPSHLQ %xmm0,%xmm15,%xmm15
VPSHLQ %xmm0,%xmm12,%xmm0
VPSHLQ %xmm15,%xmm12,%xmm0
VPSHLQ %xmm2,(%r12),%xmm15
VPSHLQ %xmm0,(%r9),%xmm7
VPSHLQ %xmm0,%xmm12,%xmm7
VPSHLQ %xmm2,(%r9),%xmm15
VPSHLQ %xmm2,(%r13),%xmm0
VPSHLQ %xmm15,(%r9),%xmm7
VPSHLQ %xmm15,(%r12),%xmm0
VPSHLQ %xmm0,(%r13),%xmm15
# Tests for op VPSHLQ xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLQ (%r12),%xmm0,%xmm15
VPSHLQ (%r12),%xmm0,%xmm7
VPSHLQ (%rax),%xmm0,%xmm0
VPSHLQ (%r10),%xmm15,%xmm15
VPSHLQ %xmm15,%xmm12,%xmm0
VPSHLQ %xmm15,%xmm12,%xmm15
VPSHLQ %xmm15,%xmm0,%xmm0
VPSHLQ %xmm2,%xmm0,%xmm0
VPSHLQ (%rax),%xmm12,%xmm15
VPSHLQ %xmm15,%xmm15,%xmm7
VPSHLQ %xmm0,%xmm0,%xmm7
VPSHLQ (%r12),%xmm15,%xmm15
VPSHLQ (%r12),%xmm15,%xmm0
VPSHLQ (%r10),%xmm15,%xmm7
VPSHLQ (%r10),%xmm12,%xmm0
VPSHLQ %xmm15,%xmm15,%xmm15
# Tests for op VPSHLW xmm2, xmm3/mem128, xmm1 (at&t syntax)
VPSHLW %xmm2,%xmm0,%xmm15
VPSHLW %xmm2,%xmm12,%xmm7
VPSHLW %xmm2,%xmm0,%xmm0
VPSHLW %xmm15,(%r9),%xmm15
VPSHLW %xmm0,%xmm15,%xmm0
VPSHLW %xmm0,%xmm15,%xmm15
VPSHLW %xmm0,%xmm12,%xmm0
VPSHLW %xmm15,%xmm12,%xmm0
VPSHLW %xmm2,(%r12),%xmm15
VPSHLW %xmm0,(%r9),%xmm7
VPSHLW %xmm0,%xmm12,%xmm7
VPSHLW %xmm2,(%r9),%xmm15
VPSHLW %xmm2,(%r13),%xmm0
VPSHLW %xmm15,(%r9),%xmm7
VPSHLW %xmm15,(%r12),%xmm0
VPSHLW %xmm0,(%r13),%xmm15
# Tests for op VPSHLW xmm3/mem128, xmm2, xmm1 (at&t syntax)
VPSHLW (%r12),%xmm0,%xmm15
VPSHLW (%r12),%xmm0,%xmm7
VPSHLW (%rax),%xmm0,%xmm0
VPSHLW (%r10),%xmm15,%xmm15
VPSHLW %xmm15,%xmm12,%xmm0
VPSHLW %xmm15,%xmm12,%xmm15
VPSHLW %xmm15,%xmm0,%xmm0
VPSHLW %xmm2,%xmm0,%xmm0
VPSHLW (%rax),%xmm12,%xmm15
VPSHLW %xmm15,%xmm15,%xmm7
VPSHLW %xmm0,%xmm0,%xmm7
VPSHLW (%r12),%xmm15,%xmm15
VPSHLW (%r12),%xmm15,%xmm0
VPSHLW (%r10),%xmm15,%xmm7
VPSHLW (%r10),%xmm12,%xmm0
VPSHLW %xmm15,%xmm15,%xmm15
# All variants of VPCOM* aliases
VPCOMLTB %xmm8,%xmm11,%xmm0
VPCOMLTB (%r12),%xmm11,%xmm15
VPCOMLTB (%rdi,%rcx),%xmm0,%xmm0
VPCOMLTB %xmm15,%xmm0,%xmm15
VPCOMLTB (%rdi,%rcx),%xmm15,%xmm7
VPCOMLTB (%rdi,%rcx),%xmm11,%xmm0
VPCOMLTB 0x6(%r9,%r11,2),%xmm15,%xmm15
VPCOMLTB (%rbx,%rax,4),%xmm15,%xmm7
VPCOMLTD (%rdi,%rcx),%xmm11,%xmm15
VPCOMLTD %xmm0,%xmm15,%xmm0
VPCOMLTD (%r12),%xmm15,%xmm7
VPCOMLTD %xmm15,%xmm0,%xmm15
VPCOMLTD %xmm0,%xmm11,%xmm15
VPCOMLTD (%r12),%xmm11,%xmm0
VPCOMLTD %xmm8,%xmm0,%xmm15
VPCOMLTD %xmm15,%xmm0,%xmm0
VPCOMLTQ %xmm0,%xmm11,%xmm7
VPCOMLTQ %xmm15,%xmm11,%xmm7
VPCOMLTQ %xmm15,%xmm11,%xmm0
VPCOMLTQ (%rdi,%rcx),%xmm0,%xmm15
VPCOMLTQ 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMLTQ (%r12),%xmm15,%xmm15
VPCOMLTQ %xmm15,%xmm0,%xmm0
VPCOMLTQ (%rdi,%rcx),%xmm0,%xmm7
VPCOMLTUB %xmm0,%xmm0,%xmm15
VPCOMLTUB 0x6(%r9,%r11,2),%xmm15,%xmm15
VPCOMLTUB (%rdi,%rcx),%xmm0,%xmm7
VPCOMLTUB %xmm8,%xmm11,%xmm15
VPCOMLTUB (%rbx,%rax,4),%xmm11,%xmm7
VPCOMLTUB (%rdi,%rcx),%xmm11,%xmm15
VPCOMLTUB %xmm8,%xmm11,%xmm7
VPCOMLTUB (%r12),%xmm15,%xmm7
VPCOMLTUD %xmm0,%xmm0,%xmm7
VPCOMLTUD %xmm15,%xmm15,%xmm0
VPCOMLTUD %xmm15,%xmm0,%xmm7
VPCOMLTUD (%rdi,%rcx),%xmm11,%xmm15
VPCOMLTUD (%rdi,%rcx),%xmm11,%xmm7
VPCOMLTUD %xmm15,%xmm0,%xmm15
VPCOMLTUD 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMLTUD 0x6(%r9,%r11,2),%xmm0,%xmm0
VPCOMLTUQ %xmm15,%xmm0,%xmm0
VPCOMLTUQ (%rbx,%rax,4),%xmm0,%xmm7
VPCOMLTUQ (%r12),%xmm11,%xmm15
VPCOMLTUQ (%rdi,%rcx),%xmm0,%xmm0
VPCOMLTUQ %xmm0,%xmm11,%xmm0
VPCOMLTUQ (%rdi,%rcx),%xmm11,%xmm7
VPCOMLTUQ (%rdi,%rcx),%xmm0,%xmm15
VPCOMLTUQ (%r12),%xmm0,%xmm7
VPCOMLTUW (%rbx,%rax,4),%xmm11,%xmm7
VPCOMLTUW (%rbx,%rax,4),%xmm15,%xmm15
VPCOMLTUW %xmm8,%xmm11,%xmm7
VPCOMLTUW (%r12),%xmm15,%xmm7
VPCOMLTUW (%rdi,%rcx),%xmm11,%xmm7
VPCOMLTUW (%rbx,%rax,4),%xmm0,%xmm15
VPCOMLTUW %xmm0,%xmm15,%xmm7
VPCOMLTUW %xmm0,%xmm0,%xmm0
VPCOMLTW %xmm0,%xmm11,%xmm0
VPCOMLTW 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMLTW %xmm15,%xmm11,%xmm15
VPCOMLTW (%r12),%xmm11,%xmm7
VPCOMLTW (%r12),%xmm15,%xmm7
VPCOMLTW 0x6(%r9,%r11,2),%xmm15,%xmm0
VPCOMLTW (%rdi,%rcx),%xmm11,%xmm7
VPCOMLTW %xmm8,%xmm11,%xmm7
VPCOMLEB %xmm0,%xmm11,%xmm7
VPCOMLEB 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMLEB %xmm0,%xmm0,%xmm15
VPCOMLEB %xmm15,%xmm0,%xmm15
VPCOMLEB (%r12),%xmm11,%xmm7
VPCOMLEB %xmm15,%xmm11,%xmm7
VPCOMLEB (%rbx,%rax,4),%xmm11,%xmm15
VPCOMLEB (%r12),%xmm15,%xmm15
VPCOMLED (%r12),%xmm0,%xmm15
VPCOMLED %xmm8,%xmm11,%xmm15
VPCOMLED 0x6(%r9,%r11,2),%xmm0,%xmm0
VPCOMLED %xmm0,%xmm15,%xmm7
VPCOMLED %xmm15,%xmm15,%xmm15
VPCOMLED 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMLED (%rbx,%rax,4),%xmm11,%xmm7
VPCOMLED (%rdi,%rcx),%xmm0,%xmm0
VPCOMLEQ 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMLEQ %xmm0,%xmm0,%xmm15
VPCOMLEQ %xmm15,%xmm11,%xmm0
VPCOMLEQ %xmm8,%xmm11,%xmm0
VPCOMLEQ %xmm8,%xmm0,%xmm0
VPCOMLEQ %xmm8,%xmm15,%xmm7
VPCOMLEQ %xmm0,%xmm11,%xmm7
VPCOMLEQ %xmm15,%xmm0,%xmm0
VPCOMLEUB (%rdi,%rcx),%xmm15,%xmm7
VPCOMLEUB 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMLEUB (%rbx,%rax,4),%xmm0,%xmm15
VPCOMLEUB 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMLEUB (%rbx,%rax,4),%xmm0,%xmm7
VPCOMLEUB (%r12),%xmm11,%xmm7
VPCOMLEUB %xmm0,%xmm15,%xmm0
VPCOMLEUB (%rbx,%rax,4),%xmm11,%xmm15
VPCOMLEUD (%rbx,%rax,4),%xmm15,%xmm15
VPCOMLEUD %xmm15,%xmm15,%xmm0
VPCOMLEUD %xmm15,%xmm0,%xmm0
VPCOMLEUD (%rbx,%rax,4),%xmm0,%xmm7
VPCOMLEUD %xmm8,%xmm11,%xmm7
VPCOMLEUD (%rbx,%rax,4),%xmm0,%xmm0
VPCOMLEUD 0x6(%r9,%r11,2),%xmm15,%xmm0
VPCOMLEUD 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMLEUQ %xmm8,%xmm11,%xmm15
VPCOMLEUQ (%rbx,%rax,4),%xmm0,%xmm15
VPCOMLEUQ %xmm15,%xmm0,%xmm15
VPCOMLEUQ %xmm15,%xmm15,%xmm7
VPCOMLEUQ %xmm15,%xmm11,%xmm0
VPCOMLEUQ (%rbx,%rax,4),%xmm0,%xmm7
VPCOMLEUQ 0x6(%r9,%r11,2),%xmm15,%xmm15
VPCOMLEUQ %xmm8,%xmm0,%xmm7
VPCOMLEUW 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMLEUW 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMLEUW %xmm8,%xmm15,%xmm0
VPCOMLEUW (%rbx,%rax,4),%xmm15,%xmm0
VPCOMLEUW %xmm0,%xmm0,%xmm15
VPCOMLEUW (%r12),%xmm0,%xmm7
VPCOMLEUW %xmm8,%xmm0,%xmm0
VPCOMLEUW (%rbx,%rax,4),%xmm11,%xmm7
VPCOMLEW (%rdi,%rcx),%xmm15,%xmm15
VPCOMLEW %xmm0,%xmm15,%xmm7
VPCOMLEW (%rbx,%rax,4),%xmm15,%xmm0
VPCOMLEW %xmm8,%xmm11,%xmm0
VPCOMLEW (%rdi,%rcx),%xmm15,%xmm7
VPCOMLEW (%rdi,%rcx),%xmm11,%xmm15
VPCOMLEW %xmm8,%xmm0,%xmm0
VPCOMLEW (%r12),%xmm11,%xmm7
VPCOMGTB (%r12),%xmm15,%xmm15
VPCOMGTB (%rbx,%rax,4),%xmm15,%xmm15
VPCOMGTB (%r12),%xmm11,%xmm15
VPCOMGTB %xmm15,%xmm15,%xmm0
VPCOMGTB %xmm8,%xmm15,%xmm0
VPCOMGTB (%rbx,%rax,4),%xmm11,%xmm7
VPCOMGTB %xmm8,%xmm11,%xmm15
VPCOMGTB %xmm8,%xmm15,%xmm15
VPCOMGTD (%rdi,%rcx),%xmm15,%xmm15
VPCOMGTD %xmm15,%xmm11,%xmm15
VPCOMGTD %xmm8,%xmm0,%xmm7
VPCOMGTD 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMGTD (%rbx,%rax,4),%xmm11,%xmm15
VPCOMGTD %xmm8,%xmm0,%xmm0
VPCOMGTD (%rdi,%rcx),%xmm11,%xmm0
VPCOMGTD %xmm15,%xmm0,%xmm0
VPCOMGTQ %xmm15,%xmm0,%xmm7
VPCOMGTQ %xmm8,%xmm0,%xmm0
VPCOMGTQ (%rdi,%rcx),%xmm15,%xmm7
VPCOMGTQ (%r12),%xmm0,%xmm7
VPCOMGTQ %xmm15,%xmm15,%xmm7
VPCOMGTQ (%rdi,%rcx),%xmm11,%xmm15
VPCOMGTQ %xmm0,%xmm15,%xmm15
VPCOMGTQ (%r12),%xmm11,%xmm15
VPCOMGTUB %xmm8,%xmm15,%xmm15
VPCOMGTUB %xmm8,%xmm11,%xmm0
VPCOMGTUB (%rbx,%rax,4),%xmm0,%xmm15
VPCOMGTUB (%r12),%xmm0,%xmm15
VPCOMGTUB (%rbx,%rax,4),%xmm11,%xmm7
VPCOMGTUB %xmm8,%xmm15,%xmm0
VPCOMGTUB %xmm15,%xmm15,%xmm15
VPCOMGTUB 0x6(%r9,%r11,2),%xmm15,%xmm0
VPCOMGTUD (%r12),%xmm15,%xmm0
VPCOMGTUD %xmm15,%xmm11,%xmm15
VPCOMGTUD %xmm15,%xmm0,%xmm0
VPCOMGTUD 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMGTUD (%rbx,%rax,4),%xmm11,%xmm0
VPCOMGTUD %xmm0,%xmm0,%xmm0
VPCOMGTUD (%rdi,%rcx),%xmm0,%xmm0
VPCOMGTUD %xmm8,%xmm15,%xmm15
VPCOMGTUQ %xmm15,%xmm15,%xmm7
VPCOMGTUQ (%rbx,%rax,4),%xmm11,%xmm7
VPCOMGTUQ (%r12),%xmm11,%xmm0
VPCOMGTUQ 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMGTUQ %xmm8,%xmm11,%xmm15
VPCOMGTUQ %xmm0,%xmm0,%xmm0
VPCOMGTUQ %xmm0,%xmm15,%xmm15
VPCOMGTUQ (%r12),%xmm11,%xmm15
VPCOMGTUW (%rdi,%rcx),%xmm0,%xmm0
VPCOMGTUW 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMGTUW %xmm15,%xmm11,%xmm0
VPCOMGTUW (%r12),%xmm0,%xmm15
VPCOMGTUW %xmm0,%xmm15,%xmm7
VPCOMGTUW %xmm15,%xmm0,%xmm7
VPCOMGTUW %xmm15,%xmm11,%xmm7
VPCOMGTUW %xmm15,%xmm15,%xmm7
VPCOMGTW %xmm8,%xmm0,%xmm0
VPCOMGTW (%rbx,%rax,4),%xmm15,%xmm15
VPCOMGTW %xmm15,%xmm0,%xmm7
VPCOMGTW %xmm15,%xmm15,%xmm7
VPCOMGTW (%rbx,%rax,4),%xmm0,%xmm15
VPCOMGTW (%r12),%xmm15,%xmm7
VPCOMGTW 0x6(%r9,%r11,2),%xmm11,%xmm7
VPCOMGTW (%rdi,%rcx),%xmm11,%xmm15
VPCOMGEB (%rbx,%rax,4),%xmm15,%xmm0
VPCOMGEB 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMGEB (%rdi,%rcx),%xmm15,%xmm15
VPCOMGEB (%rdi,%rcx),%xmm15,%xmm7
VPCOMGEB %xmm15,%xmm0,%xmm15
VPCOMGEB %xmm15,%xmm11,%xmm15
VPCOMGEB %xmm0,%xmm11,%xmm15
VPCOMGEB %xmm8,%xmm15,%xmm0
VPCOMGED %xmm15,%xmm11,%xmm15
VPCOMGED %xmm0,%xmm15,%xmm7
VPCOMGED (%rbx,%rax,4),%xmm0,%xmm7
VPCOMGED %xmm15,%xmm0,%xmm0
VPCOMGED %xmm15,%xmm0,%xmm15
VPCOMGED (%rdi,%rcx),%xmm11,%xmm15
VPCOMGED (%rbx,%rax,4),%xmm11,%xmm15
VPCOMGED %xmm8,%xmm0,%xmm15
VPCOMGEQ %xmm8,%xmm11,%xmm7
VPCOMGEQ %xmm15,%xmm15,%xmm7
VPCOMGEQ %xmm8,%xmm15,%xmm7
VPCOMGEQ %xmm15,%xmm11,%xmm0
VPCOMGEQ 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMGEQ 0x6(%r9,%r11,2),%xmm15,%xmm0
VPCOMGEQ %xmm15,%xmm11,%xmm15
VPCOMGEQ (%rbx,%rax,4),%xmm15,%xmm0
VPCOMGEUB (%r12),%xmm15,%xmm7
VPCOMGEUB 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMGEUB (%r12),%xmm15,%xmm0
VPCOMGEUB (%rbx,%rax,4),%xmm15,%xmm0
VPCOMGEUB 0x6(%r9,%r11,2),%xmm15,%xmm7
VPCOMGEUB %xmm0,%xmm0,%xmm7
VPCOMGEUB %xmm0,%xmm15,%xmm0
VPCOMGEUB %xmm15,%xmm11,%xmm7
VPCOMGEUD %xmm15,%xmm0,%xmm7
VPCOMGEUD (%rdi,%rcx),%xmm15,%xmm15
VPCOMGEUD (%rbx,%rax,4),%xmm11,%xmm0
VPCOMGEUD (%rbx,%rax,4),%xmm15,%xmm0
VPCOMGEUD %xmm0,%xmm11,%xmm15
VPCOMGEUD %xmm8,%xmm0,%xmm15
VPCOMGEUD (%r12),%xmm15,%xmm0
VPCOMGEUD (%rdi,%rcx),%xmm0,%xmm0
VPCOMGEUQ %xmm8,%xmm15,%xmm0
VPCOMGEUQ (%r12),%xmm11,%xmm15
VPCOMGEUQ %xmm15,%xmm0,%xmm7
VPCOMGEUQ %xmm0,%xmm11,%xmm15
VPCOMGEUQ (%rdi,%rcx),%xmm0,%xmm0
VPCOMGEUQ %xmm0,%xmm11,%xmm7
VPCOMGEUQ (%r12),%xmm0,%xmm0
VPCOMGEUQ 0x6(%r9,%r11,2),%xmm15,%xmm7
VPCOMGEUW %xmm8,%xmm15,%xmm15
VPCOMGEUW %xmm15,%xmm0,%xmm7
VPCOMGEUW 0x6(%r9,%r11,2),%xmm15,%xmm7
VPCOMGEUW %xmm0,%xmm11,%xmm7
VPCOMGEUW (%r12),%xmm15,%xmm15
VPCOMGEUW (%rdi,%rcx),%xmm11,%xmm7
VPCOMGEUW %xmm15,%xmm15,%xmm0
VPCOMGEUW %xmm8,%xmm15,%xmm7
VPCOMGEW (%rbx,%rax,4),%xmm0,%xmm15
VPCOMGEW %xmm0,%xmm0,%xmm0
VPCOMGEW %xmm0,%xmm15,%xmm7
VPCOMGEW %xmm0,%xmm0,%xmm15
VPCOMGEW 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMGEW %xmm0,%xmm0,%xmm7
VPCOMGEW (%rbx,%rax,4),%xmm15,%xmm15
VPCOMGEW (%rbx,%rax,4),%xmm0,%xmm0
VPCOMEQB (%r12),%xmm11,%xmm15
VPCOMEQB (%r12),%xmm15,%xmm7
VPCOMEQB %xmm15,%xmm11,%xmm7
VPCOMEQB %xmm0,%xmm15,%xmm0
VPCOMEQB 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMEQB %xmm0,%xmm15,%xmm7
VPCOMEQB (%rdi,%rcx),%xmm0,%xmm7
VPCOMEQB 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMEQD %xmm15,%xmm11,%xmm0
VPCOMEQD %xmm8,%xmm0,%xmm0
VPCOMEQD (%rbx,%rax,4),%xmm0,%xmm0
VPCOMEQD %xmm8,%xmm11,%xmm7
VPCOMEQD %xmm8,%xmm0,%xmm7
VPCOMEQD (%r12),%xmm0,%xmm7
VPCOMEQD 0x6(%r9,%r11,2),%xmm0,%xmm0
VPCOMEQD 0x6(%r9,%r11,2),%xmm15,%xmm15
VPCOMEQQ %xmm8,%xmm0,%xmm7
VPCOMEQQ %xmm8,%xmm0,%xmm15
VPCOMEQQ 0x6(%r9,%r11,2),%xmm15,%xmm0
VPCOMEQQ (%rbx,%rax,4),%xmm15,%xmm7
VPCOMEQQ (%r12),%xmm0,%xmm15
VPCOMEQQ %xmm8,%xmm11,%xmm7
VPCOMEQQ %xmm15,%xmm15,%xmm15
VPCOMEQQ 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMEQUB %xmm0,%xmm0,%xmm15
VPCOMEQUB %xmm15,%xmm11,%xmm7
VPCOMEQUB %xmm0,%xmm11,%xmm0
VPCOMEQUB (%r12),%xmm0,%xmm0
VPCOMEQUB (%r12),%xmm15,%xmm15
VPCOMEQUB (%rbx,%rax,4),%xmm0,%xmm7
VPCOMEQUB (%r12),%xmm11,%xmm0
VPCOMEQUB %xmm8,%xmm15,%xmm7
VPCOMEQUD (%rbx,%rax,4),%xmm0,%xmm7
VPCOMEQUD 0x6(%r9,%r11,2),%xmm15,%xmm15
VPCOMEQUD (%rbx,%rax,4),%xmm15,%xmm0
VPCOMEQUD (%rbx,%rax,4),%xmm0,%xmm15
VPCOMEQUD %xmm15,%xmm11,%xmm0
VPCOMEQUD (%rbx,%rax,4),%xmm15,%xmm15
VPCOMEQUD (%r12),%xmm11,%xmm7
VPCOMEQUD (%r12),%xmm0,%xmm7
VPCOMEQUQ (%r12),%xmm0,%xmm15
VPCOMEQUQ %xmm15,%xmm0,%xmm7
VPCOMEQUQ (%r12),%xmm11,%xmm15
VPCOMEQUQ (%rdi,%rcx),%xmm0,%xmm7
VPCOMEQUQ %xmm0,%xmm15,%xmm15
VPCOMEQUQ %xmm15,%xmm0,%xmm0
VPCOMEQUQ (%rdi,%rcx),%xmm11,%xmm7
VPCOMEQUQ %xmm0,%xmm0,%xmm7
VPCOMEQUW (%rdi,%rcx),%xmm15,%xmm15
VPCOMEQUW %xmm15,%xmm11,%xmm7
VPCOMEQUW 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMEQUW (%rdi,%rcx),%xmm0,%xmm15
VPCOMEQUW (%r12),%xmm15,%xmm0
VPCOMEQUW %xmm8,%xmm0,%xmm0
VPCOMEQUW (%r12),%xmm0,%xmm15
VPCOMEQUW (%rbx,%rax,4),%xmm0,%xmm15
VPCOMEQW 0x6(%r9,%r11,2),%xmm11,%xmm7
VPCOMEQW %xmm0,%xmm15,%xmm0
VPCOMEQW %xmm15,%xmm11,%xmm0
VPCOMEQW %xmm0,%xmm0,%xmm7
VPCOMEQW (%rbx,%rax,4),%xmm0,%xmm15
VPCOMEQW (%rbx,%rax,4),%xmm15,%xmm7
VPCOMEQW %xmm15,%xmm0,%xmm0
VPCOMEQW (%rbx,%rax,4),%xmm15,%xmm15
VPCOMNEQB (%r12),%xmm11,%xmm15
VPCOMNEQB 0x6(%r9,%r11,2),%xmm11,%xmm7
VPCOMNEQB %xmm0,%xmm0,%xmm0
VPCOMNEQB (%r12),%xmm0,%xmm7
VPCOMNEQB (%rbx,%rax,4),%xmm0,%xmm15
VPCOMNEQB (%rbx,%rax,4),%xmm15,%xmm15
VPCOMNEQB (%r12),%xmm11,%xmm0
VPCOMNEQB %xmm8,%xmm11,%xmm7
VPCOMNEQD (%rbx,%rax,4),%xmm15,%xmm7
VPCOMNEQD %xmm8,%xmm0,%xmm15
VPCOMNEQD %xmm8,%xmm15,%xmm0
VPCOMNEQD 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMNEQD %xmm15,%xmm11,%xmm0
VPCOMNEQD (%rdi,%rcx),%xmm0,%xmm0
VPCOMNEQD (%r12),%xmm0,%xmm0
VPCOMNEQD %xmm8,%xmm0,%xmm7
VPCOMNEQQ (%r12),%xmm0,%xmm15
VPCOMNEQQ %xmm8,%xmm15,%xmm0
VPCOMNEQQ (%rdi,%rcx),%xmm11,%xmm7
VPCOMNEQQ 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMNEQQ %xmm0,%xmm0,%xmm15
VPCOMNEQQ %xmm0,%xmm11,%xmm15
VPCOMNEQQ %xmm0,%xmm0,%xmm0
VPCOMNEQQ (%rbx,%rax,4),%xmm11,%xmm15
VPCOMNEQUB (%rdi,%rcx),%xmm11,%xmm15
VPCOMNEQUB (%rbx,%rax,4),%xmm0,%xmm0
VPCOMNEQUB %xmm15,%xmm11,%xmm7
VPCOMNEQUB %xmm8,%xmm11,%xmm15
VPCOMNEQUB %xmm0,%xmm0,%xmm0
VPCOMNEQUB 0x6(%r9,%r11,2),%xmm15,%xmm0
VPCOMNEQUB %xmm8,%xmm15,%xmm15
VPCOMNEQUB %xmm15,%xmm0,%xmm7
VPCOMNEQUD (%rbx,%rax,4),%xmm11,%xmm15
VPCOMNEQUD (%r12),%xmm15,%xmm15
VPCOMNEQUD %xmm15,%xmm11,%xmm15
VPCOMNEQUD %xmm15,%xmm15,%xmm7
VPCOMNEQUD %xmm0,%xmm15,%xmm15
VPCOMNEQUD %xmm15,%xmm11,%xmm7
VPCOMNEQUD (%r12),%xmm0,%xmm15
VPCOMNEQUD (%r12),%xmm0,%xmm0
VPCOMNEQUQ 0x6(%r9,%r11,2),%xmm15,%xmm7
VPCOMNEQUQ (%r12),%xmm0,%xmm7
VPCOMNEQUQ %xmm0,%xmm11,%xmm15
VPCOMNEQUQ (%rdi,%rcx),%xmm11,%xmm15
VPCOMNEQUQ (%rbx,%rax,4),%xmm15,%xmm15
VPCOMNEQUQ %xmm8,%xmm0,%xmm0
VPCOMNEQUQ (%r12),%xmm15,%xmm15
VPCOMNEQUQ %xmm15,%xmm15,%xmm15
VPCOMNEQUW %xmm15,%xmm0,%xmm7
VPCOMNEQUW %xmm8,%xmm0,%xmm7
VPCOMNEQUW %xmm15,%xmm15,%xmm7
VPCOMNEQUW %xmm8,%xmm11,%xmm7
VPCOMNEQUW %xmm8,%xmm0,%xmm15
VPCOMNEQUW (%rbx,%rax,4),%xmm15,%xmm15
VPCOMNEQUW 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMNEQUW %xmm15,%xmm0,%xmm15
VPCOMNEQW (%rbx,%rax,4),%xmm11,%xmm0
VPCOMNEQW (%r12),%xmm11,%xmm0
VPCOMNEQW %xmm15,%xmm11,%xmm0
VPCOMNEQW (%r12),%xmm15,%xmm15
VPCOMNEQW %xmm0,%xmm15,%xmm7
VPCOMNEQW %xmm0,%xmm0,%xmm15
VPCOMNEQW %xmm15,%xmm0,%xmm7
VPCOMNEQW (%rdi,%rcx),%xmm11,%xmm15
VPCOMFALSEB (%rdi,%rcx),%xmm11,%xmm15
VPCOMFALSEB %xmm0,%xmm15,%xmm7
VPCOMFALSEB (%r12),%xmm0,%xmm0
VPCOMFALSEB %xmm8,%xmm11,%xmm15
VPCOMFALSEB (%rbx,%rax,4),%xmm11,%xmm0
VPCOMFALSEB (%r12),%xmm15,%xmm0
VPCOMFALSEB (%r12),%xmm0,%xmm7
VPCOMFALSEB (%rdi,%rcx),%xmm15,%xmm15
VPCOMFALSED (%rdi,%rcx),%xmm11,%xmm15
VPCOMFALSED %xmm0,%xmm11,%xmm15
VPCOMFALSED 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMFALSED %xmm15,%xmm15,%xmm15
VPCOMFALSED %xmm0,%xmm11,%xmm0
VPCOMFALSED %xmm15,%xmm0,%xmm15
VPCOMFALSED %xmm0,%xmm11,%xmm7
VPCOMFALSED %xmm15,%xmm15,%xmm0
VPCOMFALSEQ %xmm15,%xmm15,%xmm7
VPCOMFALSEQ (%rdi,%rcx),%xmm0,%xmm15
VPCOMFALSEQ (%rbx,%rax,4),%xmm0,%xmm15
VPCOMFALSEQ (%r12),%xmm0,%xmm0
VPCOMFALSEQ %xmm0,%xmm0,%xmm0
VPCOMFALSEQ %xmm8,%xmm0,%xmm15
VPCOMFALSEQ %xmm15,%xmm11,%xmm7
VPCOMFALSEQ (%r12),%xmm0,%xmm7
VPCOMFALSEUB (%rdi,%rcx),%xmm11,%xmm0
VPCOMFALSEUB (%rbx,%rax,4),%xmm0,%xmm7
VPCOMFALSEUB (%rbx,%rax,4),%xmm11,%xmm0
VPCOMFALSEUB 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMFALSEUB %xmm15,%xmm15,%xmm7
VPCOMFALSEUB %xmm8,%xmm0,%xmm15
VPCOMFALSEUB (%rbx,%rax,4),%xmm11,%xmm7
VPCOMFALSEUB %xmm8,%xmm11,%xmm15
VPCOMFALSEUD 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMFALSEUD %xmm8,%xmm15,%xmm0
VPCOMFALSEUD 0x6(%r9,%r11,2),%xmm11,%xmm7
VPCOMFALSEUD 0x6(%r9,%r11,2),%xmm0,%xmm0
VPCOMFALSEUD %xmm0,%xmm0,%xmm15
VPCOMFALSEUD (%r12),%xmm15,%xmm0
VPCOMFALSEUD %xmm0,%xmm15,%xmm7
VPCOMFALSEUD %xmm15,%xmm0,%xmm7
VPCOMFALSEUQ %xmm15,%xmm0,%xmm15
VPCOMFALSEUQ %xmm15,%xmm0,%xmm7
VPCOMFALSEUQ (%r12),%xmm0,%xmm7
VPCOMFALSEUQ %xmm0,%xmm0,%xmm0
VPCOMFALSEUQ (%rdi,%rcx),%xmm0,%xmm15
VPCOMFALSEUQ 0x6(%r9,%r11,2),%xmm11,%xmm7
VPCOMFALSEUQ (%rdi,%rcx),%xmm11,%xmm0
VPCOMFALSEUQ %xmm0,%xmm15,%xmm7
VPCOMFALSEUW %xmm0,%xmm0,%xmm0
VPCOMFALSEUW (%r12),%xmm11,%xmm7
VPCOMFALSEUW (%rdi,%rcx),%xmm15,%xmm15
VPCOMFALSEUW %xmm8,%xmm15,%xmm0
VPCOMFALSEUW (%rdi,%rcx),%xmm11,%xmm7
VPCOMFALSEUW %xmm15,%xmm11,%xmm0
VPCOMFALSEUW 0x6(%r9,%r11,2),%xmm15,%xmm7
VPCOMFALSEUW (%rdi,%rcx),%xmm11,%xmm0
VPCOMFALSEW 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMFALSEW 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMFALSEW 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMFALSEW 0x6(%r9,%r11,2),%xmm11,%xmm7
VPCOMFALSEW %xmm15,%xmm15,%xmm15
VPCOMFALSEW %xmm8,%xmm11,%xmm7
VPCOMFALSEW (%rbx,%rax,4),%xmm0,%xmm0
VPCOMFALSEW (%r12),%xmm15,%xmm0
VPCOMTRUEB (%rbx,%rax,4),%xmm15,%xmm7
VPCOMTRUEB (%r12),%xmm0,%xmm7
VPCOMTRUEB %xmm0,%xmm15,%xmm7
VPCOMTRUEB (%rdi,%rcx),%xmm15,%xmm0
VPCOMTRUEB %xmm15,%xmm15,%xmm0
VPCOMTRUEB (%r12),%xmm15,%xmm0
VPCOMTRUEB %xmm15,%xmm11,%xmm15
VPCOMTRUEB (%rdi,%rcx),%xmm11,%xmm7
VPCOMTRUED (%r12),%xmm11,%xmm0
VPCOMTRUED (%r12),%xmm11,%xmm15
VPCOMTRUED %xmm15,%xmm11,%xmm0
VPCOMTRUED 0x6(%r9,%r11,2),%xmm0,%xmm7
VPCOMTRUED %xmm0,%xmm15,%xmm7
VPCOMTRUED %xmm0,%xmm11,%xmm7
VPCOMTRUED (%rbx,%rax,4),%xmm15,%xmm7
VPCOMTRUED (%r12),%xmm0,%xmm7
VPCOMTRUEQ %xmm15,%xmm0,%xmm7
VPCOMTRUEQ (%r12),%xmm15,%xmm7
VPCOMTRUEQ %xmm0,%xmm15,%xmm7
VPCOMTRUEQ 0x6(%r9,%r11,2),%xmm0,%xmm15
VPCOMTRUEQ %xmm15,%xmm11,%xmm0
VPCOMTRUEQ %xmm0,%xmm11,%xmm15
VPCOMTRUEQ (%rdi,%rcx),%xmm15,%xmm0
VPCOMTRUEQ 0x6(%r9,%r11,2),%xmm11,%xmm7
VPCOMTRUEUB %xmm0,%xmm15,%xmm15
VPCOMTRUEUB %xmm8,%xmm0,%xmm0
VPCOMTRUEUB %xmm15,%xmm0,%xmm0
VPCOMTRUEUB (%r12),%xmm15,%xmm7
VPCOMTRUEUB (%r12),%xmm0,%xmm7
VPCOMTRUEUB %xmm0,%xmm11,%xmm15
VPCOMTRUEUB 0x6(%r9,%r11,2),%xmm11,%xmm0
VPCOMTRUEUB %xmm15,%xmm11,%xmm15
VPCOMTRUEUD %xmm15,%xmm15,%xmm15
VPCOMTRUEUD 0x6(%r9,%r11,2),%xmm11,%xmm15
VPCOMTRUEUD %xmm0,%xmm15,%xmm15
VPCOMTRUEUD %xmm8,%xmm11,%xmm0
VPCOMTRUEUD 0x6(%r9,%r11,2),%xmm0,%xmm0
VPCOMTRUEUD (%rbx,%rax,4),%xmm15,%xmm15
VPCOMTRUEUD %xmm15,%xmm0,%xmm7
VPCOMTRUEUD (%rdi,%rcx),%xmm15,%xmm15
VPCOMTRUEUQ (%rdi,%rcx),%xmm0,%xmm0
VPCOMTRUEUQ (%rbx,%rax,4),%xmm15,%xmm15
VPCOMTRUEUQ (%r12),%xmm11,%xmm0
VPCOMTRUEUQ (%rbx,%rax,4),%xmm0,%xmm7
VPCOMTRUEUQ %xmm8,%xmm11,%xmm15
VPCOMTRUEUQ 0x6(%r9,%r11,2),%xmm0,%xmm0
VPCOMTRUEUQ %xmm8,%xmm15,%xmm15
VPCOMTRUEUQ (%r12),%xmm15,%xmm15
VPCOMTRUEUW %xmm8,%xmm15,%xmm15
VPCOMTRUEUW 0x6(%r9,%r11,2),%xmm15,%xmm7
VPCOMTRUEUW (%r12),%xmm11,%xmm15
VPCOMTRUEUW (%rbx,%rax,4),%xmm11,%xmm15
VPCOMTRUEUW %xmm0,%xmm15,%xmm15
VPCOMTRUEUW %xmm8,%xmm0,%xmm7
VPCOMTRUEUW (%rbx,%rax,4),%xmm0,%xmm7
VPCOMTRUEUW (%rbx,%rax,4),%xmm15,%xmm7
VPCOMTRUEW (%r12),%xmm11,%xmm0
VPCOMTRUEW %xmm15,%xmm15,%xmm15
VPCOMTRUEW %xmm15,%xmm11,%xmm15
VPCOMTRUEW (%rbx,%rax,4),%xmm11,%xmm15
VPCOMTRUEW (%r12),%xmm15,%xmm7
VPCOMTRUEW %xmm8,%xmm15,%xmm0
VPCOMTRUEW %xmm0,%xmm0,%xmm7
VPCOMTRUEW (%rbx,%rax,4),%xmm0,%xmm15
|
tactcomplabs/xbgas-binutils-gdb
| 1,923
|
gas/testsuite/gas/i386/bundle-lock.s
|
.bundle_align_mode 5
# We use these macros to test each pattern at every offset from
# bundle alignment, i.e. [0,31].
.macro offset_sequence size, offset
.p2align 5
sequence_\size\()_offset_\offset\():
.if \offset
.space \offset, 0xf4
.endif
test_sequence \size
.endm
.macro test_offsets size
offset_sequence \size, 0
offset_sequence \size, 1
offset_sequence \size, 2
offset_sequence \size, 3
offset_sequence \size, 4
offset_sequence \size, 5
offset_sequence \size, 6
offset_sequence \size, 7
offset_sequence \size, 8
offset_sequence \size, 9
offset_sequence \size, 10
offset_sequence \size, 11
offset_sequence \size, 12
offset_sequence \size, 13
offset_sequence \size, 14
offset_sequence \size, 15
offset_sequence \size, 16
offset_sequence \size, 17
offset_sequence \size, 18
offset_sequence \size, 19
offset_sequence \size, 20
offset_sequence \size, 21
offset_sequence \size, 22
offset_sequence \size, 23
offset_sequence \size, 24
offset_sequence \size, 25
offset_sequence \size, 26
offset_sequence \size, 27
offset_sequence \size, 28
offset_sequence \size, 29
offset_sequence \size, 30
offset_sequence \size, 31
.endm
.macro test_sequence size
.bundle_lock
clc
.rept \size - 1
cld
.endr
.bundle_unlock
.endm
test_offsets 1
test_offsets 2
test_offsets 3
test_offsets 4
test_offsets 5
test_offsets 6
test_offsets 7
test_offsets 8
test_offsets 9
test_offsets 10
test_offsets 11
test_offsets 12
test_offsets 13
test_offsets 14
test_offsets 15
test_offsets 16
test_offsets 17
test_offsets 18
test_offsets 19
test_offsets 20
test_offsets 21
test_offsets 22
test_offsets 23
test_offsets 24
test_offsets 25
test_offsets 26
test_offsets 27
test_offsets 28
test_offsets 29
test_offsets 30
test_offsets 31
test_offsets 32
.p2align 5
# Nested .bundle_lock.
.bundle_lock
clc
.bundle_lock
cld
.bundle_unlock
clc
.bundle_unlock
.p2align 5
hlt
|
tactcomplabs/xbgas-binutils-gdb
| 1,762
|
gas/testsuite/gas/i386/x86-64-sib.s
|
#Test the special case of the index bits, 0x4, in SIB.
.text
.allow_index_reg
foo:
mov -30,%ebx
mov -30(,%riz),%ebx
mov -30(,%riz,1),%eax
mov -30(,%riz,2),%eax
mov -30(,%riz,4),%eax
mov -30(,%riz,8),%eax
mov 30,%eax
mov 30(,%riz),%eax
mov 30(,%riz,1),%eax
mov 30(,%riz,2),%eax
mov 30(,%riz,4),%eax
mov 30(,%riz,8),%eax
mov (%rbx),%eax
mov (%rbx,%riz),%eax
mov (%rbx,%riz,1),%eax
mov (%rbx,%riz,2),%eax
mov (%rbx,%riz,4),%eax
mov (%rbx,%riz,8),%eax
mov (%rsp),%eax
mov (%rsp,%riz),%eax
mov (%rsp,%riz,1),%eax
mov (%rsp,%riz,2),%eax
mov (%rsp,%riz,4),%eax
mov (%rsp,%riz,8),%eax
mov (%r12),%eax
mov (%r12,%riz),%eax
mov (%r12,%riz,1),%eax
mov (%r12,%riz,2),%eax
mov (%r12,%riz,4),%eax
mov (%r12,%riz,8),%eax
.intel_syntax noprefix
mov eax,DWORD PTR [riz*1-30]
mov eax,DWORD PTR [riz*2-30]
mov eax,DWORD PTR [riz*4-30]
mov eax,DWORD PTR [riz*8-30]
mov eax,DWORD PTR [riz*1+30]
mov eax,DWORD PTR [riz*2+30]
mov eax,DWORD PTR [riz*4+30]
mov eax,DWORD PTR [riz*8+30]
mov eax,DWORD PTR [rbx+riz]
mov eax,DWORD PTR [rbx+riz*1]
mov eax,DWORD PTR [rbx+riz*2]
mov eax,DWORD PTR [rbx+riz*4]
mov eax,DWORD PTR [rbx+riz*8]
mov eax,DWORD PTR [rsp]
mov eax,DWORD PTR [rsp+riz]
mov eax,DWORD PTR [rsp+riz*1]
mov eax,DWORD PTR [rsp+riz*2]
mov eax,DWORD PTR [rsp+riz*4]
mov eax,DWORD PTR [rsp+riz*8]
mov eax,DWORD PTR [r12]
mov eax,DWORD PTR [r12+riz]
mov eax,DWORD PTR [r12+riz*1]
mov eax,DWORD PTR [r12+riz*2]
mov eax,DWORD PTR [r12+riz*4]
mov eax,DWORD PTR [r12+riz*8]
|
tactcomplabs/xbgas-binutils-gdb
| 11,624
|
gas/testsuite/gas/i386/avx512f_vl-opts.s
|
# Check 32bit AVX512{F,VL} swap instructions
.allow_index_reg
.text
_start:
vmovapd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovapd.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovapd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovapd.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovapd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovapd.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovapd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovapd.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovapd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovapd.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovapd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovapd.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovapd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovapd.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovapd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovapd.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovaps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovaps.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovaps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovaps.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovaps %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovaps.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovaps %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovaps.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovaps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovaps.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovaps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovaps.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovaps %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovaps.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovaps %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovaps.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqa32 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqa32.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqa32 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqa32.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqa32 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqa32.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqa32 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqa32.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqa32 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqa32.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqa32 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqa32.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqa32 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqa32.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqa32 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqa32.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqa64 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqa64.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqa64 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqa64.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqa64 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqa64.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqa64 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqa64.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqa64 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqa64.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqa64 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqa64.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqa64 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqa64.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqa64 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqa64.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqu32 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqu32.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqu32 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqu32.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqu32 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqu32.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqu32 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqu32.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqu32 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqu32.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqu32 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqu32.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqu32 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqu32.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqu32 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqu32.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqu64 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqu64.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqu64 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqu64.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqu64 %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqu64.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovdqu64 %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqu64.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovdqu64 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqu64.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqu64 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqu64.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqu64 %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqu64.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovdqu64 %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovdqu64.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovupd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovupd.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovupd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovupd.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovupd %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovupd.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovupd %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovupd.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovupd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovupd.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovupd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovupd.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovupd %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovupd.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovupd %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovupd.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovups %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovups.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovups %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovups.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovups %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovups.s %xmm5, %xmm6{%k7} # AVX512{F,VL}
vmovups %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovups.s %xmm5, %xmm6{%k7}{z} # AVX512{F,VL}
vmovups %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovups.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovups %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovups.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovups %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovups.s %ymm5, %ymm6{%k7} # AVX512{F,VL}
vmovups %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
vmovups.s %ymm5, %ymm6{%k7}{z} # AVX512{F,VL}
.intel_syntax noprefix
vmovapd xmm6{k7}, xmm5 # AVX512{F,VL}
vmovapd.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovapd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovapd.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovapd xmm6{k7}, xmm5 # AVX512{F,VL}
vmovapd.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovapd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovapd.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovapd ymm6{k7}, ymm5 # AVX512{F,VL}
vmovapd.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovapd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovapd.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovapd ymm6{k7}, ymm5 # AVX512{F,VL}
vmovapd.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovapd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovapd.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovaps xmm6{k7}, xmm5 # AVX512{F,VL}
vmovaps.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovaps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovaps.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovaps xmm6{k7}, xmm5 # AVX512{F,VL}
vmovaps.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovaps xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovaps.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovaps ymm6{k7}, ymm5 # AVX512{F,VL}
vmovaps.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovaps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovaps.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovaps ymm6{k7}, ymm5 # AVX512{F,VL}
vmovaps.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovaps ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovaps.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqa32 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqa32.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqa32 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqa32.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqa32 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqa32.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqa32 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqa32.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqa32 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqa32.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqa32 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqa32.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqa32 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqa32.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqa32 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqa32.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqa64 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqa64.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqa64 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqa64.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqa64 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqa64.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqa64 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqa64.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqa64 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqa64.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqa64 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqa64.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqa64 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqa64.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqa64 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqa64.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqu32 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqu32.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqu32 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqu32.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqu32 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqu32.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqu32 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqu32.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqu32 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqu32.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqu32 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqu32.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqu32 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqu32.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqu32 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqu32.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqu64 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqu64.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqu64 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqu64.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqu64 xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqu64.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovdqu64 xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqu64.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovdqu64 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqu64.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqu64 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqu64.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqu64 ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqu64.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovdqu64 ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovdqu64.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovupd xmm6{k7}, xmm5 # AVX512{F,VL}
vmovupd.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovupd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovupd.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovupd xmm6{k7}, xmm5 # AVX512{F,VL}
vmovupd.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovupd xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovupd.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovupd ymm6{k7}, ymm5 # AVX512{F,VL}
vmovupd.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovupd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovupd.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovupd ymm6{k7}, ymm5 # AVX512{F,VL}
vmovupd.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovupd ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovupd.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovups xmm6{k7}, xmm5 # AVX512{F,VL}
vmovups.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovups xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovups.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovups xmm6{k7}, xmm5 # AVX512{F,VL}
vmovups.s xmm6{k7}, xmm5 # AVX512{F,VL}
vmovups xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovups.s xmm6{k7}{z}, xmm5 # AVX512{F,VL}
vmovups ymm6{k7}, ymm5 # AVX512{F,VL}
vmovups.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovups ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovups.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovups ymm6{k7}, ymm5 # AVX512{F,VL}
vmovups.s ymm6{k7}, ymm5 # AVX512{F,VL}
vmovups ymm6{k7}{z}, ymm5 # AVX512{F,VL}
vmovups.s ymm6{k7}{z}, ymm5 # AVX512{F,VL}
|
tactcomplabs/xbgas-binutils-gdb
| 5,564
|
gas/testsuite/gas/i386/x86-64-opts.s
|
# Check 64bit instructions with encoding options
.allow_index_reg
.text
_start:
# Tests for op reg, reg
add %dl,%cl
add.s %dl,%cl
add %dx,%cx
add.s %dx,%cx
add %edx,%ecx
add.s %edx,%ecx
addb %dl,%cl
addb.s %dl,%cl
addw %dx,%cx
addw.s %dx,%cx
addl %edx,%ecx
addl.s %edx,%ecx
add %rdx,%rcx
add.s %rdx,%rcx
addq %rdx,%rcx
addq.s %rdx,%rcx
adc %dl,%cl
adc.s %dl,%cl
adc %dx,%cx
adc.s %dx,%cx
adc %edx,%ecx
adc.s %edx,%ecx
adcb %dl,%cl
adcb.s %dl,%cl
adcw %dx,%cx
adcw.s %dx,%cx
adcl %edx,%ecx
adcl.s %edx,%ecx
adc %rdx,%rcx
adc.s %rdx,%rcx
adcq %rdx,%rcx
adcq.s %rdx,%rcx
and %dl,%cl
and.s %dl,%cl
and %dx,%cx
and.s %dx,%cx
and %edx,%ecx
and.s %edx,%ecx
andb %dl,%cl
andb.s %dl,%cl
andw %dx,%cx
andw.s %dx,%cx
andl %edx,%ecx
andl.s %edx,%ecx
and %rdx,%rcx
and.s %rdx,%rcx
andq %rdx,%rcx
andq.s %rdx,%rcx
cmp %dl,%cl
cmp.s %dl,%cl
cmp %dx,%cx
cmp.s %dx,%cx
cmp %edx,%ecx
cmp.s %edx,%ecx
cmpb %dl,%cl
cmpb.s %dl,%cl
cmpw %dx,%cx
cmpw.s %dx,%cx
cmpl %edx,%ecx
cmpl.s %edx,%ecx
cmp %rdx,%rcx
cmp.s %rdx,%rcx
cmpq %rdx,%rcx
cmpq.s %rdx,%rcx
mov %dl,%cl
mov.s %dl,%cl
mov %dx,%cx
mov.s %dx,%cx
mov %edx,%ecx
mov.s %edx,%ecx
movb %dl,%cl
movb.s %dl,%cl
movw %dx,%cx
movw.s %dx,%cx
movl %edx,%ecx
movl.s %edx,%ecx
mov %rdx,%rcx
mov.s %rdx,%rcx
movq %rdx,%rcx
movq.s %rdx,%rcx
or %dl,%cl
or.s %dl,%cl
or %dx,%cx
or.s %dx,%cx
or %edx,%ecx
or.s %edx,%ecx
orb %dl,%cl
orb.s %dl,%cl
orw %dx,%cx
orw.s %dx,%cx
orl %edx,%ecx
orl.s %edx,%ecx
or %rdx,%rcx
or.s %rdx,%rcx
orq %rdx,%rcx
orq.s %rdx,%rcx
sbb %dl,%cl
sbb.s %dl,%cl
sbb %dx,%cx
sbb.s %dx,%cx
sbb %edx,%ecx
sbb.s %edx,%ecx
sbbb %dl,%cl
sbbb.s %dl,%cl
sbbw %dx,%cx
sbbw.s %dx,%cx
sbbl %edx,%ecx
sbbl.s %edx,%ecx
sbb %rdx,%rcx
sbb.s %rdx,%rcx
sbbq %rdx,%rcx
sbbq.s %rdx,%rcx
sub %dl,%cl
sub.s %dl,%cl
sub %dx,%cx
sub.s %dx,%cx
sub %edx,%ecx
sub.s %edx,%ecx
subb %dl,%cl
subb.s %dl,%cl
subw %dx,%cx
subw.s %dx,%cx
subl %edx,%ecx
subl.s %edx,%ecx
sub %rdx,%rcx
sub.s %rdx,%rcx
subq %rdx,%rcx
subq.s %rdx,%rcx
xor %dl,%cl
xor.s %dl,%cl
xor %dx,%cx
xor.s %dx,%cx
xor %edx,%ecx
xor.s %edx,%ecx
xorb %dl,%cl
xorb.s %dl,%cl
xorw %dx,%cx
xorw.s %dx,%cx
xorl %edx,%ecx
xorl.s %edx,%ecx
xor %rdx,%rcx
xor.s %rdx,%rcx
xorq %rdx,%rcx
xorq.s %rdx,%rcx
# Tests for op ymm, ymm
vmovapd %ymm4,%ymm6
vmovapd.s %ymm4,%ymm6
vmovaps %ymm4,%ymm6
vmovaps.s %ymm4,%ymm6
vmovdqa %ymm4,%ymm6
vmovdqa.s %ymm4,%ymm6
vmovdqu %ymm4,%ymm6
vmovdqu.s %ymm4,%ymm6
vmovupd %ymm4,%ymm6
vmovupd.s %ymm4,%ymm6
vmovups %ymm4,%ymm6
vmovups.s %ymm4,%ymm6
# Tests for op xmm, xmm
movapd %xmm4,%xmm6
movapd.s %xmm4,%xmm6
movaps %xmm4,%xmm6
movaps.s %xmm4,%xmm6
movdqa %xmm4,%xmm6
movdqa.s %xmm4,%xmm6
movdqu %xmm4,%xmm6
movdqu.s %xmm4,%xmm6
movq %xmm4,%xmm6
movq.s %xmm4,%xmm6
movsd %xmm4,%xmm6
movsd.s %xmm4,%xmm6
movss %xmm4,%xmm6
movss.s %xmm4,%xmm6
movupd %xmm4,%xmm6
movupd.s %xmm4,%xmm6
movups %xmm4,%xmm6
movups.s %xmm4,%xmm6
vmovapd %xmm4,%xmm6
vmovapd.s %xmm4,%xmm6
vmovaps %xmm4,%xmm6
vmovaps.s %xmm4,%xmm6
vmovdqa %xmm4,%xmm6
vmovdqa.s %xmm4,%xmm6
vmovdqu %xmm4,%xmm6
vmovdqu.s %xmm4,%xmm6
vmovq %xmm4,%xmm6
vmovq.s %xmm4,%xmm6
vmovupd %xmm4,%xmm6
vmovupd.s %xmm4,%xmm6
vmovups %xmm4,%xmm6
vmovups.s %xmm4,%xmm6
# Tests for op xmm, xmm, xmm
vmovsd %xmm4,%xmm6,%xmm2
vmovsd.s %xmm4,%xmm6,%xmm2
vmovss %xmm4,%xmm6,%xmm2
vmovss.s %xmm4,%xmm6,%xmm2
# Tests for op mm, mm
movq %mm0,%mm4
movq.s %mm0,%mm4
.intel_syntax noprefix
# Tests for op reg, reg
add cl,dl
add.s cl,dl
add cx,dx
add.s cx,dx
add ecx,edx
add.s ecx,edx
add rcx,rdx
add.s rcx,rdx
adc cl,dl
adc.s cl,dl
adc cx,dx
adc.s cx,dx
adc ecx,edx
adc.s ecx,edx
adc rcx,rdx
adc.s rcx,rdx
and cl,dl
and.s cl,dl
and cx,dx
and.s cx,dx
and ecx,edx
and.s ecx,edx
and rcx,rdx
and.s rcx,rdx
cmp cl,dl
cmp.s cl,dl
cmp cx,dx
cmp.s cx,dx
cmp ecx,edx
cmp.s ecx,edx
cmp rcx,rdx
cmp.s rcx,rdx
mov cl,dl
mov.s cl,dl
mov cx,dx
mov.s cx,dx
mov ecx,edx
mov.s ecx,edx
mov rcx,rdx
mov.s rcx,rdx
or cl,dl
or.s cl,dl
or cx,dx
or.s cx,dx
or ecx,edx
or.s ecx,edx
or rcx,rdx
or.s rcx,rdx
sbb cl,dl
sbb.s cl,dl
sbb cx,dx
sbb.s cx,dx
sbb ecx,edx
sbb.s ecx,edx
sbb rcx,rdx
sbb.s rcx,rdx
sub cl,dl
sub.s cl,dl
sub cx,dx
sub.s cx,dx
sub ecx,edx
sub.s ecx,edx
sub rcx,rdx
sub.s rcx,rdx
xor cl,dl
xor.s cl,dl
xor cx,dx
xor.s cx,dx
xor ecx,edx
xor.s ecx,edx
xor rcx,rdx
xor.s rcx,rdx
# Tests for op ymm, ymm
vmovapd ymm6,ymm4
vmovapd.s ymm6,ymm4
vmovaps ymm6,ymm4
vmovaps.s ymm6,ymm4
vmovdqa ymm6,ymm4
vmovdqa.s ymm6,ymm4
vmovdqu ymm6,ymm4
vmovdqu.s ymm6,ymm4
vmovupd ymm6,ymm4
vmovupd.s ymm6,ymm4
vmovups ymm6,ymm4
vmovups.s ymm6,ymm4
# Tests for op xmm, xmm
movapd xmm6,xmm4
movapd.s xmm6,xmm4
movaps xmm6,xmm4
movaps.s xmm6,xmm4
movdqa xmm6,xmm4
movdqa.s xmm6,xmm4
movdqu xmm6,xmm4
movdqu.s xmm6,xmm4
movq xmm6,xmm4
movq.s xmm6,xmm4
movsd xmm6,xmm4
movsd.s xmm6,xmm4
movss xmm6,xmm4
movss.s xmm6,xmm4
movupd xmm6,xmm4
movupd.s xmm6,xmm4
movups xmm6,xmm4
movups.s xmm6,xmm4
vmovapd xmm6,xmm4
vmovapd.s xmm6,xmm4
vmovaps xmm6,xmm4
vmovaps.s xmm6,xmm4
vmovdqa xmm6,xmm4
vmovdqa.s xmm6,xmm4
vmovdqu xmm6,xmm4
vmovdqu.s xmm6,xmm4
vmovq xmm6,xmm4
vmovq.s xmm6,xmm4
vmovupd xmm6,xmm4
vmovupd.s xmm6,xmm4
vmovups xmm6,xmm4
vmovups.s xmm6,xmm4
# Tests for op xmm, xmm, xmm
vmovsd xmm2,xmm6,xmm4
vmovsd.s xmm2,xmm6,xmm4
vmovss xmm2,xmm6,xmm4
vmovss.s xmm2,xmm6,xmm4
# Tests for op mm, mm
movq mm4,mm0
movq.s mm4,mm0
|
tactcomplabs/xbgas-binutils-gdb
| 2,429
|
gas/testsuite/gas/i386/unspec.s
|
.text
unspec:
vblendvpd %xmm0, (%eax), %ymm0, %ymm0
vblendvpd %ymm0, (%eax), %xmm0, %xmm0
vblendvps %xmm0, (%eax), %ymm0, %ymm0
vblendvps %ymm0, (%eax), %xmm0, %xmm0
vfmaddpd %xmm0, (%eax), %ymm0, %ymm0
vfmaddpd %ymm0, (%eax), %xmm0, %xmm0
vfmaddps %xmm0, (%eax), %ymm0, %ymm0
vfmaddps %ymm0, (%eax), %xmm0, %xmm0
vgatherdpd %xmm0, (%eax,%xmm1), %ymm2
vgatherdpd %ymm0, (%eax,%xmm1), %xmm2
vgatherdps %xmm0, (%eax,%xmm1), %ymm2
vgatherdps %ymm0, (%eax,%ymm1), %xmm2
vgatherqpd %xmm0, (%eax,%xmm1), %ymm2
vgatherqpd %ymm0, (%eax,%ymm1), %xmm2
vgatherqps %xmm0, (%eax,%xmm1), %ymm2
vgatherqps %xmm0, (%eax,%ymm1), %ymm2
vpblendvb %xmm0, (%eax), %ymm0, %ymm0
vpblendvb %ymm0, (%eax), %xmm0, %xmm0
vpcmov %xmm0, (%eax), %ymm0, %ymm0
vpcmov %ymm0, (%eax), %xmm0, %xmm0
vpermil2pd $0, %xmm0, (%eax), %ymm0, %ymm0
vpermil2pd $0, %ymm0, (%eax), %xmm0, %xmm0
vpermil2ps $0, %xmm0, (%eax), %ymm0, %ymm0
vpermil2ps $0, %ymm0, (%eax), %xmm0, %xmm0
vpgatherdd %xmm0, (%eax,%xmm1), %ymm2
vpgatherdd %ymm0, (%eax,%ymm1), %xmm2
vpgatherdq %xmm0, (%eax,%xmm1), %ymm2
vpgatherdq %ymm0, (%eax,%xmm1), %xmm2
vpgatherqd %xmm0, (%eax,%xmm1), %ymm2
vpgatherqd %xmm0, (%eax,%ymm1), %ymm2
vpgatherqq %xmm0, (%eax,%xmm1), %ymm2
vpgatherqq %ymm0, (%eax,%ymm1), %xmm2
.intel_syntax noprefix
vblendvpd xmm0, xmm0, [eax], ymm0
vblendvpd ymm0, ymm0, [eax], xmm0
vblendvps xmm0, xmm0, [eax], ymm0
vblendvps ymm0, ymm0, [eax], xmm0
vfmaddpd xmm0, xmm0, [eax], ymm0
vfmaddpd ymm0, ymm0, [eax], xmm0
vfmaddps xmm0, xmm0, [eax], ymm0
vfmaddps ymm0, ymm0, [eax], xmm0
vgatherdpd xmm0, [eax+xmm1], ymm2
vgatherdpd ymm0, [eax+xmm1], xmm2
vgatherdps xmm0, [eax+xmm1], ymm2
vgatherdps ymm0, [eax+ymm1], xmm2
vgatherqpd xmm0, [eax+xmm1], ymm2
vgatherqpd ymm0, [eax+ymm1], xmm2
vgatherqps xmm0, [eax+xmm1], ymm2
vgatherqps xmm0, [eax+ymm1], ymm2
vpblendvb xmm0, xmm0, [eax], ymm0
vpblendvb ymm0, ymm0, [eax], xmm0
vpcmov xmm0, xmm0, [eax], ymm0
vpcmov ymm0, ymm0, [eax], xmm0
vpermil2pd xmm0, xmm0, [eax], ymm0, 0
vpermil2pd ymm0, ymm0, [eax], xmm0, 0
vpermil2ps xmm0, xmm0, [eax], ymm0, 0
vpermil2ps ymm0, ymm0, [eax], xmm0, 0
vpgatherdd xmm0, [eax+xmm1], ymm2
vpgatherdd ymm0, [eax+ymm1], xmm2
vpgatherdq xmm0, [eax+xmm1], ymm2
vpgatherdq ymm0, [eax+xmm1], xmm2
vpgatherqd xmm0, [eax+xmm1], ymm2
vpgatherqd xmm0, [eax+ymm1], ymm2
vpgatherqq xmm0, [eax+xmm1], ymm2
vpgatherqq ymm0, [eax+ymm1], xmm2
|
tactcomplabs/xbgas-binutils-gdb
| 3,832
|
gas/testsuite/gas/i386/intelbad.s
|
.intel_syntax noprefix
.text
start:
add eax, byte ptr [eax]
add eax, qword ptr [eax]
add [eax], 1
add qword ptr [eax], 1
addpd xmm0, dword ptr [eax]
addpd xmm0, qword ptr [eax]
addpd xmm0, tbyte ptr [eax]
addps xmm0, dword ptr [eax]
addps xmm0, qword ptr [eax]
addps xmm0, tbyte ptr [eax]
addsd xmm0, dword ptr [eax]
addsd xmm0, tbyte ptr [eax]
addsd xmm0, xmmword ptr [eax]
addss xmm0, qword ptr [eax]
addss xmm0, tbyte ptr [eax]
addss xmm0, xmmword ptr [eax]
bound eax, dword ptr [ebx]
bound ax, word ptr [ebx]
call byte ptr [eax]
call qword ptr [eax]
call tbyte ptr [eax]
call xword ptr [eax]
cmps [esi], es:[edi]
cmps dword ptr [esi], word ptr es:[edi]
cmpxchg8b dword ptr [eax]
fadd [eax]
fadd word ptr [eax]
fadd tbyte ptr [eax]
fbld byte ptr [eax]
fbld word ptr [eax]
fbstp dword ptr [eax]
fbstp qword ptr [eax]
fiadd [eax]
fiadd byte ptr [eax]
fild [eax]
fild byte ptr [eax]
fild tbyte ptr [eax]
fist [eax]
fist byte ptr [eax]
fist qword ptr [eax]
fistp [eax]
fistp byte ptr [eax]
fisttp [eax]
fisttp byte ptr [eax]
fld [eax]
fld word ptr [eax]
fldcw dword ptr [eax]
fst [eax]
fst word ptr [eax]
fst tbyte ptr [eax]
fstp [eax]
fstp word ptr [eax]
ins es:[edi], dx
lds ax, word ptr [eax]
lds eax, dword ptr [eax]
lods [esi]
movs es:[edi], [esi]
movs dword ptr es:[edi], word ptr [esi]
movsx eax, [eax]
movsx eax, dword ptr [eax]
outs dx, [esi]
paddb mm0, dword ptr [eax]
paddb mm0, xmmword ptr [eax]
paddb xmm0, dword ptr [eax]
paddb xmm0, qword ptr [eax]
pinsrw mm0, byte ptr [eax], 3
pinsrw mm0, dword ptr [eax], 3
pinsrw mm0, qword ptr [eax], 3
pinsrw xmm0, dword ptr [eax], 7
pinsrw xmm0, qword ptr [eax], 7
pinsrw xmm0, xmmword ptr [eax], 7
push byte ptr [eax]
push qword ptr [eax]
scas es:[edi]
#XXX? shl eax
stos es:[edi]
xlat word ptr [ebx]
#XXX? xlatb [ebx]
# expressions
#XXX? push ~ 1
#XXX? push 1 % 1
#XXX? push 1 << 1
#XXX? push 1 >> 1
#XXX? push 1 & 1
#XXX? push 1 ^ 1
#XXX? push 1 | 1
push 1 1
push 1 +
push 1 * * 1
# memory references
mov eax, [ecx*3]
mov eax, [3*ecx]
mov eax, [-1*ecx + 1]
mov eax, [esp + esp]
mov eax, [eax - 1*ecx + 1]
mov eax, [(eax-1) * (eax-1)]
mov eax, [eax-1 xor eax-1]
mov eax, [(eax-1) xor (eax-1)]
mov eax, [not eax + 1]
mov eax, [ecx*2 + edx*4]
mov eax, [2*ecx + 4*edx]
mov eax, [eax]1[ecx] # ugly diag
mov eax, [eax][ecx]1 # ugly diag
mov eax, eax[ecx] # ugly diag
mov eax, es[ecx]
mov eax, cr0[ecx]
mov eax, [eax]ecx
mov eax, [eax]+ecx
mov eax, [eax]+ecx*2
mov eax, [eax]+2*ecx
mov eax, [[eax]ecx]
mov eax, eax:[ecx]
mov eax, [ss]
mov eax, [st]
mov eax, [mm0]
mov eax, [xmm0]
mov eax, [cr0]
mov eax, [dr7]
mov eax, [ss+edx]
mov eax, [st+edx]
mov eax, [mm0+edx]
mov eax, [xmm0+edx]
mov eax, [cr0+edx]
mov eax, [dr7+edx]
mov eax, [edx+ss]
mov eax, [edx+st]
mov eax, [edx+cr0]
mov eax, [edx+dr7]
mov eax, [edx+mm0]
mov eax, [edx+xmm0]
lea eax, [bx+si*1]
lea eax, [bp+si*2]
lea eax, [bx+di*4]
lea eax, [bp+di*8]
lea eax, [bx+1*si]
lea eax, [bp+2*si]
lea eax, [bx+4*di]
lea eax, [bp+8*di]
mov eax, [ah]
mov eax, [ax]
mov eax, [eax+bx]
mov eax, offset [eax]
mov eax, offset eax
mov eax, offset offset eax
mov eax, offset [1*eax]
mov eax, offset 1*eax
#XXX? mov eax, offset x[eax]
#XXX? mov eax, offset [x][eax]
mov eax, flat x
mov eax, flat [x]
mov eax, es:eax
mov eax, eax[ebp]
movzx eax, 1 ptr [eax]
movzx eax, byte word ptr [eax]
movzx eax, [byte ptr eax]
movzx eax, byte [ptr [eax]]
movzx eax, byte ptr [gs:eax]
movzx eax, byte gs:ptr [eax]
movzx eax, byte ptr 1
#XXX? movzx eax, byte ptr [1]
mov eax, 3:5
lds eax, byte ptr [eax]
les eax, word ptr [eax]
lfs eax, dword ptr [eax]
lgs eax, qword ptr [eax]
lss eax, tbyte ptr [eax]
fld near ptr [ebx]
fst far ptr [ebx]
fild far ptr [ebx]
fist near ptr [ebx]
|
tactcomplabs/xbgas-binutils-gdb
| 5,559
|
gas/testsuite/gas/i386/avx512cd.s
|
# Check 32bit AVX512CD instructions
.allow_index_reg
.text
_start:
vpconflictd %zmm5, %zmm6 # AVX512CD
vpconflictd %zmm5, %zmm6{%k7} # AVX512CD
vpconflictd %zmm5, %zmm6{%k7}{z} # AVX512CD
vpconflictd (%ecx), %zmm6 # AVX512CD
vpconflictd -123456(%esp,%esi,8), %zmm6 # AVX512CD
vpconflictd (%eax){1to16}, %zmm6 # AVX512CD
vpconflictd 8128(%edx), %zmm6 # AVX512CD Disp8
vpconflictd 8192(%edx), %zmm6 # AVX512CD
vpconflictd -8192(%edx), %zmm6 # AVX512CD Disp8
vpconflictd -8256(%edx), %zmm6 # AVX512CD
vpconflictd 508(%edx){1to16}, %zmm6 # AVX512CD Disp8
vpconflictd 512(%edx){1to16}, %zmm6 # AVX512CD
vpconflictd -512(%edx){1to16}, %zmm6 # AVX512CD Disp8
vpconflictd -516(%edx){1to16}, %zmm6 # AVX512CD
vpconflictq %zmm5, %zmm6 # AVX512CD
vpconflictq %zmm5, %zmm6{%k7} # AVX512CD
vpconflictq %zmm5, %zmm6{%k7}{z} # AVX512CD
vpconflictq (%ecx), %zmm6 # AVX512CD
vpconflictq -123456(%esp,%esi,8), %zmm6 # AVX512CD
vpconflictq (%eax){1to8}, %zmm6 # AVX512CD
vpconflictq 8128(%edx), %zmm6 # AVX512CD Disp8
vpconflictq 8192(%edx), %zmm6 # AVX512CD
vpconflictq -8192(%edx), %zmm6 # AVX512CD Disp8
vpconflictq -8256(%edx), %zmm6 # AVX512CD
vpconflictq 1016(%edx){1to8}, %zmm6 # AVX512CD Disp8
vpconflictq 1024(%edx){1to8}, %zmm6 # AVX512CD
vpconflictq -1024(%edx){1to8}, %zmm6 # AVX512CD Disp8
vpconflictq -1032(%edx){1to8}, %zmm6 # AVX512CD
vplzcntd %zmm5, %zmm6 # AVX512CD
vplzcntd %zmm5, %zmm6{%k7} # AVX512CD
vplzcntd %zmm5, %zmm6{%k7}{z} # AVX512CD
vplzcntd (%ecx), %zmm6 # AVX512CD
vplzcntd -123456(%esp,%esi,8), %zmm6 # AVX512CD
vplzcntd (%eax){1to16}, %zmm6 # AVX512CD
vplzcntd 8128(%edx), %zmm6 # AVX512CD Disp8
vplzcntd 8192(%edx), %zmm6 # AVX512CD
vplzcntd -8192(%edx), %zmm6 # AVX512CD Disp8
vplzcntd -8256(%edx), %zmm6 # AVX512CD
vplzcntd 508(%edx){1to16}, %zmm6 # AVX512CD Disp8
vplzcntd 512(%edx){1to16}, %zmm6 # AVX512CD
vplzcntd -512(%edx){1to16}, %zmm6 # AVX512CD Disp8
vplzcntd -516(%edx){1to16}, %zmm6 # AVX512CD
vplzcntq %zmm5, %zmm6 # AVX512CD
vplzcntq %zmm5, %zmm6{%k7} # AVX512CD
vplzcntq %zmm5, %zmm6{%k7}{z} # AVX512CD
vplzcntq (%ecx), %zmm6 # AVX512CD
vplzcntq -123456(%esp,%esi,8), %zmm6 # AVX512CD
vplzcntq (%eax){1to8}, %zmm6 # AVX512CD
vplzcntq 8128(%edx), %zmm6 # AVX512CD Disp8
vplzcntq 8192(%edx), %zmm6 # AVX512CD
vplzcntq -8192(%edx), %zmm6 # AVX512CD Disp8
vplzcntq -8256(%edx), %zmm6 # AVX512CD
vplzcntq 1016(%edx){1to8}, %zmm6 # AVX512CD Disp8
vplzcntq 1024(%edx){1to8}, %zmm6 # AVX512CD
vplzcntq -1024(%edx){1to8}, %zmm6 # AVX512CD Disp8
vplzcntq -1032(%edx){1to8}, %zmm6 # AVX512CD
vpbroadcastmw2d %k6, %zmm6 # AVX512CD
vpbroadcastmb2q %k6, %zmm6 # AVX512CD
.intel_syntax noprefix
vpconflictd zmm6, zmm5 # AVX512CD
vpconflictd zmm6{k7}, zmm5 # AVX512CD
vpconflictd zmm6{k7}{z}, zmm5 # AVX512CD
vpconflictd zmm6, ZMMWORD PTR [ecx] # AVX512CD
vpconflictd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512CD
vpconflictd zmm6, [eax]{1to16} # AVX512CD
vpconflictd zmm6, ZMMWORD PTR [edx+8128] # AVX512CD Disp8
vpconflictd zmm6, ZMMWORD PTR [edx+8192] # AVX512CD
vpconflictd zmm6, ZMMWORD PTR [edx-8192] # AVX512CD Disp8
vpconflictd zmm6, ZMMWORD PTR [edx-8256] # AVX512CD
vpconflictd zmm6, [edx+508]{1to16} # AVX512CD Disp8
vpconflictd zmm6, [edx+512]{1to16} # AVX512CD
vpconflictd zmm6, [edx-512]{1to16} # AVX512CD Disp8
vpconflictd zmm6, [edx-516]{1to16} # AVX512CD
vpconflictq zmm6, zmm5 # AVX512CD
vpconflictq zmm6{k7}, zmm5 # AVX512CD
vpconflictq zmm6{k7}{z}, zmm5 # AVX512CD
vpconflictq zmm6, ZMMWORD PTR [ecx] # AVX512CD
vpconflictq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512CD
vpconflictq zmm6, [eax]{1to8} # AVX512CD
vpconflictq zmm6, ZMMWORD PTR [edx+8128] # AVX512CD Disp8
vpconflictq zmm6, ZMMWORD PTR [edx+8192] # AVX512CD
vpconflictq zmm6, ZMMWORD PTR [edx-8192] # AVX512CD Disp8
vpconflictq zmm6, ZMMWORD PTR [edx-8256] # AVX512CD
vpconflictq zmm6, [edx+1016]{1to8} # AVX512CD Disp8
vpconflictq zmm6, [edx+1024]{1to8} # AVX512CD
vpconflictq zmm6, [edx-1024]{1to8} # AVX512CD Disp8
vpconflictq zmm6, [edx-1032]{1to8} # AVX512CD
vplzcntd zmm6, zmm5 # AVX512CD
vplzcntd zmm6{k7}, zmm5 # AVX512CD
vplzcntd zmm6{k7}{z}, zmm5 # AVX512CD
vplzcntd zmm6, ZMMWORD PTR [ecx] # AVX512CD
vplzcntd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512CD
vplzcntd zmm6, [eax]{1to16} # AVX512CD
vplzcntd zmm6, ZMMWORD PTR [edx+8128] # AVX512CD Disp8
vplzcntd zmm6, ZMMWORD PTR [edx+8192] # AVX512CD
vplzcntd zmm6, ZMMWORD PTR [edx-8192] # AVX512CD Disp8
vplzcntd zmm6, ZMMWORD PTR [edx-8256] # AVX512CD
vplzcntd zmm6, [edx+508]{1to16} # AVX512CD Disp8
vplzcntd zmm6, [edx+512]{1to16} # AVX512CD
vplzcntd zmm6, [edx-512]{1to16} # AVX512CD Disp8
vplzcntd zmm6, [edx-516]{1to16} # AVX512CD
vplzcntq zmm6, zmm5 # AVX512CD
vplzcntq zmm6{k7}, zmm5 # AVX512CD
vplzcntq zmm6{k7}{z}, zmm5 # AVX512CD
vplzcntq zmm6, ZMMWORD PTR [ecx] # AVX512CD
vplzcntq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512CD
vplzcntq zmm6, [eax]{1to8} # AVX512CD
vplzcntq zmm6, ZMMWORD PTR [edx+8128] # AVX512CD Disp8
vplzcntq zmm6, ZMMWORD PTR [edx+8192] # AVX512CD
vplzcntq zmm6, ZMMWORD PTR [edx-8192] # AVX512CD Disp8
vplzcntq zmm6, ZMMWORD PTR [edx-8256] # AVX512CD
vplzcntq zmm6, [edx+1016]{1to8} # AVX512CD Disp8
vplzcntq zmm6, [edx+1024]{1to8} # AVX512CD
vplzcntq zmm6, [edx-1024]{1to8} # AVX512CD Disp8
vplzcntq zmm6, [edx-1032]{1to8} # AVX512CD
vpbroadcastmw2d zmm6, k6 # AVX512CD
vpbroadcastmb2q zmm6, k6 # AVX512CD
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.