repo_id
stringlengths 5
115
| size
int64 590
5.01M
| file_path
stringlengths 4
212
| content
stringlengths 590
5.01M
|
|---|---|---|---|
stsp/binutils-ia16
| 4,042
|
gas/testsuite/gas/ft32/insn.s
|
# Used for all instructions that have a 3-address form
.macro TERNARY insn
# reg-reg
\insn $r31, $r0, $r0
\insn $r0, $r31, $r0
\insn $r0, $r0, $r31
\insn $r1, $r2, $r4
\insn $r8, $r16, $r0
# immediate
\insn $r31, $r0, -512
\insn $r0, $r31, 0
\insn $r0, $r31, 1
\insn $r0, $r31, 511
# short and byte
\insn\().s $r0, $r31, $r1
\insn\().s $r0, $r31, 77
\insn\().b $r0, $r31, $r1
\insn\().b $r0, $r31, 77
.endm
.macro RegUImm insn
\insn r0, r0, 0
\insn r0, r0, 65535
\insn r0, r31, 0
\insn r0, r31, 65535
\insn r31, r0, 0
\insn r31, r0, 65535
.endm
.macro CMPOP insn
# reg-reg
\insn $r0, $r0
\insn $r31, $r0
\insn $r0, $r31
# immediate
\insn $r0, -512
\insn $r31, 0
\insn $r31, 1
\insn $r31, 511
# short and byte
\insn\().s $r31, $r1
\insn\().s $r31, 77
\insn\().b $r31, $r1
\insn\().b $r31, 77
.endm
.section .data
dalabel:
.long 0
.section .text
pmlabel:
TERNARY add
TERNARY sub
TERNARY and
TERNARY or
TERNARY xor
TERNARY xnor
TERNARY ashl
TERNARY lshr
TERNARY ashr
TERNARY ror
TERNARY ldl
TERNARY bins
TERNARY bexts
TERNARY bextu
TERNARY flip
CMPOP addcc
CMPOP cmp
CMPOP tst
CMPOP btst
# LDI, STI, EXI
ldi.l $r0,$r31,-128
ldi.l $r31,$r0,127
ldi.s $r0,$r31,-128
ldi.s $r0,$r31,127
ldi.b $r31,$r0,-128
ldi.b $r31,$r0,127
sti.l $r31,-128,$r0
sti.l $r0,127,$r31
sti.s $r31,-128,$r0
sti.s $r31,127,$r0
sti.b $r0,-128,$r31
sti.b $r0,127,$r31
exi.l $r0,$r31,-128
exi.l $r31,$r0,127
exi.s $r0,$r31,-128
exi.s $r0,$r31,127
exi.b $r31,$r0,-128
exi.b $r31,$r0,127
# LPM, LPMI
lpm.l $r0,pmlabel
lpm.s $r16,pmlabel
lpm.b $r31,pmlabel
lpmi.l $r0,$r1,-128
lpmi.s $r16,$r1,127
lpmi.b $r31,$r1,-128
# JMP
jmp pmlabel
jmpi $r16
jmpx 31,$r28,1,pmlabel
jmpc nz,pmlabel
# CALL
call pmlabel
calli $r16
callx 31,$r28,1,pmlabel
callc nz,pmlabel
# PUSH, POP
push $r0
push $r16
push $r31
pop $r0
pop $r16
pop $r31
# LINK,UNLINK
link $r0,0
link $r16,65535
link $r31,1017
unlink $r0
unlink $r16
unlink $r31
# RETURN,RETI
return
reti
# LDA,STA,EXA
lda.l $r0,dalabel
lda.s $r16,dalabel
lda.b $r31,dalabel
sta.l dalabel,$r0
sta.s dalabel,$r16
sta.b dalabel,$r31
exa.l $r0,dalabel
exa.s $r16,dalabel
exa.b $r31,dalabel
# LDK
ldk $r0,-524288
ldk $r0,524287
ldk $r0,0
move $r0,$r31
move $r31,$r0
TERNARY udiv
TERNARY umod
TERNARY div
TERNARY mod
TERNARY strcmp
TERNARY memcpy
TERNARY memset
TERNARY mul
TERNARY muluh
TERNARY streamin
TERNARY streamini
TERNARY streamout
TERNARY streamouti
strlen.l $r0,$r31
strlen.l $r31,$r0
strlen.s $r0,$r31
strlen.s $r31,$r0
strlen.b $r0,$r31
strlen.b $r31,$r0
stpcpy.l $r0,$r31
stpcpy.l $r31,$r0
stpcpy.s $r0,$r31
stpcpy.s $r31,$r0
stpcpy.b $r0,$r31
stpcpy.b $r31,$r0
|
stsp/binutils-ia16
| 1,935
|
gas/testsuite/gas/ppc/xcoff-visibility-1.s
|
# Tests every possible visibility using XCOFF format.
# Ensure that the visibility field is left empty if no
# visibility is provided.
# Ensure that only the last visibility is taken into
# account when several are provided.
# Csect visibility
.globl globl_novisibility[RW]
.csect globl_novisibility[RW]
.globl globl_internal[RW], internal
.csect globl_internal[RW]
.globl globl_hidden[RW], hidden
.csect globl_hidden[RW]
.globl globl_protected[RW], protected
.csect globl_protected[RW]
.globl globl_exported[RW], exported
.csect globl_exported[RW]
.globl globl_dual[RW], exported
.globl globl_dual[RW], internal
.csect globl_dual[RW]
# Weak csect visibility
.weak weak_novisibility[RW]
.csect weak_novisibility[RW]
.weak weak_internal[RW], internal
.csect weak_internal[RW]
.weak weak_hidden[RW], hidden
.csect weak_hidden[RW]
.weak weak_protected[RW], protected
.csect weak_protected[RW]
.weak weak_exported[RW], exported
.csect weak_exported[RW]
.weak weak_dual[RW], exported
.weak weak_dual[RW], internal
.csect weak_dual[RW]
# Comm visibility
.comm comm_novisibility[RW], 8, 4
.comm comm_internal[RW], 8, 4, internal
.comm comm_hidden[RW], 8, 4, hidden
.comm comm_protected[RW], 8, 4, protected
.comm comm_exported[RW], 8, 4, exported
# Extern visibility
.extern extern_novisibility[RW]
.extern extern_internal[RW], internal
.extern extern_hidden[RW], hidden
.extern extern_protected[RW], protected
.extern extern_exported[RW], exported
.extern extern_dual[RW], exported
.extern extern_dual[RW], internal
# Label visibility
.csect .text[PR]
.globl l_novisibility
l_novisibility:
blr
.globl l_internal, internal
l_internal:
blr
.globl l_hidden, hidden
l_hidden:
blr
.globl l_protected, protected
l_protected:
blr
.globl l_exported, exported
l_exported:
blr
.globl l_dual, exported
.globl l_dual, internal
l_dual:
blr
|
stsp/binutils-ia16
| 3,545
|
gas/testsuite/gas/ppc/altivec.s
|
# PowerPC AltiVec tests
#as: -m601 -maltivec
.text
start:
dss 3
dssall
dst 5,4,1
dstt 8,7,0
dstst 5,6,3
dststt 4,5,2
lvebx 30,22,24
lvebx 21,0,24
lvehx 10,16,2
lvehx 20,0,23
lvewx 17,4,18
lvewx 23,0,8
lvsl 6,0,25
lvsl 2,0,6
lvsr 22,16,12
lvsr 0,0,29
lvxl 15,5,13
lvxl 19,0,23
lvx 22,1,2
lvx 18,0,17
mfvrsave 31
mfvscr 24
mtvrsave 10
mtvscr 25
stvebx 18,27,10
stvebx 16,0,6
stvehx 17,13,16
stvehx 23,0,20
stvewx 11,19,31
stvewx 31,0,1
stvxl 26,21,17
stvxl 13,0,22
stvx 11,31,31
stvx 30,0,16
vaddcuw 24,7,28
vaddfp 3,30,11
vaddsbs 8,28,9
vaddshs 7,5,4
vaddsws 22,26,27
vaddubm 16,14,28
vaddubs 6,1,25
vadduhm 2,4,6
vadduhs 26,21,8
vadduwm 29,31,1
vadduws 23,13,4
vandc 30,16,9
vand 3,13,27
vavgsb 4,6,17
vavgsh 23,28,19
vavgsw 8,15,31
vavgub 6,7,25
vavguh 25,22,10
vavguw 3,23,29
vcfpsxws 14,2,6
vcfpuxws 9,31,20
vcfsx 24,30,3
vcfux 17,21,29
vcmpbfp 18,28,0
vcmpbfp. 19,26,3
vcmpeqfp 16,2,11
vcmpeqfp. 23,13,13
vcmpequb 25,19,10
vcmpequb. 18,11,2
vcmpequh 9,25,7
vcmpequh. 14,24,21
vcmpequw 24,12,5
vcmpequw. 19,16,1
vcmpgefp 23,17,16
vcmpgefp. 19,29,17
vcmpgtfp 16,28,13
vcmpgtfp. 14,24,7
vcmpgtsb 16,22,6
vcmpgtsb. 2,12,14
vcmpgtsh 28,3,29
vcmpgtsh. 16,19,13
vcmpgtsw 15,0,5
vcmpgtsw. 21,13,0
vcmpgtub 5,10,30
vcmpgtub. 7,13,10
vcmpgtuh 24,15,16
vcmpgtuh. 25,21,27
vcmpgtuw 17,27,6
vcmpgtuw. 8,21,27
vcsxwfp 1,1,14
vctsxs 4,15,25
vctuxs 28,23,14
vcuxwfp 6,6,0
vexptefp 0,8
vlogefp 22,27
vmaddfp 23,18,5,18
vmaxfp 13,13,27
vmaxsb 8,23,14
vmaxsh 19,17,0
vmaxsw 19,3,22
vmaxub 23,30,28
vmaxuh 9,20,23
vmaxuw 21,19,1
vmhaddshs 22,13,5,22
vmhraddshs 31,0,3,18
vminfp 2,21,24
vminsb 20,6,10
vminsh 18,27,26
vminsw 3,4,1
vminub 7,0,13
vminuh 0,12,6
vminuw 6,3,1
vmladduhm 3,29,3,26
vmrghb 21,5,31
vmrghh 21,24,0
vmrghw 16,0,22
vmrglb 1,17,16
vmrglh 14,8,15
vmrglw 31,21,5
vmr 24,9,
vor 24,9,9
vmsummbm 0,24,15,23
vmsumshm 1,4,7,25
vmsumshs 9,8,13,31
vmsumubm 23,31,12,30
vmsumuhm 29,0,26,21
vmsumuhs 27,14,25,5
vmulesb 10,25,14
vmulesh 1,18,8
vmuleub 17,14,9
vmuleuh 5,26,9
vmulosb 21,18,6
vmulosh 4,5,8
vmuloub 2,9,19
vmulouh 29,5,4
vnmsubfp 8,2,6,5
vnor 31,9,10
vnor 25,31,31
vnot 25,31,
vor 23,7,2
vperm 0,28,22,25
vpkpx 16,25,17
vpkshss 12,16,17
vpkshus 1,19,23
vpkswss 25,7,13
vpkswus 4,24,10
vpkuhum 9,27,12
vpkuhus 22,10,25
vpkuwum 30,18,0
vpkuwus 7,3,22
vrefp 24,28
vrfim 17,19
vrfin 24,25
vrfip 3,5
vrfiz 8,10
vrlb 26,18,30
vrlh 16,17,25
vrlw 23,30,9
vrsqrtefp 2,18
vsel 20,14,18,10
vslb 25,25,12
vsldoi 9,9,12,7
vslh 14,2,11
vslo 30,5,6
vsl 22,30,9
vslw 26,26,3
vspltb 1,20,6
vsplth 16,18,3
vspltisb 25,-13
vspltish 22,10
vspltisw 13,13
vspltw 9,18,2
vsrab 14,22,0
vsrah 12,12,18
vsraw 2,2,13
vsrb 7,27,5
vsrh 7,11,29
vsro 18,30,31
vsr 2,9,28
vsrw 0,25,0
vsubcuw 24,2,10
vsubfp 22,24,20
vsubsbs 10,22,13
vsubshs 24,17,28
vsubsws 10,26,0
vsububm 16,11,24
vsububs 11,21,1
vsubuhm 6,12,24
vsubuhs 30,11,9
vsubuwm 19,20,13
vsubuws 18,25,6
vsum2sws 25,10,18
vsum4sbs 13,16,21
vsum4shs 23,8,4
vsum4ubs 28,13,30
vsumsws 22,10,8
vupkhpx 24,14
vupkhsb 2,22
vupkhsh 16,2
vupklpx 10,26
vupklsb 15,28
vupklsh 8,8
vxor 25,0,3
|
stsp/binutils-ia16
| 1,222
|
gas/testsuite/gas/ppc/test1xcoff32.s
|
.csect [RW]
dsym0: .long 0xdeadbeef
dsym1:
.toc
.L_tsym0:
.tc ignored0[TC],dsym0
.L_tsym1:
.tc ignored1[TC],dsym1
.L_tsym2:
.tc ignored2[TC],usym0
.L_tsym3:
.tc ignored3[TC],usym1
.L_tsym4:
.tc ignored4[TC],esym0
.L_tsym5:
.tc ignored5[TC],esym1
.L_tsym6:
.tc ignored6[TC],.text
.csect .crazy_table[RO]
xdsym0: .long 0xbeefed
xdsym1:
.csect [PR]
.lglobl reference_csect_relative_symbols
reference_csect_relative_symbols:
lwz 3,xdsym0(3)
lwz 3,xdsym1(3)
lwz 3,xusym0(3)
lwz 3,xusym1(3)
.lglobl dubious_references_to_default_RW_csect
dubious_references_to_default_RW_csect:
lwz 3,dsym0(3)
lwz 3,dsym1(3)
lwz 3,usym0(3)
lwz 3,usym1(3)
.lglobl reference_via_toc
reference_via_toc:
lwz 3,.L_tsym0(2)
lwz 3,.L_tsym1(2)
lwz 3,.L_tsym2(2)
lwz 3,.L_tsym3(2)
lwz 3,.L_tsym4(2)
lwz 3,.L_tsym5(2)
.lglobl subtract_symbols
subtract_symbols:
li 3,dsym1-dsym0
li 3,dsym0-dsym1
li 3,usym1-usym0
li 3,usym0-usym1
li 3,dsym0-usym0
li 3,usym0-dsym0
lwz 3,dsym1-dsym0(4)
.lglobl load_addresses
load_addresses:
la 3,xdsym0(0)
la 3,xusym0(0)
la 3,.L_tsym6(2)
.csect [RW]
usym0: .long 0xcafebabe
usym1: .long 0xbaad
.csect .crazy_table[RO]
xusym0: .long 0xbeefed
xusym1:
|
stsp/binutils-ia16
| 27,621
|
gas/testsuite/gas/ppc/spe2.s
|
# PA SPE2 instructions
# Testcase for CMPE200GCC-5, CMPE200GCC-62
.section ".text"
.equ rA,1
.equ rB,2
.equ rD,0
.equ rS,0
.equ UIMM, 31
.equ UIMM_LT8, 7
.equ UIMM_LT16, 15
.equ UIMM_1, 1
.equ UIMM_2, 2
.equ UIMM_4, 4
.equ UIMM_8, 8
.equ SIMM, -16
.equ crD, 0
.equ nnn, 7
.equ bbb, 7
.equ dd, 3
.equ Ddd, 7
.equ hh, 3
.equ mask, 15
.equ offset, 7
evdotpwcssi rD, rA, rB
evdotpwcsmi rD, rA, rB
evdotpwcssfr rD, rA, rB
evdotpwcssf rD, rA, rB
evdotpwgasmf rD, rA, rB
evdotpwxgasmf rD, rA, rB
evdotpwgasmfr rD, rA, rB
evdotpwxgasmfr rD, rA, rB
evdotpwgssmf rD, rA, rB
evdotpwxgssmf rD, rA, rB
evdotpwgssmfr rD, rA, rB
evdotpwxgssmfr rD, rA, rB
evdotpwcssiaaw3 rD, rA, rB
evdotpwcsmiaaw3 rD, rA, rB
evdotpwcssfraaw3 rD, rA, rB
evdotpwcssfaaw3 rD, rA, rB
evdotpwgasmfaa3 rD, rA, rB
evdotpwxgasmfaa3 rD, rA, rB
evdotpwgasmfraa3 rD, rA, rB
evdotpwxgasmfraa3 rD, rA, rB
evdotpwgssmfaa3 rD, rA, rB
evdotpwxgssmfaa3 rD, rA, rB
evdotpwgssmfraa3 rD, rA, rB
evdotpwxgssmfraa3 rD, rA, rB
evdotpwcssia rD, rA, rB
evdotpwcsmia rD, rA, rB
evdotpwcssfra rD, rA, rB
evdotpwcssfa rD, rA, rB
evdotpwgasmfa rD, rA, rB
evdotpwxgasmfa rD, rA, rB
evdotpwgasmfra rD, rA, rB
evdotpwxgasmfra rD, rA, rB
evdotpwgssmfa rD, rA, rB
evdotpwxgssmfa rD, rA, rB
evdotpwgssmfra rD, rA, rB
evdotpwxgssmfra rD, rA, rB
evdotpwcssiaaw rD, rA, rB
evdotpwcsmiaaw rD, rA, rB
evdotpwcssfraaw rD, rA, rB
evdotpwcssfaaw rD, rA, rB
evdotpwgasmfaa rD, rA, rB
evdotpwxgasmfaa rD, rA, rB
evdotpwgasmfraa rD, rA, rB
evdotpwxgasmfraa rD, rA, rB
evdotpwgssmfaa rD, rA, rB
evdotpwxgssmfaa rD, rA, rB
evdotpwgssmfraa rD, rA, rB
evdotpwxgssmfraa rD, rA, rB
evdotphihcssi rD, rA, rB
evdotplohcssi rD, rA, rB
evdotphihcssf rD, rA, rB
evdotplohcssf rD, rA, rB
evdotphihcsmi rD, rA, rB
evdotplohcsmi rD, rA, rB
evdotphihcssfr rD, rA, rB
evdotplohcssfr rD, rA, rB
evdotphihcssiaaw3 rD, rA, rB
evdotplohcssiaaw3 rD, rA, rB
evdotphihcssfaaw3 rD, rA, rB
evdotplohcssfaaw3 rD, rA, rB
evdotphihcsmiaaw3 rD, rA, rB
evdotplohcsmiaaw3 rD, rA, rB
evdotphihcssfraaw3 rD, rA, rB
evdotplohcssfraaw3 rD, rA, rB
evdotphihcssia rD, rA, rB
evdotplohcssia rD, rA, rB
evdotphihcssfa rD, rA, rB
evdotplohcssfa rD, rA, rB
evdotphihcsmia rD, rA, rB
evdotplohcsmia rD, rA, rB
evdotphihcssfra rD, rA, rB
evdotplohcssfra rD, rA, rB
evdotphihcssiaaw rD, rA, rB
evdotplohcssiaaw rD, rA, rB
evdotphihcssfaaw rD, rA, rB
evdotplohcssfaaw rD, rA, rB
evdotphihcsmiaaw rD, rA, rB
evdotplohcsmiaaw rD, rA, rB
evdotphihcssfraaw rD, rA, rB
evdotplohcssfraaw rD, rA, rB
evdotphausi rD, rA, rB
evdotphassi rD, rA, rB
evdotphasusi rD, rA, rB
evdotphassf rD, rA, rB
evdotphsssf rD, rA, rB
evdotphaumi rD, rA, rB
evdotphasmi rD, rA, rB
evdotphasumi rD, rA, rB
evdotphassfr rD, rA, rB
evdotphssmi rD, rA, rB
evdotphsssfr rD, rA, rB
evdotphausiaaw3 rD, rA, rB
evdotphassiaaw3 rD, rA, rB
evdotphasusiaaw3 rD, rA, rB
evdotphassfaaw3 rD, rA, rB
evdotphsssiaaw3 rD, rA, rB
evdotphsssfaaw3 rD, rA, rB
evdotphaumiaaw3 rD, rA, rB
evdotphasmiaaw3 rD, rA, rB
evdotphasumiaaw3 rD, rA, rB
evdotphassfraaw3 rD, rA, rB
evdotphssmiaaw3 rD, rA, rB
evdotphsssfraaw3 rD, rA, rB
evdotphausia rD, rA, rB
evdotphassia rD, rA, rB
evdotphasusia rD, rA, rB
evdotphassfa rD, rA, rB
evdotphsssfa rD, rA, rB
evdotphaumia rD, rA, rB
evdotphasmia rD, rA, rB
evdotphasumia rD, rA, rB
evdotphassfra rD, rA, rB
evdotphssmia rD, rA, rB
evdotphsssfra rD, rA, rB
evdotphausiaaw rD, rA, rB
evdotphassiaaw rD, rA, rB
evdotphasusiaaw rD, rA, rB
evdotphassfaaw rD, rA, rB
evdotphsssiaaw rD, rA, rB
evdotphsssfaaw rD, rA, rB
evdotphaumiaaw rD, rA, rB
evdotphasmiaaw rD, rA, rB
evdotphasumiaaw rD, rA, rB
evdotphassfraaw rD, rA, rB
evdotphssmiaaw rD, rA, rB
evdotphsssfraaw rD, rA, rB
evdotp4hgaumi rD, rA, rB
evdotp4hgasmi rD, rA, rB
evdotp4hgasumi rD, rA, rB
evdotp4hgasmf rD, rA, rB
evdotp4hgssmi rD, rA, rB
evdotp4hgssmf rD, rA, rB
evdotp4hxgasmi rD, rA, rB
evdotp4hxgasmf rD, rA, rB
evdotpbaumi rD, rA, rB
evdotpbasmi rD, rA, rB
evdotpbasumi rD, rA, rB
evdotp4hxgssmi rD, rA, rB
evdotp4hxgssmf rD, rA, rB
evdotp4hgaumiaa3 rD, rA, rB
evdotp4hgasmiaa3 rD, rA, rB
evdotp4hgasumiaa3 rD, rA, rB
evdotp4hgasmfaa3 rD, rA, rB
evdotp4hgssmiaa3 rD, rA, rB
evdotp4hgssmfaa3 rD, rA, rB
evdotp4hxgasmiaa3 rD, rA, rB
evdotp4hxgasmfaa3 rD, rA, rB
evdotpbaumiaaw3 rD, rA, rB
evdotpbasmiaaw3 rD, rA, rB
evdotpbasumiaaw3 rD, rA, rB
evdotp4hxgssmiaa3 rD, rA, rB
evdotp4hxgssmfaa3 rD, rA, rB
evdotp4hgaumia rD, rA, rB
evdotp4hgasmia rD, rA, rB
evdotp4hgasumia rD, rA, rB
evdotp4hgasmfa rD, rA, rB
evdotp4hgssmia rD, rA, rB
evdotp4hgssmfa rD, rA, rB
evdotp4hxgasmia rD, rA, rB
evdotp4hxgasmfa rD, rA, rB
evdotpbaumia rD, rA, rB
evdotpbasmia rD, rA, rB
evdotpbasumia rD, rA, rB
evdotp4hxgssmia rD, rA, rB
evdotp4hxgssmfa rD, rA, rB
evdotp4hgaumiaa rD, rA, rB
evdotp4hgasmiaa rD, rA, rB
evdotp4hgasumiaa rD, rA, rB
evdotp4hgasmfaa rD, rA, rB
evdotp4hgssmiaa rD, rA, rB
evdotp4hgssmfaa rD, rA, rB
evdotp4hxgasmiaa rD, rA, rB
evdotp4hxgasmfaa rD, rA, rB
evdotpbaumiaaw rD, rA, rB
evdotpbasmiaaw rD, rA, rB
evdotpbasumiaaw rD, rA, rB
evdotp4hxgssmiaa rD, rA, rB
evdotp4hxgssmfaa rD, rA, rB
evdotpwausi rD, rA, rB
evdotpwassi rD, rA, rB
evdotpwasusi rD, rA, rB
evdotpwaumi rD, rA, rB
evdotpwasmi rD, rA, rB
evdotpwasumi rD, rA, rB
evdotpwssmi rD, rA, rB
evdotpwausiaa3 rD, rA, rB
evdotpwassiaa3 rD, rA, rB
evdotpwasusiaa3 rD, rA, rB
evdotpwsssiaa3 rD, rA, rB
evdotpwaumiaa3 rD, rA, rB
evdotpwasmiaa3 rD, rA, rB
evdotpwasumiaa3 rD, rA, rB
evdotpwssmiaa3 rD, rA, rB
evdotpwausia rD, rA, rB
evdotpwassia rD, rA, rB
evdotpwasusia rD, rA, rB
evdotpwaumia rD, rA, rB
evdotpwasmia rD, rA, rB
evdotpwasumia rD, rA, rB
evdotpwssmia rD, rA, rB
evdotpwausiaa rD, rA, rB
evdotpwassiaa rD, rA, rB
evdotpwasusiaa rD, rA, rB
evdotpwsssiaa rD, rA, rB
evdotpwaumiaa rD, rA, rB
evdotpwasmiaa rD, rA, rB
evdotpwasumiaa rD, rA, rB
evdotpwssmiaa rD, rA, rB
evaddib rD, rB, UIMM
evaddih rD, rB, UIMM
evsubifh rD, UIMM, rB
evsubifb rD, UIMM, rB
evabsb rD, rA
evabsh rD, rA
evabsd rD, rA
evabss rD, rA
evabsbs rD, rA
evabshs rD, rA
evabsds rD, rA
evnegwo rD, rA
evnegb rD, rA
evnegbo rD, rA
evnegh rD, rA
evnegho rD, rA
evnegd rD, rA
evnegs rD, rA
evnegwos rD, rA
evnegbs rD, rA
evnegbos rD, rA
evneghs rD, rA
evneghos rD, rA
evnegds rD, rA
evextzb rD, rA
evextsbh rD, rA
evextsw rD, rA
evrndwh rD, rA
evrndhb rD, rA
evrnddw rD, rA
evrndwhus rD, rA
evrndwhss rD, rA
evrndhbus rD, rA
evrndhbss rD, rA
evrnddwus rD, rA
evrnddwss rD, rA
evrndwnh rD, rA
evrndhnb rD, rA
evrnddnw rD, rA
evrndwnhus rD, rA
evrndwnhss rD, rA
evrndhnbus rD, rA
evrndhnbss rD, rA
evrnddnwus rD, rA
evrnddnwss rD, rA
evcntlzh rD, rA
evcntlsh rD, rA
evpopcntb rD, rA
circinc rD, rA, rB
evunpkhibui rD, rA
evunpkhibsi rD, rA
evunpkhihui rD, rA
evunpkhihsi rD, rA
evunpklobui rD, rA
evunpklobsi rD, rA
evunpklohui rD, rA
evunpklohsi rD, rA
evunpklohf rD, rA
evunpkhihf rD, rA
evunpklowgsf rD, rA
evunpkhiwgsf rD, rA
evsatsduw rD, rA
evsatsdsw rD, rA
evsatshub rD, rA
evsatshsb rD, rA
evsatuwuh rD, rA
evsatswsh rD, rA
evsatswuh rD, rA
evsatuhub rD, rA
evsatuduw rD, rA
evsatuwsw rD, rA
evsatshuh rD, rA
evsatuhsh rD, rA
evsatswuw rD, rA
evsatswgsdf rD, rA
evsatsbub rD, rA
evsatubsb rD, rA
evmaxhpuw rD, rA
evmaxhpsw rD, rA
evmaxbpuh rD, rA
evmaxbpsh rD, rA
evmaxwpud rD, rA
evmaxwpsd rD, rA
evminhpuw rD, rA
evminhpsw rD, rA
evminbpuh rD, rA
evminbpsh rD, rA
evminwpud rD, rA
evminwpsd rD, rA
evmaxmagws rD, rA, rB
evsl rD, rA, rB
evsli rD, rA, UIMM
evsplatie rD, SIMM
evsplatib rD, SIMM
evsplatibe rD, SIMM
evsplatih rD, SIMM
evsplatihe rD, SIMM
evsplatid rD, SIMM
evsplatia rD, SIMM
evsplatiea rD, SIMM
evsplatiba rD, SIMM
evsplatibea rD, SIMM
evsplatiha rD, SIMM
evsplatihea rD, SIMM
evsplatida rD, SIMM
evsplatfio rD, SIMM
evsplatfib rD, SIMM
evsplatfibo rD, SIMM
evsplatfih rD, SIMM
evsplatfiho rD, SIMM
evsplatfid rD, SIMM
evsplatfia rD, SIMM
evsplatfioa rD, SIMM
evsplatfiba rD, SIMM
evsplatfiboa rD, SIMM
evsplatfiha rD, SIMM
evsplatfihoa rD, SIMM
evsplatfida rD, SIMM
evcmpgtdu crD, rA, rB
evcmpgtds crD, rA, rB
evcmpltdu crD, rA, rB
evcmpltds crD, rA, rB
evcmpeqd crD, rA, rB
evswapbhilo rD, rA, rB
evswapblohi rD, rA, rB
evswaphhilo rD, rA, rB
evswaphlohi rD, rA, rB
evswaphe rD, rA, rB
evswaphhi rD, rA, rB
evswaphlo rD, rA, rB
evswapho rD, rA, rB
evinsb rD, rA, Ddd, bbb
evxtrb rD, rA, Ddd, bbb
evsplath rD, rA, hh
evsplatb rD, rA, bbb
evinsh rD, rA, dd, hh
evclrbe rD, rA, mask
evclrbo rD, rA, mask
evclrh rD, rA, mask
evxtrh rD, rA, dd, hh
evselbitm0 rD, rA, rB
evselbitm1 rD, rA, rB
evselbit rD, rA, rB
evperm rD, rA, rB
evperm2 rD, rA, rB
evperm3 rD, rA, rB
evxtrd rD, rA, rB, offset
evsrbu rD, rA, rB
evsrbs rD, rA, rB
evsrbiu rD, rA, UIMM_LT8
evsrbis rD, rA, UIMM_LT8
evslb rD, rA, rB
evrlb rD, rA, rB
evslbi rD, rA, UIMM_LT8
evrlbi rD, rA, UIMM_LT8
evsrhu rD, rA, rB
evsrhs rD, rA, rB
evsrhiu rD, rA, UIMM_LT16
evsrhis rD, rA, UIMM_LT16
evslh rD, rA, rB
evrlh rD, rA, rB
evslhi rD, rA, UIMM_LT16
evrlhi rD, rA, UIMM_LT16
evsru rD, rA, rB
evsrs rD, rA, rB
evsriu rD, rA, UIMM
evsris rD, rA, UIMM
evlvsl rD, rA, rB
evlvsr rD, rA, rB
evsroiu rD, rA, nnn
evsrois rD, rA, nnn
evsloi rD, rA, nnn
evfssqrt rD, rA
evfscfh rD, rB
evfscth rD, rB
evfsmax rD, rA, rB
evfsmin rD, rA, rB
evfsaddsub rD, rA, rB
evfssubadd rD, rA, rB
evfssum rD, rA, rB
evfsdiff rD, rA, rB
evfssumdiff rD, rA, rB
evfsdiffsum rD, rA, rB
evfsaddx rD, rA, rB
evfssubx rD, rA, rB
evfsaddsubx rD, rA, rB
evfssubaddx rD, rA, rB
evfsmulx rD, rA, rB
evfsmule rD, rA, rB
evfsmulo rD, rA, rB
evldbx rD, rA, rB
evldb rD, UIMM_8 (rA)
evlhhsplathx rD, rA, rB
evlhhsplath rD, UIMM_2 (rA)
evlwbsplatwx rD, rA, rB
evlwbsplatw rD, UIMM_4 (rA)
evlwhsplatwx rD, rA, rB
evlwhsplatw rD, UIMM_4 (rA)
evlbbsplatbx rD, rA, rB
evlbbsplatb rD, UIMM_1 (rA)
evstdbx rS, rA, rB
evstdb rS, UIMM_8 (rA)
evlwbex rD, rA, rB
evlwbe rD, UIMM_4 (rA)
evlwboux rD, rA, rB
evlwbou rD, UIMM_4 (rA)
evlwbosx rD, rA, rB
evlwbos rD, UIMM_4 (rA)
evstwbex rS, rA, rB
evstwbe rS, UIMM_4 (rA)
evstwbox rS, rA, rB
evstwbo rS, UIMM_4 (rA)
evstwbx rS, rA, rB
evstwb rS, UIMM_4 (rA)
evsthbx rS, rA, rB
evsthb rS, UIMM_2 (rA)
evlddmx rD, rA, rB
evlddu rD, UIMM_8 (rA)
evldwmx rD, rA, rB
evldwu rD, UIMM_8 (rA)
evldhmx rD, rA, rB
evldhu rD, UIMM_8 (rA)
evldbmx rD, rA, rB
evldbu rD, UIMM_8 (rA)
evlhhesplatmx rD, rA, rB
evlhhesplatu rD, UIMM_2 (rA)
evlhhsplathmx rD, rA, rB
evlhhsplathu rD, UIMM_2 (rA)
evlhhousplatmx rD, rA, rB
evlhhousplatu rD, UIMM_2 (rA)
evlhhossplatmx rD, rA, rB
evlhhossplatu rD, UIMM_2 (rA)
evlwhemx rD, rA, rB
evlwheu rD, UIMM_4 (rA)
evlwbsplatwmx rD, rA, rB
evlwbsplatwu rD, UIMM_4 (rA)
evlwhoumx rD, rA, rB
evlwhouu rD, UIMM_4 (rA)
evlwhosmx rD, rA, rB
evlwhosu rD, UIMM_4 (rA)
evlwwsplatmx rD, rA, rB
evlwwsplatu rD, UIMM_4 (rA)
evlwhsplatwmx rD, rA, rB
evlwhsplatwu rD, UIMM_4 (rA)
evlwhsplatmx rD, rA, rB
evlwhsplatu rD, UIMM_4 (rA)
evlbbsplatbmx rD, rA, rB
evlbbsplatbu rD, UIMM_1 (rA)
evstddmx rS, rA, rB
evstddu rS, UIMM_8 (rA)
evstdwmx rS, rA, rB
evstdwu rS, UIMM_8 (rA)
evstdhmx rS, rA, rB
evstdhu rS, UIMM_8 (rA)
evstdbmx rS, rA, rB
evstdbu rS, UIMM_8 (rA)
evlwbemx rD, rA, rB
evlwbeu rD, UIMM_4 (rA)
evlwboumx rD, rA, rB
evlwbouu rD, UIMM_4 (rA)
evlwbosmx rD, rA, rB
evlwbosu rD, UIMM_4 (rA)
evstwhemx rS, rA, rB
evstwheu rS, UIMM_4 (rA)
evstwbemx rS, rA, rB
evstwbeu rS, UIMM_4 (rA)
evstwhomx rS, rA, rB
evstwhou rS, UIMM_4 (rA)
evstwbomx rS, rA, rB
evstwbou rS, UIMM_4 (rA)
evstwwemx rS, rA, rB
evstwweu rS, UIMM_4 (rA)
evstwbmx rS, rA, rB
evstwbu rS, UIMM_4 (rA)
evstwwomx rS, rA, rB
evstwwou rS, UIMM_4 (rA)
evsthbmx rS, rA, rB
evsthbu rS, UIMM_2 (rA)
evmhusi rD, rA, rB
evmhssi rD, rA, rB
evmhsusi rD, rA, rB
evmhssf rD, rA, rB
evmhumi rD, rA, rB
evmhssfr rD, rA, rB
evmhesumi rD, rA, rB
evmhosumi rD, rA, rB
evmbeumi rD, rA, rB
evmbesmi rD, rA, rB
evmbesumi rD, rA, rB
evmboumi rD, rA, rB
evmbosmi rD, rA, rB
evmbosumi rD, rA, rB
evmhesumia rD, rA, rB
evmhosumia rD, rA, rB
evmbeumia rD, rA, rB
evmbesmia rD, rA, rB
evmbesumia rD, rA, rB
evmboumia rD, rA, rB
evmbosmia rD, rA, rB
evmbosumia rD, rA, rB
evmwusiw rD, rA, rB
evmwssiw rD, rA, rB
evmwhssfr rD, rA, rB
evmwehgsmfr rD, rA, rB
evmwehgsmf rD, rA, rB
evmwohgsmfr rD, rA, rB
evmwohgsmf rD, rA, rB
evmwhssfra rD, rA, rB
evmwehgsmfra rD, rA, rB
evmwehgsmfa rD, rA, rB
evmwohgsmfra rD, rA, rB
evmwohgsmfa rD, rA, rB
evaddusiaa rD, rA
evaddssiaa rD, rA
evsubfusiaa rD, rA
evsubfssiaa rD, rA
evaddsmiaa rD, rA
evsubfsmiaa rD, rA
evaddh rD, rA, rB
evaddhss rD, rA, rB
evsubfh rD, rA, rB
evsubfhss rD, rA, rB
evaddhx rD, rA, rB
evaddhxss rD, rA, rB
evsubfhx rD, rA, rB
evsubfhxss rD, rA, rB
evaddd rD, rA, rB
evadddss rD, rA, rB
evsubfd rD, rA, rB
evsubfdss rD, rA, rB
evaddb rD, rA, rB
evaddbss rD, rA, rB
evsubfb rD, rA, rB
evsubfbss rD, rA, rB
evaddsubfh rD, rA, rB
evaddsubfhss rD, rA, rB
evsubfaddh rD, rA, rB
evsubfaddhss rD, rA, rB
evaddsubfhx rD, rA, rB
evaddsubfhxss rD, rA, rB
evsubfaddhx rD, rA, rB
evsubfaddhxss rD, rA, rB
evadddus rD, rA, rB
evaddbus rD, rA, rB
evsubfdus rD, rA, rB
evsubfbus rD, rA, rB
evaddwus rD, rA, rB
evaddwxus rD, rA, rB
evsubfwus rD, rA, rB
evsubfwxus rD, rA, rB
evadd2subf2h rD, rA, rB
evadd2subf2hss rD, rA, rB
evsubf2add2h rD, rA, rB
evsubf2add2hss rD, rA, rB
evaddhus rD, rA, rB
evaddhxus rD, rA, rB
evsubfhus rD, rA, rB
evsubfhxus rD, rA, rB
evaddwss rD, rA, rB
evsubfwss rD, rA, rB
evaddwx rD, rA, rB
evaddwxss rD, rA, rB
evsubfwx rD, rA, rB
evsubfwxss rD, rA, rB
evaddsubfw rD, rA, rB
evaddsubfwss rD, rA, rB
evsubfaddw rD, rA, rB
evsubfaddwss rD, rA, rB
evaddsubfwx rD, rA, rB
evaddsubfwxss rD, rA, rB
evsubfaddwx rD, rA, rB
evsubfaddwxss rD, rA, rB
evmar rD
evsumwu rD, rA
evsumws rD, rA
evsum4bu rD, rA
evsum4bs rD, rA
evsum2hu rD, rA
evsum2hs rD, rA
evdiff2his rD, rA
evsum2his rD, rA
evsumwua rD, rA
evsumwsa rD, rA
evsum4bua rD, rA
evsum4bsa rD, rA
evsum2hua rD, rA
evsum2hsa rD, rA
evdiff2hisa rD, rA
evsum2hisa rD, rA
evsumwuaa rD, rA
evsumwsaa rD, rA
evsum4buaaw rD, rA
evsum4bsaaw rD, rA
evsum2huaaw rD, rA
evsum2hsaaw rD, rA
evdiff2hisaaw rD, rA
evsum2hisaaw rD, rA
evdivwsf rD, rA, rB
evdivwuf rD, rA, rB
evdivs rD, rA, rB
evdivu rD, rA, rB
evaddwegsi rD, rA, rB
evaddwegsf rD, rA, rB
evsubfwegsi rD, rA, rB
evsubfwegsf rD, rA, rB
evaddwogsi rD, rA, rB
evaddwogsf rD, rA, rB
evsubfwogsi rD, rA, rB
evsubfwogsf rD, rA, rB
evaddhhiuw rD, rA, rB
evaddhhisw rD, rA, rB
evsubfhhiuw rD, rA, rB
evsubfhhisw rD, rA, rB
evaddhlouw rD, rA, rB
evaddhlosw rD, rA, rB
evsubfhlouw rD, rA, rB
evsubfhlosw rD, rA, rB
evmhesusiaaw rD, rA, rB
evmhosusiaaw rD, rA, rB
evmhesumiaaw rD, rA, rB
evmhosumiaaw rD, rA, rB
evmbeusiaah rD, rA, rB
evmbessiaah rD, rA, rB
evmbesusiaah rD, rA, rB
evmbousiaah rD, rA, rB
evmbossiaah rD, rA, rB
evmbosusiaah rD, rA, rB
evmbeumiaah rD, rA, rB
evmbesmiaah rD, rA, rB
evmbesumiaah rD, rA, rB
evmboumiaah rD, rA, rB
evmbosmiaah rD, rA, rB
evmbosumiaah rD, rA, rB
evmwlusiaaw3 rD, rA, rB
evmwlssiaaw3 rD, rA, rB
evmwhssfraaw3 rD, rA, rB
evmwhssfaaw3 rD, rA, rB
evmwhssfraaw rD, rA, rB
evmwhssfaaw rD, rA, rB
evmwlumiaaw3 rD, rA, rB
evmwlsmiaaw3 rD, rA, rB
evmwusiaa rD, rA, rB
evmwssiaa rD, rA, rB
evmwehgsmfraa rD, rA, rB
evmwehgsmfaa rD, rA, rB
evmwohgsmfraa rD, rA, rB
evmwohgsmfaa rD, rA, rB
evmhesusianw rD, rA, rB
evmhosusianw rD, rA, rB
evmhesumianw rD, rA, rB
evmhosumianw rD, rA, rB
evmbeusianh rD, rA, rB
evmbessianh rD, rA, rB
evmbesusianh rD, rA, rB
evmbousianh rD, rA, rB
evmbossianh rD, rA, rB
evmbosusianh rD, rA, rB
evmbeumianh rD, rA, rB
evmbesmianh rD, rA, rB
evmbesumianh rD, rA, rB
evmboumianh rD, rA, rB
evmbosmianh rD, rA, rB
evmbosumianh rD, rA, rB
evmwlusianw3 rD, rA, rB
evmwlssianw3 rD, rA, rB
evmwhssfranw3 rD, rA, rB
evmwhssfanw3 rD, rA, rB
evmwhssfranw rD, rA, rB
evmwhssfanw rD, rA, rB
evmwlumianw3 rD, rA, rB
evmwlsmianw3 rD, rA, rB
evmwusian rD, rA, rB
evmwssian rD, rA, rB
evmwehgsmfran rD, rA, rB
evmwehgsmfan rD, rA, rB
evmwohgsmfran rD, rA, rB
evmwohgsmfan rD, rA, rB
evseteqb rD, rA, rB
evseteqb. rD, rA, rB
evseteqh rD, rA, rB
evseteqh. rD, rA, rB
evseteqw rD, rA, rB
evseteqw. rD, rA, rB
evsetgthu rD, rA, rB
evsetgthu. rD, rA, rB
evsetgths rD, rA, rB
evsetgths. rD, rA, rB
evsetgtwu rD, rA, rB
evsetgtwu. rD, rA, rB
evsetgtws rD, rA, rB
evsetgtws. rD, rA, rB
evsetgtbu rD, rA, rB
evsetgtbu. rD, rA, rB
evsetgtbs rD, rA, rB
evsetgtbs. rD, rA, rB
evsetltbu rD, rA, rB
evsetltbu. rD, rA, rB
evsetltbs rD, rA, rB
evsetltbs. rD, rA, rB
evsetlthu rD, rA, rB
evsetlthu. rD, rA, rB
evsetlths rD, rA, rB
evsetlths. rD, rA, rB
evsetltwu rD, rA, rB
evsetltwu. rD, rA, rB
evsetltws rD, rA, rB
evsetltws. rD, rA, rB
evsaduw rD, rA, rB
evsadsw rD, rA, rB
evsad4ub rD, rA, rB
evsad4sb rD, rA, rB
evsad2uh rD, rA, rB
evsad2sh rD, rA, rB
evsaduwa rD, rA, rB
evsadswa rD, rA, rB
evsad4uba rD, rA, rB
evsad4sba rD, rA, rB
evsad2uha rD, rA, rB
evsad2sha rD, rA, rB
evabsdifuw rD, rA, rB
evabsdifsw rD, rA, rB
evabsdifub rD, rA, rB
evabsdifsb rD, rA, rB
evabsdifuh rD, rA, rB
evabsdifsh rD, rA, rB
evsaduwaa rD, rA, rB
evsadswaa rD, rA, rB
evsad4ubaaw rD, rA, rB
evsad4sbaaw rD, rA, rB
evsad2uhaaw rD, rA, rB
evsad2shaaw rD, rA, rB
evpkshubs rD, rA, rB
evpkshsbs rD, rA, rB
evpkswuhs rD, rA, rB
evpkswshs rD, rA, rB
evpkuhubs rD, rA, rB
evpkuwuhs rD, rA, rB
evpkswshilvs rD, rA, rB
evpkswgshefrs rD, rA, rB
evpkswshfrs rD, rA, rB
evpkswshilvfrs rD, rA, rB
evpksdswfrs rD, rA, rB
evpksdshefrs rD, rA, rB
evpkuduws rD, rA, rB
evpksdsws rD, rA, rB
evpkswgswfrs rD, rA, rB
evilveh rD, rA, rB
evilveoh rD, rA, rB
evilvhih rD, rA, rB
evilvhiloh rD, rA, rB
evilvloh rD, rA, rB
evilvlohih rD, rA, rB
evilvoeh rD, rA, rB
evilvoh rD, rA, rB
evdlveb rD, rA, rB
evdlveh rD, rA, rB
evdlveob rD, rA, rB
evdlveoh rD, rA, rB
evdlvob rD, rA, rB
evdlvoh rD, rA, rB
evdlvoeb rD, rA, rB
evdlvoeh rD, rA, rB
evmaxbu rD, rA, rB
evmaxbs rD, rA, rB
evmaxhu rD, rA, rB
evmaxhs rD, rA, rB
evmaxwu rD, rA, rB
evmaxws rD, rA, rB
evmaxdu rD, rA, rB
evmaxds rD, rA, rB
evminbu rD, rA, rB
evminbs rD, rA, rB
evminhu rD, rA, rB
evminhs rD, rA, rB
evminwu rD, rA, rB
evminws rD, rA, rB
evmindu rD, rA, rB
evminds rD, rA, rB
evavgwu rD, rA, rB
evavgws rD, rA, rB
evavgbu rD, rA, rB
evavgbs rD, rA, rB
evavghu rD, rA, rB
evavghs rD, rA, rB
evavgdu rD, rA, rB
evavgds rD, rA, rB
evavgwur rD, rA, rB
evavgwsr rD, rA, rB
evavgbur rD, rA, rB
evavgbsr rD, rA, rB
evavghur rD, rA, rB
evavghsr rD, rA, rB
evavgdur rD, rA, rB
evavgdsr rD, rA, rB
;#SPE2 mapped by macro
evdotphsssi rD, rA, rB
evdotphsssia rD, rA, rB
evdotpwsssi rD, rA, rB
evdotpwsssia rD, rA, rB
|
stsp/binutils-ia16
| 1,477
|
gas/testsuite/gas/ppc/altivec3.s
|
.text
start:
vmul10cuq 11,30
vcmpneb 30,1,23
vpermr 30,19,30,29
vmul10ecuq 20,20,17
vcmpneh 27,19,31
vrlwmi 6,9,23
vcmpnew 22,26,1
vrldmi 24,30,25
vcmpnezb 19,29,22
vcmpnezh 8,23,19
vrlwnm 27,24,11
vcmpnezw 21,13,10
vrldnm 5,20,29
vmul10uq 30,19
vextractub 24,21,12
vmul10euq 0,19,28
vextractuh 10,3,12
vextractuw 28,12,7
vextractd 30,27,1
vinsertb 25,31,4
bcdcpsgn. 21,14,30
vinserth 22,18,5
vinsertw 29,22,1
vinsertd 29,13,7
vcmpneb. 22,25,8
vcmpneh. 16,15,21
bcdus. 22,21,31
vcmpnew. 1,12,12
bcds. 5,3,8,1
bcdtrunc. 27,22,1,0
vcmpnezb. 2,26,0
bcdutrunc. 26,14,7
vcmpnezh. 16,5,12
bcdctsq. 24,5
bcdcfsq. 7,0,0
bcdctz. 30,12,1
bcdctn. 17,23
bcdcfz. 4,15,1
bcdcfn. 29,5,1
bcdsetsgn. 27,12,0
vcmpnezw. 14,28,25
bcdsr. 2,2,6,1
vbpermd 25,0,5
vclzlsbb 28,25
vctzlsbb 2,24
vnegw 21,11
vnegd 17,27
vprtybw 31,23
vprtybd 21,23
vprtybq 21,18
vextsb2w 30,4
vextsh2w 3,26
vextsb2d 11,17
vextsh2d 5,10
vextsw2d 13,25
vctzb 25,2
vctzh 0,3
vctzw 22,6
vctzd 26,24
vextublx 6,31,2
vextuhlx 13,0,18
vextuwlx 14,30,31
vsrv 15,12,14
vextubrx 20,10,30
vslv 21,21,2
vextuhrx 15,9,1
vextuwrx 21,17,16
vmsumudm 20,21,22,23
|
stsp/binutils-ia16
| 3,331
|
gas/testsuite/gas/ppc/vsx.s
|
.text
start:
lxsdx 40,10,20
lxvd2x 40,10,20
lxvdsx 40,10,20
lxvw4x 40,10,20
stxsdx 40,10,20
stxvd2x 40,10,20
stxvw4x 40,10,20
xsabsdp 40,60
xsadddp 40,50,60
xscmpodp 1,50,60
xscmpudp 1,50,60
xscpsgndp 40,50,60
xscvdpsp 40,60
xscvdpsxds 40,60
xscvdpsxws 40,60
xscvdpuxds 40,60
xscvdpuxws 40,60
xscvspdp 40,60
xscvsxddp 40,60
xscvuxddp 40,60
xsdivdp 40,50,60
xsmaddadp 40,50,60
xsmaddmdp 40,50,60
xsmaxdp 40,50,60
xsmindp 40,50,60
xsmsubadp 40,50,60
xsmsubmdp 40,50,60
xsmuldp 40,50,60
xsnabsdp 40,60
xsnegdp 40,60
xsnmaddadp 40,50,60
xsnmaddmdp 40,50,60
xsnmsubadp 40,50,60
xsnmsubmdp 40,50,60
xsrdpi 40,60
xsrdpic 40,60
xsrdpim 40,60
xsrdpip 40,60
xsrdpiz 40,60
xsredp 40,60
xsrsqrtedp 40,60
xssqrtdp 40,60
xssubdp 40,50,60
xstdivdp 1,50,60
xstsqrtdp 1,60
xvabsdp 40,60
xvabssp 40,60
xvadddp 40,50,60
xvaddsp 40,50,60
xvcmpeqdp 40,50,60
xvcmpeqdp. 40,50,60
xvcmpeqsp 40,50,60
xvcmpeqsp. 40,50,60
xvcmpgedp 40,50,60
xvcmpgedp. 40,50,60
xvcmpgesp 40,50,60
xvcmpgesp. 40,50,60
xvcmpgtdp 40,50,60
xvcmpgtdp. 40,50,60
xvcmpgtsp 40,50,60
xvcmpgtsp. 40,50,60
xvcpsgndp 40,50,60
xvmovdp 40,60
xvcpsgndp 40,60,60
xvcpsgnsp 40,50,60
xvmovsp 40,60
xvcpsgnsp 40,60,60
xvcvdpsp 40,60
xvcvdpsxds 40,60
xvcvdpsxws 40,60
xvcvdpuxds 40,60
xvcvdpuxws 40,60
xvcvspdp 40,60
xvcvspsxds 40,60
xvcvspsxws 40,60
xvcvspuxds 40,60
xvcvspuxws 40,60
xvcvsxddp 40,60
xvcvsxdsp 40,60
xvcvsxwdp 40,60
xvcvsxwsp 40,60
xvcvuxddp 40,60
xvcvuxdsp 40,60
xvcvuxwdp 40,60
xvcvuxwsp 40,60
xvdivdp 40,50,60
xvdivsp 40,50,60
xvmaddadp 40,50,60
xvmaddmdp 40,50,60
xvmaddasp 40,50,60
xvmaddmsp 40,50,60
xvmaxdp 40,50,60
xvmaxsp 40,50,60
xvmindp 40,50,60
xvminsp 40,50,60
xvmsubadp 40,50,60
xvmsubmdp 40,50,60
xvmsubasp 40,50,60
xvmsubmsp 40,50,60
xvmuldp 40,50,60
xvmulsp 40,50,60
xvnabsdp 40,60
xvnabssp 40,60
xvnegdp 40,60
xvnegsp 40,60
xvnmaddadp 40,50,60
xvnmaddmdp 40,50,60
xvnmaddasp 40,50,60
xvnmaddmsp 40,50,60
xvnmsubadp 40,50,60
xvnmsubmdp 40,50,60
xvnmsubasp 40,50,60
xvnmsubmsp 40,50,60
xvrdpi 40,60
xvrdpic 40,60
xvrdpim 40,60
xvrdpip 40,60
xvrdpiz 40,60
xvredp 40,60
xvresp 40,60
xvrspi 40,60
xvrspic 40,60
xvrspim 40,60
xvrspip 40,60
xvrspiz 40,60
xvrsqrtedp 40,60
xvrsqrtesp 40,60
xvsqrtdp 40,60
xvsqrtsp 40,60
xvsubdp 40,50,60
xvsubsp 40,50,60
xvtdivdp 1,50,60
xvtdivsp 1,50,60
xvtsqrtdp 1,60
xvtsqrtsp 1,60
xxland 40,50,60
xxlandc 40,50,60
xxlnor 40,50,60
xxlor 40,50,60
xxlxor 40,50,60
xxmrghw 40,50,60
xxmrglw 40,50,60
xxpermdi 40,50,60,0b00
xxpermdi 40,50,60,0b01
xxpermdi 40,50,60,0b10
xxpermdi 40,50,60,0b11
xxspltd 40,50,0
xxpermdi 40,50,50,0b00
xxspltd 40,50,1
xxpermdi 40,50,50,0b11
xxmrghd 40,50,60
xxpermdi 40,50,60,0b00
xxmrgld 40,50,60
xxpermdi 40,50,50,0b10
xxswapd 40,50
xxsel 40,50,60,62
xxsldwi 40,50,60,2
xxspltw 40,60,2
lxvd2x 40,0,20
lxvd2x 40,10,20
stxvd2x 40,0,20
stxvd2x 40,10,20
xxlnot 40,50
xxlnor 40,50,50
xxmr 40,50
xxlor 40,50,50
|
stsp/binutils-ia16
| 2,623
|
gas/testsuite/gas/ppc/vsx3.s
|
.text
vsx3:
lxvx 34,6,25
lxvx 20,0,10
lxvl 20,24,10
lxvl 54,0,29
lxvll 24,20,19
lxvll 34,0,14
mfvsrld 2,22
lxvwsx 58,26,25
lxvwsx 55,0,29
stxvx 14,21,4
stxvx 30,0,22
stxvl 0,26,4
stxvl 37,0,22
mtvsrws 24,10
stxvll 30,21,15
stxvll 39,0,14
mtvsrdd 12,6,5
mtvsrdd 38,0,21
lxsibzx 59,28,6
lxsibzx 30,0,8
lxvh8x 42,23,17
lxvh8x 36,0,5
lxsihzx 12,9,11
lxsihzx 49,0,13
lxvb16x 37,3,19
lxvb16x 0,0,30
stxsibx 2,30,6
stxsibx 12,0,13
stxvh8x 16,29,8
stxvh8x 55,0,10
stxsihx 34,2,23
stxsihx 60,0,23
stxvb16x 23,14,12
stxvb16x 19,0,5
lxsd 24,0(0)
lxsd 15,16(21)
lxssp 6,0(0)
lxssp 23,16(9)
xscmpeqdp 18,51,33
xscmpgtdp 2,26,34
xscmpgedp 5,26,20
xxperm 44,10,43
xxpermr 41,20,5
xscmpexpdp 4,18,55
xxextractuw 23,37,3
xxspltib 54,235
xxinsertw 15,30,4
xsmaxcdp 12,11,7
xsmincdp 32,25,24
xsmaxjdp 25,53,12
xststdcsp 2,36,127
xsminjdp 32,21,45
xsxexpdp 17,50
xsxsigdp 7,40
xscvhpdp 54,34
xscvdphp 58,54
xststdcdp 0,38,127
xvtstdcsp 56,53,127
xviexpsp 54,20,52
xsiexpdp 57,28,29
xvxexpdp 1,20
xvxsigdp 54,59
xxbrh 18,37
xvxexpsp 14,1
xvxsigsp 52,13
xxbrw 19,5
xxbrd 51,55
xvcvhpsp 35,17
xvcvsphp 15,45
xxbrq 17,31
xvtstdcdp 16,12,127
xviexpdp 27,9,8
lxv 4,0(0)
lxv 40,16(20)
stxv 50,0(0)
stxv 8,16(16)
stxsd 3,0(0)
stxsd 17,16(2)
stxssp 13,0(0)
stxssp 17,16(13)
xsaddqp 8,10,18
xsaddqpo 5,1,29
xsrqpi 0,12,18,3
xsrqpix 1,31,19,0
xsmulqp 14,1,6
xsmulqpo 17,7,27
xsrqpxp 0,4,11,0
xscpsgnqp 29,23,28
xscmpoqp 7,13,27
xscmpexpqp 5,21,6
xsmaddqp 2,19,4
xsmaddqpo 30,7,16
xsmsubqp 21,30,15
xsmsubqpo 12,17,30
xsnmaddqp 6,30,12
xsnmaddqpo 12,22,12
xsnmsubqp 10,29,27
xsnmsubqpo 29,29,13
xssubqp 19,27,4
xssubqpo 13,8,1
xsdivqp 8,3,27
xsdivqpo 24,20,27
xscmpuqp 7,14,4
xststdcqp 4,2,127
xsabsqp 31,22
xsxexpqp 25,3
xsnabsqp 10,28
xsnegqp 19,31
xsxsigqp 11,13
xssqrtqp 13,14
xssqrtqpo 1,27
xscvqpuwz 3,7
xscvudqp 20,18
xscvqpswz 29,29
xscvsdqp 2,28
xscvqpudz 23,4
xscvqpdp 3,20
xscvqpdpo 1,3
xscvdpqp 19,12
xscvqpsdz 13,4
xsiexpqp 7,24,7
|
stsp/binutils-ia16
| 1,174
|
gas/testsuite/gas/ppc/vsx2.s
|
.text
vsx2:
lxsiwzx 62,14,26
lxsiwzx 40,0,25
lxsiwax 25,0,26
lxsiwax 3,0,3
mfvsrd 12,30
mffprd 12,30
mfvsrd 12,62
mfvrd 12,30
mfvsrwz 20,12
mffprwz 20,12
mfvsrwz 21,44
mfvrwz 21,12
stxsiwx 14,9,14
stxsiwx 21,0,8
mtvsrd 11,28
mtfprd 11,28
mtvsrd 43,29
mtvrd 11,29
mtvsrwa 24,22
mtfprwa 24,22
mtvsrwa 57,23
mtvrwa 25,23
mtvsrwz 26,27
mtfprwz 26,27
mtvsrwz 59,28
mtvrwz 27,28
lxsspx 13,19,13
lxsspx 18,0,13
stxsspx 43,2,4
stxsspx 55,0,11
xsaddsp 54,48,25
xsmaddasp 14,50,1
xssubsp 26,22,42
xsmaddmsp 27,53,52
xsrsqrtesp 8,59
xssqrtsp 12,41
xsmulsp 57,11,32
xsmsubasp 38,20,26
xsdivsp 26,19,6
xsmsubmsp 35,37,55
xsresp 59,8
xsnmaddasp 44,33,33
xsnmaddmsp 17,62,30
xsnmsubasp 54,52,31
xsnmsubmsp 37,5,58
xxlorc 30,54,44
xxlnand 49,14,29
xxleqv 62,22,30
xscvdpspn 60,54
xsrsp 22,45
xscvuxdsp 26,59
xscvsxdsp 38,49
xscvspdpn 59,26
fmrgow 24,14,2
fmrgew 22,7,5
|
stsp/binutils-ia16
| 1,281
|
gas/testsuite/gas/ppc/e6500.s
|
# Power E6500 tests
.text
start:
vabsdub 0, 1, 2
vabsduh 0, 1, 2
vabsduw 0, 1, 2
mvidsplt 0, 1, 2
mviwsplt 0, 1, 2
lvexbx 0, 0, 2
lvexbx 0, 1, 2
lvexhx 0, 0, 2
lvexhx 0, 1, 2
lvexwx 0, 0, 2
lvexwx 0, 1, 2
stvexbx 0, 0, 2
stvexbx 0, 1, 2
stvexhx 0, 0, 2
stvexhx 0, 1, 2
stvexwx 0, 0, 2
stvexwx 0, 1, 2
lvepx 0, 0, 2
lvepx 0, 1, 2
lvepxl 0, 0, 2
lvepxl 0, 1, 2
stvepx 0, 0, 2
stvepx 0, 1, 2
stvepxl 0, 0, 2
stvepxl 0, 1, 2
lvtlx 0, 0, 2
lvtlx 0, 1, 2
lvtlxl 0, 0, 2
lvtlxl 0, 1, 2
lvtrx 0, 0, 2
lvtrx 0, 1, 2
lvtrxl 0, 0, 2
lvtrxl 0, 1, 2
stvflx 0, 0, 2
stvflx 0, 1, 2
stvflxl 0, 0, 2
stvflxl 0, 1, 2
stvfrx 0, 0, 2
stvfrx 0, 1, 2
stvfrxl 0, 0, 2
stvfrxl 0, 1, 2
lvswx 0, 0, 2
lvswx 0, 1, 2
lvswxl 0, 0, 2
lvswxl 0, 1, 2
stvswx 0, 0, 2
stvswx 0, 1, 2
stvswxl 0, 0, 2
stvswxl 0, 1, 2
lvsm 0, 0, 2
lvsm 0, 1, 2
miso
sync
sync 0,0
sync 1,0
sync 1,1
sync 0,7
sync 1,8
dni 0,0
dni 31,31
dcblq. 2,0,1
dcblq. 2,3,1
icblq. 2,0,1
icblq. 2,3,1
mftmr 0,16
mttmr 16,0
lbarx 20,0,7
lbarx 20,1,7
lharx 21,0,8
lharx 21,1,8
lwarx 22,0,9
lwarx 22,1,9
ldarx 23,0,10
ldarx 23,1,10
stbcx. 10,0,7
stbcx. 10,1,7
sthcx. 11,0,8
sthcx. 11,1,8
stwcx. 12,0,9
stwcx. 12,1,9
stdcx. 13,0,10
stdcx. 13,1,10
|
stsp/binutils-ia16
| 8,090
|
gas/testsuite/gas/ppc/power9.s
|
.text
power9:
cnttzd 3,13
cnttzd. 4,14
cnttzw 5,15
cnttzw. 6,16
modsd 10,20,21
modsw 11,21,22
modud 12,22,23
moduw 13,23,24
bcdcfn. 3,4,0
bcdcfn. 3,4,1
bcdcfsq. 4,5,0
bcdcfsq. 4,5,1
bcdcfz. 5,6,0
bcdcfz. 5,6,1
bcdcpsgn. 6,7,8
bcdctn. 7,8
bcdctsq. 8,9
bcdctz. 9,10,0
bcdctz. 9,10,1
bcdsetsgn. 10,11,0
bcdsetsgn. 10,11,1
bcdsr. 11,12,13,0
bcdsr. 11,12,13,1
bcds. 12,13,14,0
bcds. 12,13,14,1
bcdtrunc. 13,14,15,0
bcdtrunc. 13,14,15,1
bcdus. 14,15,16
bcdutrunc. 15,16,17
lxvll 20,0,21
lxvll 20,10,21
stxvll 21,0,11
stxvll 21,10,11
vmul10cuq 22,23
vmul10ecuq 23,24,25
vmul10euq 24,25,26
vmul10uq 25,26
xsaddqp 10,11,12
xsaddqpo 11,12,12
xsrqpi 0,20,30,0
xsrqpi 1,20,30,0
xsrqpi 0,20,30,3
xsrqpi 1,20,30,3
xsrqpix 0,21,31,0
xsrqpix 1,21,31,0
xsrqpix 0,21,31,3
xsrqpix 1,21,31,3
xsmulqp 12,13,14
xsmulqpo 13,14,15
xsrqpxp 0,22,23,0
xsrqpxp 1,22,23,0
xsrqpxp 0,22,23,3
xsrqpxp 1,22,23,3
xscpsgnqp 14,15,16
xscmpoqp 0,15,16
xscmpoqp 7,15,16
xscmpexpqp 0,16,17
xscmpexpqp 7,16,17
xsmaddqp 17,18,19
xsmaddqpo 18,19,20
xsmsubqp 19,20,21
xsmsubqpo 20,21,22
xsnmaddqp 21,22,23
xsnmaddqpo 22,23,24
xsnmsubqp 23,24,25
xsnmsubqpo 24,25,26
xssubqp 25,26,27
xssubqpo 26,27,28
xsdivqp 27,28,29
xsdivqpo 28,29,30
xscmpuqp 0,29,30
xscmpuqp 7,29,30
xststdcqp 0,30,0
xststdcqp 7,30,0
xststdcqp 0,31,0x7f
xststdcqp 7,31,0x7f
xsabsqp 10,11
xsxexpqp 11,12
xsnabsqp 12,13
xsnegqp 13,14
xsxsigqp 14,15
xssqrtqp 15,16
xssqrtqpo 16,17
xscvqpuwz 17,18
xscvudqp 18,19
xscvqpswz 19,20
xscvsdqp 20,21
xscvqpudz 21,22
xscvqpdp 22,23
xscvqpdpo 23,24
xscvdpqp 24,25
xscvqpsdz 25,26
xsiexpqp 26,27,28
vpermr 4,5,6,7
vextractub 5,6,0
vextractub 5,6,0xf
vextractuh 6,7,0
vextractuh 6,7,0xf
vextractuw 7,8,0
vextractuw 7,8,0xf
vextractd 8,9,0
vextractd 8,9,0xf
vinsertb 9,10,0
vinsertb 9,10,0xf
vinserth 10,11,0
vinserth 10,11,0xf
vinsertw 11,12,0
vinsertw 11,12,0xf
vinsertd 12,13,0
vinsertd 12,13,0xf
mfvsrld 20,45
mtvsrws 46,21
mtvsrdd 47,0,23
mtvsrdd 47,22,23
lxvx 50,0,11
lxvx 0,10,11
lxvwsx 51,0,12
lxvwsx 1,10,12
lxvh8x 52,0,13
lxvh8x 2,10,13
lxvb16x 53,0,14
lxvb16x 3,10,14
stxvx 54,0,15
stxvx 4,20,15
stxvh8x 55,0,16
stxvh8x 5,20,16
stxvb16x 56,0,17
stxvb16x 6,20,17
xxextractuw 4,5,0x0
xxextractuw 40,50,0xf
xxspltib 4,0x0
xxspltib 4,-128
xxspltib 41,255
xxspltib 41,-1
xxinsertw 5,6,0
xxinsertw 50,60,0xf
xxbrh 6,7
xxbrh 56,57
xxbrw 7,8
xxbrw 57,58
xxbrd 8,9
xxbrd 58,59
xxbrq 9,10
xxbrq 59,60
lxsd 20,0(0)
lxsd 20,0(10)
lxsd 20,8(0)
lxsd 20,8(10)
lxsd 20,-8(0)
lxsd 20,-8(10)
lxsd 20,32764(0)
lxsd 20,32764(10)
lxsd 20,-32768(0)
lxsd 20,-32768(10)
lxssp 30,0(0)
lxssp 30,0(11)
lxssp 30,8(0)
lxssp 30,8(11)
lxssp 30,-8(0)
lxssp 30,-8(11)
lxssp 30,32764(0)
lxssp 30,32764(11)
lxssp 30,-32768(0)
lxssp 30,-32768(11)
lxv 40,0(0)
lxv 40,0(12)
lxv 40,16(0)
lxv 40,16(12)
lxv 40,-16(0)
lxv 10,-16(12)
lxv 10,32752(0)
lxv 10,32752(12)
lxv 10,-32768(0)
lxv 10,-32768(12)
stxsd 21,0(0)
stxsd 21,0(10)
stxsd 21,8(0)
stxsd 21,8(10)
stxsd 21,-8(0)
stxsd 21,-8(10)
stxsd 21,32764(0)
stxsd 21,32764(10)
stxsd 21,-32768(0)
stxsd 21,-32768(10)
stxssp 31,0(0)
stxssp 31,0(11)
stxssp 31,8(0)
stxssp 31,8(11)
stxssp 31,-8(0)
stxssp 31,-8(11)
stxssp 31,32764(0)
stxssp 31,32764(11)
stxssp 31,-32768(0)
stxssp 31,-32768(11)
stxv 41,0(0)
stxv 41,0(12)
stxv 41,16(0)
stxv 41,16(12)
stxv 41,-16(0)
stxv 11,-16(12)
stxv 11,32752(0)
stxv 11,32752(12)
stxv 11,-32768(0)
stxv 11,-32768(12)
xxperm 20,22,24
xxperm 40,42,44
xxpermr 21,23,25
xxpermr 41,43,45
extswsli 12,20,0
extswsli 12,20,1
extswsli 12,20,63
extswsli. 13,21,0
extswsli. 13,21,1
extswsli. 13,21,63
vrlwmi 14,22,23
vrldmi 15,23,24
vrlwnm 16,24,25
vrldnm 17,25,26
vbpermd 18,26,27
vnegw 19,20
vnegd 20,21
vprtybw 21,22
vprtybd 22,23
vprtybq 23,24
vextsb2w 24,25
vextsh2w 25,26
vextsb2d 26,27
vextsh2d 27,28
vextsw2d 28,29
vctzb 29,30
vctzh 30,31
vctzw 31,30
vctzd 30,29
lxsibzx 10,0,20
lxsibzx 50,10,20
lxsihzx 11,0,21
lxsihzx 51,11,21
stxsibx 12,0,22
stxsibx 52,12,22
stxsihx 13,0,23
stxsihx 53,13,23
maddhd 10,11,12,13
maddhdu 20,21,22,23
maddld 2,3,4,5
xscmpexpdp 0,10,20
xscmpexpdp 7,40,50
xsiexpdp 41,11,21
xststdcdp 0,11,0x7f
xststdcdp 7,41,0x7f
xststdcsp 0,11,0x7f
xststdcsp 7,41,0x7f
xsxexpdp 13,43
xsxsigdp 14,44
xviexpdp 45,46,47
xviexpsp 46,47,48
xvtstdcdp 54,55,0
xvtstdcdp 54,55,0x7f
xvtstdcsp 55,56,0
xvtstdcsp 55,56,0x7f
xvxexpdp 57,58
xvxexpsp 58,59
xvxsigdp 59,60
xvxsigsp 60,61
cmpeqb 0,6,7
cmpeqb 7,6,7
cmprb 0,0,8,9
cmprb 7,0,8,9
cmprb 0,1,8,9
cmprb 7,1,8,9
setb 15,0
setb 15,7
lxvl 26,0,10
lxvl 56,20,10
stxvl 27,0,11
stxvl 57,21,11
vclzlsbb 20,30
vctzlsbb 21,31
vcmpneb 10,11,12
vcmpneb. 20,21,22
vcmpneh 11,12,13
vcmpneh. 21,22,23
vcmpnew 12,13,14
vcmpnew. 22,23,24
vcmpnezb 13,14,15
vcmpnezb. 23,24,25
vcmpnezh 14,15,16
vcmpnezh. 24,25,26
vcmpnezw 15,16,17
vcmpnezw. 25,26,27
vextublx 16,17,10
vextubrx 17,18,11
vextuhlx 18,19,12
vextuhrx 19,20,13
vextuwlx 20,21,14
vextuwrx 21,22,15
dtstsfi 0,0,3
dtstsfi 7,0x3f,3
dtstsfiq 0,0,4
dtstsfiq 7,0x3f,4
xscvhpdp 40,50
xscvdphp 41,51
xvcvhpsp 42,52
xvcvsphp 43,53
lnia 3
addpcis 3,0
subpcis 3,0
addpcis 4,1
subpcis 4,-1
addpcis 5,-2
subpcis 5,2
addpcis 6,0x7fff
subpcis 6,-0x7fff
addpcis 7,-0x8000
subpcis 7,0x8000
slbsync
slbiag 10
slbieg 10,11
slbmfee 3,4
slbmfee 3,4,0
slbmfee 3,4,1
slbmfev 4,5
slbmfev 4,5,0
slbmfev 4,5,1
tlbie 3,4
tlbie 3,4,0,0,0
tlbie 3,4,3,1,1
tlbiel 3
tlbiel 3,0,0,0,0
tlbiel 3,4,3,1,1
copy 12,13
paste. 10,11
cpabort
hwsync
sync
sync 0
lwsync
sync 1
ptesync
sync 2
ldat 20,0,0x0
ldat 20,10,0x1c
lwat 21,0,0x0
lwat 21,11,0x1c
stdat 22,0,0x0
stdat 22,12,0x1c
stwat 23,0,0x0
stwat 23,13,0x1c
urfid
rmieg 30
stop
wait
wait 0
darn 3,0
darn 3,1
darn 3,2
mcrxrx 0
mcrxrx 7
vslv 20,21,22
vsrv 23,24,25
msgsync
xscmpeqdp 30,40,50
xscmpgtdp 31,41,51
xscmpgedp 32,42,52
xsmincdp 34,44,54
xsmaxcdp 35,45,55
xsminjdp 36,46,56
xsmaxjdp 37,47,57
vmsumudm 20,21,22,23
addex 11,12,13,0
addex 11,12,13,1
addex 11,12,13,2
mffs 25
mffs. 25
mffsce 26
mffscdrn 27,20
mffscdrni 28,0
mffscdrni 28,7
mffscrn 29,21
mffscrni 30,0
mffscrni 30,3
mffsl 31
scv 0
scv 127
rfscv
dcbf 0,3
dcbf 0,3,0
dcbfl 0,4
dcbf 0,4,1
dcbflp 0,5
dcbf 0,5,3
ori 31,31,0
exser
msgsndu 3
msgclru 27
|
stsp/binutils-ia16
| 4,175
|
gas/testsuite/gas/ppc/lsp-checks.s
|
# Test PA LSP operands checks
.section ".text"
.equ rA,1
.equ rB,2
.equ rD,0 ;# ok
.equ rD_odd, 1 ;# GPR odd is illegal
.equ rS,0 ;# ok
.equ rS_odd, 1 ;# GPR odd is illegal
.equ UIMM_GT15, 16 ;# UIMM values >15 are illegal
.equ UIMM_2, 2 ;# ok
.equ UIMM_2_ILL, 3 ;# 3 is not a multiple of 2
.equ UIMM_2_ZERO, 0 ;# UIMM = 00000 is illegal if U=1
.equ UIMM_4, 4 ;# ok
.equ UIMM_4_ILL, 3 ;# 3 is not a multiple of 4
.equ UIMM_4_ZERO, 0 ;# UIMM = 00000 is illegal if U=1
.equ UIMM_8, 8 ;# ok
.equ UIMM_8_ILL, 7 ;# 7 is not a multiple of 8
.equ UIMM_8_ZERO, 0 ;# UIMM = 00000 is illegal if U=1
.equ offset, 0 ;# invalid offset
zxtrw rD, rA, rB, offset
zvsrhiu rD, rA, UIMM_GT15
zvsrhis rD, rA, UIMM_GT15
zvslhi rD, rA, UIMM_GT15
zvrlhi rD, rA, UIMM_GT15
zvslhius rD, rA, UIMM_GT15
zvslhiss rD, rA, UIMM_GT15
zldd rD_odd, UIMM_8(rA)
zldd rD, UIMM_8_ILL(rA)
zldw rD_odd, UIMM_8(rA)
zldw rD, UIMM_8_ILL(rA)
zldh rD_odd, UIMM_8(rA)
zldh rD, UIMM_8_ILL(rA)
zlwgsfd rD_odd, UIMM_4(rA)
zlwgsfd rD, UIMM_4_ILL(rA)
zlwwosd rD_odd, UIMM_4(rA)
zlwwosd rD, UIMM_4_ILL(rA)
zlwhsplatwd rD_odd, UIMM_4(rA)
zlwhsplatwd rD, UIMM_4_ILL(rA)
zlwhsplatd rD_odd, UIMM_4(rA)
zlwhsplatd rD, UIMM_4_ILL(rA)
zlwhgwsfd rD_odd, UIMM_4(rA)
zlwhgwsfd rD, UIMM_4_ILL(rA)
zlwhed rD_odd, UIMM_4(rA)
zlwhed rD, UIMM_4_ILL(rA)
zlwhosd rD_odd, UIMM_4(rA)
zlwhosd rD, UIMM_4_ILL(rA)
zlwhoud rD_odd, UIMM_4(rA)
zlwh rD, UIMM_4_ILL(rA)
zlww rD, UIMM_4_ILL(rA)
zlhgwsf rD, UIMM_2_ILL(rA)
zlhhsplat rD, UIMM_2_ILL(rA)
zstdd rS_odd, UIMM_8(rA)
zstdd rS, UIMM_8_ILL(rA)
zstdw rS_odd, UIMM_8(rA)
zstdw rS, UIMM_8_ILL(rA)
zstdh rS_odd, UIMM_8(rA)
zstdh rS, UIMM_8_ILL(rA)
zstwhed rS_odd, UIMM_4(rA)
zstwhed rS, UIMM_4_ILL(rA)
zstwhod rS_odd, UIMM_4(rA)
zstwhod rS, UIMM_4_ILL(rA)
zlhhe rD, UIMM_2_ILL(rA)
zlhhos rD, UIMM_2_ILL(rA)
zlhhou rD, UIMM_2_ILL(rA)
zsthe rS, UIMM_2_ILL(rA)
zstho rS, UIMM_2_ILL(rA)
zstwh rS, UIMM_4_ILL(rA)
zstww rS, UIMM_4_ILL(rA)
zlddu rD_odd, UIMM_8(rA)
zlddu rD, UIMM_8_ZERO(rA)
zldwu rD_odd, UIMM_8(rA)
zldwu rD, UIMM_8_ZERO(rA)
zldhu rD_odd, UIMM_8(rA)
zldhu rD, UIMM_8_ZERO(rA)
zlwgsfdu rD_odd, UIMM_4(rA)
zlwgsfdu rD, UIMM_4_ZERO(rA)
zlwwosdu rD_odd, UIMM_4(rA)
zlwwosdu rD, UIMM_4_ZERO(rA)
zlwhsplatwdu rD_odd, UIMM_4(rA)
zlwhsplatwdu rD, UIMM_4_ZERO(rA)
zlwhsplatdu rD_odd, UIMM_4(rA)
zlwhsplatdu rD, UIMM_4_ZERO(rA)
zlwhgwsfdu rD_odd, UIMM_4(rA)
zlwhgwsfdu rD, UIMM_4_ZERO(rA)
zlwhedu rD_odd, UIMM_4(rA)
zlwhedu rD, UIMM_4_ZERO(rA)
zlwhosdu rD_odd, UIMM_4(rA)
zlwhosdu rD, UIMM_4_ZERO(rA)
zlwhoudu rD_odd, UIMM_4(rA)
zlwhoudu rD, UIMM_4_ZERO(rA)
zlwhu rD, UIMM_4_ZERO(rA)
zlwwu rD, UIMM_4_ZERO(rA)
zlhgwsfu rD, UIMM_2_ZERO(rA)
zlhhsplatu rD, UIMM_2_ZERO(rA)
zstddu rS, UIMM_8_ZERO(rA)
zstdwu rS_odd, UIMM_8(rA)
zstdwu rS, UIMM_8_ZERO(rA)
zstdhu rS_odd, UIMM_8(rA)
zstdhu rS, UIMM_8_ZERO(rA)
zstwhedu rS_odd, UIMM_4(rA)
zstwhedu rS, UIMM_4_ZERO(rA)
zstwhodu rS_odd, UIMM_4(rA)
zstwhodu rS, UIMM_4_ZERO(rA)
zlhheu rD, UIMM_2_ZERO(rA)
zlhhosu rD, UIMM_2_ZERO(rA)
zlhhouu rD, UIMM_2_ZERO(rA)
zstheu rS, UIMM_2_ZERO(rA)
zsthou rS, UIMM_2_ZERO(rA)
zstwhu rS, UIMM_4_ZERO(rA)
zstwwu rS, UIMM_4_ZERO(rA)
|
stsp/binutils-ia16
| 2,599
|
gas/testsuite/gas/ppc/prefix-pcrel.s
|
.text
prefix:
# The following should all disassemble to: paddi rX,rY,disp
pla 10,0(9)
paddi 10,9,0
paddi 10,9,0,0
pla 11,~(1<<15)(9)
paddi 11,9,~(1<<15)
paddi 11,9,~(1<<15),0
pla 12,8589934591(9)
psubi 12,9,-8589934591
psubi 12,9,-8589934591,0
paddi 12,9,8589934591
paddi 12,9,8589934591,0
pla 13,-8589934592(9)
psubi 13,9,8589934592
psubi 13,9,8589934592,0
paddi 13,9,-8589934592
paddi 13,9,-8589934592,0
# The following should all disassemble to: pla rX,disp
pla 14,0
paddi 14,0,0,1
pla 15,~(1<<15)
psubi 15,0,-(~(1<<15)),1
paddi 15,0,~(1<<15),1
pla 16,8589934591
psubi 16,0,-8589934591,1
paddi 16,0,8589934591,1
pla 17,-8589934592
psubi 17,0,8589934592,1
paddi 17,0,-8589934592,1
# The following should all disassemble to: pli rX,immed
pli 20,13
pla 20,13(0)
psubi 20,0,-13
paddi 20,0,13
pli 21,~(1<<15)
pla 21,~(1<<15)(0)
paddi 21,0,~(1<<15)
pli 22,8589934591
pla 22,8589934591(0)
psubi 22,0,-8589934591
psubi 22,0,-8589934591,0
paddi 22,0,8589934591
paddi 22,0,8589934591,0
pli 23,-8589934592
pla 23,-8589934592(0)
psubi 23,0,8589934592
psubi 23,0,8589934592,0
paddi 23,0,-8589934592
paddi 23,0,-8589934592,0
# Tests of prefix loads and stores
plbz 3,0(1)
plbz 3,0(1),0
plbz 3,~(1<<15)(1)
plbz 3,~(1<<15)(1),0
plbz 3,8589934591(1)
plbz 3,8589934591(1),0
plbz 3,-8589934592(1)
plbz 3,-8589934592(1),0
plbz 3,0(0)
plbz 4,0(0),1
plbz 4,0
plbz 3,~(1<<15)(0)
plbz 4,~(1<<15)(0),1
plbz 4,~(1<<15)
plbz 3,8589934591(0)
plbz 4,8589934591(0),1
plbz 4,8589934591
plbz 3,-8589934592(0)
plbz 4,-8589934592(0),1
plbz 4,-8589934592
plhz 5,4(10),0
plhz 5,4(0),1
plha 6,8(10),0
plha 6,8(0),1
plwz 7,12(10),0
plwz 7,12(0),1
plwa 8,16(10),0
plwa 8,16(0),1
pld 9,20(10),0
pld 9,20(0),1
plfs 10,24(10),0
plfs 10,24(0),1
plfd 11,28(10),0
plfd 11,28(0),1
plxsd 13,36(10),0
plxsd 13,36(0),1
plxssp 14,40(10),0
plxssp 14,40(0),1
plq 16,48(10),0
plq 16,48(0),1
plxv 17,64(10),0
plxv 17,64(0),1
plxv 34,64(10),0
plxv 34,64(0),1
pstb 3,52(11),0
pstb 3,52(0),1
psth 4,56(11),0
psth 4,56(0),1
pstw 5,60(11),0
pstw 5,60(0),1
pstfs 6,64(11),0
pstfs 6,64(0),1
pstfd 7,68(11),0
pstfd 7,68(0),1
pstxsd 9,76(11),0
pstxsd 9,76(0),1
pstxssp 10,80(11),0
pstxssp 10,80(0),1
pstd 11,84(11),0
pstd 11,84(0),1
pstq 12,88(11),0
pstq 12,88(0),1
pstxv 13,96(11),0
pstxv 13,96(0),1
pstxv 63,96(11),0
pstxv 63,96(0),1
# test d34 values of interest when bfd_vma is 32-bit
pli 1,0x7fffffff
pli 2,0x80000000
pli 3,0xffffffff
pli 4,0
pli 5,-1
pli 6,-0x7fffffff
pli 7,-0x80000000
pli 8,~0x7fffffff
pli 9,~0x80000000
|
stsp/binutils-ia16
| 6,110
|
gas/testsuite/gas/ppc/power8.s
|
.text
power8:
tabort. 5
tabortwc. 7,8,16
tabortdc. 20,11,10
tabortwci. 17,10,-13
tabortdci. 29,3,-5
tbegin. 0
tcheck 7
tend. 0
tend.
tend. 1
tendall.
treclaim. 24
trechkpt.
tsr. 0
tsuspend.
tsr. 1
tresume.
ori 2,2,0
.p2align 4,,15
rfebb 0
rfebb
rfebb 1
bctar- 12,21
bctarl- 4,7
bctar+ 12,12
bctarl+ 4,2
bctar 4,8,1
bctarl 4,7,2
waitasec
msgsndp 8
mtsle 1
msgclrp 27
stqcx. 10,10,12
stqcx. 28,0,7
lqarx 24,19,11,0
lqarx 22,0,11,0
mfbhrbe 20,6
pbt. 29,17,16
pbt. 14,0,7
clrbhrb
vpermxor 11,10,0,23
vaddeuqm 24,2,7,4
vaddecuq 10,10,8,2
vsubeuqm 5,15,8,19
vsubecuq 12,31,16,29
vmulouw 20,29,13
vmuluwm 29,0,26
vaddudm 8,21,28
vmaxud 1,26,1
vrld 20,3,1
vcmpequd 4,19,11
vadduqm 23,14,30
vaddcuq 8,8,13
vmulosw 28,27,4
vmaxsd 3,4,4
vmuleuw 0,19,21
vminud 24,20,19
vcmpgtud 0,28,15
vmulesw 21,0,2
vminsd 9,26,9
vsrad 25,29,11
vcmpgtsd 11,28,11
bcdadd. 5,8,26,1
vpmsumb 4,3,12
bcdsub. 26,31,21,1
vpmsumh 5,17,16
vpkudum 23,17,20
vpmsumw 24,21,29
vpmsumd 9,22,13
vpkudus 18,19,18
vsubuqm 30,16,22
vcipher 14,11,7
vcipherlast 10,2,22
vgbbd 23,13
vsubcuq 16,25,16
vorc 31,1,5
vncipher 4,17,31
vncipherlast 24,2,27
vbpermq 23,21,23
vpksdus 27,18,9
vnand 27,29,27
vsld 19,19,24
vsbox 5,13
vpksdss 25,3,7
vcmpequd. 28,8,0
vupkhsw 26,26
vshasigmaw 5,7,0,6
veqv 28,21,14
vmrgow 1,8,19
vshasigmad 0,10,0,10
vsrd 5,27,14
vupklsw 11,13
vclzb 14,16
vpopcntb 20,27
vclzh 28,11
vpopcnth 24,9
vclzw 27,31
vpopcntw 17,19
vclzd 12,29
vpopcntd 23,22
vcmpgtud. 24,20,29
vcmpgtsd. 9,6,27
lxsiwzx 62,14,26
lxsiwzx 40,0,25
lxsiwax 25,0,26
lxsiwax 3,0,3
mfvsrd 12,62
mfvsrwz 20,12
stxsiwx 14,9,14
stxsiwx 21,0,8
mtvsrd 48,11
mtvsrwa 63,23
mtvsrwz 16,26
lxsspx 13,19,13
lxsspx 18,0,13
stxsspx 43,2,4
stxsspx 55,0,11
xsaddsp 54,48,25
xsmaddasp 14,50,1
xssubsp 26,22,42
xsmaddmsp 27,53,52
xsrsqrtesp 8,59
xssqrtsp 12,41
xsmulsp 57,11,32
xsmsubasp 38,20,26
xsdivsp 26,19,6
xsmsubmsp 35,37,55
xsresp 59,8
xsnmaddasp 44,33,33
xsnmaddmsp 17,62,30
xsnmsubasp 54,52,31
xsnmsubmsp 37,5,58
xxlorc 30,54,44
xxlnand 49,14,29
xxleqv 62,22,30
xscvdpspn 60,54
xsrsp 22,45
xscvuxdsp 26,59
xscvsxdsp 38,49
xscvspdpn 59,26
fmrgow 24,14,2
fmrgew 22,7,5
msgsnd 14
msgclr 23
lxvx 40,0,5
lxvd2x 40,0,5
lxvx 10,20,6
lxvd2x 10,20,6
stxvx 41,0,7
stxvd2x 41,0,7
stxvx 11,21,8
stxvd2x 11,21,8
lbarx 20,0,7
lbarx 20,0,7,0
lbarx 20,0,7,1
lbarx 20,1,7
lbarx 20,1,7,0
lbarx 20,1,7,1
ldarx 21,0,8
ldarx 21,0,8,0
ldarx 21,0,8,1
ldarx 21,1,8
ldarx 21,1,8,0
ldarx 21,1,8,1
lharx 22,0,9
lharx 22,0,9,0
lharx 22,0,9,1
lharx 22,1,9
lharx 22,1,9,0
lharx 22,1,9,1
lwarx 23,0,10
lwarx 23,0,10,0
lwarx 23,0,10,1
lwarx 23,1,10
lwarx 23,1,10,0
lwarx 23,1,10,1
stbcx. 10,0,7
stbcx. 10,1,7
sthcx. 11,0,8
sthcx. 11,1,8
stwcx. 12,0,9
stwcx. 12,1,9
stdcx. 13,0,10
stdcx. 13,1,10
bctar 0b10100,lt
btar
bdnztar
bdnztarl
bdztar
bdztarl
btar
btarl
bdnztar-
bdnztarl-
bdnztar+
bdnztarl+
bdztar-
bdztarl-
bdztar+
bdztarl+
bgetar cr4
bnltar cr4
bgetarl cr4
bnltarl cr4
bletar cr4
bngtar cr4
bletarl cr4
bngtarl cr4
bnetar cr4
bnetarl cr4
bnstar cr4
bnutar cr4
bnstarl cr4
bnutarl cr4
bgetar- cr4
bnltar- cr4
bgetarl- cr4
bnltarl- cr4
bletar- cr4
bngtar- cr4
bletarl- cr4
bngtarl- cr4
bnetar- cr4
bnetarl- cr4
bnstar- cr4
bnutar- cr4
bnstarl- cr4
bnutarl- cr4
bgetar+ cr4
bnltar+ cr4
bgetarl+ cr4
bnltarl+ cr4
bletar+ cr4
bngtar+ cr4
bletarl+ cr4
bngtarl+ cr4
bnetar+ cr4
bnetarl+ cr4
bnstar+ cr4
bnutar+ cr4
bnstarl+ cr4
bnutarl+ cr4
blttar cr4
blttarl cr4
bgttar cr4
bgttarl cr4
beqtar cr4
beqtarl cr4
bsotar cr4
buntar cr4
bsotarl cr4
buntarl cr4
blttar- cr4
blttarl- cr4
bgttar- cr4
bgttarl- cr4
beqtar- cr4
beqtarl- cr4
bsotar- cr4
buntar- cr4
bsotarl- cr4
buntarl- cr4
blttar+ cr4
blttarl+ cr4
bgttar+ cr4
bgttarl+ cr4
beqtar+ cr4
beqtarl+ cr4
bsotar+ cr4
buntar+ cr4
bsotarl+ cr4
buntarl+ cr4
bdnzftar 4*cr2+lt
bdnzftarl 4*cr2+lt
bdzftar 4*cr2+lt
bdzftarl 4*cr2+lt
bftar 4*cr2+lt
bftarl 4*cr2+lt
bftar- 4*cr2+lt
bftarl- 4*cr2+lt
bftar+ 4*cr2+lt
bftarl+ 4*cr2+lt
bdnzttar 4*cr2+lt
bdnzttarl 4*cr2+lt
bdzttar 4*cr2+lt
bdzttarl 4*cr2+lt
bttar 4*cr2+lt
bttarl 4*cr2+lt
bttar- 4*cr2+lt
bttarl- 4*cr2+lt
bttar+ 4*cr2+lt
bttarl+ 4*cr2+lt
bctar- 0b01100,4*cr2+eq
bctarl- 0b01100,4*cr2+eq
bctar+ 0b01100,4*cr2+eq
bctarl+ 0b01100,4*cr2+eq
bctar 0b01100,4*cr2+gt
bctar 0b01100,4*cr2+gt,0
bctar 0b01100,4*cr2+gt,3
bctarl 0b01100,4*cr2+gt
bctarl 0b01100,4*cr2+gt,0
bctarl 0b01100,4*cr2+gt,3
or 26,26,26
miso
|
stsp/binutils-ia16
| 1,327
|
gas/testsuite/gas/ppc/power4.s
|
.data
.p2align 4
dsym0: .llong 0xdeadbeef
.llong 0xc0ffee
dsym1:
.section ".toc"
.p2align 4
.L_tsym0:
.tc ignored0[TC],dsym0
.tc ignored1[TC],dsym1
.L_tsym1:
.tc ignored2[TC],usym0
.tc ignored3[TC],usym1
.text
.p2align 4
lq 4,dsym0@l(3)
lq 4,dsym1@l(3)
lq 4,usym0@l(3)
lq 4,usym1@l(3)
lq 4,esym0@l(3)
lq 4,esym1@l(3)
lq 4,.L_tsym0@toc(2)
lq 4,.L_tsym1@toc(2)
lq 4,.text@l(0)
lq 6,dsym0@got(3)
lq 6,dsym0@got@l(3)
lq 6,dsym0@plt@l(3)
lq 6,dsym1@sectoff(3)
lq 6,dsym1@sectoff@l(3)
lq 6,usym1-dsym0@l(4)
stq 6,0(7)
stq 6,16(7)
stq 6,-16(7)
stq 6,-32768(7)
stq 6,32752(7)
attn
mtcr 3
mtcrf 0xff,3
mtcrf 0x81,3
mtcrf 0x01,3
mtcrf 0x02,3
mtcrf 0x04,3
mtcrf 0x08,3
mtcrf 0x10,3
mtcrf 0x20,3
mtcrf 0x40,3
mtcrf 0x80,3
mfcr 3
# mfcr 3,0xff #Error, invalid mask
# mfcr 3,0x81 #Error, invalid mask
mfcr 3,0x01
mfcr 3,0x02
mfcr 3,0x04
mfcr 3,0x08
mfcr 3,0x10
mfcr 3,0x20
mfcr 3,0x40
mfcr 3,0x80
dcbz 1, 2
dcbzl 3, 4
dcbz 5, 6
lq 2,16(0)
lq 0,16(5)
lq 2,16(5)
stq 2,16(0)
stq 0,16(5)
stq 2,16(5)
slbia
hwsync
sync
sync 0
lwsync
sync 1
ptesync
sync 2
lwarx 20,0,6
lwarx 20,1,6
ldarx 21,0,7
ldarx 21,1,7
stwcx. 22,0,8
stwcx. 22,1,8
stdcx. 23,0,9
stdcx. 23,1,9
.section ".data"
usym0: .llong 0xcafebabe
.llong 0xc0ffee
usym1:
|
stsp/binutils-ia16
| 2,429
|
gas/testsuite/gas/ppc/common.s
|
.text
start:
and. 3,4,5
and 3,4,5
andc 13,14,15
andc. 16,17,18
ba label_abs
bc 0,1,foo
bca 4,5,foo_abs
bcl 2,3,foo
bcla 10,7,foo_abs
bctr
bctrl
bdza foo_abs
bdz foo
bdzla foo_abs
bdzl foo
beq 0,foo
beqa 2,foo_abs
beql 1,foo
beqla 3,foo_abs
bge 0,foo
bgea 4,foo_abs
bgel 2,foo
bgela 6,foo_abs
bgt 4,foo
bgta 6,foo_abs
bgtl 5,foo
bgtla 7,foo_abs
b label
bla label_abs
ble 0,foo
blea 4,foo
blel 2,foo
blela 6,foo_abs
bl label
blt 0,foo
blta 2,foo_abs
bltl 1,foo
bltla 3,foo_abs
bne 0,foo
bnea 2,foo
bnel 1,foo
bnela 3,foo_abs
bng 1,foo
bnga 5,foo_abs
bngl 3,foo
bngla 7,foo_abs
bnl 1,foo
bnla 5,foo_abs
bnll 3,foo
bnlla 7,foo_abs
bns 4,foo
bnsa 6,foo_abs
bnsl 5,foo
bnsla 7,foo_abs
bso 4,foo
bsoa 6,foo_abs
bsol 5,foo
bsola 7,foo_abs
crand 4,5,6
crandc 3,4,5
creqv 7,0,1
crnand 1,2,3
crnor 0,1,2
cror 5,6,7
crmove 5,6
cror 5,6,6
crorc 2,3,4
crxor 6,7,0
eqv. 10,11,12
eqv 10,11,12
fabs. 21,31
fabs 21,31
fcmpo 3,10,11
fcmpu 3,4,5
fmr. 3,4
fmr 3,4
fnabs. 20,30
fnabs 20,30
fneg. 3,4
fneg 3,4
frsp 6,7
frsp. 8,9
lbz 9,0(1)
lbzu 10,1(1)
lbzux 20,21,22
lbzx 3,4,5
lfd 21,8(1)
lfdu 22,16(1)
lfdux 20,21,22
lfdx 13,14,15
lfs 19,0(1)
lfsu 20,4(1)
lfsux 10,11,12
lfsx 10,11,12
lha 15,6(1)
lhau 16,8(1)
lhaux 9,10,11
lhax 9,10,11
lhbrx 3,4,5
lhz 13,0(1)
lhzu 14,2(1)
lhzux 20,22,24
lhzx 23,24,25
mcrf 0,1
mcrfs 3,4
mcrxr 3
mfcr 3
mfctr 3
mfdar 5
mfdsisr 4
mffs 30
mffs. 31
mflr 2
mfmsr 19
mfocrf 3,0x80
mfrtcl 1
mfrtcu 0
mfsdr1 6
mfspr 3,0x80
mfsrr0 7
mfsrr1 8
mfxer 30
mr. 30,31
or. 30,31,31
mr 30,31
or 30,31,31
mtcr 3
mtcrf 0x80,3
mtctr 19
mtdar 21
mtdec 24
mtdsisr 20
mtfsb0. 3
mtfsb0 3
mtfsb1. 3
mtfsb1 3
mtfsf 6,10
mtfsf. 6,11
mtfsfi 6,0
mtfsfi. 6,15
mtlr 18
mtmsr 10
mtocrf 0x80,3
mtrtcl 23
mtrtcu 22
mtsdr1 25
mtspr 0x80,3
mtsrr0 26
mtsrr1 27
mtxer 17
nand. 28,29,30
nand 28,29,30
neg. 3,4
neg 3,4
nego 16,17
nego. 18,19
nor. 20,21,22
nor 20,21,22
not. 20,21
nor. 20,21,21
not 20,21
nor 20,21,21
or 0,2,4
or. 12,14,16
orc 15,16,17
orc. 18,19,20
rfi
stb 11,2(1)
stbu 12,3(1)
stbux 13,14,15
stbx 3,4,5
stfd 25,32(1)
stfdu 26,40(1)
stfdux 0,1,2
stfdx 29,30,31
stfs 23,20(1)
stfsu 24,24(1)
stfsux 26,27,28
stfsx 23,24,25
sth 17,10(1)
sthbrx 6,7,8
sthu 18,12(1)
sthux 21,22,23
sthx 12,13,14
xor. 29,30,31
xor 29,30,31
nop
ori 0,0,0
xnop
xori 0,0,0
|
stsp/binutils-ia16
| 7,404
|
gas/testsuite/gas/ppc/spe.s
|
# PA SPE instructions
.section ".text"
.equ rA,1
.equ rB,2
.equ rD,0
.equ rS,0
.equ rT,0
.equ UIMM, 31
.equ UIMM_2, 2
.equ UIMM_4, 4
.equ UIMM_8, 8
.equ SIMM, -16
.equ crD, 0
.equ crS, 0
evaddw rS, rA, rB
evaddiw rS, rB, UIMM
evsubfw rS, rA, rB
evsubw rS, rB, rA
evsubifw rS, UIMM, rB
evsubiw rS, rB, UIMM
evabs rS, rA
evneg rS, rA
evextsb rS, rA
evextsh rS, rA
evrndw rS, rA
evcntlzw rS, rA
evcntlsw rS, rA
brinc rS, rA, rB
evand rS, rA, rB
evandc rS, rA, rB
evxor rS, rA, rB
evmr rS, rA
evor rS, rA, rA
evor rS, rA, rB
evnor rS, rA, rB
evnot rS, rA
evnor rS, rA, rA
eveqv rS, rA, rB
evorc rS, rA, rB
evnand rS, rA, rB
evsrwu rS, rA, rB
evsrws rS, rA, rB
evsrwiu rS, rA, UIMM
evsrwis rS, rA, UIMM
evslw rS, rA, rB
evslwi rS, rA, UIMM
evrlw rS, rA, rB
evsplati rS, SIMM
evrlwi rS, rA, UIMM
evsplatfi rS, SIMM
evmergehi rS, rA, rB
evmergelo rS, rA, rB
evmergehilo rS, rA, rB
evmergelohi rS, rA, rB
evcmpgtu crD, rA, rB
evcmpgts crD, rA, rB
evcmpltu crD, rA, rB
evcmplts crD, rA, rB
evcmpeq crD, rA, rB
evsel rS, rA, rB, crS
evfsadd rS, rA, rB
evfssub rS, rA, rB
evfsmadd rS, rA, rB
evfsmsub rS, rA, rB
evfsabs rS, rA
evfsnabs rS, rA
evfsneg rS, rA
evfsmul rS, rA, rB
evfsdiv rS, rA, rB
evfsnmadd rS, rA, rB
evfsnmsub rS, rA, rB
evfscmpgt crD, rA, rB
evfscmplt crD, rA, rB
evfscmpeq crD, rA, rB
evfscfui rS, rB
evfscfsi rS, rB
evfscfuf rS, rB
evfscfsf rS, rB
evfsctui rS, rB
evfsctsi rS, rB
evfsctuf rS, rB
evfsctsf rS, rB
evfsctuiz rS, rB
evfsctsiz rS, rB
evfststgt crD, rA, rB
evfststlt crD, rA, rB
evfststeq crD, rA, rB
evlddx rS, rA, rB
evldd rS, UIMM_8(rA)
evldwx rS, rA, rB
evldw rS, UIMM_8(rA)
evldhx rS, rA, rB
evldh rS, UIMM_8(rA)
evlhhesplatx rS, rA, rB
evlhhesplat rS, UIMM_2(rA)
evlhhousplatx rS, rA, rB
evlhhousplat rS, UIMM_2(rA)
evlhhossplatx rS, rA, rB
evlhhossplat rS, UIMM_2(rA)
evlwhex rS, rA, rB
evlwhe rS, UIMM_4(rA)
evlwhoux rS, rA, rB
evlwhou rS, UIMM_4(rA)
evlwhosx rS, rA, rB
evlwhos rS, UIMM_4(rA)
evlwwsplatx rS, rA, rB
evlwwsplat rS, UIMM_4(rA)
evlwhsplatx rS, rA, rB
evlwhsplat rS, UIMM_4(rA)
evstddx rS, rA, rB
evstdd rS, UIMM_8(rA)
evstdwx rS, rA, rB
evstdw rS, UIMM_8(rA)
evstdhx rS, rA, rB
evstdh rS, UIMM_8(rA)
evstwhex rS, rA, rB
evstwhe rS, UIMM_4(rA)
evstwhox rS, rA, rB
evstwho rS, UIMM_4(rA)
evstwwex rS, rA, rB
evstwwe rS, UIMM_4(rA)
evstwwox rS, rA, rB
evstwwo rS, UIMM_4(rA)
evmhessf rS, rA, rB
evmhossf rS, rA, rB
evmheumi rS, rA, rB
evmhesmi rS, rA, rB
evmhesmf rS, rA, rB
evmhoumi rS, rA, rB
evmhosmi rS, rA, rB
evmhosmf rS, rA, rB
evmhessfa rS, rA, rB
evmhossfa rS, rA, rB
evmheumia rS, rA, rB
evmhesmia rS, rA, rB
evmhesmfa rS, rA, rB
evmhoumia rS, rA, rB
evmhosmia rS, rA, rB
evmhosmfa rS, rA, rB
evmwlssf rD, rA, rB
evmwhssf rS, rA, rB
evmwlumi rS, rA, rB
evmwlsmf rD, rA, rB
evmwhumi rS, rA, rB
evmwhsmi rS, rA, rB
evmwhsmf rS, rA, rB
evmwssf rS, rA, rB
evmwumi rS, rA, rB
evmwsmi rS, rA, rB
evmwsmf rS, rA, rB
evmwlssfa rD, rA, rB
evmwhssfa rS, rA, rB
evmwlumia rS, rA, rB
evmwlsmfa rD, rA, rB
evmwhumia rS, rA, rB
evmwhsmia rS, rA, rB
evmwhsmfa rS, rA, rB
evmwssfa rS, rA, rB
evmwumia rS, rA, rB
evmwsmia rS, rA, rB
evmwsmfa rS, rA, rB
evaddusiaaw rS, rA
evaddssiaaw rS, rA
evsubfusiaaw rS, rA
evsubfssiaaw rS, rA
evmra rS, rA
evdivws rS, rA, rB
evdivwu rS, rA, rB
evaddumiaaw rS, rA
evaddsmiaaw rS, rA
evsubfumiaaw rS, rA
evsubfsmiaaw rS, rA
evmheusiaaw rS, rA, rB
evmhessiaaw rS, rA, rB
evmhessfaaw rS, rA, rB
evmhousiaaw rS, rA, rB
evmhossiaaw rS, rA, rB
evmhossfaaw rS, rA, rB
evmheumiaaw rS, rA, rB
evmhesmiaaw rS, rA, rB
evmhesmfaaw rS, rA, rB
evmhoumiaaw rS, rA, rB
evmhosmiaaw rS, rA, rB
evmhosmfaaw rS, rA, rB
evmhegumiaa rS, rA, rB
evmhegsmiaa rS, rA, rB
evmhegsmfaa rS, rA, rB
evmhogumiaa rS, rA, rB
evmhogsmiaa rS, rA, rB
evmhogsmfaa rS, rA, rB
evmwlusiaaw rS, rA, rB
evmwlssiaaw rS, rA, rB
evmwlssfaaw rD, rA, rB
evmwhusiaa rD, rA, rB
evmwhssmaa rD, rA, rB
evmwhssfaa rD, rA, rB
evmwlumiaaw rS, rA, rB
evmwlsmiaaw rS, rA, rB
evmwlsmfaaw rD, rA, rB
evmwhumiaa rD, rA, rB
evmwhsmiaa rD, rA, rB
evmwhsmfaa rD, rA, rB
evmwssfaa rS, rA, rB
evmwumiaa rS, rA, rB
evmwsmiaa rS, rA, rB
evmwsmfaa rS, rA, rB
evmwhgumiaa rD, rA, rB
evmwhgsmiaa rD, rA, rB
evmwhgssfaa rD, rA, rB
evmwhgsmfaa rD, rA, rB
evmheusianw rS, rA, rB
evmhessianw rS, rA, rB
evmhessfanw rS, rA, rB
evmhousianw rS, rA, rB
evmhossianw rS, rA, rB
evmhossfanw rS, rA, rB
evmheumianw rS, rA, rB
evmhesmianw rS, rA, rB
evmhesmfanw rS, rA, rB
evmhoumianw rS, rA, rB
evmhosmianw rS, rA, rB
evmhosmfanw rS, rA, rB
evmhegumian rS, rA, rB
evmhegsmian rS, rA, rB
evmhegsmfan rS, rA, rB
evmhogumian rS, rA, rB
evmhogsmian rS, rA, rB
evmhogsmfan rS, rA, rB
evmwlusianw rS, rA, rB
evmwlssianw rS, rA, rB
evmwlssfanw rD, rA, rB
evmwhusian rD, rA, rB
evmwhssian rD, rA, rB
evmwhssfan rD, rA, rB
evmwlumianw rS, rA, rB
evmwlsmianw rS, rA, rB
evmwlsmfanw rD, rA, rB
evmwhumian rD, rA, rB
evmwhsmian rD, rA, rB
evmwhsmfan rD, rA, rB
evmwssfan rS, rA, rB
evmwumian rS, rA, rB
evmwsmian rS, rA, rB
evmwsmfan rS, rA, rB
evmwhgumian rD, rA, rB
evmwhgsmian rD, rA, rB
evmwhgssfan rD, rA, rB
evmwhgsmfan rD, rA, rB
evlddepx rT, rA, rB
evstddepx rT, rA, rB
;#SPE mapped by macro
evsadd rS, rA, rB
evssub rS, rA, rB
evsabs rS, rA
evsnabs rS, rA
evsneg rS, rA
evsmul rS, rA, rB
evsdiv rS, rA, rB
evscmpgt crD, rA, rB
evsgmplt crD, rA, rB
evsgmpeq crD, rA, rB
evscfui rS, rB
evscfsi rS, rB
evscfuf rS, rB
evscfsf rS, rB
evsctui rS, rB
evsctsi rS, rB
evsctuf rS, rB
evsctsf rS, rB
evsctuiz rS, rB
evsctsiz rS, rB
evststgt crD, rA, rB
evststlt crD, rA, rB
evststeq crD, rA, rB
|
stsp/binutils-ia16
| 6,857
|
gas/testsuite/gas/ppc/476.s
|
.text
ppc476:
add 3,4,5
add. 3,4,5
addc 3,4,5
addc. 3,4,5
addco 3,4,5
addco. 3,4,5
adde 3,4,5
adde. 3,4,5
addeo 3,4,5
addeo. 3,4,5
addi 3,4,-128
addic 3,4,-128
addic. 3,4,-128
addis 3,4,-128
addme 3,4
addme. 3,4
addmeo 3,4
addmeo. 3,4
addo 3,4,5
addo. 3,4,5
addze 3,4
addze. 3,4
addzeo 3,4
addzeo. 3,4
and 3,4,5
and. 3,4,5
andc 13,14,15
andc. 16,17,18
andi. 3,4,0xdead
andis. 3,4,0xdead
ba label_abs
bc 0,1,foo
bca 4,5,foo_abs
bcctr 12,0,0
bcctr 4,10,0
bcctr 4,6
bcctr 4,6,0
bcctrl 12,0,0
bcctrl 4,10,0
bcctrl 4,6
bcctrl 4,6,0
bcl 2,3,foo
bclr 12,0,0
bclr 4,10,0
bclr 4,6
bclr 4,6,0
bclrl 12,0,0
bclrl 4,10,0
bclrl 4,6
bclrl 4,6,0
b label
bl label
clrrwi 3,4,4
cmp 0,0,3,4
cmp 7,0,3,4
cmpb 3,4,5
cmpb 3,4,5
cmpi 0,0,3,-167
cmpi 7,0,3,-167
cmpl 0,0,3,4
cmpl 7,0,3,4
cmpli 0,0,3,167
cmpli 7,0,3,167
cmplw 3,4
cmplwi 3,167
cmpw 3,4
cmpwi 3,-167
cntlzw 10,11
cntlzw. 10,11
crand 4,5,6
crandc 3,4,5
creqv 7,0,1
crnand 1,2,3
crnor 0,1,2
cror 5,6,7
crorc 2,3,4
crxor 6,7,0
dcba 9, 10
dcbf 6,7
dcbf 6,7,0
dcbi 6,7
dcblc 4, 5, 6
dcbst 6,7
dcbt 0,5,6
dcbt 5,6
dcbt 8,5,6
dcbtls 7, 8, 9
dcbtst 0,6,7
dcbtst 6,7
dcbtst 9,6,7
dcbtstls 10, 11, 12
dcbz 1,2
dcbz 5,6
dccci
dci
dci 0
dci 1
divw 10,11,12
divw. 11,12,13
divwo 10,11,12
divwo. 11,12,13
divwu 10,11,12
divwu. 11,12,13
divwuo 10,11,12
divwuo. 11,12,13
dlmzb 3,4,5
dlmzb. 3,4,5
eqv 10,11,12
eqv. 10,11,12
extlwi 3,4,20,4
extsb 3,4
extsb. 3,4
extsh 3,4
extsh. 3,4
fabs 21,31
fabs. 21,31
fadd 10,11,12
fadd. 10,11,12
fadds 10,11,12
fadds. 10,11,12
fcfid 10,11
fcfid. 10,11
fcmpo 3,10,11
fcmpu 3,4,5
fcpsgn 10,11,12
fcpsgn. 10,11,12
fctid 10,11
fctid. 10,11
fctidz 10,11
fctidz. 10,11
fctiw 10,11
fctiw. 10,11
fctiwz 10,11
fctiwz. 10,11
fdiv 10,11,12
fdiv. 10,11,12
fdivs 10,11,12
fdivs. 10,11,12
fmadd 10,11,12,13
fmadd. 10,11,12,13
fmadds 10,11,12,13
fmadds. 10,11,12,13
fmr 3,4
fmr. 3,4
fmsub 10,11,12,13
fmsub. 10,11,12,13
fmsubs 10,11,12,13
fmsubs. 10,11,12,13
fmul 10,11,12
fmul. 10,11,12
fmuls 10,11,12
fmuls. 10,11,12
fnabs 20,30
fnabs. 20,30
fneg 3,4
fneg. 3,4
fnmadd 10,11,12,13
fnmadd. 10,11,12,13
fnmadds 10,11,12,13
fnmadds. 10,11,12,13
fnmsub 10,11,12,13
fnmsub. 10,11,12,13
fnmsubs 10,11,12,13
fnmsubs. 10,11,12,13
fre 14,15
fre. 14,15
fres 14,15
fres. 14,15
frim 10,11
frim. 10,11
frin 10,11
frin. 10,11
frip 10,11
frip. 10,11
friz 10,11
friz. 10,11
frsp 6,7
frsp. 8,9
frsqrte 14,15
frsqrte. 14,15
frsqrtes 14,15
frsqrtes. 14,15
fsel 10,11,12,13
fsel. 10,11,12,13
fsqrt 10,11
fsqrt. 10,11
fsqrts 10,11
fsqrts. 10,11
fsub 10,11,12
fsub. 10,11,12
fsubs 10,11,12
fsubs. 10,11,12
icbi 3,4
icblc 16, 17, 18
icbt 5, 8, 9
icbtls 13, 14, 15
iccci
ici
ici 0
ici 1
icread 3,4
inslwi 3,4,8,20
isel 2,3,4,28
isync
lbz 9,0(1)
lbzu 10,1(1)
lbzux 20,21,22
lbzx 3,4,5
lfd 21,8(1)
lfdu 22,16(1)
lfdux 20,21,22
lfdx 13,14,15
lfiwax 10,3,4
lfs 19,0(1)
lfsu 20,4(1)
lfsux 10,11,12
lfsx 10,11,12
lha 15,6(1)
lhau 16,8(1)
lhaux 9,10,11
lhax 9,10,11
lhbrx 3,4,5
lhz 13,0(1)
lhzu 14,2(1)
lhzux 20,22,24
lhzx 23,24,25
lwarx 3,4,5
lwarx 3,4,5,0
lwarx 3,4,5,1
lwbrx 3,4,5
lwz 6,0(7)
lwzu 3,16(1)
lwzux 3,4,5
lwzx 3,4,5
macchw 3,4,5
macchw. 3,4,5
macchwo 3,4,5
macchwo. 3,4,5
macchws 3,4,5
macchws. 3,4,5
macchwso 3,4,5
macchwso. 3,4,5
macchwsu 3,4,5
macchwsu. 3,4,5
macchwsuo 3,4,5
macchwsuo. 3,4,5
macchwu 3,4,5
macchwu. 3,4,5
macchwuo 3,4,5
macchwuo. 3,4,5
machhw 3,4,5
machhw. 3,4,5
machhwo 3,4,5
machhwo. 3,4,5
machhws 3,4,5
machhws. 3,4,5
machhwso 3,4,5
machhwso. 3,4,5
machhwsu 3,4,5
machhwsu. 3,4,5
machhwsuo 3,4,5
machhwsuo. 3,4,5
machhwu 3,4,5
machhwu. 3,4,5
machhwuo 3,4,5
machhwuo. 3,4,5
maclhw 3,4,5
maclhw. 3,4,5
maclhwo 3,4,5
maclhwo. 3,4,5
maclhws 3,4,5
maclhws. 3,4,5
maclhwso 3,4,5
maclhwso. 3,4,5
maclhwsu 3,4,5
maclhwsu. 3,4,5
maclhwsuo 3,4,5
maclhwsuo. 3,4,5
maclhwu 3,4,5
maclhwu. 3,4,5
maclhwuo 3,4,5
maclhwuo. 3,4,5
mbar
mbar 0
mbar 1
mcrf 0,1
mcrfs 3,4
mcrxr 3
mfcr 3
mfcr 3
mfdcr 5, 234
mfdcrux 3,4
mfdcrx 4, 5
mffs 30
mffs. 31
mfmsr 19
mfocrf 3,0x80
mfspr 3,0x80
mftb 3
msync
mtcrf 0x80,3
mtcrf 0xff,3
mtdcr 432, 8
mtdcrux 3,4
mtdcrx 6, 7
mtfsb0 3
mtfsb0. 3
mtfsb1 3
mtfsb1. 3
mtfsf 6,10
mtfsf 6,10,0,0
mtfsf 6,10,0,1
mtfsf 6,10,1,0
mtfsf. 6,11
mtfsf. 6,11,0,0
mtfsf. 6,11,0,1
mtfsf. 6,11,1,0
mtfsfi 6,0
mtfsfi 6,0
mtfsfi 6,0,0
mtfsfi 6,0,1
mtfsfi. 6,15
mtfsfi. 6,15
mtfsfi. 6,15,0
mtfsfi. 6,15,1
mtmsr 10
mtocrf 0x80,3
mtspr 0x80,3
mulchw 3,4,5
mulchw. 3,4,5
mulchwu 3,4,5
mulchwu. 3,4,5
mulhhw 3,4,5
mulhhw. 3,4,5
mulhhwu 3,4,5
mulhhwu. 3,4,5
mulhw 3,4,5
mulhw. 3,4,5
mulhwu 3,4,5
mulhwu. 3,4,5
mullhw 3,4,5
mullhw. 3,4,5
mullhwu 3,4,5
mullhwu. 3,4,5
mulli 3,4,5
mullw 3,4,5
mullw. 3,4,5
mullwo 3,4,5
mullwo. 3,4,5
nand 28,29,30
nand. 28,29,30
neg 3,4
neg. 3,4
nego 16,17
nego. 18,19
nmacchw 3,4,5
nmacchw. 3,4,5
nmacchwo 3,4,5
nmacchwo. 3,4,5
nmacchws 3,4,5
nmacchws. 3,4,5
nmacchwso 3,4,5
nmacchwso. 3,4,5
nmachhw 3,4,5
nmachhw. 3,4,5
nmachhwo 3,4,5
nmachhwo. 3,4,5
nmachhws 3,4,5
nmachhws. 3,4,5
nmachhwso 3,4,5
nmachhwso. 3,4,5
nmaclhw 3,4,5
nmaclhw. 3,4,5
nmaclhwo 3,4,5
nmaclhwo. 3,4,5
nmaclhws 3,4,5
nmaclhws. 3,4,5
nmaclhwso 3,4,5
nmaclhwso. 3,4,5
nor 20,21,22
nor. 20,21,22
or 0,2,4
or. 12,14,16
orc 15,16,17
orc. 18,19,20
ori 1,1,0
oris 3,4,0xdead
popcntb 3,4
prtyw 3,4
rfci
rfi
rfmci
rlwimi 3,4,12,20,27
rlwimi. 3,4,12,20,27
rlwinm 3,4,0,0,27
rlwinm 3,4,26,6,31
rlwinm 3,4,4,0,19
rlwinm. 3,4,0,0,27
rlwnm 3,4,5,0,31
rlwnm. 3,4,5,0,31
rotlw 3,4,5
rotlw. 3,4,5
sc
slw 3,4,5
slw. 3,4,5
sraw 3,4,5
sraw. 3,4,5
srawi 3,4,16
srawi. 3,4,16
srw 3,4,5
srw. 3,4,5
srwi 3,4,6
stb 11,2(1)
stbu 12,3(1)
stbux 13,14,15
stbx 3,4,5
stfd 25,32(1)
stfdu 26,40(1)
stfdux 0,1,2
stfdx 29,30,31
stfiwx 10,3,4
stfs 23,20(1)
stfsu 24,24(1)
stfsux 26,27,28
stfsx 23,24,25
sth 17,10(1)
sthbrx 6,7,8
sthu 18,12(1)
sthux 21,22,23
sthx 12,13,14
stw 6,-16(7)
stwbrx 3,4,5
stwcx. 3,4,5
stwu 3,16(1)
stwux 3,4,5
stwx 3,4,5
subf 3,4,5
subf. 3,4,5
subfc 3,4,5
subfc. 3,4,5
subfco 3,4,5
subfco. 3,4,5
subfe 3,4,5
subfe. 3,4,5
subfeo 3,4,5
subfeo. 3,4,5
subfic 3,4,5
subfme 3,4
subfme. 3,4
subfmeo 3,4
subfmeo. 3,4
subfo 3,4,5
subfo. 3,4,5
subfze 3,4
subfze. 3,4
subfzeo 3,4
subfzeo. 3,4
tlbivax 7, 8
tlbre 1, 2, 7
tlbsx 11, 12
tlbsx 12, 13, 14
tlbsx. 12, 13, 14
tlbsync
tlbwe
tlbwe 0,0,0
tlbwe 1,1,1
trap
tw 31,0,0
tw 4,3,4
tw 5,3,4
tweq 3,4
twgti 3,15
twi 6,3,15
twi 8,3,15
twlge 3,4
twllei 3,15
wrtee 3
wrteei 1
xor 29,30,31
xor. 29,30,31
xori 3,4,0xdead
xoris 3,4,0xdead
|
stsp/binutils-ia16
| 1,087
|
gas/testsuite/gas/ppc/vle-mult-ld-st-insns.s
|
# VLE Instructions for Improving Interrupt Handler Efficiency (e200z760RM.pdf)
# Original Engineering Bullet (EB696.pdf) contains two writings of load instructions
# and has no ones for MCSRRs.
# e_lmvgprw, e_stmvgprw - load/store multiple volatile GPRs (r0, r3:r12)
# e_lmvsprw, e_stmvsprw - load/store multiple volatile SPRs (CR, LR, CTR, and XER)
# e_lmvsrrw, e_stmvsrrw - load/store multiple volatile SRRs (SRR0, SRR1)
# e_lmvcsrrw, e_stmvcsrrw - load/store multiple volatile CSRRs (CSRR0, CSRR1)
# e_lmvdsrrw, e_stmvdsrrw - load/store multiple volatile DSRRs (DSRR0, DSRR1)
# e_lmvmcsrrw, e_stmvmcsrrw - load/store multiple volatile MCSRRs (MCSRR0, MCSRR1)
.text
prolog:
e_stmvgprw 0x00 (r1)
e_stmvsprw 0x04 (r2)
e_stmvsrrw 0x08 (r3)
e_stmvcsrrw 0x0c (r4)
e_stmvdsrrw 0x10 (r5)
e_stmvmcsrrw 0x14 (r6)
epilog:
e_lmvgprw 0x18 (r7)
e_lmvsprw 0x1c (r8)
e_lmvsrrw 0x20 (r9)
e_lmvcsrrw 0x24 (r10)
e_lmvdsrrw 0x28 (r11)
e_lmvmcsrrw 0x2c (r12)
epilog_alt:
e_ldmvgprw 0x30 (r13)
e_ldmvsprw 0x34 (r14)
e_ldmvsrrw 0x38 (r15)
e_ldmvcsrrw 0x3c (r16)
e_ldmvdsrrw 0x40 (r17)
|
stsp/binutils-ia16
| 1,383
|
gas/testsuite/gas/ppc/broadway.s
|
# PowerPC Broadway instruction tests
.text
start:
mfiabr 0
mtiabr 1
mfdabr 2
mtdabr 3
mfgqr 4, 0
mfgqr 5, 1
mfgqr 6, 2
mfgqr 7, 3
mfgqr 8, 4
mfgqr 9, 5
mfgqr 10, 6
mfgqr 11, 7
mtgqr 0, 4
mtgqr 1, 5
mtgqr 2, 6
mtgqr 3, 7
mtgqr 4, 8
mtgqr 5, 9
mtgqr 6, 10
mtgqr 7, 11
mfwpar 12
mtwpar 13
mfdmal 14
mtdmal 15
mfdmau 16
mtdmau 17
mfhid0 18
mthid0 19
mfhid1 20
mthid1 21
mfhid2 22
mthid2 23
mfhid4 24
mthid4 25
mfibatu 0, 0
mtibatu 0, 1
mfibatu 2, 1
mtibatu 1, 3
mfibatu 4, 2
mtibatu 2, 5
mfibatu 6, 3
mtibatu 3, 7
mfibatu 8, 4
mtibatu 4, 9
mfibatu 10, 5
mtibatu 5, 11
mfibatu 12, 6
mtibatu 6, 13
mfibatu 14, 7
mtibatu 7, 15
mfibatl 16, 0
mtibatl 0, 17
mfibatl 18, 1
mtibatl 1, 19
mfibatl 20, 2
mtibatl 2, 21
mfibatl 22, 3
mtibatl 3, 23
mfibatl 24, 4
mtibatl 4, 25
mfibatl 26, 5
mtibatl 5, 27
mfibatl 28, 6
mtibatl 6, 29
mfibatl 30, 7
mtibatl 7, 31
mfdbatu 0, 0
mtdbatu 0, 1
mfdbatu 2, 1
mtdbatu 1, 3
mfdbatu 4, 2
mtdbatu 2, 5
mfdbatu 6, 3
mtdbatu 3, 7
mfdbatu 8, 4
mtdbatu 4, 9
mfdbatu 10, 5
mtdbatu 5, 11
mfdbatu 12, 6
mtdbatu 6, 13
mfdbatu 14, 7
mtdbatu 7, 15
mfdbatl 16, 0
mtdbatl 0, 17
mfdbatl 18, 1
mtdbatl 1, 19
mfdbatl 20, 2
mtdbatl 2, 21
mfdbatl 22, 3
mtdbatl 3, 23
mfdbatl 24, 4
mtdbatl 4, 25
mfdbatl 26, 5
mtdbatl 5, 27
mfdbatl 28, 6
mtdbatl 6, 29
mfdbatl 30, 7
mtdbatl 7, 31
|
stsp/binutils-ia16
| 1,226
|
gas/testsuite/gas/ppc/test1elf64.s
|
.section ".data"
dsym0: .llong 0xdeadbeef
dsym1:
.section ".toc"
.L_tsym0:
.tc ignored0[TC],dsym0
.L_tsym1:
.tc ignored1[TC],dsym1
.L_tsym2:
.tc ignored2[TC],usym0
.L_tsym3:
.tc ignored3[TC],usym1
.L_tsym4:
.tc ignored4[TC],esym0
.L_tsym5:
.tc ignored5[TC],esym1
.section ".text"
ld 3,dsym0@l(3)
ld 3,dsym1@l(3)
ld 3,usym0@l(3)
ld 3,usym1@l(3)
ld 3,esym0@l(3)
ld 3,esym1@l(3)
ld 3,.L_tsym0@toc(2)
ld 3,.L_tsym1@toc(2)
ld 3,.L_tsym2@toc(2)
ld 3,.L_tsym3@toc(2)
ld 3,.L_tsym4@toc(2)
ld 3,.L_tsym5@toc(2)
lis 4,.L_tsym5@toc@ha
ld 3,.L_tsym5@toc@l(2)
li 3,dsym1-dsym0
li 3,dsym0-dsym1
li 3,usym1-usym0
li 3,usym0-usym1
li 3,dsym0-usym0
li 3,usym0-dsym0
li 3,dsym0@l
li 3,dsym0@h
li 3,dsym0@ha
li 3,dsym0@higher
li 3,dsym0@highera
li 3,dsym0@highest
li 3,dsym0@highesta
li 3,usym0-usym1@l
li 3,usym0-usym1@h
li 3,usym0-usym1@ha
li 3,usym0-usym1@higher
li 3,usym0-usym1@highera
li 3,usym0-usym1@highest
li 3,usym0-usym1@highesta
ld 3,dsym1-dsym0@l(4)
ld 3,.text@l(0)
.section ".data"
usym0: .llong 0xcafebabe
usym1:
datpt: .long jk-.+10000000
dat0: .long jk-dat1
dat1: .long jk-dat1
dat2: .long jk-dat1
dat3: .llong jk-dat1
dat4: .llong jk-dat1
|
stsp/binutils-ia16
| 8,176
|
gas/testsuite/gas/ppc/a2.s
|
.text
start:
add. 4,5,6
add 4,5,6
addc. 4,5,6
addc 4,5,6
addco. 4,5,6
addco 4,5,6
adde. 4,5,6
adde 4,5,6
addeo. 4,5,6
addeo 4,5,6
addi 4,5,13
addi 4,5,-13
addic. 4,5,13
addic. 4,5,-13
addic 4,5,13
addic 4,5,-13
addis 4,5,23
addis 4,5,-23
addme. 4,5
addme 4,5
addmeo. 4,5
addmeo 4,5
addo. 4,5,6
addo 4,5,6
addze. 4,5
addze 4,5
addzeo. 4,5
addzeo 4,5
and. 4,5,6
and 4,5,6
andc. 4,5,6
andc 4,5,6
andi. 4,5,6
andis. 4,5,6
attn
ba label_abs
bc 4,10,foo
bc- 4,10,foo
bc+ 4,10,foo
bca 4,5,foo_abs
bca- 4,5,foo_abs
bca+ 4,5,foo_abs
bcctr 4,6,1
bcctr- 4,6
bcctr+ 4,6
bcctrl 4,6,1
bcctrl- 4,6
bcctrl+ 4,6
bcl 4,10,foo
bcl- 4,10,foo
bcl+ 4,10,foo
bcla 4,5,foo_abs
bcla- 4,5,foo_abs
bcla+ 4,5,foo_abs
bclr 4,6,1
bclr- 4,6
bclr+ 4,6
bclrl 4,6,1
bclrl- 4,6
bclrl+ 4,6
b label
bla label_abs
bl label
bpermd 10,11,12
cmp 1,1,7,8
cmpb 10,11,12
cmpi 1,1,10,13
cmpi 1,1,10,-13
cmpl 1,1,7,8
cmpli 1,1,10,100
cntlzd. 20,21
cntlzd 20,21
cntlzw. 20,21
cntlzw 20,21
crand 1,2,3
crandc 1,2,3
creqv 1,2,3
crnand 1,2,3
crnor 1,2,3
cror 1,2,3
crorc 1,2,3
crxor 1,2,3
dcba 10,11
dcbf 10,11,0
dcbf 10,11,1
dcbfep 10,11
dcbi 10,11
dcblc 0,10,11
dcblc 1,10,11
dcbst 10,11
dcbstep 10,11
dcbt 0,10,11
dcbt 1,10,11
dcbtep 10,11,12
dcbtls 0,10,11
dcbtls 1,10,11
dcbtst 0,10,11
dcbtst 1,10,11
dcbtstep 10,11,12
dcbtstls 0,10,11
dcbtstls 1,10,11
dcbz 10,11
dcbzep 10,11
dccci
dci
dci 0
dci 10
divd. 20,21,22
divd 20,21,22
divdo. 20,21,22
divdo 20,21,22
divdu. 20,21,22
divdu 20,21,22
divduo. 20,21,22
divduo 20,21,22
divw. 20,21,22
divw 20,21,22
divwo. 20,21,22
divwo 20,21,22
divwu. 20,21,22
divwu 20,21,22
divwuo. 20,21,22
divwuo 20,21,22
eqv. 20,21,22
eqv 20,21,22
eratilx 0,10,11
eratilx 1,10,11
eratilx 7,10,11
erativax 10,11,12
eratre 10,11,0
eratre 10,11,3
eratsx. 10,11,12
eratsx 10,11,12
eratwe 10,11,0
eratwe 10,11,3
extsb. 10,11
extsb 10,11
extsh. 10,11
extsh 10,11
extsw. 10,11
extsw 10,11
fabs. 20,21
fabs 20,21
fadd. 20,21,22
fadd 20,21,22
fadds. 20,21,22
fadds 20,21,22
fcfid. 20,21
fcfid 20,21
fcmpo 0,20,21
fcmpo 1,20,21
fcmpu 0,20,21
fcmpu 1,20,21
fcpsgn. 20,21,22
fcpsgn 20,21,22
fctid. 20,21
fctid 20,21
fctidz. 20,21
fctidz 20,21
fctiw. 20,21
fctiw 20,21
fctiwz. 20,21
fctiwz 20,21
fdiv. 20,21,22
fdiv 20,21,22
fdivs. 20,21,22
fdivs 20,21,22
fmadd. 20,21,22,23
fmadd 20,21,22,23
fmadds. 20,21,22,23
fmadds 20,21,22,23
fmr. 20,21
fmr 20,21
fmsub. 20,21,22,23
fmsub 20,21,22,23
fmsubs. 20,21,22,23
fmsubs 20,21,22,23
fmul. 20,21,22
fmul 20,21,22
fmuls. 20,21,22
fmuls 20,21,22
fnabs. 20,21
fnabs 20,21
fneg. 20,21
fneg 20,21
fnmadd. 20,21,22,23
fnmadd 20,21,22,23
fnmadds. 20,21,22,23
fnmadds 20,21,22,23
fnmsub. 20,21,22,23
fnmsub 20,21,22,23
fnmsubs. 20,21,22,23
fnmsubs 20,21,22,23
fre. 20,21
fre 20,21
fre. 20,21,0
fre 20,21,0
fre. 20,21,1
fre 20,21,1
fres. 20,21
fres 20,21
fres. 20,21,0
fres 20,21,0
fres. 20,21,1
fres 20,21,1
frim. 20,21
frim 20,21
frin. 20,21
frin 20,21
frip. 20,21
frip 20,21
friz. 20,21
friz 20,21
frsp. 20,21
frsp 20,21
frsqrte. 20,21
frsqrte 20,21
frsqrte. 20,21,0
frsqrte 20,21,0
frsqrte. 20,21,1
frsqrte 20,21,1
frsqrtes 20,21
frsqrtes. 20,21
frsqrtes 20,21,0
frsqrtes. 20,21,0
frsqrtes 20,21,1
frsqrtes. 20,21,1
fsel. 20,21,22,23
fsel 20,21,22,23
fsqrt. 20,21
fsqrt 20,21
fsqrts. 20,21
fsqrts 20,21
fsub. 20,21,22
fsub 20,21,22
fsubs. 20,21,22
fsubs 20,21,22
icbi 10,11
icbiep 10,11
icbt 0,10,11
icbt 7,10,11
icbtls 0,10,11
icbtls 7,10,11
iccci
ici
ici 0
ici 10
icswx. 10,11,12
icswx 10,11,12
isel 10,11,12,23
isync
lbepx 10,11,12
lbz 10,-17(11)
lbz 10,17(11)
lbzu 10,-1(11)
lbzu 10,1(11)
lbzux 10,11,13
lbzx 10,11,13
ld 10,-8(11)
ld 10,8(11)
ldarx 10,11,12,0
ldarx 10,11,12,1
ldbrx 10,11,12
ldepx 10,11,12
ldu 10,-8(11)
ldu 10,8(11)
ldux 10,11,12
ldx 10,11,12
lfd 20,-8(10)
lfd 20,8(10)
lfdepx 20,10,11
lfdu 20,-8(10)
lfdu 20,8(10)
lfdux 20,10,11
lfdx 20,10,11
lfiwax 20,10,11
lfiwzx 20,10,11
lfs 20,-4(10)
lfs 20,4(10)
lfsu 20,-4(10)
lfsu 20,4(10)
lfsux 20,10,11
lfsx 20,10,11
lha 10,2(11)
lhau 10,-2(11)
lhaux 10,11,12
lhax 10,11,12
lhbrx 10,11,12
lhepx 10,11,12
lhz 10,-2(11)
lhz 10,2(11)
lhzu 10,-2(11)
lhzu 10,2(11)
lhzux 10,11,12
lhzx 10,11,12
lwa 10,-4(11)
lwa 10,4(11)
lwarx 10,11,12,0
lwarx 10,11,12,1
lwaux 10,11,12
lwax 10,11,12
lwbrx 10,11,12
lwepx 10,11,12
lwz 10,-4(11)
lwz 10,4(11)
lwzu 10,-4(11)
lwzu 10,4(11)
lwzux 10,11,12
lwzx 10,11,12
mbar
mbar 0
eieio
mbar 1
mcrf 0,1
mcrfs 3,4
mcrxr 0
mcrxr 3
mfcr 3
mfcr 3,0x02
mfcr 3,0x01
mfcr 3,0x80
mfdcr. 10,234
mfdcr 10,234
mfdcrx. 10,11
mfdcrx 10,11
mffs. 20
mffs 20
mfmsr 10
mfocrf 3,0x01
mfocrf 3,0x80
mfspr 10,234
mftb 10,268
mftb 10,269
msgclr 10
msgsnd 10
mtcrf 0x00,3
mtcrf 0x01,3
mtcrf 0x80,3
mtcrf 0xff,3
mtdcr. 234,10
mtdcr 234,10
mtdcrx. 10,11
mtdcrx 10,11
mtfsb0. 3
mtfsb0 3
mtfsb1. 3
mtfsb1 3
mtfsf. 6,20
mtfsf 6,20
mtfsf. 6,20,0,0
mtfsf 6,20,0,0
mtfsf. 6,20,1,1
mtfsf 6,20,1,1
mtfsfi. 6,0
mtfsfi 6,0
mtfsfi. 6,13,0
mtfsfi 6,13,0
mtfsfi. 6,13,1
mtfsfi 6,13,1
mtmsr 10
mtmsr 10,0
mtmsr 10,1
mtocrf 0x01,3
mtocrf 0x80,3
mtspr 234,10
mulhd. 20,21,22
mulhd 20,21,22
mulhdu. 20,21,22
mulhdu 20,21,22
mulhw. 20,21,22
mulhw 20,21,22
mulhwu. 20,21,22
mulhwu 20,21,22
mulld. 20,21,22
mulld 20,21,22
mulldo. 20,21,22
mulldo 20,21,22
mulli 20,21,100
mulli 20,21,-100
mullw. 20,21,22
mullw 20,21,22
mullwo. 20,21,22
mullwo 20,21,22
nand. 20,21,22
nand 20,21,22
neg. 20,21
neg 20,21
nego. 20,21
nego 20,21
nor. 20,21,22
nor 20,21,22
or. 20,21,22
or 20,21,22
orc. 20,21,22
orc 20,21,22
ori 20,21,0x1000
oris 20,21,0x1000
popcntb 10,11
popcntd 10,11
popcntw 10,11
prtyd 10,11
prtyw 10,11
rfci
rfgi
rfi
rfmci
rldcl. 10,11,12,0x3f
rldcl 10,11,12,0x3f
rldcr. 10,11,12,0x3f
rldcr 10,11,12,0x3f
rldic. 10,11,23,0x3f
rldic 10,11,23,0x3f
rldicl. 10,11,23,0x3f
rldicl 10,11,23,0x3f
rldicr. 10,11,23,0x3f
rldicr 10,11,23,0x3f
rldimi. 10,11,23,0x3f
rldimi 10,11,23,0x3f
rlwimi. 10,11,23,0,31
rlwimi 10,11,23,0,31
rlwinm. 10,11,23,0,31
rlwinm 10,11,23,0,31
rlwnm. 10,11,23,0,31
rlwnm 10,11,23,0,31
sc
sc 100
sld. 10,11,12
sld 10,11,12
slw. 10,11,12
slw 10,11,12
srad. 10,11,12
srad 10,11,12
sradi. 10,11,0x3f
sradi 10,11,0x3f
sraw. 10,11,12
sraw 10,11,12
srawi. 10,11,31
srawi 10,11,31
srd. 10,11,12
srd 10,11,12
srw. 10,11,12
srw 10,11,12
stb 10,-1(11)
stb 10,1(11)
stbepx 10,11,12
stbu 10,-1(11)
stbu 10,1(11)
stbux 10,11,12
stbx 10,11,12
std 10,-8(11)
std 10,8(11)
stdbrx 10,11,12
stdcx. 10,11,12
stdepx 10,11,12
stdu 10,-8(11)
stdu 10,8(11)
stdux 10,11,12
stdx 10,11,12
stfd 20,-8(10)
stfd 20,8(10)
stfdepx 20,10,11
stfdu 20,-8(10)
stfdu 20,8(10)
stfdux 20,10,11
stfdx 20,10,11
stfiwx 20,10,11
stfs 20,-4(10)
stfs 20,4(10)
stfsu 20,-4(10)
stfsu 20,4(10)
stfsux 20,10,11
stfsx 20,10,11
sth 10,-2(11)
sth 10,2(11)
sth 10,-4(11)
sth 10,4(11)
sthbrx 10,11,12
sthepx 10,11,12
sthu 10,-2(11)
sthu 10,2(11)
sthux 10,11,12
sthx 10,11,12
stwbrx 10,11,12
stwcx. 10,11,12
stwepx 10,11,12
stwu 10,-4(11)
stwu 10,4(11)
stwux 10,11,12
stwx 10,11,12
subf. 20,21,22
subf 20,21,22
subfc. 20,21,22
subfc 20,21,22
subfco. 20,21,22
subfco 20,21,22
subfe. 20,21,22
subfe 20,21,22
subfeo. 20,21,22
subfeo 20,21,22
subfic 20,21,100
subfic 20,21,-100
subfme. 20,21
subfme 20,21
subfmeo. 20,21
subfmeo 20,21
subfo. 20,21,22
subfo 20,21,22
subfze. 20,21
subfze 20,21
subfzeo. 20,21
subfzeo 20,21
sync
msync
sync 0
sync 1
td 5,10,11
tdi 5,10,100
tdi 5,10,-100
tlbilx 3,10,11
tlbivax 10,11
tlbre
tlbre 10,11,7
tlbsrx. 10,11
tlbsx. 10,11,12
tlbsx 10,11,12
tlbsync
tlbwe
tlbwe 10,11,7
tw 5,10,11
twi 5,10,100
twi 5,10,-100
wait
wait 0
wait 1
wait 2
waitimpl
waitrsv
wchkall
wchkall 0
wchkall 3
wclr 1,10,11
wclrall 1
wclrone 10,11
wrtee 10
wrteei 1
xor. 10,11,12
xor 10,11,12
xori 10,11,0x1000
xoris 10,11,0x1000
|
stsp/binutils-ia16
| 1,427
|
gas/testsuite/gas/ppc/outerprod.s
|
.text
_start:
xxmfacc 5
xxmtacc 6
xxsetaccz 7
xvi4ger8 0,63,62
xvi4ger8pp 1,61,60
pmxvi4ger8 2,59,58,15,14,255
pmxvi4ger8pp 3,57,56,7,8,128
xvi8ger4 4,55,54
xvi8ger4pp 5,53,52
pmxvi8ger4 6,51,50,13,12,11
pmxvi8ger4pp 7,49,48,10,9,8
xvi16ger2s 0,47,46
xvi16ger2spp 1,45,44
pmxvi16ger2s 2,43,42,7,6,3
pmxvi16ger2spp 3,41,40,5,4,2
xvf16ger2 4,39,38
xvf16ger2pp 5,37,36
xvf16ger2pn 6,35,34
xvf16ger2np 7,33,32
xvf16ger2nn 0,4,5
pmxvf16ger2 1,2,3,3,2,1
pmxvf16ger2pp 2,4,5,1,0,0
pmxvf16ger2pn 3,6,7,15,14,3
pmxvf16ger2np 4,8,9,13,12,2
pmxvf16ger2nn 5,10,11,11,10,1
xvf32ger 6,12,13
xvf32gerpp 7,14,15
xvf32gerpn 0,16,17
xvf32gernp 1,18,19
xvf32gernn 2,20,21
pmxvf32ger 3,22,23,9,8
pmxvf32gerpp 4,24,25,7,6
pmxvf32gerpn 5,26,27,5,4
pmxvf32gernp 6,28,29,3,2
pmxvf32gernn 7,0,1,1,0
xvf64ger 0,4,5
xvf64gerpp 1,8,9
xvf64gerpn 2,2,2
xvf64gernp 3,4,3
xvf64gernn 4,4,4
pmxvf64ger 5,6,5,15,0
pmxvf64gerpp 6,6,6,14,1
pmxvf64gerpn 7,8,7,13,2
pmxvf64gernp 0,4,5,12,3
pmxvf64gernn 1,2,1,10,0
xvbf16ger2pp 2,3,4
xvbf16ger2 3,4,5
xvbf16ger2np 4,5,6
xvbf16ger2pn 5,6,7
xvbf16ger2nn 6,7,8
pmxvbf16ger2pp 7,8,9,15,15,3
pmxvbf16ger2 0,9,10,12,12,2
pmxvbf16ger2np 1,10,11,10,10,1
pmxvbf16ger2pn 2,12,13,13,13,0
pmxvbf16ger2nn 3,16,17,14,14,3
xvi8ger4spp 4,32,33
pmxvi8ger4spp 5,34,35,15,15,15
xvi16ger2 6,36,37
xvi16ger2pp 7,38,39
pmxvi16ger2 0,38,39,15,15,1
pmxvi16ger2pp 1,40,41,12,12,2
|
stsp/binutils-ia16
| 2,276
|
gas/testsuite/gas/ppc/vle-reloc.s
|
.text
se_b sub1
se_bl sub1
se_bc 0,1,sub2
se_bc 1,2,sub2
e_b sub3
e_bl sub4
e_bc 0,5,sub5
e_bcl 1,10,sub5
e_or2i 1, low@l
e_or2i 2, high@h
e_or2i 3, high_adjust@ha
e_or2i 4, low_sdarel@sdarel@l
e_or2i 5, high_sdarel@sdarel@h
e_or2i 2, high_adjust_sdarel@sdarel@ha
e_and2i. 1, low@l
e_and2i. 2, high@h
e_and2i. 3, high_adjust@ha
e_and2i. 4, low_sdarel@sdarel@l
e_and2i. 5, high_sdarel@sdarel@h
e_and2i. 2, high_adjust_sdarel@sdarel@ha
e_and2i. 2, high_adjust_sdarel@sdarel@ha
e_or2is 1, low@l
e_or2is 2, high@h
e_or2is 3, high_adjust@ha
e_or2is 4, low_sdarel@sdarel@l
e_or2is 5, high_sdarel@sdarel@h
e_or2is 2, high_adjust_sdarel@sdarel@ha
e_lis 1, low@l
e_lis 2, high@h
e_lis 3, high_adjust@ha
e_lis 4, low_sdarel@sdarel@l
e_lis 5, high_sdarel@sdarel@h
e_lis 2, high_adjust_sdarel@sdarel@ha
e_and2is. 1, low@l
e_and2is. 2, high@h
e_and2is. 3, high_adjust@ha
e_and2is. 4, low_sdarel@sdarel@l
e_and2is. 5, high_sdarel@sdarel@h
e_and2is. 2, high_adjust_sdarel@sdarel@ha
e_cmp16i 1, low@l
e_cmp16i 2, high@h
e_cmp16i 3, high_adjust@ha
e_cmp16i 4, low_sdarel@sdarel@l
e_cmp16i 5, high_sdarel@sdarel@h
e_cmp16i 2, high_adjust_sdarel@sdarel@ha
e_cmpl16i 1, low@l
e_cmpl16i 2, high@h
e_cmpl16i 3, high_adjust@ha
e_cmpl16i 4, low_sdarel@sdarel@l
e_cmpl16i 5, high_sdarel@sdarel@h
e_cmpl16i 2, high_adjust_sdarel@sdarel@ha
e_cmph16i 1, low@l
e_cmph16i 2, high@h
e_cmph16i 3, high_adjust@ha
e_cmph16i 4, low_sdarel@sdarel@l
e_cmph16i 5, high_sdarel@sdarel@h
e_cmph16i 2, high_adjust_sdarel@sdarel@ha
e_cmphl16i 1, low@l
e_cmphl16i 2, high@h
e_cmphl16i 3, high_adjust@ha
e_cmphl16i 4, low_sdarel@sdarel@l
e_cmphl16i 5, high_sdarel@sdarel@h
e_cmphl16i 2, high_adjust_sdarel@sdarel@ha
e_add2i. 1, low@l
e_add2i. 2, high@h
e_add2i. 3, high_adjust@ha
e_add2i. 4, low_sdarel@sdarel@l
e_add2i. 5, high_sdarel@sdarel@h
e_add2i. 2, high_adjust_sdarel@sdarel@ha
e_add2is 1, low@l
e_add2is 2, high@h
e_add2is 3, high_adjust@ha
e_add2is 4, low_sdarel@sdarel@l
e_add2is 5, high_sdarel@sdarel@h
e_add2is 2, high_adjust_sdarel@sdarel@ha
e_mull2i 1, low@l
e_mull2i 2, high@h
e_mull2i 3, high_adjust@ha
e_mull2i 4, low_sdarel@sdarel@l
e_mull2i 5, high_sdarel@sdarel@h
e_mull2i 2, high_adjust_sdarel@sdarel@ha
|
stsp/binutils-ia16
| 4,070
|
gas/testsuite/gas/ppc/titan.s
|
# AppliedMicro Titan tests
.text
start:
blr
tweqi 1, 0
macchw 2, 1, 0
macchw. 2, 1, 0
macchwo 2, 1, 0
macchwo. 2, 1, 0
macchws 2, 1, 0
macchws. 2, 1, 0
macchwso 2, 1, 0
macchwso. 2, 1, 0
macchwsu 2, 1, 0
macchwsu. 2, 1, 0
macchwsuo 2, 1, 0
macchwsuo. 2, 1, 0
macchwu 2, 1, 0
macchwu. 2, 1, 0
macchwuo 2, 1, 0
macchwuo. 2, 1, 0
machhw 2, 1, 0
machhw. 2, 1, 0
machhwo 2, 1, 0
machhwo. 2, 1, 0
machhws 2, 1, 0
machhws. 2, 1, 0
machhwso 2, 1, 0
machhwso. 2, 1, 0
machhwsu 2, 1, 0
machhwsu. 2, 1, 0
machhwsuo 2, 1, 0
machhwsuo. 2, 1, 0
machhwu 2, 1, 0
machhwu. 2, 1, 0
machhwuo 2, 1, 0
machhwuo. 2, 1, 0
maclhw 2, 1, 0
maclhw. 2, 1, 0
maclhwo 2, 1, 0
maclhwo. 2, 1, 0
maclhws 2, 1, 0
maclhws. 2, 1, 0
maclhwso 2, 1, 0
maclhwso. 2, 1, 0
maclhwsu 2, 1, 0
maclhwsu. 2, 1, 0
maclhwsuo 2, 1, 0
maclhwsuo. 2, 1, 0
maclhwu 2, 1, 0
maclhwu. 2, 1, 0
maclhwuo 2, 1, 0
maclhwuo. 2, 1, 0
nmacchw 2, 1, 0
nmacchw. 2, 1, 0
nmacchwo 2, 1, 0
nmacchwo. 2, 1, 0
nmacchws 2, 1, 0
nmacchws. 2, 1, 0
nmacchwso 2, 1, 0
nmacchwso. 2, 1, 0
nmachhw 2, 1, 0
nmachhw. 2, 1, 0
nmachhwo 2, 1, 0
nmachhwo. 2, 1, 0
nmachhws 2, 1, 0
nmachhws. 2, 1, 0
nmachhwso 2, 1, 0
nmachhwso. 2, 1, 0
nmaclhw 2, 1, 0
nmaclhw. 2, 1, 0
nmaclhwo 2, 1, 0
nmaclhwo. 2, 1, 0
nmaclhws 2, 1, 0
nmaclhws. 2, 1, 0
nmaclhwso 2, 1, 0
nmaclhwso. 2, 1, 0
mulchw 2, 1, 0
mulchw. 2, 1, 0
mulchwu 2, 1, 0
mulchwu. 2, 1, 0
mulhhw 2, 1, 0
mulhhw. 2, 1, 0
mulhhwu 2, 1, 0
mulhhwu. 2, 1, 0
mullhw 2, 1, 0
mullhw. 2, 1, 0
mullhwu 2, 1, 0
mullhwu. 2, 1, 0
dlmzb 2, 1, 0
dlmzb. 2, 1, 0
dccci 2, 1
iccci 2, 1
dcblc 0, 2, 1
dcblc 2, 1
dcblc 1, 2, 1
dcbtls 0, 2, 1
dcbtls 2, 1
dcbtls 1, 2, 1
dcbtstls 0, 2, 1
dcbtstls 2, 1
dcbtstls 1, 2, 1
icblc 0, 2, 1
icblc 2, 1
icblc 1, 2, 1
icbtls 0, 2, 1
icbtls 2, 1
icbtls 1, 2, 1
dcread 2, 1, 0
icread 2, 1
mfpmr 2, 1
mfpmr 1, 2
mfspr 4, 0x001
mfxer 4
mfspr 4, 0x008
mflr 4
mfspr 4, 0x009
mfctr 4
mfspr 4, 0x016
mfdec 4
mfspr 4, 0x01a
mfsrr0 4
mfspr 4, 0x01b
mfsrr1 4
mfspr 4, 0x030
mfpid 4
mfspr 4, 0x03a
mfcsrr0 4
mfspr 4, 0x03b
mfcsrr1 4
mfspr 4, 0x03d
mfdear 4
mfspr 4, 0x03e
mfesr 4
mfspr 4, 0x03f
mfivpr 4
mfspr 4, 0x100
mfusprg0 4
mfspr 4, 0x104
mfsprg4 4
mfspr 4, 0x105
mfsprg5 4
mfspr 4, 0x106
mfsprg6 4
mfspr 4, 0x107
mfsprg7 4
mfspr 4, 0x10c
mftbl 4
mftb 4
mfspr 4, 0x10d
mftbu 4
mfspr 4, 0x110
mfsprg0 4
mfspr 4, 0x111
mfsprg1 4
mfspr 4, 0x112
mfsprg2 4
mfspr 4, 0x113
mfsprg3 4
mfspr 4, 0x11e
mfpir 4
mfspr 4, 0x11f
mfpvr 4
mfspr 4, 0x130
mfdbsr 4
mfspr 4, 0x134
mfdbcr0 4
mfspr 4, 0x135
mfdbcr1 4
mfspr 4, 0x136
mfdbcr2 4
mfspr 4, 0x138
mfiac1 4
mfspr 4, 0x139
mfiac2 4
mfspr 4, 0x13a
mfiac3 4
mfspr 4, 0x13b
mfiac4 4
mfspr 4, 0x13c
mfdac1 4
mfspr 4, 0x13d
mfdac2 4
mfspr 4, 0x13e
mfdvc1 4
mfspr 4, 0x13f
mfdvc2 4
mfspr 4, 0x150
mftsr 4
mfspr 4, 0x154
mftcr 4
mfspr 4, 0x190
mfivor0 4
mfspr 4, 0x191
mfivor1 4
mfspr 4, 0x192
mfivor2 4
mfspr 4, 0x193
mfivor3 4
mfspr 4, 0x194
mfivor4 4
mfspr 4, 0x195
mfivor5 4
mfspr 4, 0x196
mfivor6 4
mfspr 4, 0x197
mfivor7 4
mfspr 4, 0x198
mfivor8 4
mfspr 4, 0x199
mfivor9 4
mfspr 4, 0x19a
mfivor10 4
mfspr 4, 0x19b
mfivor11 4
mfspr 4, 0x19c
mfivor12 4
mfspr 4, 0x19d
mfivor13 4
mfspr 4, 0x19e
mfivor14 4
mfspr 4, 0x19f
mfivor15 4
mfspr 4, 0x213
mfivor35 4
mfspr 4, 0x23a
mfmcsrr0 4
mfspr 4, 0x23b
mfmcsrr1 4
mfspr 4, 0x23c
mfmcsr 4
mfspr 4, 0x370
mfivndx 4
mfspr 4, 0x371
mfdvndx 4
mfspr 4, 0x372
mfivlim 4
mfspr 4, 0x373
mfdvlim 4
mfspr 4, 0x374
mfclcsr 4
mfspr 4, 0x378
mfccr1 4
mfspr 4, 0x39b
mfrstcfg 4
mfspr 4, 0x39c
mfdcdbtrl 4
mfspr 4, 0x39d
mfdcdbtrh 4
mfspr 4, 0x39f
mficdbtr 4
mfspr 4, 0x3b2
mfmmucr 4
mfspr 4, 0x3b3
mfccr0 4
mfspr 4, 0x3d3
mficdbdr 4
mfspr 4, 0x3f3
mfdbdr 4
mtspr 0x036, 4
mtdecar 4
|
stsp/binutils-ia16
| 21,400
|
gas/testsuite/gas/ppc/lsp.s
|
# PA LSP instructions
# CMPE200GCC-62
.section ".text"
.equ rA,1
.equ rB,2
.equ rD,0
.equ rS,0
.equ UIMM, 15 ;#UIMM values >15 are illegal
.equ UIMM_2, 4
.equ UIMM_4, 8
.equ UIMM_8, 16
.equ SIMM, -16
.equ crD, 0
.equ offset, 1
zvaddih rD, rA, UIMM
zvsubifh rD, rA, UIMM
zvaddh rD, rA, rB
zvsubfh rD, rA, rB
zvaddsubfh rD, rA, rB
zvsubfaddh rD, rA, rB
zvaddhx rD, rA, rB
zvsubfhx rD, rA, rB
zvaddsubfhx rD, rA, rB
zvsubfaddhx rD, rA, rB
zaddwus rD, rA, rB
zsubfwus rD, rA, rB
zaddwss rD, rA, rB
zsubfwss rD, rA, rB
zvaddhus rD, rA, rB
zvsubfhus rD, rA, rB
zvaddhss rD, rA, rB
zvsubfhss rD, rA, rB
zvaddsubfhss rD, rA, rB
zvsubfaddhss rD, rA, rB
zvaddhxss rD, rA, rB
zvsubfhxss rD, rA, rB
zvaddsubfhxss rD, rA, rB
zvsubfaddhxss rD, rA, rB
zaddheuw rD, rA, rB
zsubfheuw rD, rA, rB
zaddhesw rD, rA, rB
zsubfhesw rD, rA, rB
zaddhouw rD, rA, rB
zsubfhouw rD, rA, rB
zaddhosw rD, rA, rB
zsubfhosw rD, rA, rB
zvmergehih rD, rA, rB
zvmergeloh rD, rA, rB
zvmergehiloh rD, rA, rB
zvmergelohih rD, rA, rB
zvcmpgthu crD, rA, rB
zvcmpgths crD, rA, rB
zvcmplthu crD, rA, rB
zvcmplths crD, rA, rB
zvcmpeqh crD, rA, rB
zpkswgshfrs rD, rA, rB
zpkswgswfrs rD, rA, rB
zvpkshgwshfrs rD, rA, rB
zvpkswshfrs rD, rA, rB
zvpkswuhs rD, rA, rB
zvpkswshs rD, rA, rB
zvpkuwuhs rD, rA, rB
zvsplatih rD, SIMM
zvsplatfih rD, SIMM
zcntlsw rD, rA
zvcntlzh rD, rA
zvcntlsh rD, rA
znegws rD, rA
zvnegh rD, rA
zvneghs rD, rA
zvnegho rD, rA
zvneghos rD, rA
zrndwh rD, rA
zrndwhss rD, rA
zvabsh rD, rA
zvabshs rD, rA
zabsw rD, rA
zabsws rD, rA
zsatswuw rD, rA
zsatuwsw rD, rA
zsatswuh rD, rA
zsatswsh rD, rA
zvsatshuh rD, rA
zvsatuhsh rD, rA
zsatuwuh rD, rA
zsatuwsh rD, rA
zsatsduw rD, rA, rB
zsatsdsw rD, rA, rB
zsatuduw rD, rA, rB
zvselh rD, rA, rB
zxtrw rD, rA, rB, offset
zbrminc rD, rA, rB
zcircinc rD, rA, rB
zdivwsf rD, rA, rB
zvsrhu rD, rA, rB
zvsrhs rD, rA, rB
zvsrhiu rD, rA, UIMM
zvsrhis rD, rA, UIMM
zvslh rD, rA, rB
zvrlh rD, rA, rB
zvslhi rD, rA, UIMM
zvrlhi rD, rA, UIMM
zvslhus rD, rA, rB
zvslhss rD, rA, rB
zvslhius rD, rA, UIMM
zvslhiss rD, rA, UIMM
zslwus rD, rA, rB
zslwss rD, rA, rB
zslwius rD, rA, UIMM
zslwiss rD, rA, UIMM
zaddwgui rD, rA, rB
zsubfwgui rD, rA, rB
zaddd rD, rA, rB
zsubfd rD, rA, rB
zvaddsubfw rD, rA, rB
zvsubfaddw rD, rA, rB
zvaddw rD, rA, rB
zvsubfw rD, rA, rB
zaddwgsi rD, rA, rB
zsubfwgsi rD, rA, rB
zadddss rD, rA, rB
zsubfdss rD, rA, rB
zvaddsubfwss rD, rA, rB
zvsubfaddwss rD, rA, rB
zvaddwss rD, rA, rB
zvsubfwss rD, rA, rB
zaddwgsf rD, rA, rB
zsubfwgsf rD, rA, rB
zadddus rD, rA, rB
zsubfdus rD, rA, rB
zvaddwus rD, rA, rB
zvsubfwus rD, rA, rB
zvunpkhgwsf rD, rA
zvunpkhsf rD, rA
zvunpkhui rD, rA
zvunpkhsi rD, rA
zunpkwgsf rD, rA
zvdotphgwasmf rD, rA, rB
zvdotphgwasmfr rD, rA, rB
zvdotphgwasmfaa rD, rA, rB
zvdotphgwasmfraa rD, rA, rB
zvdotphgwasmfan rD, rA, rB
zvdotphgwasmfran rD, rA, rB
zvmhulgwsmf rD, rA, rB
zvmhulgwsmfr rD, rA, rB
zvmhulgwsmfaa rD, rA, rB
zvmhulgwsmfraa rD, rA, rB
zvmhulgwsmfan rD, rA, rB
zvmhulgwsmfran rD, rA, rB
zvmhulgwsmfanp rD, rA, rB
zvmhulgwsmfranp rD, rA, rB
zmhegwsmf rD, rA, rB
zmhegwsmfr rD, rA, rB
zmhegwsmfaa rD, rA, rB
zmhegwsmfraa rD, rA, rB
zmhegwsmfan rD, rA, rB
zmhegwsmfran rD, rA, rB
zvdotphxgwasmf rD, rA, rB
zvdotphxgwasmfr rD, rA, rB
zvdotphxgwasmfaa rD, rA, rB
zvdotphxgwasmfraa rD, rA, rB
zvdotphxgwasmfan rD, rA, rB
zvdotphxgwasmfran rD, rA, rB
zvmhllgwsmf rD, rA, rB
zvmhllgwsmfr rD, rA, rB
zvmhllgwsmfaa rD, rA, rB
zvmhllgwsmfraa rD, rA, rB
zvmhllgwsmfan rD, rA, rB
zvmhllgwsmfran rD, rA, rB
zvmhllgwsmfanp rD, rA, rB
zvmhllgwsmfranp rD, rA, rB
zmheogwsmf rD, rA, rB
zmheogwsmfr rD, rA, rB
zmheogwsmfaa rD, rA, rB
zmheogwsmfraa rD, rA, rB
zmheogwsmfan rD, rA, rB
zmheogwsmfran rD, rA, rB
zvdotphgwssmf rD, rA, rB
zvdotphgwssmfr rD, rA, rB
zvdotphgwssmfaa rD, rA, rB
zvdotphgwssmfraa rD, rA, rB
zvdotphgwssmfan rD, rA, rB
zvdotphgwssmfran rD, rA, rB
zvmhuugwsmf rD, rA, rB
zvmhuugwsmfr rD, rA, rB
zvmhuugwsmfaa rD, rA, rB
zvmhuugwsmfraa rD, rA, rB
zvmhuugwsmfan rD, rA, rB
zvmhuugwsmfran rD, rA, rB
zvmhuugwsmfanp rD, rA, rB
zvmhuugwsmfranp rD, rA, rB
zmhogwsmf rD, rA, rB
zmhogwsmfr rD, rA, rB
zmhogwsmfaa rD, rA, rB
zmhogwsmfraa rD, rA, rB
zmhogwsmfan rD, rA, rB
zmhogwsmfran rD, rA, rB
zvmhxlgwsmf rD, rA, rB
zvmhxlgwsmfr rD, rA, rB
zvmhxlgwsmfaa rD, rA, rB
zvmhxlgwsmfraa rD, rA, rB
zvmhxlgwsmfan rD, rA, rB
zvmhxlgwsmfran rD, rA, rB
zvmhxlgwsmfanp rD, rA, rB
zvmhxlgwsmfranp rD, rA, rB
zmhegui rD, rA, rB
zvdotphgaui rD, rA, rB
zmheguiaa rD, rA, rB
zvdotphgauiaa rD, rA, rB
zmheguian rD, rA, rB
zvdotphgauian rD, rA, rB
zmhegsi rD, rA, rB
zvdotphgasi rD, rA, rB
zmhegsiaa rD, rA, rB
zvdotphgasiaa rD, rA, rB
zmhegsian rD, rA, rB
zvdotphgasian rD, rA, rB
zmhegsui rD, rA, rB
zvdotphgasui rD, rA, rB
zmhegsuiaa rD, rA, rB
zvdotphgasuiaa rD, rA, rB
zmhegsuian rD, rA, rB
zvdotphgasuian rD, rA, rB
zmhegsmf rD, rA, rB
zvdotphgasmf rD, rA, rB
zmhegsmfaa rD, rA, rB
zvdotphgasmfaa rD, rA, rB
zmhegsmfan rD, rA, rB
zvdotphgasmfan rD, rA, rB
zmheogui rD, rA, rB
zvdotphxgaui rD, rA, rB
zmheoguiaa rD, rA, rB
zvdotphxgauiaa rD, rA, rB
zmheoguian rD, rA, rB
zvdotphxgauian rD, rA, rB
zmheogsi rD, rA, rB
zvdotphxgasi rD, rA, rB
zmheogsiaa rD, rA, rB
zvdotphxgasiaa rD, rA, rB
zmheogsian rD, rA, rB
zvdotphxgasian rD, rA, rB
zmheogsui rD, rA, rB
zvdotphxgasui rD, rA, rB
zmheogsuiaa rD, rA, rB
zvdotphxgasuiaa rD, rA, rB
zmheogsuian rD, rA, rB
zvdotphxgasuian rD, rA, rB
zmheogsmf rD, rA, rB
zvdotphxgasmf rD, rA, rB
zmheogsmfaa rD, rA, rB
zvdotphxgasmfaa rD, rA, rB
zmheogsmfan rD, rA, rB
zvdotphxgasmfan rD, rA, rB
zmhogui rD, rA, rB
zvdotphgsui rD, rA, rB
zmhoguiaa rD, rA, rB
zvdotphgsuiaa rD, rA, rB
zmhoguian rD, rA, rB
zvdotphgsuian rD, rA, rB
zmhogsi rD, rA, rB
zvdotphgssi rD, rA, rB
zmhogsiaa rD, rA, rB
zvdotphgssiaa rD, rA, rB
zmhogsian rD, rA, rB
zvdotphgssian rD, rA, rB
zmhogsui rD, rA, rB
zvdotphgssui rD, rA, rB
zmhogsuiaa rD, rA, rB
zvdotphgssuiaa rD, rA, rB
zmhogsuian rD, rA, rB
zvdotphgssuian rD, rA, rB
zmhogsmf rD, rA, rB
zvdotphgssmf rD, rA, rB
zmhogsmfaa rD, rA, rB
zvdotphgssmfaa rD, rA, rB
zmhogsmfan rD, rA, rB
zvdotphgssmfan rD, rA, rB
zmwgui rD, rA, rB
zmwguiaa rD, rA, rB
zmwguiaas rD, rA, rB
zmwguian rD, rA, rB
zmwguians rD, rA, rB
zmwgsi rD, rA, rB
zmwgsiaa rD, rA, rB
zmwgsiaas rD, rA, rB
zmwgsian rD, rA, rB
zmwgsians rD, rA, rB
zmwgsui rD, rA, rB
zmwgsuiaa rD, rA, rB
zmwgsuiaas rD, rA, rB
zmwgsuian rD, rA, rB
zmwgsuians rD, rA, rB
zmwgsmf rD, rA, rB
zmwgsmfr rD, rA, rB
zmwgsmfaa rD, rA, rB
zmwgsmfraa rD, rA, rB
zmwgsmfan rD, rA, rB
zmwgsmfran rD, rA, rB
zvmhului rD, rA, rB
zvmhuluiaa rD, rA, rB
zvmhuluiaas rD, rA, rB
zvmhuluian rD, rA, rB
zvmhuluians rD, rA, rB
zvmhuluianp rD, rA, rB
zvmhuluianps rD, rA, rB
zvmhulsi rD, rA, rB
zvmhulsiaa rD, rA, rB
zvmhulsiaas rD, rA, rB
zvmhulsian rD, rA, rB
zvmhulsians rD, rA, rB
zvmhulsianp rD, rA, rB
zvmhulsianps rD, rA, rB
zvmhulsui rD, rA, rB
zvmhulsuiaa rD, rA, rB
zvmhulsuiaas rD, rA, rB
zvmhulsuian rD, rA, rB
zvmhulsuians rD, rA, rB
zvmhulsuianp rD, rA, rB
zvmhulsuianps rD, rA, rB
zvmhulsf rD, rA, rB
zvmhulsfr rD, rA, rB
zvmhulsfaas rD, rA, rB
zvmhulsfraas rD, rA, rB
zvmhulsfans rD, rA, rB
zvmhulsfrans rD, rA, rB
zvmhulsfanps rD, rA, rB
zvmhulsfranps rD, rA, rB
zvmhllui rD, rA, rB
zvmhlluiaa rD, rA, rB
zvmhlluiaas rD, rA, rB
zvmhlluian rD, rA, rB
zvmhlluians rD, rA, rB
zvmhlluianp rD, rA, rB
zvmhlluianps rD, rA, rB
zvmhllsi rD, rA, rB
zvmhllsiaa rD, rA, rB
zvmhllsiaas rD, rA, rB
zvmhllsian rD, rA, rB
zvmhllsians rD, rA, rB
zvmhllsianp rD, rA, rB
zvmhllsianps rD, rA, rB
zvmhllsui rD, rA, rB
zvmhllsuiaa rD, rA, rB
zvmhllsuiaas rD, rA, rB
zvmhllsuian rD, rA, rB
zvmhllsuians rD, rA, rB
zvmhllsuianp rD, rA, rB
zvmhllsuianps rD, rA, rB
zvmhllsf rD, rA, rB
zvmhllsfr rD, rA, rB
zvmhllsfaas rD, rA, rB
zvmhllsfraas rD, rA, rB
zvmhllsfans rD, rA, rB
zvmhllsfrans rD, rA, rB
zvmhllsfanps rD, rA, rB
zvmhllsfranps rD, rA, rB
zvmhuuui rD, rA, rB
zvmhuuuiaa rD, rA, rB
zvmhuuuiaas rD, rA, rB
zvmhuuuian rD, rA, rB
zvmhuuuians rD, rA, rB
zvmhuuuianp rD, rA, rB
zvmhuuuianps rD, rA, rB
zvmhuusi rD, rA, rB
zvmhuusiaa rD, rA, rB
zvmhuusiaas rD, rA, rB
zvmhuusian rD, rA, rB
zvmhuusians rD, rA, rB
zvmhuusianp rD, rA, rB
zvmhuusianps rD, rA, rB
zvmhuusui rD, rA, rB
zvmhuusuiaa rD, rA, rB
zvmhuusuiaas rD, rA, rB
zvmhuusuian rD, rA, rB
zvmhuusuians rD, rA, rB
zvmhuusuianp rD, rA, rB
zvmhuusuianps rD, rA, rB
zvmhuusf rD, rA, rB
zvmhuusfr rD, rA, rB
zvmhuusfaas rD, rA, rB
zvmhuusfraas rD, rA, rB
zvmhuusfans rD, rA, rB
zvmhuusfrans rD, rA, rB
zvmhuusfanps rD, rA, rB
zvmhuusfranps rD, rA, rB
zvmhxlui rD, rA, rB
zvmhxluiaa rD, rA, rB
zvmhxluiaas rD, rA, rB
zvmhxluian rD, rA, rB
zvmhxluians rD, rA, rB
zvmhxluianp rD, rA, rB
zvmhxluianps rD, rA, rB
zvmhxlsi rD, rA, rB
zvmhxlsiaa rD, rA, rB
zvmhxlsiaas rD, rA, rB
zvmhxlsian rD, rA, rB
zvmhxlsians rD, rA, rB
zvmhxlsianp rD, rA, rB
zvmhxlsianps rD, rA, rB
zvmhxlsui rD, rA, rB
zvmhxlsuiaa rD, rA, rB
zvmhxlsuiaas rD, rA, rB
zvmhxlsuian rD, rA, rB
zvmhxlsuians rD, rA, rB
zvmhxlsuianp rD, rA, rB
zvmhxlsuianps rD, rA, rB
zvmhxlsf rD, rA, rB
zvmhxlsfr rD, rA, rB
zvmhxlsfaas rD, rA, rB
zvmhxlsfraas rD, rA, rB
zvmhxlsfans rD, rA, rB
zvmhxlsfrans rD, rA, rB
zvmhxlsfanps rD, rA, rB
zvmhxlsfranps rD, rA, rB
zmheui rD, rA, rB
zmheuiaa rD, rA, rB
zmheuiaas rD, rA, rB
zmheuian rD, rA, rB
zmheuians rD, rA, rB
zmhesi rD, rA, rB
zmhesiaa rD, rA, rB
zmhesiaas rD, rA, rB
zmhesian rD, rA, rB
zmhesians rD, rA, rB
zmhesui rD, rA, rB
zmhesuiaa rD, rA, rB
zmhesuiaas rD, rA, rB
zmhesuian rD, rA, rB
zmhesuians rD, rA, rB
zmhesf rD, rA, rB
zmhesfr rD, rA, rB
zmhesfaas rD, rA, rB
zmhesfraas rD, rA, rB
zmhesfans rD, rA, rB
zmhesfrans rD, rA, rB
zmheoui rD, rA, rB
zmheouiaa rD, rA, rB
zmheouiaas rD, rA, rB
zmheouian rD, rA, rB
zmheouians rD, rA, rB
zmheosi rD, rA, rB
zmheosiaa rD, rA, rB
zmheosiaas rD, rA, rB
zmheosian rD, rA, rB
zmheosians rD, rA, rB
zmheosui rD, rA, rB
zmheosuiaa rD, rA, rB
zmheosuiaas rD, rA, rB
zmheosuian rD, rA, rB
zmheosuians rD, rA, rB
zmheosf rD, rA, rB
zmheosfr rD, rA, rB
zmheosfaas rD, rA, rB
zmheosfraas rD, rA, rB
zmheosfans rD, rA, rB
zmheosfrans rD, rA, rB
zmhoui rD, rA, rB
zmhouiaa rD, rA, rB
zmhouiaas rD, rA, rB
zmhouian rD, rA, rB
zmhouians rD, rA, rB
zmhosi rD, rA, rB
zmhosiaa rD, rA, rB
zmhosiaas rD, rA, rB
zmhosian rD, rA, rB
zmhosians rD, rA, rB
zmhosui rD, rA, rB
zmhosuiaa rD, rA, rB
zmhosuiaas rD, rA, rB
zmhosuian rD, rA, rB
zmhosuians rD, rA, rB
zmhosf rD, rA, rB
zmhosfr rD, rA, rB
zmhosfaas rD, rA, rB
zmhosfraas rD, rA, rB
zmhosfans rD, rA, rB
zmhosfrans rD, rA, rB
zvmhuih rD, rA, rB
zvmhuihs rD, rA, rB
zvmhuiaah rD, rA, rB
zvmhuiaahs rD, rA, rB
zvmhuianh rD, rA, rB
zvmhuianhs rD, rA, rB
zvmhsihs rD, rA, rB
zvmhsiaahs rD, rA, rB
zvmhsianhs rD, rA, rB
zvmhsuihs rD, rA, rB
zvmhsuiaahs rD, rA, rB
zvmhsuianhs rD, rA, rB
zvmhsfh rD, rA, rB
zvmhsfrh rD, rA, rB
zvmhsfaahs rD, rA, rB
zvmhsfraahs rD, rA, rB
zvmhsfanhs rD, rA, rB
zvmhsfranhs rD, rA, rB
zvdotphaui rD, rA, rB
zvdotphauis rD, rA, rB
zvdotphauiaa rD, rA, rB
zvdotphauiaas rD, rA, rB
zvdotphauian rD, rA, rB
zvdotphauians rD, rA, rB
zvdotphasi rD, rA, rB
zvdotphasis rD, rA, rB
zvdotphasiaa rD, rA, rB
zvdotphasiaas rD, rA, rB
zvdotphasian rD, rA, rB
zvdotphasians rD, rA, rB
zvdotphasui rD, rA, rB
zvdotphasuis rD, rA, rB
zvdotphasuiaa rD, rA, rB
zvdotphasuiaas rD, rA, rB
zvdotphasuian rD, rA, rB
zvdotphasuians rD, rA, rB
zvdotphasfs rD, rA, rB
zvdotphasfrs rD, rA, rB
zvdotphasfaas rD, rA, rB
zvdotphasfraas rD, rA, rB
zvdotphasfans rD, rA, rB
zvdotphasfrans rD, rA, rB
zvdotphxaui rD, rA, rB
zvdotphxauis rD, rA, rB
zvdotphxauiaa rD, rA, rB
zvdotphxauiaas rD, rA, rB
zvdotphxauian rD, rA, rB
zvdotphxauians rD, rA, rB
zvdotphxasi rD, rA, rB
zvdotphxasis rD, rA, rB
zvdotphxasiaa rD, rA, rB
zvdotphxasiaas rD, rA, rB
zvdotphxasian rD, rA, rB
zvdotphxasians rD, rA, rB
zvdotphxasui rD, rA, rB
zvdotphxasuis rD, rA, rB
zvdotphxasuiaa rD, rA, rB
zvdotphxasuiaas rD, rA, rB
zvdotphxasuian rD, rA, rB
zvdotphxasuians rD, rA, rB
zvdotphxasfs rD, rA, rB
zvdotphxasfrs rD, rA, rB
zvdotphxasfaas rD, rA, rB
zvdotphxasfraas rD, rA, rB
zvdotphxasfans rD, rA, rB
zvdotphxasfrans rD, rA, rB
zvdotphsui rD, rA, rB
zvdotphsuis rD, rA, rB
zvdotphsuiaa rD, rA, rB
zvdotphsuiaas rD, rA, rB
zvdotphsuian rD, rA, rB
zvdotphsuians rD, rA, rB
zvdotphssi rD, rA, rB
zvdotphssis rD, rA, rB
zvdotphssiaa rD, rA, rB
zvdotphssiaas rD, rA, rB
zvdotphssian rD, rA, rB
zvdotphssians rD, rA, rB
zvdotphssui rD, rA, rB
zvdotphssuis rD, rA, rB
zvdotphssuiaa rD, rA, rB
zvdotphssuiaas rD, rA, rB
zvdotphssuian rD, rA, rB
zvdotphssuians rD, rA, rB
zvdotphssfs rD, rA, rB
zvdotphssfrs rD, rA, rB
zvdotphssfaas rD, rA, rB
zvdotphssfraas rD, rA, rB
zvdotphssfans rD, rA, rB
zvdotphssfrans rD, rA, rB
zmwluis rD, rA, rB
zmwluiaa rD, rA, rB
zmwluiaas rD, rA, rB
zmwluian rD, rA, rB
zmwluians rD, rA, rB
zmwlsis rD, rA, rB
zmwlsiaas rD, rA, rB
zmwlsians rD, rA, rB
zmwlsuis rD, rA, rB
zmwlsuiaas rD, rA, rB
zmwlsuians rD, rA, rB
zmwsf rD, rA, rB
zmwsfr rD, rA, rB
zmwsfaas rD, rA, rB
zmwsfraas rD, rA, rB
zmwsfans rD, rA, rB
zmwsfrans rD, rA, rB
zlddx rD, rA, rB
zldd rD, UIMM_8(rA)
zldwx rD, rA, rB
zldw rD, UIMM_8(rA)
zldhx rD, rA, rB
zldh rD, UIMM_8(rA)
zlwgsfdx rD, rA, rB
zlwgsfd rD, UIMM_4(rA)
zlwwosdx rD, rA, rB
zlwwosd rD, UIMM_4(rA)
zlwhsplatwdx rD, rA, rB
zlwhsplatwd rD, UIMM_4(rA)
zlwhsplatdx rD, rA, rB
zlwhsplatd rD, UIMM_4(rA)
zlwhgwsfdx rD, rA, rB
zlwhgwsfd rD, UIMM_4(rA)
zlwhedx rD, rA, rB
zlwhed rD, UIMM_4(rA)
zlwhosdx rD, rA, rB
zlwhosd rD, UIMM_4(rA)
zlwhoudx rD, rA, rB
zlwhoud rD, UIMM_4(rA)
zlwhx rD, rA, rB
zlwh rD, UIMM_4(rA)
zlwwx rD, rA, rB
zlww rD, UIMM_4(rA)
zlhgwsfx rD, rA, rB
zlhgwsf rD, UIMM_2(rA)
zlhhsplatx rD, rA, rB
zlhhsplat rD, UIMM_2(rA)
zstddx rS, rA, rB
zstdd rS, UIMM_8(rA)
zstdwx rS, rA, rB
zstdw rS, UIMM_8(rA)
zstdhx rS, rA, rB
zstdh rS, UIMM_8(rA)
zstwhedx rS, rA, rB
zstwhed rS, UIMM_4(rA)
zstwhodx rS, rA, rB
zstwhod rS, UIMM_4(rA)
zlhhex rS, rA, rB
zlhhe rD, UIMM_2(rA)
zlhhosx rS, rA, rB
zlhhos rD, UIMM_2(rA)
zlhhoux rS, rA, rB
zlhhou rD, UIMM_2(rA)
zsthex rS, rA, rB
zsthe rS, UIMM_2(rA)
zsthox rS, rA, rB
zstho rS, UIMM_2(rA)
zstwhx rS, rA, rB
zstwh rS, UIMM_4(rA)
zstwwx rS, rA, rB
zstww rS, UIMM_4(rA)
zlddmx rD, rA, rB
zlddu rD, UIMM_8(rA)
zldwmx rD, rA, rB
zldwu rD, UIMM_8(rA)
zldhmx rD, rA, rB
zldhu rD, UIMM_8(rA)
zlwgsfdmx rD, rA, rB
zlwgsfdu rD, UIMM_4(rA)
zlwwosdmx rD, rA, rB
zlwwosdu rD, UIMM_4(rA)
zlwhsplatwdmx rD, rA, rB
zlwhsplatwdu rD, UIMM_4(rA)
zlwhsplatdmx rD, rA, rB
zlwhsplatdu rD, UIMM_4(rA)
zlwhgwsfdmx rD, rA, rB
zlwhgwsfdu rD, UIMM_4(rA)
zlwhedmx rD, rA, rB
zlwhedu rD, UIMM_4(rA)
zlwhosdmx rD, rA, rB
zlwhosdu rD, UIMM_4(rA)
zlwhoudmx rD, rA, rB
zlwhoudu rD, UIMM_4(rA)
zlwhmx rD, rA, rB
zlwhu rD, UIMM_4(rA)
zlwwmx rD, rA, rB
zlwwu rD, UIMM_4(rA)
zlhgwsfmx rD, rA, rB
zlhgwsfu rD, UIMM_2(rA)
zlhhsplatmx rD, rA, rB
zlhhsplatu rD, UIMM_2(rA)
zstddmx rS, rA, rB
zstddu rS, UIMM_8(rA)
zstdwmx rS, rA, rB
zstdwu rS, UIMM_8(rA)
zstdhmx rS, rA, rB
zstdhu rS, UIMM_8(rA)
zstwhedmx rS, rA, rB
zstwhedu rS, UIMM_4(rA)
zstwhodmx rD, rA, rB
zstwhodu rS, UIMM_4(rA)
zlhhemx rD, rA, rB
zlhheu rD, UIMM_2(rA)
zlhhosmx rD, rA, rB
zlhhosu rD, UIMM_2(rA)
zlhhoumx rD, rA, rB
zlhhouu rD, UIMM_2(rA)
zsthemx rS, rA, rB
zstheu rS, UIMM_2(rA)
zsthomx rS, rA, rB
zsthou rS, UIMM_2(rA)
zstwhmx rS, rA, rB
zstwhu rS, UIMM_4(rA)
zstwwmx rS, rA, rB
zstwwu rS, UIMM_4(rA)
|
stsp/binutils-ia16
| 3,479
|
gas/testsuite/gas/ppc/spe2-checks.s
|
# PA SPE2 instructions
.section ".text"
.equ rA,1
.equ rB,2
.equ rD,0
.equ rS,0
.equ UIMM_ILL, 32
.equ UIMM_1_ZERO, 0
.equ UIMM_1_ILL, 32
.equ UIMM_2_ILL, 1
.equ UIMM_4_ILL, 3
.equ UIMM_8_ILL, 7
.equ UIMM_GT7, 8
.equ UIMM_GT15, 16
.equ nnn_ILL, 8
.equ bbb_ILL, 8
.equ dd, 3
.equ dd_ILL, 4
.equ Ddd, 7
.equ Ddd_ILL, 8
.equ hh, 3
.equ hh_ILL, 4
.equ mask_ILL, 16
.equ offset_ILL0, 0
.equ offset_ILL, 8
evaddib rD, rB, UIMM_ILL
evaddih rD, rB, UIMM_ILL
evsubifh rD, UIMM_ILL, rB
evsubifb rD, UIMM_ILL, rB
evinsb rD, rA, Ddd, bbb_ILL
evxtrb rD, rA, Ddd, bbb_ILL
evsplath rD, rA, hh_ILL
evsplatb rD, rA, bbb_ILL
evinsh rD, rA, dd_ILL, hh
evclrbe rD, rA, mask_ILL
evclrbo rD, rA, mask_ILL
evclrh rD, rA, mask_ILL
evxtrh rD, rA, dd_ILL, hh
evxtrh rD, rA, dd, hh_ILL
evxtrd rD, rA, rB, offset_ILL0
evxtrd rD, rA, rB, offset_ILL
evsrbiu rD, rA, UIMM_GT7
evsrbis rD, rA, UIMM_GT7
evslbi rD, rA, UIMM_GT7
evrlbi rD, rA, UIMM_GT7
evsrhiu rD, rA, UIMM_GT15
evsrhis rD, rA, UIMM_GT15
evslhi rD, rA, UIMM_GT15
evrlhi rD, rA, UIMM_GT15
evsroiu rD, rA, nnn_ILL
evsrois rD, rA, nnn_ILL
evsloi rD, rA, nnn_ILL
evldb rD, UIMM_8_ILL (rA)
evlhhsplath rD, UIMM_2_ILL (rA)
evlwbsplatw rD, UIMM_4_ILL (rA)
evlwhsplatw rD, UIMM_4_ILL (rA)
evlbbsplatb rD, UIMM_1_ILL (rA)
evstdb rS, UIMM_8_ILL (rA)
evlwbe rD, UIMM_4_ILL (rA)
evlwbou rD, UIMM_4_ILL (rA)
evlwbos rD, UIMM_4_ILL (rA)
evstwbe rS, UIMM_4_ILL (rA)
evstwbo rS, UIMM_4_ILL (rA)
evstwb rS, UIMM_4_ILL (rA)
evsthb rS, UIMM_2_ILL (rA)
evlddu rD, UIMM_8_ILL (rA)
evldwu rD, UIMM_8_ILL (rA)
evldhu rD, UIMM_8_ILL (rA)
evldbu rD, UIMM_8_ILL (rA)
evlhhesplatu rD, UIMM_2_ILL (rA)
evlhhsplathu rD, UIMM_2_ILL (rA)
evlhhousplatu rD, UIMM_2_ILL (rA)
evlhhossplatu rD, UIMM_2_ILL (rA)
evlwheu rD, UIMM_4_ILL (rA)
evlwbsplatwu rD, UIMM_4_ILL (rA)
evlwhouu rD, UIMM_4_ILL (rA)
evlwhosu rD, UIMM_4_ILL (rA)
evlwwsplatu rD, UIMM_4_ILL (rA)
evlwhsplatwu rD, UIMM_4_ILL (rA)
evlwhsplatu rD, UIMM_4_ILL (rA)
evlbbsplatbu rD, UIMM_1_ZERO (rA)
evstddu rS, UIMM_8_ILL (rA)
evstdwu rS, UIMM_8_ILL (rA)
evstdhu rS, UIMM_8_ILL (rA)
evstdbu rS, UIMM_8_ILL (rA)
evlwbeu rD, UIMM_4_ILL (rA)
evlwbouu rD, UIMM_4_ILL (rA)
evlwbosu rD, UIMM_4_ILL (rA)
evstwheu rS, UIMM_4_ILL (rA)
evstwbeu rS, UIMM_4_ILL (rA)
evstwhou rS, UIMM_4_ILL (rA)
evstwbou rS, UIMM_4_ILL (rA)
evstwweu rS, UIMM_4_ILL (rA)
evstwbu rS, UIMM_4_ILL (rA)
evstwwou rS, UIMM_4_ILL (rA)
evsthbu rS, UIMM_2_ILL (rA)
|
stsp/binutils-ia16
| 2,342
|
gas/testsuite/gas/ppc/bc.s
|
.macro err op:vararg
.ifndef AT
.ifndef Y
\op
.endif
.endif
.endm
.macro errat op:vararg
.ifndef AT
\op
.endif
.endm
.macro erry op:vararg
.ifndef Y
\op
.endif
.endm
.text
bc 0,0,.
errat bc 1,0,. # z bit
bc 2,0,.
errat bc 3,0,. # z bit
bc 4,0,.
errat bc 5,0,. # at = 01 reserved
erry bc 6,0,. # z bit
erry bc 7,0,. # z bit
bc 8,0,.
errat bc 9,0,. # z bit
bc 10,0,.
errat bc 11,0,. # z bit
bc 12,0,.
errat bc 13,0,. # at = 01 reserved
erry bc 14,0,. # z bit
erry bc 15,0,. # z bit
bc 16,0,.
errat bc 17,0,. # at = 01 reserved
bc 18,0,.
errat bc 19,0,. # at = 01 reserved
bc 20,0,.
err bc 21,0,. # z bit
err bc 22,0,. # z bit
err bc 23,0,. # z bit
erry bc 24,0,. # z bit
erry bc 25,0,. # z bit
erry bc 26,0,. # z bit
erry bc 27,0,. # z bit
err bc 28,0,. # z bit
err bc 29,0,. # z bit
err bc 30,0,. # z bit
err bc 31,0,. # z bit
err bcctr 0,0
err bcctr 1,0
err bcctr 2,0
err bcctr 3,0
bcctr 4,0
errat bcctr 5,0
erry bcctr 6,0
erry bcctr 7,0
err bcctr 8,0
err bcctr 9,0
err bcctr 10,0
err bcctr 11,0
bcctr 12,0
errat bcctr 13,0
erry bcctr 14,0
erry bcctr 15,0
err bcctr 16,0
err bcctr 17,0
err bcctr 18,0
err bcctr 19,0
bcctr 20,0
err bcctr 21,0
err bcctr 22,0
err bcctr 23,0
err bcctr 24,0
err bcctr 25,0
err bcctr 26,0
err bcctr 27,0
err bcctr 28,0
err bcctr 29,0
err bcctr 30,0
err bcctr 31,0
bclr 0,0
errat bclr 1,0
bclr 2,0
errat bclr 3,0
bclr 4,0
errat bclr 5,0
erry bclr 6,0
erry bclr 7,0
bclr 8,0
errat bclr 9,0
bclr 10,0
errat bclr 11,0
bclr 12,0
errat bclr 13,0
erry bclr 14,0
erry bclr 15,0
bclr 16,0
errat bclr 17,0
bclr 18,0
errat bclr 19,0
bclr 20,0
err bclr 21,0
err bclr 22,0
err bclr 23,0
erry bclr 24,0
erry bclr 25,0
erry bclr 26,0
erry bclr 27,0
err bclr 28,0
err bclr 29,0
err bclr 30,0
err bclr 31,0
.ifdef POWER8
bctar 0,0
errat bctar 1,0
bctar 2,0
errat bctar 3,0
bctar 4,0
errat bctar 5,0
bctar 6,0
bctar 7,0
bctar 8,0
errat bctar 9,0
bctar 10,0
errat bctar 11,0
bctar 12,0
errat bctar 13,0
bctar 14,0
bctar 15,0
bctar 16,0
errat bctar 17,0
bctar 18,0
errat bctar 19,0
bctar 20,0
errat bctar 21,0
errat bctar 22,0
errat bctar 23,0
bctar 24,0
bctar 25,0
bctar 26,0
bctar 27,0
errat bctar 28,0
errat bctar 29,0
errat bctar 30,0
errat bctar 31,0
.endif
|
stsp/binutils-ia16
| 1,766
|
gas/testsuite/gas/ppc/xcoff-tls.s
|
# An external tdata symbol
.globl tdata_ext[TL]
.csect tdata_ext[TL]
.long 1
.csect tdata_int_csect[TL]
# A first internal tdata symbol
tdata_int1:
.long 2
# A second internal tdata symbol
tdata_int2:
.long 3
# Two external tbss symbols.
# XCOFF doesn't seem to allow internal tbss
# (or bss) symbols.
.comm tbss_ext[UL],8
.toc
# TC entries targeting the external tdata symbol
# Their value should be "tdata_ext" address,
# except TLSM value which must be 0.
# Their relocations should target it.
.tc tdata_ext_gd[TC],tdata_ext[TL]
.tc .tdata_ext_gd[TC],tdata_ext[TL]@m
.tc tdata_ext_ld[TC],tdata_ext[TL]@ld
.tc tdata_ext_ie[TC],tdata_ext[TL]@ie
.tc tdata_ext_le[TC],tdata_ext[TL]@le
# TC entries targeting internal tdata symbols.
# Their value should be "tdata_int1" or "tdata_int2"
# addresses, except TLSM value which must be 0.
# Their relocations should target "tdata_int_csect".
.tc tdata_int1_gd[TC],tdata_int1
.tc .tdata_int1_gd[TC],tdata_int1@m
.tc tdata_int1_ld[TC],tdata_int1@ld
.tc tdata_int1_ie[TC],tdata_int1@ie
.tc tdata_int1_le[TC],tdata_int1@le
.tc tdata_int2_gd[TC],tdata_int2
.tc .tdata_int2_gd[TC],tdata_int2@m
.tc tdata_int2_ld[TC],tdata_int2@ld
.tc tdata_int2_ie[TC],tdata_int2@ie
.tc tdata_int2_le[TC],tdata_int2@le
# TC entries targeting the external tdata symbol
# Their value should be "tbss_ext" address,
# except TLSM value which must be 0.
# Their relocations should target "tbss_ext".
.tc tbss_ext_gd[TC],tbss_ext[UL]
.tc .tbss_ext_gd[TC],tbss_ext[UL]@m
.tc tbss_ext_ld[TC],tbss_ext[UL]@ld
.tc tbss_ext_ie[TC],tbss_ext[UL]@ie
.tc tbss_ext_le[TC],tbss_ext[UL]@le
# Module entry
.tc mh[TC],mh[TC]@ml
.rename mh[TC], "_$TLSML" # Symbol for the module handle
|
stsp/binutils-ia16
| 1,578
|
gas/testsuite/gas/ppc/power7.s
|
.text
power7:
lxvd2x 3,4,5
lxvd2x 43,4,5
stxvd2x 3,4,5
stxvd2x 43,4,5
xxmrghd 3,4,5
xxmrghd 43,44,45
xxmrgld 3,4,5
xxmrgld 43,44,45
xxpermdi 3,4,5,0
xxpermdi 43,44,45,0
xxpermdi 3,4,5,3
xxpermdi 43,44,45,3
xxpermdi 3,4,5,1
xxpermdi 43,44,45,1
xxpermdi 3,4,5,2
xxpermdi 43,44,45,2
xvmovdp 3,4
xvmovdp 43,44
xvcpsgndp 3,4,4
xvcpsgndp 43,44,44
xvcpsgndp 3,4,5
xvcpsgndp 43,44,45
doze
nap
sleep
rvwinkle
prtyw 3,4
prtyd 13,14
mfcfar 10
mtcfar 11
cmpb 3,4,5
lwzcix 10,11,12
dadd 16,17,18
daddq 20,22,24
dss 3
dssall
dst 5,4,1
dstt 8,7,0
dstst 5,6,3
dststt 4,5,2
divwe 10,11,12
divwe. 11,12,13
divweo 12,13,14
divweo. 13,14,15
divweu 10,11,12
divweu. 11,12,13
divweuo 12,13,14
divweuo. 13,14,15
bpermd 7,17,27
popcntw 10,20
popcntd 10,20
ldbrx 20,21,22
stdbrx 20,21,22
lfiwzx 10,0,10
lfiwzx 10,9,10
fcfids 4,5
fcfids. 4,5
fcfidus 4,5
fcfidus. 4,5
fctiwu 4,5
fctiwu. 4,5
fctiwuz 4,5
fctiwuz. 4,5
fctidu 4,5
fctidu. 4,5
fctiduz 4,5
fctiduz. 4,5
fcfidu 4,5
fcfidu. 4,5
ftdiv 0,10,11
ftdiv 7,10,11
ftsqrt 0,10
ftsqrt 7,10
dcbtt 8,9
dcbtstt 8,9
dcffix 10,12
dcffix. 20,22
fre 14,15
fre. 14,15
fres 14,15
fres. 14,15
frsqrte 14,15
frsqrte. 14,15
frsqrtes 14,15
frsqrtes. 14,15
isel 2,3,4,28
yield
or 27,27,27
ori 2,2,0
.p2align 4,,15
mdoio
or 29,29,29
mdoom
or 30,30,30
tlbie 10,11
|
stsp/binutils-ia16
| 3,095
|
gas/testsuite/gas/ppc/vle.s
|
# Freescale PowerPC VLE instruction tests
#as: -mvle
.text
.extern extern_subr
.equ UI8,0x37
.equ SCI0,UI8<<0
.equ SCI1,UI8<<8
.equ SCI2,UI8<<16
.equ SCI3,UI8<<24
.equ r0,0
.equ r1,1
.equ r2,2
.equ r3,3
.equ r4,4
.equ r5,5
.equ r6,6
.equ r7,7
.equ r8,8
.equ r9,9
.equ r10,10
.equ r11,11
.equ r12,12
.equ r13,13
.equ r14,14
.equ r15,15
.equ r16,16
.equ r17,17
.equ r18,18
.equ r19,19
.equ r20,20
.equ r21,21
.equ r22,22
.equ r23,23
.equ r24,24
.equ r25,25
.equ r26,26
.equ r27,27
.equ r28,28
.equ r29,29
.equ r30,30
.equ r31,31
.equ r32,32
.equ rsp,r1
start_label:
e_add16i r4,r3,27
e_add2i. r0,0x3456
e_add2is r1,0x4321
e_addi. r2,r6,SCI0
e_addi r3,r5,SCI1
e_addic. r4,r4,SCI2
e_addic r7,r8,SCI3
e_and2i. r9,0xfeed
e_and2is. r10,5
e_andi. r11,r13,0x39
e_andi r12,r15,SCI2
e_b middle_label
e_bl extern_subr
e_bc 0,3,start_label
e_bcl 1,15,extern_subr
e_cmp16i r2,0x3333
e_cmpi 2,r6,SCI1
e_cmph 1,r7,r11
e_cmph16i r12,0xfdef
e_cmphl 0,r6,r8
e_cmphl16i r13,0x1234
e_cmpl16i r1, 0xfee0
e_cmpli 1,r3,SCI3
e_crand 0x1d,3,0
e_crandc 0,2,0x1d
e_creqv 15,16,17
e_crnand 0xf,0,3
e_crnor 0xf,0,3
e_cror 12,13,14
e_crorc 19,18,17
e_crxor 0,0,0
e_lbz r7,0xffffcc0d(r3)
e_lbzu r7,-52(r5)
e_lha r8,0x1ff(r10)
e_lhau r8,-1(r1)
e_lhz r7,6200(r0)
e_lhzu r7,62(r0)
e_li r0,0x33333
e_lis r1,0x3333
e_lmw r5,24(r3)
e_lwz r5,10024(r3)
e_lwzu r6,0x72(r2)
e_mcrf 1,6
e_mulli r9,r10,SCI0
e_mull2i r1,0x668
e_or2i r5,0x2345
e_or2is r5,0xa345
e_ori. r7,r9,SCI0
e_ori r7,r8,SCI1
e_rlw r18, r22,r0
e_rlw. r8, r2,r0
e_rlwi r20,r3,21
e_rlwi. r2,r3,21
e_rlwimi r4,r19,13,8,15
e_rlwinm r4,r1,13,1,17
e_slwi r12,r19,6
e_slwi. r12,r10,20
e_srwi r0,r1,16
e_srwi. r0,r1,11
e_stb r3,22000(r1)
e_stbu r19,-4(r22)
e_sth r0,666(r21)
e_sthu r1,-1(r23)
e_stmw r0,4(r3)
e_stw r3,16161(r0)
e_stwu r22,0xffffffee(r4)
e_subfic r0,r21,SCI2
e_subfic. r22,r0,SCI3
e_xori r21,r3,SCI1
e_xori. r0,r20,SCI0
middle_label:
se_add r31,r7
se_addi r28,0x1f
se_and r0,r1
se_and. r1,r0
se_andc r2, r3
se_andi r4,0x11
se_b middle_label
se_bl extern_subr
se_bc 1,3,not_end_label
se_bclri r27,0x12
se_bctr
se_bctrl
se_bgeni r7,17
se_blr
se_blrl
se_bmaski r6,0
se_bseti r0,1
se_btsti r4,7
se_cmp r0,r1
se_cmph r31,r28
se_cmphl r1,r25
se_cmpi r3,22
se_cmpl r6,r7
se_cmpli r28,0xc
se_extsb r1
se_extsh r2
se_extzb r30
se_extzh r24
not_end_label:
se_illegal
se_isync
se_lbz r1,8(r24)
se_lhz r24,18(r4)
se_li r4,0x4f
se_lwz r6,60(r0)
se_mfar r7,r8
se_mfctr r3
se_mflr r4
se_mr r31,r0
se_mtar r23,r2
se_mtctr r6
se_mtlr r31
se_mullw r3,r4
se_neg r24
se_not r25
se_or r0,r1
se_rfci
se_rfdi
se_rfi
se_sc
se_slw r5,r6
se_slwi r7,7
se_sraw r6,r30
se_srawi r25,8
se_srw r30,r0
se_srwi r29,25
se_stb r0,10(r2)
se_sth r1,12(r30)
se_stw r7,0(r29)
se_sub r1,r2
se_subf r29,r26
se_subi r7,24
end_label:
se_subi. r25,19
se_bl middle_label
e_b middle_label
e_bl start_label
se_rfgi
e_sc
e_sc 0
e_sc 1
|
stsp/binutils-ia16
| 1,134
|
gas/testsuite/gas/ppc/vle-simple-2.s
|
.text
target0:
e_bdnz target1
e_bdnzl target1
e_bdz target2
target1:
e_bdzl target0
e_beq target0
e_beq cr1, target8
target2:
e_beql cr0, target1
e_beql target6
e_bf 4*cr0+gt, target3
target3:
e_bfl cr0*4+un, target0
e_bge cr1, target1
e_bge target5
target4:
e_bgel cr2, target3
e_bgel target4
e_bgt cr0, target0
e_bgt target0
e_bgtl cr2, target2
e_bgtl target2
e_ble cr3, target5
e_ble target5
target5:
e_blel cr0, target4
e_blel target4
e_blt cr1, target3
e_blt target3
e_bltl target0
e_bltl cr1, target0
target6:
e_bne target7
e_bne cr1, target0
e_bnel cr0, target5
e_bnel target5
e_bng target9
e_bng cr1, target4
target7:
e_bngl cr2, target6
e_bngl target8
e_bnl cr1, target5
e_bnl target5
e_bnll cr3, target3
e_bnll target3
e_bns target2
e_bns cr0, target2
target8:
e_bnsl cr2, target0
e_bnsl target6
e_bnu cr1, target1
e_bnu target1
e_bnul target7
e_bnul cr0, target3
e_bso cr1, target4
e_bso target4
target9:
e_bsol cr0, target8
e_bsol target8
e_bt gt+cr0*4, target7
e_btl lt+4*cr0, target5
e_bun cr1, target4
e_bun target4
e_bunl cr2, target0
e_bunl target9
|
stsp/binutils-ia16
| 1,026
|
gas/testsuite/gas/ppc/power6.s
|
# PowerPC POWER6 AltiVec tests
#as: -mpower6
.text
start:
doze
nap
sleep
rvwinkle
prtyw 3,4
prtyd 13,14
mfcfar 10
mtcfar 11
cmpb 3,4,5
mffgpr 6,7
mftgpr 8,9
lwzcix 10,11,12
lfdpx 12,14,15
dadd 16,17,18
daddq 20,22,24
dss 3
dssall
dst 5,4,1
dstt 8,7,0
dstst 5,6,3
dststt 4,5,2
attn
mtcr 3
mtcrf 0xff,3
mtcrf 0x81,3
mtcrf 0x01,3
mtcrf 0x02,3
mtcrf 0x04,3
mtcrf 0x08,3
mtcrf 0x10,3
mtcrf 0x20,3
mtcrf 0x40,3
mtcrf 0x80,3
mfcr 3
mfcr 3,0x01
mfcr 3,0x02
mfcr 3,0x04
mfcr 3,0x08
mfcr 3,0x10
mfcr 3,0x20
mfcr 3,0x40
mfcr 3,0x80
dcbz 1, 2
dcbzl 3, 4
dcbz 5, 6
mtfsf 6,10
mtfsf. 6,11
mtfsf 6,10,0,0
mtfsf. 6,11,0,0
mtfsf 6,10,0,1
mtfsf. 6,11,0,1
mtfsf 6,10,1,0
mtfsf. 6,11,1,0
mtfsfi 6,0
mtfsfi. 6,15
mtfsfi 6,0,0
mtfsfi. 6,15,0
mtfsfi 6,0,1
mtfsfi. 6,15,1
cbcdtd 10,11
cdtbcd 10,11
addg6s 10,11,12
ori 1,1,0
.p2align 4,,15
slbia
slbia 0
slbia 7
tlbie 10
tlbie 10,0
tlbie 10,1
|
stsp/binutils-ia16
| 1,921
|
gas/testsuite/gas/ppc/simpshft.s
|
# These are all the examples from section F.4 of
# "PowerPC Microprocessor Family: The Programming Environments".
# 64-bit examples
extrdi %r4,%r3,1,0
insrdi %r3,%r4,1,0
sldi %r5,%r5,8
clrldi %r4,%r3,32
# 32-bit examples
extrwi %r4,%r3,1,0
insrwi %r3,%r4,1,0
slwi %r5,%r5,8
clrlwi %r4,%r3,16
# These test the remaining corner cases for 64-bit operations.
extldi %r4,%r3,1,0
extldi %r4,%r3,64,0
extldi %r4,%r3,1,63
extldi %r4,%r3,64,63 # bit weird, that one.
extrdi %r4,%r3,63,0
extrdi %r4,%r3,1,62
insrdi %r4,%r3,64,0
insrdi %r4,%r3,63,0
insrdi %r4,%r3,1,62
insrdi %r4,%r3,1,63
rotldi %r4,%r3,0
rotldi %r4,%r3,1
rotldi %r4,%r3,63
rotrdi %r4,%r3,0
rotrdi %r4,%r3,1
rotrdi %r4,%r3,63
rotld %r5,%r3,%r4
sldi %r4,%r3,0
sldi %r4,%r3,63
srdi %r4,%r3,0
srdi %r4,%r3,1
srdi %r4,%r3,63
clrldi %r4,%r3,0
clrldi %r4,%r3,1
clrldi %r4,%r3,63
clrrdi %r4,%r3,0
clrrdi %r4,%r3,1
clrrdi %r4,%r3,63
clrlsldi %r4,%r3,0,0
clrlsldi %r4,%r3,1,0
clrlsldi %r4,%r3,63,0
clrlsldi %r4,%r3,63,1
clrlsldi %r4,%r3,63,63
# These test the remaining corner cases for 32-bit operations.
extlwi %r4,%r3,1,0
extlwi %r4,%r3,32,0
extlwi %r4,%r3,1,31
extlwi %r4,%r3,32,31 # bit weird, that one.
extrwi %r4,%r3,31,0
extrwi %r4,%r3,1,30
inslwi %r4,%r3,1,0
inslwi %r4,%r3,32,0
inslwi %r4,%r3,1,31
insrwi %r4,%r3,1,0
insrwi %r4,%r3,32,0
insrwi %r4,%r3,1,31
rotlwi %r4,%r3,0
rotlwi %r4,%r3,1
rotlwi %r4,%r3,31
rotrwi %r4,%r3,0
rotrwi %r4,%r3,1
rotrwi %r4,%r3,31
rotlw %r5,%r3,%r4
slwi %r4,%r3,0
slwi %r4,%r3,1
slwi %r4,%r3,31
srwi %r4,%r3,0
srwi %r4,%r3,1
srwi %r4,%r3,31
clrlwi %r4,%r3,0
clrlwi %r4,%r3,1
clrlwi %r4,%r3,31
clrrwi %r4,%r3,0
clrrwi %r4,%r3,1
clrrwi %r4,%r3,31
clrlslwi %r4,%r3,0,0
clrlslwi %r4,%r3,1,0
clrlslwi %r4,%r3,31,0
clrlslwi %r4,%r3,31,1
clrlslwi %r4,%r3,31,31
# Force alignment so that we pass the test on AIX
.p2align 3,0
|
stsp/binutils-ia16
| 1,230
|
gas/testsuite/gas/ppc/altivec2.s
|
.text
start:
vabsdub 6,17,16
vabsduh 21,18,4
vabsduw 25,20,9
vpermxor 6,17,20,26
vaddeuqm 29,26,15,28
vaddecuq 15,8,7,24
vsubeuqm 2,6,21,1
vsubecuq 29,6,0,4
vmulouw 14,9,3
vmuluwm 24,16,18
vaddudm 10,17,17
vmaxud 30,25,4
vrld 10,6,28
vcmpequd 27,7,7
vadduqm 22,16,25
vaddcuq 1,21,29
vmulosw 20,11,19
vmaxsd 24,19,1
vmuleuw 13,27,30
vminud 9,24,17
vcmpgtud 10,18,28
vmulesw 0,29,22
vminsd 13,28,1
vsrad 10,20,5
vcmpgtsd 27,21,5
bcdadd. 0,23,30,1
vpmsumb 25,24,26
bcdsub. 8,4,4,1
vpmsumh 16,14,26
vpkudum 27,2,26
vpmsumw 6,23,17
vpmsumd 20,6,25
vpkudus 27,22,16
vsubudm 21,20,18
vsubuqm 21,20,18
vcipher 29,29,6
vcipherlast 2,13,20
vgbbd 20,18
vsubcuq 19,8,25
vorc 9,26,29
vncipher 20,20,13
vncipherlast 15,5,27
vbpermq 3,19,6
vpksdus 30,4,28
vnand 0,4,14
vsld 17,8,29
vsbox 29,20
vpksdss 11,7,11
vcmpequd. 3,19,16
vupkhsw 18,17
vshasigmaw 29,8,0,13
veqv 23,28,26
vmrgew 29,0,2
vmrgow 29,0,2
vshasigmad 27,19,0,0
vsrd 20,28,28
vupklsw 18,21
vclzb 30,7
vpopcntb 29,21
vclzh 25,21
vpopcnth 16,30
vclzw 28,3
vpopcntw 10,9
vclzd 22,9
vpopcntd 15,30
vcmpgtud. 2,31,6
vcmpgtsd. 20,15,2
|
stsp/binutils-ia16
| 1,131
|
gas/testsuite/gas/ppc/ppc750ps.s
|
# PowerPC 750 paired single precision tests
.text
start:
psq_l 0, 4(3), 1, 5
psq_lu 1, 8(2), 0, 3
psq_lux 2, 5, 4, 1, 2
psq_lx 3, 2, 4, 0, 5
psq_st 3, 8(2), 0, 3
psq_stu 3, 8(2), 0, 7
psq_stux 2, 3, 4, 0, 5
psq_stx 6, 7, 8, 1, 4
ps_abs 5,7
ps_abs. 5,7
ps_add 1,2,3
ps_add. 1,2,3
ps_cmpo0 3,2,4
ps_cmpo1 3,2,4
ps_cmpu0 3,2,4
ps_cmpu1 3,2,4
ps_div 2,4,6
ps_div. 2,4,6
ps_madd 0,1,2,3
ps_madd. 0,1,2,3
ps_madds0 1,2,3,4
ps_madds0. 1,2,3,4
ps_madds1 1,2,3,4
ps_madds1. 1,2,3,4
ps_merge00 2,4,6
ps_merge00. 2,4,6
ps_merge01 2,4,6
ps_merge01. 2,4,6
ps_merge10 2,4,6
ps_merge10. 2,4,6
ps_merge11 2,4,6
ps_merge11. 2,4,6
ps_mr 3,5
ps_mr. 3,5
ps_msub 2,4,6,8
ps_msub. 2,4,6,8
ps_mul 2,3,5
ps_mul. 2,3,5
ps_muls0 3,4,7
ps_muls0. 3,4,7
ps_muls1 3,4,7
ps_muls1. 3,4,7
ps_nabs 1,5
ps_nabs. 1,5
ps_neg 1,5
ps_neg. 1,5
ps_nmadd 1,3,5,7
ps_nmadd. 1,3,5,7
ps_nmsub 1,3,5,7
ps_nmsub. 1,3,5,7
ps_res 9,3
ps_res. 9,3
ps_rsqrte 9,3
ps_rsqrte. 9,3
ps_sel 1,2,3,4
ps_sel. 1,2,3,4
ps_sub 5,11,2
ps_sub. 5,11,2
ps_sum0 2,5,9,10
ps_sum0. 2,5,9,10
ps_sum1 2,5,9,10
ps_sum1. 2,5,9,10
dcbz_l 3,5
|
stsp/binutils-ia16
| 1,707
|
gas/testsuite/gas/elf/dwarf-5-file0-3.s
|
.file "test.c"
.text
.Ltext0:
.file 0 "/current/directory" "/full/path/test.c"
.globl x
.section .bss
.balign 4
.type x, %object
.size x, 4
x:
.zero 4
.text
.Letext0:
.file 1 "/full/path/test.c"
.section .debug_info,"",%progbits
.Ldebug_info0:
.4byte 0x32
.2byte 0x5
.byte 0x1
.byte 0x4
.4byte .Ldebug_abbrev0
.uleb128 0x1
.4byte .LASF2
.byte 0x1d
.4byte .LASF0
.4byte .LASF1
.4byte .Ldebug_line0
.uleb128 0x2
.asciz "x"
.byte 0x1
.byte 0x1
.byte 0x5
.4byte 0x2e
.uleb128 0x5
.byte 0x3
.4byte x
.uleb128 0x3
.byte 0x4
.byte 0x5
.asciz "int"
.byte 0
.section .debug_abbrev,"",%progbits
.Ldebug_abbrev0:
.uleb128 0x1
.uleb128 0x11
.byte 0x1
.uleb128 0x25
.uleb128 0xe
.uleb128 0x13
.uleb128 0xb
.uleb128 0x3
.uleb128 0x1f
.uleb128 0x1b
.uleb128 0x1f
.uleb128 0x10
.uleb128 0x17
.byte 0
.byte 0
.uleb128 0x2
.uleb128 0x34
.byte 0
.uleb128 0x3
.uleb128 0x8
.uleb128 0x3a
.uleb128 0xb
.uleb128 0x3b
.uleb128 0xb
.uleb128 0x39
.uleb128 0xb
.uleb128 0x49
.uleb128 0x13
.uleb128 0x3f
.uleb128 0x19
.uleb128 0x2
.uleb128 0x18
.byte 0
.byte 0
.uleb128 0x3
.uleb128 0x24
.byte 0
.uleb128 0xb
.uleb128 0xb
.uleb128 0x3e
.uleb128 0xb
.uleb128 0x3
.uleb128 0x8
.byte 0
.byte 0
.byte 0
.section .debug_aranges,"",%progbits
.4byte 0x14
.2byte 0x2
.4byte .Ldebug_info0
.byte 0x4
.byte 0
.2byte 0
.2byte 0
.4byte 0
.4byte 0
.section .debug_line,"",%progbits
.Ldebug_line0:
.section .debug_str,"MS",%progbits,1
.LASF2:
.asciz "GNU C17 11.2.1 -g"
.section .debug_line_str,"MS",%progbits,1
.LASF1:
.asciz "/working/directory"
.LASF0:
.asciz "/full/path/test.c"
.ident "GCC: (GNU) 11.2.1"
.section .note.GNU-stack,"",%progbits
|
stsp/binutils-ia16
| 2,486
|
gas/testsuite/gas/elf/dwarf2-6.s
|
/* Test view number decoding.
Copyright (C) 2017-2022 Free Software Foundation, Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
.file "dwarf2-6.c"
.text
.balign 4
.globl _start
_start:
.L_start:
.org .+256
.Lend_start:
.section .debug_line,"",%progbits
.4byte .Lline_end - .Lline_start /* Initial length. */
.Lline_start:
.2byte 2 /* Dwarf Version. */
.4byte .Lline_lines - .Lline_hdr
.Lline_hdr:
.byte 1 /* Minimum insn length. */
.byte 1 /* Default is_stmt. */
.byte 1 /* Line base. */
.byte 1 /* Line range. */
.byte 0x10 /* Opcode base. */
/* Standard lengths. */
.byte 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0
.byte 0 /* Include directories. */
/* File names. */
.ascii "dwarf2-6.c\0"
.uleb128 0, 0, 0
.byte 0
.Lline_lines:
.byte 0 /* DW_LNS_extended_op. */
.uleb128 5
.byte 2 /* DW_LNE_set_address. */
.4byte .L_start
.byte 1 /* DW_LNS_copy view 0. */
.byte 1 /* DW_LNS_copy view 1. */
.byte 0 /* DW_LNS_extended_op. */
.uleb128 5
.byte 2 /* DW_LNE_set_address. */
.4byte .L_start+1
.byte 1 /* DW_LNS_copy view 0. */
.byte 2 /* DW_LNS_advance_pc by 0. */
.uleb128 0
.byte 1 /* DW_LNS_copy view 1. */
.byte 2 /* DW_LNS_advance_pc by 1 (reset view). */
.uleb128 1
.byte 1 /* DW_LNS_copy view 0. */
.byte 9 /* DW_LNS_fixed_advance_pc by 1. */
.2byte 1 /* This opcode does NOT reset view. */
.byte 1 /* DW_LNS_copy view 1. */
.byte 16 /* Special opcode 0, PC+=0, Line+=1, view 2. */
.byte 17 /* Special opcode 1, PC+=1 (reset view), Line+=1. */
.byte 1 /* DW_LNS_copy view 1. */
.byte 8 /* DW_LNS_const_add_pc by 239 (reset view). */
.byte 1 /* DW_LNS_copy view 0. */
.byte 0 /* DW_LNS_extended_op. */
.uleb128 5
.byte 2 /* DW_LNE_set_address. */
.4byte .Lend_start
.byte 0 /* DW_LNS_extended_op. */
.uleb128 1
.byte 1 /* DW_LEN_end_of_sequence. */
.Lline_end:
|
stsp/binutils-ia16
| 1,138
|
gas/testsuite/gas/elf/dwarf2-10.s
|
/* Test view numbering zero-assert checking with zero-sized align.
Copyright (C) 2017-2022 Free Software Foundation, Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
.file "dwarf2-10.c"
.text
.balign 8
.globl _start
_start:
.file 1 "dwarf2-10.c"
.loc 1 1 view 0
.balign 8 /* No skip needed here... */
.loc 1 2 view 0 /* so this zero-view check fails. */
.quad 0
.loc 1 3 view 0
.balign 16 /* Skip 8 more bytes after .quad... */
.loc 1 4 view 0 /* so this is a zero view indeed. */
.quad 0
.size _start, .-_start
|
stsp/binutils-ia16
| 5,257
|
gas/testsuite/gas/elf/dwarf2-1.s
|
/* This testcase is derived from a similar test in GDB.
Copyright (C) 2009-2022 Free Software Foundation, Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
/* Dummy function to provide debug information for. */
.file "dwarf2-1.c"
.text
.globl _start
_start:
.quad 0
.Lbegin_text1:
.globl func_cu1
.type func_cu1, %function
func_cu1:
.Lbegin_func_cu1:
.quad 0
.Lend_func_cu1:
.size func_cu1, .-func_cu1
.Lend_text1:
/* Debug information */
.section .debug_info
.Lcu1_begin:
/* CU header */
.4byte .Lcu1_end - .Lcu1_start /* Length of Compilation Unit */
.Lcu1_start:
.2byte 2 /* DWARF Version */
.4byte .Labbrev1_begin /* Offset into abbrev section */
.byte 4 /* Pointer size */
/* CU die */
.uleb128 1 /* Abbrev: DW_TAG_compile_unit */
.4byte .Lline1_begin /* DW_AT_stmt_list */
.4byte .Lend_text1 /* DW_AT_high_pc */
.4byte .Lbegin_text1 /* DW_AT_low_pc */
.ascii "file1.txt\0" /* DW_AT_name */
.ascii "GNU C 3.3.3\0" /* DW_AT_producer */
.byte 1 /* DW_AT_language (C) */
/* func_cu1 */
.uleb128 2 /* Abbrev: DW_TAG_subprogram */
.byte 1 /* DW_AT_external */
.byte 1 /* DW_AT_decl_file */
.byte 2 /* DW_AT_decl_line */
.ascii "func_cu1\0" /* DW_AT_name */
.4byte .Ltype_int-.Lcu1_begin /* DW_AT_type */
.4byte .Lbegin_func_cu1 /* DW_AT_low_pc */
.4byte .Lend_func_cu1 /* DW_AT_high_pc */
.byte 1 /* DW_AT_frame_base: length */
.byte 0x55 /* DW_AT_frame_base: DW_OP_reg5 */
.Ltype_int:
.uleb128 3 /* Abbrev: DW_TAG_base_type */
.ascii "int\0" /* DW_AT_name */
.byte 4 /* DW_AT_byte_size */
.byte 5 /* DW_AT_encoding */
.byte 0 /* End of children of CU */
.Lcu1_end:
/* Line table */
.section .debug_line
.Lline1_begin:
.4byte .Lline1_end - .Lline1_start /* Initial length */
.Lline1_start:
.2byte 2 /* Version */
.4byte .Lline1_lines - .Lline1_hdr /* header_length */
.Lline1_hdr:
.byte 1 /* Minimum insn length */
.byte 1 /* default_is_stmt */
.byte 1 /* line_base */
.byte 1 /* line_range */
.byte 0x10 /* opcode_base */
/* Standard lengths */
.byte 0
.byte 1
.byte 1
.byte 1
.byte 1
.byte 0
.byte 0
.byte 0
.byte 1
.byte 0
.byte 0
.byte 1
.byte 0
.byte 0
.byte 0
/* Include directories */
.byte 0
/* File names */
.ascii "file1.txt\0"
.uleb128 0
.uleb128 0
.uleb128 0
.byte 0
.Lline1_lines:
.byte 0 /* DW_LNE_set_address */
.uleb128 5
.byte 2
.4byte .Lbegin_func_cu1
.byte 3 /* DW_LNS_advance_line */
.sleb128 3 /* ... to 4 */
.byte 1 /* DW_LNS_copy */
.byte 1 /* DW_LNS_copy (second time as an end-of-prologue marker) */
.byte 0 /* DW_LNE_set_address */
.uleb128 5
.byte 2
.4byte .Lend_func_cu1
.byte 0 /* DW_LNE_end_of_sequence */
.uleb128 1
.byte 1
.Lline1_end:
/* Abbrev table */
.section .debug_abbrev
.Labbrev1_begin:
.uleb128 1 /* Abbrev code */
.uleb128 0x11 /* DW_TAG_compile_unit */
.byte 1 /* has_children */
.uleb128 0x10 /* DW_AT_stmt_list */
.uleb128 0x6 /* DW_FORM_data4 */
.uleb128 0x12 /* DW_AT_high_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x11 /* DW_AT_low_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x3 /* DW_AT_name */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0x25 /* DW_AT_producer */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0x13 /* DW_AT_language */
.uleb128 0xb /* DW_FORM_data1 */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.uleb128 2 /* Abbrev code */
.uleb128 0x2e /* DW_TAG_subprogram */
.byte 0 /* has_children */
.uleb128 0x3f /* DW_AT_external */
.uleb128 0xc /* DW_FORM_flag */
.uleb128 0x3a /* DW_AT_decl_file */
.uleb128 0xb /* DW_FORM_data1 */
.uleb128 0x3b /* DW_AT_decl_line */
.uleb128 0xb /* DW_FORM_data1 */
.uleb128 0x3 /* DW_AT_name */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0x49 /* DW_AT_type */
.uleb128 0x13 /* DW_FORM_ref4 */
.uleb128 0x11 /* DW_AT_low_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x12 /* DW_AT_high_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x40 /* DW_AT_frame_base */
.uleb128 0xa /* DW_FORM_block1 */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.uleb128 3 /* Abbrev code */
.uleb128 0x24 /* DW_TAG_base_type */
.byte 0 /* has_children */
.uleb128 0x3 /* DW_AT_name */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0xb /* DW_AT_byte_size */
.uleb128 0xb /* DW_FORM_data1 */
.uleb128 0x3e /* DW_AT_encoding */
.uleb128 0xb /* DW_FORM_data1 */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
|
stsp/binutils-ia16
| 1,654
|
gas/testsuite/gas/elf/dwarf-5-file0-2.s
|
.file "test.c"
.text
.Ltext0:
.file 0 "/example" "test.c"
.globl x
.section .bss
.balign 4
.type x, %object
.size x, 4
x:
.zero 4
.text
.Letext0:
.file 1 "test.c"
.section .debug_info,"",%progbits
.Ldebug_info0:
.4byte 0x32
.2byte 0x5
.byte 0x1
.byte 0x4
.4byte .Ldebug_abbrev0
.uleb128 0x1
.4byte .LASF2
.byte 0x1d
.4byte .LASF0
.4byte .LASF1
.4byte .Ldebug_line0
.uleb128 0x2
.asciz "x"
.byte 0x1
.byte 0x1
.byte 0x5
.4byte 0x2e
.uleb128 0x5
.byte 0x3
.4byte x
.uleb128 0x3
.byte 0x4
.byte 0x5
.asciz "int"
.byte 0
.section .debug_abbrev,"",%progbits
.Ldebug_abbrev0:
.uleb128 0x1
.uleb128 0x11
.byte 0x1
.uleb128 0x25
.uleb128 0xe
.uleb128 0x13
.uleb128 0xb
.uleb128 0x3
.uleb128 0x1f
.uleb128 0x1b
.uleb128 0x1f
.uleb128 0x10
.uleb128 0x17
.byte 0
.byte 0
.uleb128 0x2
.uleb128 0x34
.byte 0
.uleb128 0x3
.uleb128 0x8
.uleb128 0x3a
.uleb128 0xb
.uleb128 0x3b
.uleb128 0xb
.uleb128 0x39
.uleb128 0xb
.uleb128 0x49
.uleb128 0x13
.uleb128 0x3f
.uleb128 0x19
.uleb128 0x2
.uleb128 0x18
.byte 0
.byte 0
.uleb128 0x3
.uleb128 0x24
.byte 0
.uleb128 0xb
.uleb128 0xb
.uleb128 0x3e
.uleb128 0xb
.uleb128 0x3
.uleb128 0x8
.byte 0
.byte 0
.byte 0
.section .debug_aranges,"",%progbits
.4byte 0x14
.2byte 0x2
.4byte .Ldebug_info0
.byte 0x4
.byte 0
.2byte 0
.2byte 0
.4byte 0
.4byte 0
.section .debug_line,"",%progbits
.Ldebug_line0:
.section .debug_str,"MS",%progbits,1
.LASF2:
.asciz "GNU C17 11.2.1 -g"
.section .debug_line_str,"MS",%progbits,1
.LASF1:
.asciz "/example"
.LASF0:
.asciz "test.c"
.ident "GCC: (GNU) 11.2.1"
.section .note.GNU-stack,"",%progbits
|
stsp/binutils-ia16
| 1,328
|
gas/testsuite/gas/elf/dwarf2-5.s
|
/* Test view numbering.
Copyright (C) 2017-2022 Free Software Foundation, Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
.file "dwarf2-5.c"
.text
.balign 8
.globl _start
_start:
.file 1 "dwarf2-5.c"
.loc 1 1 view 0
.loc 1 2 view .L2
.quad 0
.loc 1 3 view 0
.balign 8
.loc 1 4 view .L4
.loc 1 5 view .L5
.org .+1
.balign 8
.loc 1 6 view 0
.quad 0
.text
.globl func
.type func, %function
func:
.loc 1 7 view 0
.loc 1 8 view .L8
.quad 0
.loc 1 9 view 0
.loc 1 10 view .L10
.pushsection .text
.loc 1 11 view .L11
.popsection
.loc 1 12 view .L12
.quad 0
.size func, .-func
.section .rodata
.uleb128 .L2
.uleb128 .L4
.uleb128 .L5
.uleb128 .L8
.uleb128 .L10
.uleb128 .L11
.uleb128 .L12
|
stsp/binutils-ia16
| 1,213
|
gas/testsuite/gas/elf/dwarf2-19.s
|
/* Test view numbering continuity at subsection borders.
Copyright (C) 2017-2022 Free Software Foundation, Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
.file "dwarf2-19.c"
.text 0
.balign 8
.globl _start
_start:
.file 1 "dwarf2-19.c"
.loc 1 1 view 0
.section .rodata
.uleb128 .L1
.uleb128 .L3
.uleb128 .L4
.uleb128 .L2
.text 1
.loc 1 2 view .L1 /* same address as view 0 above -> view 1 */
.text 2
.loc 1 3 view .L2 /* same address as .L4 below -> view 2 */
.text 1
.quad 0
.loc 1 4 view .L3 /* bumped address from .L1's, view 0 */
.loc 1 5 view .L4 /* same address, view 1 */
|
stsp/binutils-ia16
| 2,789
|
gas/testsuite/gas/elf/dwarf2-3.s
|
.file "beginwarn.c"
.section .debug_abbrev,"",%progbits
.Ldebug_abbrev0:
.section .debug_info,"",%progbits
.Ldebug_info0:
.section .debug_line,"",%progbits
.Ldebug_line0:
.text
.Ltext0:
.section .init_array
.align 4
.type init_array, %object
.size init_array, 4
init_array:
.long foo
.section .gnu.warning.foo,"a",%progbits
.type _evoke_link_warning_foo, %object
.size _evoke_link_warning_foo, 27
_evoke_link_warning_foo:
.string "function foo is deprecated"
.file 1 "/beginwarn.c"
.text
.Letext0:
.section .debug_info
.4byte 0x8a
.2byte 0x2
.4byte .Ldebug_abbrev0
.byte 0x4
.uleb128 0x1
.4byte .Ldebug_line0
.4byte .Letext0
.4byte .Ltext0
.4byte .LASF4
.byte 0x1
.4byte .LASF5
.uleb128 0x2
.4byte 0x31
.4byte 0x38
.uleb128 0x3
.4byte 0x31
.byte 0x1a
.byte 0x0
.uleb128 0x4
.4byte .LASF0
.byte 0x4
.byte 0x7
.uleb128 0x5
.4byte 0x3d
.uleb128 0x4
.4byte .LASF1
.byte 0x1
.byte 0x6
.uleb128 0x6
.4byte .LASF2
.byte 0x1
.byte 0x3
.4byte 0x55
.byte 0x5
.byte 0x3
.4byte _evoke_link_warning_foo
.uleb128 0x5
.4byte 0x21
.uleb128 0x2
.4byte 0x6a
.4byte 0x6c
.uleb128 0x3
.4byte 0x31
.byte 0x0
.byte 0x0
.uleb128 0x7
.byte 0x1
.uleb128 0x5
.4byte 0x71
.uleb128 0x8
.byte 0x4
.4byte 0x6a
.uleb128 0x6
.4byte .LASF3
.byte 0x1
.byte 0x9
.4byte 0x88
.byte 0x5
.byte 0x3
.4byte init_array
.uleb128 0x5
.4byte 0x5a
.byte 0x0
.section .debug_abbrev
.uleb128 0x1
.uleb128 0x11
.byte 0x1
.uleb128 0x10
.uleb128 0x6
.uleb128 0x12
.uleb128 0x1
.uleb128 0x11
.uleb128 0x1
.uleb128 0x25
.uleb128 0xe
.uleb128 0x13
.uleb128 0xb
.uleb128 0x3
.uleb128 0xe
.byte 0x0
.byte 0x0
.uleb128 0x2
.uleb128 0x1
.byte 0x1
.uleb128 0x1
.uleb128 0x13
.uleb128 0x49
.uleb128 0x13
.byte 0x0
.byte 0x0
.uleb128 0x3
.uleb128 0x21
.byte 0x0
.uleb128 0x49
.uleb128 0x13
.uleb128 0x2f
.uleb128 0xb
.byte 0x0
.byte 0x0
.uleb128 0x4
.uleb128 0x24
.byte 0x0
.uleb128 0x3
.uleb128 0xe
.uleb128 0xb
.uleb128 0xb
.uleb128 0x3e
.uleb128 0xb
.byte 0x0
.byte 0x0
.uleb128 0x5
.uleb128 0x26
.byte 0x0
.uleb128 0x49
.uleb128 0x13
.byte 0x0
.byte 0x0
.uleb128 0x6
.uleb128 0x34
.byte 0x0
.uleb128 0x3
.uleb128 0xe
.uleb128 0x3a
.uleb128 0xb
.uleb128 0x3b
.uleb128 0xb
.uleb128 0x49
.uleb128 0x13
.uleb128 0x2
.uleb128 0xa
.byte 0x0
.byte 0x0
.uleb128 0x7
.uleb128 0x15
.byte 0x0
.uleb128 0x27
.uleb128 0xc
.byte 0x0
.byte 0x0
.uleb128 0x8
.uleb128 0xf
.byte 0x0
.uleb128 0xb
.uleb128 0xb
.uleb128 0x49
.uleb128 0x13
.byte 0x0
.byte 0x0
.byte 0x0
.section .debug_str,"MS",%progbits,1
.LASF5:
.string "/beginwarn.c"
.LASF0:
.string "unsigned int"
.LASF3:
.string "init_array"
.LASF2:
.string "_evoke_link_warning_foo"
.LASF4:
.string "GNU C 3.4.6"
.LASF1:
.string "char"
|
stsp/binutils-ia16
| 5,382
|
gas/testsuite/gas/elf/dwarf2-2.s
|
/* This testcase is derived from a similar test in GDB.
Copyright (C) 2009-2022 Free Software Foundation, Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
/* Dummy function to provide debug information for. */
.file "dwarf2-2.c"
.section .gnu.linkonce.t.foo,"axG",%progbits,foo,comdat
.globl _start
_start:
.quad 0
.Lbegin_text1:
.globl func_cu1
.type func_cu1, %function
func_cu1:
.Lbegin_func_cu1:
.quad 0
.Lend_func_cu1:
.size func_cu1, .-func_cu1
.Lend_text1:
/* Debug information */
.section .debug_info,"G",%progbits,foo,comdat
.Lcu1_begin:
/* CU header */
.4byte .Lcu1_end - .Lcu1_start /* Length of Compilation Unit */
.Lcu1_start:
.2byte 2 /* DWARF Version */
.4byte .Labbrev1_begin /* Offset into abbrev section */
.byte 4 /* Pointer size */
/* CU die */
.uleb128 1 /* Abbrev: DW_TAG_compile_unit */
.4byte .Lline1_begin /* DW_AT_stmt_list */
.4byte .Lend_text1 /* DW_AT_high_pc */
.4byte .Lbegin_text1 /* DW_AT_low_pc */
.ascii "file1.txt\0" /* DW_AT_name */
.ascii "GNU C 3.3.3\0" /* DW_AT_producer */
.byte 1 /* DW_AT_language (C) */
/* func_cu1 */
.uleb128 2 /* Abbrev: DW_TAG_subprogram */
.byte 1 /* DW_AT_external */
.byte 1 /* DW_AT_decl_file */
.byte 2 /* DW_AT_decl_line */
.ascii "func_cu1\0" /* DW_AT_name */
.4byte .Ltype_int-.Lcu1_begin /* DW_AT_type */
.4byte .Lbegin_func_cu1 /* DW_AT_low_pc */
.4byte .Lend_func_cu1 /* DW_AT_high_pc */
.byte 1 /* DW_AT_frame_base: length */
.byte 0x55 /* DW_AT_frame_base: DW_OP_reg5 */
.Ltype_int:
.uleb128 3 /* Abbrev: DW_TAG_base_type */
.ascii "int\0" /* DW_AT_name */
.byte 4 /* DW_AT_byte_size */
.byte 5 /* DW_AT_encoding */
.byte 0 /* End of children of CU */
.Lcu1_end:
/* Line table */
.section .debug_line,"G",%progbits,foo,comdat
.Lline1_begin:
.4byte .Lline1_end - .Lline1_start /* Initial length */
.Lline1_start:
.2byte 2 /* Version */
.4byte .Lline1_lines - .Lline1_hdr /* header_length */
.Lline1_hdr:
.byte 1 /* Minimum insn length */
.byte 1 /* default_is_stmt */
.byte 1 /* line_base */
.byte 1 /* line_range */
.byte 0x10 /* opcode_base */
/* Standard lengths */
.byte 0
.byte 1
.byte 1
.byte 1
.byte 1
.byte 0
.byte 0
.byte 0
.byte 1
.byte 0
.byte 0
.byte 1
.byte 0
.byte 0
.byte 0
/* Include directories */
.byte 0
/* File names */
.ascii "file1.txt\0"
.uleb128 0
.uleb128 0
.uleb128 0
.byte 0
.Lline1_lines:
.byte 0 /* DW_LNE_set_address */
.uleb128 5
.byte 2
.4byte .Lbegin_func_cu1
.byte 3 /* DW_LNS_advance_line */
.sleb128 3 /* ... to 4 */
.byte 1 /* DW_LNS_copy */
.byte 1 /* DW_LNS_copy (second time as an end-of-prologue marker) */
.byte 0 /* DW_LNE_set_address */
.uleb128 5
.byte 2
.4byte .Lend_func_cu1
.byte 0 /* DW_LNE_end_of_sequence */
.uleb128 1
.byte 1
.Lline1_end:
/* Abbrev table */
.section .debug_abbrev,"G",%progbits,foo,comdat
.Labbrev1_begin:
.uleb128 1 /* Abbrev code */
.uleb128 0x11 /* DW_TAG_compile_unit */
.byte 1 /* has_children */
.uleb128 0x10 /* DW_AT_stmt_list */
.uleb128 0x6 /* DW_FORM_data4 */
.uleb128 0x12 /* DW_AT_high_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x11 /* DW_AT_low_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x3 /* DW_AT_name */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0x25 /* DW_AT_producer */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0x13 /* DW_AT_language */
.uleb128 0xb /* DW_FORM_data1 */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.uleb128 2 /* Abbrev code */
.uleb128 0x2e /* DW_TAG_subprogram */
.byte 0 /* has_children */
.uleb128 0x3f /* DW_AT_external */
.uleb128 0xc /* DW_FORM_flag */
.uleb128 0x3a /* DW_AT_decl_file */
.uleb128 0xb /* DW_FORM_data1 */
.uleb128 0x3b /* DW_AT_decl_line */
.uleb128 0xb /* DW_FORM_data1 */
.uleb128 0x3 /* DW_AT_name */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0x49 /* DW_AT_type */
.uleb128 0x13 /* DW_FORM_ref4 */
.uleb128 0x11 /* DW_AT_low_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x12 /* DW_AT_high_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x40 /* DW_AT_frame_base */
.uleb128 0xa /* DW_FORM_block1 */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.uleb128 3 /* Abbrev code */
.uleb128 0x24 /* DW_TAG_base_type */
.byte 0 /* has_children */
.uleb128 0x3 /* DW_AT_name */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0xb /* DW_AT_byte_size */
.uleb128 0xb /* DW_FORM_data1 */
.uleb128 0x3e /* DW_AT_encoding */
.uleb128 0xb /* DW_FORM_data1 */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
|
stsp/binutils-ia16
| 3,644
|
gas/testsuite/gas/msp430/msp430x.s
|
.text
.global foo
foo:
adc r4 ; MSP430 instruction for comparison purposes.
adcx r4
adcx.a bar
adcx.b r6
adcx.w r7
addcx r8, r9
addcx.a #0x12345, r10
addcx.b r11, r12
addcx.w r13, r14
ADDX @R9, PC
ADDX R9, PC
ADDX.A #FE000h, PC
ADDX.A &EDE, PC
ADDX.A @R9+, PC
ADDX.A EDE, PC
addx.b r1, r2
addx.w r3, r4
ADDX K(R4), R5
ANDX #1234, 4(R6)
ANDX 4(R7), 4(R6)
ANDX @R5+, 4(R6)
ANDX EDE, 4(R6)
ANDX EDE, TONI
ANDX.A @R5, 4(R6)
ANDX.A R5, 4(R6)
ANDX.B &EDE, 4(R6)
andx.w r1, r2
bicx #0xa0, r14
bicx.a #0xa0, r14
bicx.b #0xa0, r14
bicx.w #0xa0, r14
bisx #8, r11
bisx.a #8, r11
bisx.b #8, r11
bisx.w #8, r11
BITX #20, R8
BITX &EDE, &TONI
BITX &EDE, R8
BITX 2(R5), R8
BITX 8(SP), &EDE
BITX @R5+, &EDE
BITX @R5+, R8
BITX @R5, R8
BITX EDE, &TONI
BITX.B #12, &EDE
BITX.B @R5, &EDE
BITX.B EDE, R8
BITX.B R5, R8
BITX.W R5, &EDE
clrx TONI
clrx.a fooz
clrx.b bar
clrx.w baz
cmpx #0, r15
cmpx.a #01800h, ede
cmpx.b @r1, r15
cmpx.w @r2+, &pin
dadcx fooz
dadcx.a 0(r12)
dadcx.b bar
dadcx.w r12
daddx @r5, r7
daddx.a #10h, &decdr
daddx.b 2(r6), r4
daddx.w bcd, r4
decx toni
decx.a fooz
decx.b bar
decx.w fred
decdx toni
decdx.a fooz
decdx.b bar
decdx.w fred
incx r4
incx.a r5
incx.b r6
incx.w r7
incdx r8
incdx.a r9
incdx.b r10
incdx.w r11
invx r12
invx.a LEO
invx.b r14
invx.w r15
movx #foo, r4
movx.a #foo, r5
movx.b #foo, r6
movx.w #foo, r7
MOVX &X, R5
MOVX #X, R5
MOVX R5, &Y
MOVX #0xabcde, &Y
MOVX &X, &Y
MOVX #X, &Y
MOVX X, R5
MOVX R5, Y
MOVX #0xabcde, Y
MOVX X, Y
sbcx r15
sbcx.a 012345h
sbcx.b r15
sbcx.w 0(r7)
subcx r15, r15
subcx.a #012345h, r15
subcx.b r15, r15
subcx.w @r5+, 0(r7)
SUBX 2(R6), PC
SUBX.A #4455, ede
SUBX.B 2(R6), PC
SUBX.W 2(R6), PC
tstx LEO
tstx.a foo
tstx.b bar
tstx.w baz
XORX #5A5Ah, EDE
XORX &EDE, TONI
XORX @R8, EDE
XORX R8, EDE
XORX.B 2(R6), EDE
XORX.B @R8+, EDE
xorx.a toni, &cntr
xorx.w @r5, r6
xorx.a #12345, 0x45678h(r15)
adda #0x12345, r7
adda r6, r14
bra #bar
bra #011044H
bra r5
bra &ede
bra @r5
bra @r5+
bra 0x9876(r5)
calla r5
calla 0x1234(r6)
calla @r7
calla @r8+
calla &foo
calla bar
calla #011004h
clra r6
cmpa r1, r2
cmpa #0xfedcb, r3
decda r5
incda r5
mova R9,R8
MOVA #12345h,R12
MOVA 100h(R9),R8
MOVA &EDE,R12
MOVA @R9,R8
MOVA @R9+,R8
MOVA R8,100h(R9)
MOVA R13,&EDE
reta
reti
suba r5, r6
suba #0xfffff, r6
tsta fooz
popm #1, r5
popm.a #3, r15
popm.w #8, r12
popx r10
popx.a r10
popx.b r10
popx.w r10
pushm #1, r9
pushm.a #2, r9
pushm.w #3, r9
pushx r8
pushx.a r8
pushx.b &ede
pushx.w r8
rlam #1, r15
rlam.a #2, r15
rlam.w #3, r15
rlax r6
rlax.a r6
rlax.w r6
rlcx r6
rlcx.a r6
rlcx.w r6
rram #1, r6
rram.a #4, r6
rram.w #2, r6
rrax r11
rrax.a r11
rrax.w r11
rrcm #4, r5
rrcm.a #1, r5
rrcm.w #3, r5
rrcx r13
rrcx.a r13
rrcx.w r13
rrum #3, r4
rrum.a #2, r4
rrum.w #1, r4
rrux r4
rrux.a r7
rrux.b r5
rrux.w r6
swpbx r1
swpbx.a ede
swpbx.w r12
sxtx r2
sxtx.a &ede
sxtx.w r2
rpt #5
rrax.a r5
rpt r5
rrax.a r5
;; The following are all aliases for similarly named instructions
;; without the period. Eg: add.a -> adda
add.a r1, r2
br.a r1
call.a r1
clr.a r1
cmp.a r1, r2
decd.a r1
incd.a r1
mov.a r1, r2
ret.a
sub.a r1, r2
tst.a fooz
;; Check that repeat counts can be used with shift instructions.
rpt r1 { rrux.w r1
rpt #2 { rrcx.w r2
rpt #3 { rrax.b r7
rpt r4 { rrax.a r4
rpt #5 { rlax.b r5
rpt #6 { rlcx.a r6
|
stsp/binutils-ia16
| 1,279
|
gas/testsuite/gas/msp430/nop-int.s
|
.text
;;; Test some common instruction patterns for disabling/enabling interrupts.
;;; "MOV &FOO,r10" is used as an artbitrary statement which isn't a NOP, to
;;; break up the instructions being tested.
fn1:
;;; 1: Test EINT
;; 430 ISA: NOP *not* required before *or* after EINT
;; 430x ISA: NOP *is* required before *and* after EINT
MOV &FOO,r10
EINT
MOV &FOO,r10
BIS.W #8,SR ; Alias for EINT
MOV &FOO,r10
;;; 2: Test DINT
;; 430 ISA: NOP *is* required after DINT
;; 430x ISA: NOP *is* required after DINT
MOV &FOO,r10
DINT
NOP
MOV &FOO,r10
BIC.W #8,SR ; Alias for DINT
NOP
MOV &FOO,r10
;;; 3: Test EINT immediately before DINT
;; 430 ISA: NOP *not* required.
;; 430x ISA: NOP *is* required between EINT and DINT
MOV &FOO,r10
NOP
EINT
DINT
NOP
MOV &FOO,r10
NOP
BIS.W #8,SR ; Alias for EINT
BIC.W #8,SR ; Alias for DINT
NOP
MOV &FOO,r10
;;; 4: Test DINT immediately before EINT
;; 430 ISA: NOP *is* required after DINT.
;; 430x ISA: NOP *is* required after DINT and before EINT. Ensure only one
;; warning is emitted.
MOV &FOO,r10
NOP
DINT
EINT
NOP
MOV &FOO,r10
BIC.W #8,SR ; Alias for DINT
BIS.W #8,SR ; Alias for EINT
NOP
MOV &FOO,r10
;;; 5: Test EINT last insn in file
NOP
EINT
|
stsp/binutils-ia16
| 2,145
|
gas/testsuite/gas/wasm32/allinsn.s
|
block[]
br 0
br_if 0
br_table 1 1 1
call 0
call_indirect 0 0
drop
else
end
f32.abs
f32.add
f32.ceil
f32.const 3.14159
f32.convert_s/i32
f32.convert_s/i64
f32.convert_u/i32
f32.convert_u/i64
f32.copysign
f32.demote/f64
f32.div
f32.eq
f32.floor
f32.ge
f32.gt
f32.le
f32.load a=0 0
f32.lt
f32.max
f32.min
f32.mul
f32.ne
f32.nearest
f32.neg
f32.reinterpret/i32
f32.sqrt
f32.store a=0 0
f32.sub
f32.trunc
f64.abs
f64.add
f64.ceil
f64.const 3.14159e200
f64.convert_s/i32
f64.convert_s/i64
f64.convert_u/i32
f64.convert_u/i64
f64.copysign
f64.div
f64.eq
f64.floor
f64.ge
f64.gt
f64.le
f64.load a=0 0
f64.lt
f64.max
f64.min
f64.mul
f64.ne
f64.nearest
f64.neg
f64.promote/f32
f64.reinterpret/i64
f64.sqrt
f64.store a=0 0
f64.sub
f64.trunc
get_global 0
get_local 0
i32.add
i32.and
i32.clz
i32.const 0xdeadbeef
i32.ctz
i32.div_s
i32.div_u
i32.eq
i32.eqz
i32.ge_s
i32.ge_u
i32.gt_s
i32.gt_u
i32.le_s
i32.le_u
i32.load a=0 0
i32.load16_s a=0 0
i32.load16_u a=0 0
i32.load8_s a=0 0
i32.load8_u a=0 0
i32.lt_s
i32.lt_u
i32.mul
i32.ne
i32.or
i32.popcnt
i32.reinterpret/f32
i32.rem_s
i32.rem_u
i32.rotl
i32.rotr
i32.shl
i32.shr_s
i32.shr_u
i32.store a=0 0
i32.store16 a=0 0
i32.store8 a=0 0
i32.sub
i32.trunc_s/f32
i32.trunc_s/f64
i32.trunc_u/f32
i32.trunc_u/f64
i32.wrap/i64
i32.xor
i64.add
i64.and
i64.clz
i64.const 0xdeadbeefdeadbeef
i64.ctz
i64.div_s
i64.div_u
i64.eq
i64.eqz
i64.extend_s/i32
i64.extend_u/i32
i64.ge_s
i64.ge_u
i64.gt_s
i64.gt_u
i64.le_s
i64.le_u
i64.load a=0 0
i64.load16_s a=0 0
i64.load16_u a=0 0
i64.load32_s a=0 0
i64.load32_u a=0 0
i64.load8_s a=0 0
i64.load8_u a=0 0
i64.lt_s
i64.lt_u
i64.mul
i64.ne
i64.or
i64.popcnt
i64.reinterpret/f64
i64.rem_s
i64.rem_u
i64.rotl
i64.rotr
i64.shl
i64.shr_s
i64.shr_u
i64.store a=0 0
i64.store16 a=0 0
i64.store32 a=0 0
i64.store8 a=0 0
i64.sub
i64.trunc_s/f32
i64.trunc_s/f64
i64.trunc_u/f32
i64.trunc_u/f64
i64.xor
if[i]
loop[l]
nop
return
select
set_global 0
set_local 0
signature FvildffdliE
tee_local 0
unreachable
|
stsp/binutils-ia16
| 3,323
|
gas/testsuite/gas/mcore/allinsn.s
|
.data
foodata: .word 42
.text
footext:
.macro test insn text=""
.export \insn
\insn:
\insn \text
.endm
test abs r0
test addc "r1,r2" // A double forward slash starts a line comment
test addi "r3, 1" # So does a hash
test addu "r4, r5" // White space between operands should be ignored
test and "r6,r7" ; test andi "r8,2" // A semicolon separates statements
test andn "r9, r10"
test asr "r11, R12" // Uppercase R is allowed as a register prefix
test asrc "r13"
test asri "r14,0x1f"
test bclri "r15,0"
test bf footext
test bgeni "sp, 7" // r0 can also be referred to as 'sp'
test BGENI "r0, 8" // Officially upper case or mixed case
test BGENi "r0, 31" // mnemonics should not be allowed, but we relax this...
test bgenr "r1, r2"
test bkpt
test bmaski "r3,8"
test BMASKI "r3,0x1f"
test br . // Dot means the current address
test brev r4
test bseti "r5,30"
test bsr footext
test bt footext
test btsti "r6, 27"
test clrc
test clrf r7
test clrt r8
test cmphs "r9,r10"
test cmplt "r11,r12"
test cmplei "r11, 14"
test cmplti "r13,32"
test cmpne "r14, r15"
test cmpnei "r0,0"
test decf r1
test decgt r2
test declt r3
test decne r4
test dect r5
test divs "r6,r1"
test divu "r8, r1"
test doze
test ff1 r10
test incf r11
test inct r12
test ixh "r13,r14"
test ixw "r15,r0"
test jbf footext
test jbr fooloop
test jbsr footext
test jbt fooloop
test jmp r1
test jmpi footext
test jsr r2
test jsri footext
test ld.b "r3,(r4,0)"
test ld.h "r5 , ( r6, 2)"
test ld.w "r7, (r8, 0x4)"
test ldb "r9,(r10,0xf)"
test ldh "r11, (r12, 30)"
test ld "r13, (r14, 20)"
test ldw "r13, (r14, 60)"
test ldm "r2-r15,(r0)"
.export fooloop
fooloop:
test ldq "r4-r7,(r1)"
test loopt "r8, fooloop"
test LRW "r9, [foolit]"
test lrw "r9, 0x4321" // PC rel indirect
.global foolit
foolit:
.word 0x1234
test lsl "r10,r11"
test lslc r12
.literals // Dump literals table
test lsli "r13,31"
test lsr "r14,r15"
test lsrc r0
test lsri "r1,1"
test mclri "r4, 64"
test mfcr "r2, cr0"
test mov "r3,r4"
test movf "r5, r6"
test movi "r7, 127"
test movt "r8, r9"
test mtcr "r10, psr"
test mult "r11, r12"
test mvc r13
test mvcv r14
test neg r2
test not r15
test or "r0,r1"
test rfi
test rolc "r6, 1"
test rori "r9, 6"
test rotlc "r6, 1"
test rotli "r2, 10"
test rotri "r9, 6"
test rsub "r3, r4"
test rsubi "r5, 0x0"
test rte
test rts
test setc
test sextb r6
test sexth r7
test st.b "r8, (r9, 0)"
test st.h "r10, (r11, 2)"
test st.w "r12, (r13, 4)"
test stb "r14, (r15, 15)"
test sth "r0, (r1, 30)"
test stw "r2, (r3, 0x3c)"
test st "r4, (r5, 0)"
test stm "r14 - r15 , (r0)"
test stop
test stq "r4 - r7 , (r1)"
test subc "r7, r13"
test subi "r14, 32"
test subu "r9, r3"
test sync
test tstlt r5
test tstne r7
test trap 2
test tst "r14, r14"
test tstnbz r2
test wait
test xor "r15,r0"
test xsr r11
test xtrb0 "r1, r1"
test xtrb1 "r1, r2"
test xtrb2 "r1, r0"
test xtrb3 "r1, r13"
test zextb r8
test zexth r4
clrc // These two instructions pad the object file
clrc // out to a 16 byte boundary.
|
stsp/binutils-ia16
| 5,219
|
gas/testsuite/gas/fr30/allinsn.s
|
.data
foodata: .word 42
.text
footext:
.global add
add:
add r0, r1
add #0, r2
.global add2
add2:
add2 #-1, r3
.global addc
addc:
addc r4, r5
.global addn
addn:
addn r6, r7
addn #15, r8
.global addn2
addn2:
addn2 #-16, r9
.global sub
sub:
sub r10, r11
.global subc
subc:
subc r12, r13
.global subn
subn:
subn r14, r15
.global cmp
cmp:
cmp ac, fp
cmp #1, sp
.global cmp2
cmp2:
cmp2 #-15, r0
.global and
and:
and r1, r2
and r3, @r4
.global andh
andh:
andh r5, @r6
.global andb
andb:
andb r7, @r8
.global or
or:
or r9, r10
or r11, @r12
.global orh
orh:
orh r13, @r14
.global orb
orb:
orb r15, @ac
.global eor
eor:
eor fp, sp
eor r0, @r1
.global eorh
eorh:
eorh r2, @r3
.global eorb
eorb:
eorb r4, @r5
.global bandl
bandl:
bandl #15, @r6
.global bandh
nadh:
bandh #7, @r7
.global borl
borl:
borl #3, @r8
.global borh
borh:
borh #13, @r9
.global beorl
beorl:
beorl #15, @r10
.global beorh
beorh:
beorh #1, @r11
.global btstl
btstl:
btstl #0, @r12
.global btsth
btsth:
btsth #8, @r13
.global mul
mul:
mul r14, r15
.global mulu
mulu:
mulu ac, fp
.global muluh
muluh:
muluh sp, r0
.global mulh
mulh:
mulh r1, r2
.global div0s
div0s:
div0s r3
.global div0u
div0u:
div0u r4
.global div1
div1:
div1 r5
.global div2
div2:
div2 r6
.global div3
div3:
div3
.global div4s
div4s:
div4s
.global lsl
lsl:
lsl r7, r8
lsl #3, r9
.global lsl2
lsl2:
lsl2 #0, r10
.global lsr
lsr:
lsr r11, r12
lsr #15, r13
.global lsr2
lsr2:
lsr2 #15, r14
.global asr
asr:
asr r15, ac
asr #6, fp
.global asr2
asr2:
asr2 #7, sp
.global ldi_8
ldi_8:
ldi:8 #0xff, r2
.global ld
ld:
ld @r3, r4
ld @(R13, r5), r6
ld @(R14, 0x1fc), r7
ld @(R15, 0x3c), r8
ld @r15+, r9
ld @r15+, ps
ld @R15+, tbr
ld @r15+, rp
ld @R15+, ssp
.global lduh
lduh:
lduh @r10, r11
lduh @(r13, r12), r13
lduh @(r14, #-256), r15
.global ldub
ldub:
ldub @ac, fp
ldub @(r13, sp), r0
ldub @(r14, -128), r1
.global st
st:
st r2, @r3
st r4, @(r13, r5)
st r6, @(r14, -512)
st r7, @(r15, 0x3c)
st r8, @ - r15
st MDH, @-r15
st PS, @ - r15
.global lsth
sth:
sth r9, @r10
sth r11, @(r13, r12)
sth r13, @(r14, 128)
.global stb
stb:
STB r14, @r15
stb r0, @(r13, r1)
STB r2, @(r14, -128)
.global mov
mov:
mov r3, r4
MOV mdl, r5
mov ps, r6
mov r7, usp
mov r8, ps
.global jmp
jmp:
jmp @r9
.global ret
ret:
ret
.global bra
bra:
bra footext
.global bno
bno:
bno footext
.global beq
beq:
beq footext
.global bne
bne:
bne footext
.global bc
bc:
bc footext
.global bnc
bnc:
bnc footext
.global bn
bn:
bn footext
.global bp
bp:
bp footext
.global bv
bv:
bv footext
.global bnv
bnv:
bnv footext
.global blt
blt:
blt footext
.global bge
bge:
bge footext
.global ble
ble:
ble footext
.global bgt
bgt:
bgt footext
.global bls
bls:
bls footext
.global bhi
bhi:
bhi footext
delay_footext:
.global jmp_d
jmp_d:
jmp:d @r11
nop
.global ret_d
ret_d:
ret:d
nop
.global bra_d
bra_d:
bra:D delay_footext
nop
.global bno_d
bno_d:
bno:d delay_footext
nop
.global beq_d
beq_d:
beq:D delay_footext
nop
.global bne_d
bne_d:
bne:d delay_footext
nop
.global bc_d
bc_d:
bc:d delay_footext
nop
.global bnc_d
bnc_d:
bnc:d delay_footext
nop
.global bn_d
bn_d:
bn:d delay_footext
nop
.global bp_d
bp_d:
bp:d delay_footext
nop
.global bv_d
bv_d:
bv:d delay_footext
nop
.global bnv_d
bnv_d:
bnv:d delay_footext
nop
.global blt_d
blt_d:
blt:d delay_footext
nop
.global bge_d
bge_d:
bge:d delay_footext
nop
.global ble_d
ble_d:
ble:d delay_footext
nop
.global bgt_d
bgt_d:
bgt:d delay_footext
nop
.global bls_d
bls_d:
bls:d delay_footext
nop
.global bhi_d
bhi_d:
bhi:d delay_footext
nop
.global ldres
ldres:
ldres @r2+, #8
.global stres
stres:
stres #15, @r3+
.global nop
nop:
nop
.global andccr
andccr:
andccr #255
.global orccr
orccr:
orccr #125
.global stilm
stilm:
stilm #97
.global addsp
addsp:
addsp #-512
.global extsb
extsb:
extsb r9
.global extub
extub:
extub r10
.global extsh
extsh:
extsh r11
.global extuh
extuh:
extuh r12
.global enter
enter:
enter #1020
.global leave
leave:
leave
.global xchb
xchb:
xchb @r14, r15
.global ldi_32
ldi_32:
ldi:32 #0x12345678, r0
.global copop
copop:
copop #15, #1, cr3, cr4
copop #15, #4, cr5, cr6
copop #15, #255, cr7, cr0
.global copld
copld:
copld #0, #0, r4, cr0
.global copst
copst:
copst #7, #2, cr1, r5
.global copsv
copsv:
copsv #8, #3, cr2, r6
.global ldm0
ldm0:
ldm0 (r0, r2, r3, r7)
.global ldm1
ldm1:
ldm1 (r8, r11, r15)
.global stm0
stm0:
stm0 (r2, r3)
.global stm1
stm1:
stm1 (r13, r14)
.global call
call:
call footext
call @r10
.global call_d
call_d:
call:D footext
nop
call:d @r12
nop
.global dmov
dmov:
dmov @0x88, r13
dmov r13, @0x54
dmov @0x44, @r13+
dmov @R13+, @0x2
dmov @0x2c, @-r15
dmov @r15+, @38
.global dmovh
dmovh:
dmovh @0x88, r13
dmovh r13, @0x52
dmovh @0x34, @r13 +
dmovh @r13+, @0x52
.global dmovb
dmovb:
dmovb @0x91, r13
dmovb r13, @0x53
dmovb @71, @r13+
dmovb @r13+, @0x0
.global ldi_20
ldi_20:
ldi:20 #0x000fffff, r1
finish:
ldi:32 #0x8000,r0
mov r0,ssp
ldi:32 #1,r0
int #10
.global inte
inte:
inte
.global reti
reti:
reti
|
stsp/binutils-ia16
| 6,114
|
gas/testsuite/gas/iq2000/allinsn.s
|
.data
foodata: .word 42
.text
footext:
.text
.global add
add:
add %0,%0,%0
.text
.global addi
addi:
addi %0,%0,-4
.text
.global addiu
addiu:
addiu %0,%0,4
.text
.global addu
addu:
addu %0,%0,%0
.text
.global ado16
ado16:
ado16 %0,%0,%0
.text
.global and
and:
and %0,%0,%0
.text
.global andi
andi:
andi %0,%0,0xdead
.text
.global andoi
andoi:
andoi %0,%0,0
.text
.global andoui
andoui:
andoui %0,%0,0
.text
.global mrgb
mrgb:
mrgb %0,%0,%0,0
.text
.global nor
nor:
nor %0,%0,%0
.text
.global or
or:
or %0,%0,%0
.text
.global ori
ori:
ori %0,%0,-1
.text
.global orui
orui:
orui %0,%0,0
.text
.global ram
ram:
ram %0,%0,0,0,0
.text
.global sll
sll:
sll %0,%0,0
.text
.global sllv
sllv:
sllv %0,%0,%0
.text
.global slmv
slmv:
slmv %0,%0,%0,0
.text
.global slt
slt:
slt %0,%0,%0
.text
.global slti
slti:
slti %0,%0,0
.text
.global sltiu
sltiu:
sltiu %0,%0,0
.text
.global sltu
sltu:
sltu %0,%0,%0
.text
.global sra
sra:
sra %0,%0,0
.text
.global srav
srav:
srav %0,%0,%0
.text
.global srl
srl:
srl %0,%0,0
.text
.global srlv
srlv:
srlv %0,%0,%0
.text
.global srmv
srmv:
srmv %0,%0,%0,0
.text
.global sub
sub:
sub %0,%0,%0
.text
.global subu
subu:
subu %0,%0,%0
.text
.global xor
xor:
xor %0,%0,%0
.text
.global xori
xori:
xori %0,%0,0
.text
.global bbi
bbi:
bbi %0(0),footext
.text
.global bbin
bbin:
bbin %0(0),footext
.text
.global bbv
bbv:
bbv %0,%0,footext
.text
.global bbvn
bbvn:
bbvn %0,%0,footext
.text
.global beq
beq:
beq %0,%0,footext
.text
.global beql
beql:
beql %0,%0,footext
.text
.global bgez
bgez:
bgez %0,footext
.text
.global bgezal
bgezal:
bgezal %0,footext
.text
.global bgezall
bgezall:
bgezall %0,footext
.text
.global bgezl
bgezl:
bgezl %0,footext
.text
.global bgtz
bgtz:
bgtz %0,footext
.text
.global bgtzl
bgtzl:
bgtzl %0,footext
.text
.global blez
blez:
blez %0,footext
.text
.global blezl
blezl:
blezl %0,footext
.text
.global bltz
bltz:
bltz %0,footext
.text
.global bltzl
bltzl:
bltzl %0,footext
.text
.global bltzal
bltzal:
bltzal %0,footext
.text
.global bltzall
bltzall:
bltzall %0,footext
.text
.global bmb
bmb:
bmb %0,%0,footext
.text
.global bmb0
bmb0:
bmb0 %0,%0,footext
.text
.global bmb1
bmb1:
bmb1 %0,%0,footext
.text
.global bmb2
bmb2:
bmb2 %0,%0,footext
.text
.global bmb3
bmb3:
bmb3 %0,%0,footext
.text
.global bne
bne:
bne %0,%0,footext
.text
.global bnel
bnel:
bnel %0,%0,footext
.text
.global bctxt
bctxt:
bctxt %0,footext
.text
.global bc0f
bc0f:
bc0f footext
.text
.global bc0fl
bc0fl:
bc0fl footext
.text
.global bc3f
bc3f:
bc3f footext
.text
.global bc3fl
bc3fl:
bc3fl footext
.text
.global bc0t
bc0t:
bc0t footext
.text
.global bc0tl
bc0tl:
bc0tl footext
.text
.global bc3t
bc3t:
bc3t footext
.text
.global bc3tl
bc3tl:
bc3tl footext
.text
.global break
break:
break
.text
.global cfc0
cfc0:
cfc0 %0,%0
.text
.global cfc1
cfc1:
cfc1 %0,%0
.text
.global cfc2
cfc2:
cfc2 %0,%0
.text
.global cfc3
cfc3:
cfc3 %0,%0
.text
.global chkhdr
chkhdr:
chkhdr %0,%0
.text
.global ctc0
ctc0:
ctc0 %0,%0
.text
.global ctc1
ctc1:
ctc1 %0,%0
.text
.global ctc2
ctc2:
ctc2 %0,%0
.text
.global ctc3
ctc3:
ctc3 %0,%0
.text
.global jcr
jcr:
jcr %0
.text
.global luc32
nop
luc32:
# insert a nop here to pacify the assembler (luc32 may not follow jcr).
luc32 %0,%0
.text
.global luc32l
luc32l:
luc32l %0,%0
.text
.global luc64
luc64:
luc64 %0,%0
.text
.global luc64l
luc64l:
luc64l %0,%0
.text
.global luk
luk:
luk %0,%0
.text
.global lulck
lulck:
lulck %0
.text
.global lum32
lum32:
lum32 %0,%0
.text
.global lum32l
lum32l:
lum32l %0,%0
.text
.global lum64
lum64:
lum64 %0,%0
.text
.global lum64l
lum64l:
lum64l %0,%0
.text
.global lur
lur:
lur %0,%0
.text
.global lurl
lurl:
lurl %0,%0
.text
.global luulck
luulck:
luulck %0
.text
.global mfc0
mfc0:
mfc0 %0,%0
.text
.global mfc1
mfc1:
mfc1 %0,%0
.text
.global mfc2
mfc2:
mfc2 %0,%0
.text
.global mfc3
mfc3:
mfc3 %0,%0
.text
.global mtc0
mtc0:
mtc0 %0,%0
.text
.global mtc1
mtc1:
mtc1 %0,%0
.text
.global mtc2
mtc2:
mtc2 %0,%0
.text
.global mtc3
mtc3:
mtc3 %0,%0
.text
.global rb
rb:
rb %0,%0
.text
.global rbr1
rbr1:
rbr1 %0,0,0
.text
.global rbr30
rbr30:
rbr30 %0,0,0
.text
.global rfe
rfe:
rfe
.text
.global rx
rx:
rx %0,%0
.text
.global rxr1
rxr1:
rxr1 %0,0,0
.text
.global rxr30
rxr30:
rxr30 %0,0,0
.text
.global sleep
sleep:
sleep
.text
.global srrd
srrd:
srrd %0
.text
.global srrdl
srrdl:
srrdl %0
.text
.global srulck
srulck:
srulck %0
.text
.global srwr
srwr:
srwr %0,%0
.text
.global srwru
srwru:
srwru %0,%0
.text
.global syscall
syscall:
syscall
.text
.global trapqfl
trapqfl:
trapqfl
.text
.global trapqne
trapqne:
trapqne
.text
.global wb
wb:
wb %0,%0
.text
.global wbu
wbu:
wbu %0,%0
.text
.global wbr1
wbr1:
wbr1 %3,0,0
.text
.global wbr1u
wbr1u:
wbr1u %0,0,0
.text
.global wbr30
wbr30:
wbr30 %0,0,0
.text
.global wbr30u
wbr30u:
wbr30u %0,0,0
.text
.global wx
wx:
wx %0,%0
.text
.global wxu
wxu:
wxu %0,%0
.text
.global wxr1
wxr1:
wxr1 %0,0,0
.text
.global wxr1u
wxr1u:
wxr1u %0,0,0
.text
.global wxr30
wxr30:
wxr30 %0,0,0
.text
.global wxr30u
wxr30u:
wxr30u %0,0,0
.text
.global j
j:
j footext
.text
.global jal
jal:
jal footext
.text
.global jalr
jalr:
jalr %0,%0
.text
.global jr
jr:
jr %0
.text
.global lb
lb:
lb %0,0x1024(%0)
.text
.global lbu
lbu:
lbu %0,0x1024(%0)
.text
.global ldw
ldw:
ldw %0,0x1024(%0)
.text
.global lh
lh:
lh %0,0x1024(%0)
.text
.global lhu
lhu:
lhu %0,0x1024(%0)
.text
.global lui
lui:
lui %0,-1
.text
.global lw
lw:
lw %0,0x1024(%0)
.text
.global sb
sb:
sb %0,0x1024(%0)
.text
.global sdw
sdw:
sdw %0,0x1024(%0)
.text
.global sh
sh:
sh %0,0x1024(%0)
.text
.global sw
sw:
sw %0,0x1024(%0)
.text
.global traprel
traprel:
traprel %0
.text
.global pkrl
pkrl:
pkrl %0,%1
.text
.global pkrlr1
pkrlr1:
pkrlr1 %0,0,0
.text
.global pkrlr30
pkrlr30:
pkrlr30 %0,0,0
|
stsp/binutils-ia16
| 1,540
|
gas/testsuite/gas/iq2000/yield0.s
|
# This test case includes a single case of a yield instruction
# (e.g. SLEEP) appearing in the branch delay slot. We expect
# the assembler to issue a warning about this!
.text
# yield insn in the branch delay slot.
beq %0,%0,foo
cfc2 %1, %1
# likewise for the rest.
beq %0,%0,foo
cfc3 %1, %1
beq %0,%0,foo
chkhdr %1, %1
beq %0,%0,foo
luc32 %1, %1
beq %0,%0,foo
luc32l %1, %1
beq %0,%0,foo
luc64 %1, %1
beq %0,%0,foo
luc64l %1, %1
beq %0,%0,foo
lulck %1
beq %0,%0,foo
lum32 %1, %1
beq %0,%0,foo
lum32l %1, %1
beq %0,%0,foo
lum64 %1, %1
beq %0,%0,foo
lum64l %1, %1
beq %0,%0,foo
lur %1, %1
beq %0,%0,foo
lurl %1, %1
beq %0,%0,foo
luulck %1
beq %0,%0,foo
mfc2 %1, %1
beq %0,%0,foo
mfc3 %1, %1
beq %0,%0,foo
rb %1, %1
beq %0,%0,foo
rbr1 %1, 1, 1
beq %0,%0,foo
rbr30 %1, 1, 1
beq %0,%0,foo
rx %1, %1
beq %0,%0,foo
rxr1 %1, 1, 1
beq %0,%0,foo
rxr30 %1, 1, 1
beq %0,%0,foo
sleep
beq %0,%0,foo
srrd %1
beq %0,%0,foo
srrdl %1
beq %0,%0,foo
srulck %1
beq %0,%0,foo
srwr %1, %1
beq %0,%0,foo
srwru %1, %1
beq %0,%0,foo
syscall
beq %0,%0,foo
trapqfl
beq %0,%0,foo
trapqne
beq %0,%0,foo
wb %1, %1
beq %0,%0,foo
wbu %1, %1
beq %0,%0,foo
wbr1 %1, 1, 1
beq %0,%0,foo
wbr1u %1, 1, 1
beq %0,%0,foo
wbr30 %1, 1, 1
beq %0,%0,foo
wbr30u %1, 1, 1
beq %0,%0,foo
wx %1, %1
beq %0,%0,foo
wxu %1, %1
beq %0,%0,foo
wxr1 %1, 1, 1
beq %0,%0,foo
wxr1u %1, 1, 1
beq %0,%0,foo
wxr30 %1, 1, 1
beq %0,%0,foo
wxr30u %1, 1, 1
foo: nop
|
stsp/binutils-ia16
| 2,027
|
gas/testsuite/gas/score/rD_rA_rB.s
|
/*
* test relax
* add.c <-> add! : register number must be in 0-15
* addc.c <-> addc! : register number must be in 0-15
* sub.c <-> sub! : register number must be in 0-15
* and.c <-> and! : register number must be in 0-15
* or.c <-> or! : register number must be in 0-15
* xor.c <-> xor! : register number must be in 0-15
* sra.c <-> sra! : register number must be in 0-15
* srl.c <-> srl! : register number must be in 0-15
* sll.c <-> sll! : register number must be in 0-15
* Author: ligang
*/
/* This macro transform 32b instruction to 16b. */
.macro tran3216 insn32, insn16
.align 4
\insn32 r0, r0, r2 #32b -> 16b
\insn16 r0, r2
\insn32 r5, r5, r4 #32b -> 16b
\insn16 r5, r4
\insn32 r15, r15, r4 #32b -> 16b
\insn16 r15, r4
\insn16 r15, r3
\insn32 r15, r15, r3 #32b -> 16b
\insn32 r8, r8, r3 #32b -> 16b
\insn32 r8, r8, r3 #32b -> 16b
\insn32 r15, r15, r6 #No transform
\insn32 r26, r23, r4
.endm
/* This macro transform 16b instruction to 32b. */
.macro tran1632 insn32, insn16
.align 4
\insn16 r0, r2 #16b -> 32b
\insn32 r20, r21, r2
\insn16 r15, r4 #16b -> 32b
\insn32 r25, r21, r4
\insn16 r15, r3 #16b -> 32b
\insn32 r25, r22, r3
\insn16 r8, r7 #No transform
\insn16 r8, r7 #No transform
\insn16 r6, r4 #No transform
\insn32 r6, r6, r4
\insn32 r7, r7, r4 #32b -> 16b
\insn16 r7, r4 #No transform
.endm
.text
tran3216 "add.c", "add!"
tran3216 "addc.c", "addc!"
tran3216 "sub.c", "sub!"
tran3216 "and.c", "and!"
tran3216 "or.c", "or!"
tran3216 "xor.c", "xor!"
tran3216 "sra.c", "sra!"
tran3216 "srl.c", "srl!"
tran3216 "sll.c", "sll!"
tran1632 "add.c", "add!"
tran1632 "addc.c", "addc!"
tran1632 "sub.c", "sub!"
tran1632 "and.c", "and!"
tran1632 "or.c", "or!"
tran1632 "xor.c", "xor!"
tran1632 "sra.c", "sra!"
tran1632 "srl.c", "srl!"
tran1632 "sll.c", "sll!"
|
stsp/binutils-ia16
| 1,344
|
gas/testsuite/gas/score/rD_rA.s
|
/*
* test relax
* not.c <-> not! : register number must be in 0-15
* neg.c <-> neg! : register number must be in 0-15
* cmp.c <-> cmp! : register number must be in 0-15
* Author: ligang
*/
/* This macro transform 32b instruction to 16b. */
.macro tran3216 insn32, insn16
.align 4
\insn32 r0, r7 #32b -> 16b
\insn16 r0, r7
\insn32 r15, r4 #32b -> 16b
\insn16 r15, r4
\insn32 r15, r15 #32b -> 16b
\insn16 r15, r15
\insn16 r15, r3
\insn32 r15, r3 #32b -> 16b
\insn32 r8, r2 #32b -> 16b
\insn32 r8, r2 #32b -> 16b
\insn32 r15, r5 #No transform
\insn32 r26, r23
.endm
/* This macro transform 16b instruction to 32b. */
.macro tran1632 insn32, insn16
.align 4
\insn16 r0, r2 #16b -> 32b
\insn32 r20, r21
\insn16 r15, r4 #16b -> 32b
\insn32 r25, r21
\insn16 r15, r3 #16b -> 32b
\insn32 r25, r22
\insn16 r8, r3 #No transform
\insn16 r8, r3 #No transform
\insn16 r6, r2 #No transform
\insn32 r6, r2 #32b -> 16b
\insn32 r7, r4 #32b -> 16b
\insn16 r7, r4 #No transform
.endm
.text
tran3216 "not.c", "not!"
tran3216 "neg.c", "neg!"
tran3216 "cmp.c", "cmp!"
tran1632 "not.c", "not!"
tran1632 "neg.c", "neg!"
tran1632 "cmp.c", "cmp!"
|
stsp/binutils-ia16
| 1,860
|
gas/testsuite/gas/score/ls32ls16p.s
|
/*
* test relax
* lw <-> lwp! : rs = r2, offset & 0x3 == 0, offset >> 2 : 5b
* lh <-> lhp! : rs = r2, offset & 0x1 == 0, offset >> 1 : 5b
* lbu <-> lbu! : rs = r2, offset != 0, offset : 5b
* sw <-> swp! : rs = r2, offset & 0x3 == 0, offset >> 2 : 5b
* sh <-> shp! : rs = r2, offset & 0x1 == 0, offset >> 1 : 5b
* sb <-> sb! : rs = r2, offset != 0, offset : 5b
* Author: ligang
*/
/* This macro transform 32b instruction to 16b. */
.macro tran3216 insn32, insn16, shift
.align 4
\insn32 r3, [r2, 0x4 << \shift] #32b -> 16b
\insn16 r3, 0x4 << \shift
\insn32 r4, [r2, 0xC << \shift] #32b -> 16b
\insn16 r4, 0xC << \shift
\insn32 r7, [r2, 0x12 << \shift] #32b -> 16b
\insn32 r7, [r2, 0x12 << \shift] #32b -> 16b
\insn16 r8, 0x8 << \shift
\insn32 r8, [r2, 0x8 << \shift] #32b -> 16b
\insn32 r5, [r2, 0x20 << \shift] #No transform
\insn32 r5, [r2, 0x20 << \shift] #No transform
\insn32 r6, [r6, 0x8 << \shift] #No transform
\insn32 r6, [r6, 0x8 << \shift] #No transform
.endm
/* This macro transform 16b instruction to 32b. */
.macro tran1632 insn32, insn16, shift
.align 4
\insn16 r0, 0xC #16b -> 32b
\insn32 r0, [r5, 0xFF]
\insn16 r15, 0x0 #16b -> 32b
\insn32 r15, [r4, 0xFF]
\insn16 r4, 0x8 #No transform
\insn16 r4, 0x8 #No transform
\insn16 r7, 0x8 #No transform
\insn32 r7, [r2, 0x8 << \shift]
.endm
tran3216 "lw", "lwp!", 2
tran3216 "lh", "lhp!", 1
tran3216 "lbu", "lbup!", 0
tran3216 "sw", "swp!", 2
tran3216 "sh", "shp!", 1
tran3216 "sb", "sbp!", 0
tran1632 "lw", "lwp!", 2
tran1632 "lh", "lhp!", 1
tran1632 "lbu", "lbup!", 0
tran1632 "sw", "swp!", 2
tran1632 "sh", "shp!", 1
tran1632 "sb", "sbp!", 0
|
stsp/binutils-ia16
| 1,600
|
gas/testsuite/gas/score/ls32ls16.s
|
/*
* test relax
* lw <-> lw! : register number must be in 0-15, offset == 0
* lh <-> lh! : register number must be in 0-15, offset == 0
* lbu <-> lbu! : register number must be in 0-15, offset == 0
* sw <-> sw! : register number must be in 0-15, offset == 0
* sh <-> sh! : register number must be in 0-15, offset == 0
* sb <-> sb! : register number must be in 0-15, offset == 0
* Author: ligang
*/
/* This macro transform 32b instruction to 16b. */
.macro tran3216 insn32, insn16
.align 4
\insn32 r0, [r3, 0] #32b -> 16b
\insn16 r0, [r3]
\insn32 r3, [r15, 0] #32b -> 16b
\insn16 r3, [r15]
\insn32 r15, [r8, 0] #32b -> 16b
\insn16 r15, [r8]
\insn32 r4, [r8, 0] #No transform
\insn32 r25, [r19, 0]
\insn32 r5, [r7, 0] #32b -> 16b
\insn32 r5, [r7, 0] #32b -> 16b
\insn16 r2, [r3]
\insn32 r2, [r3, 0] #32b -> 16b
.endm
/* This macro transform 16b instruction to 32b. */
.macro tran1632 insn32, insn16
.align 4
\insn16 r0, [r3] #16b -> 32b
\insn32 r18, [r23, 10]
\insn16 r15, [r0] #16b -> 32b
\insn32 r17, [r26, 10]
\insn16 r6, [r8] #No transform
\insn16 r6, [r8] #No transform
\insn16 r3, [r7] #No transform
\insn32 r3, [r7, 0]
.endm
.space 1
tran3216 "lw", "lw!"
.fill 10, 1
tran3216 "lh", "lh!"
.org 0x101
tran3216 "lbu", "lbu!"
.org 0x203
tran3216 "sw", "sw!"
tran3216 "sh", "sh!"
tran3216 "sb", "sb!"
tran1632 "lw", "lw!"
tran1632 "lh", "lh!"
tran1632 "lbu", "lbu!"
tran1632 "sw", "sw!"
tran1632 "sh", "sh!"
tran1632 "sb", "sb!"
|
stsp/binutils-ia16
| 1,080
|
gas/testsuite/gas/score/addi.s
|
/*
* test relax
* addi <-> addei! : for addei : register number must be in 0-15, offset : 4b, only 16b -> 32b
* (1)addi rD, simm16 : rD = rD + simm16, -32768 <= simm16 <= 32767
* (2)addei! rD, imm4 : rD = rD + 2**imm4
* addi <-> subei! : for addei : register number must be in 0-15, offset : 4b, only 16b -> 32b
* (1)addi rD, simm16 : rD = rD + simm16, -32768 <= simm16 <= 32767
* (2)subei! rD, imm4 : rD = rD + 2**imm4
* Author: ligang
*/
/* This macro transform 16b instruction to 32b. */
.macro tran1632 insn32, insn16, sign
.align 4
\insn16 r0, 0 #16b -> 32b
\insn32 r0, \sign * 1
\insn16 r15, 4 #16b -> 32b
\insn32 r15, \sign * 16
\insn16 r15, 14 #16b -> 32b
\insn32 r15, \sign * 1024 * 16
\insn16 r8, 3 #No transform
\insn16 r8, 3 #No transform
\insn16 r15, 15 #No transform. Because 2**15 = 32768, extend range of addi
\insn32 r15, 0x7FFF
.endm
.text
tran1632 "addi.c", "addei!", 1
tran1632 "addi.c", "subei!", -1
|
stsp/binutils-ia16
| 1,649
|
gas/testsuite/gas/score/rD_rA_BN.s
|
/*
* test relax
* bitclr.c <-> bitclr! : register number must be in 0-15
* bitset.c <-> bitset! : register number must be in 0-15
* bittgl.c <-> bittgl! : register number must be in 0-15
* slli.c <-> slli! : register number must be in 0-15
* srli.c <-> srli! : register number must be in 0-15
* Author: ligang
*/
/* This macro transform 32b instruction to 16b. */
.macro tran3216 insn32, insn16
.align 4
\insn32 r0, r0, 2 #32b -> 16b
\insn16 r0, 2
\insn32 r15, r15, 4 #32b -> 16b
\insn16 r15, 4
\insn32 r15, r15, 1 #32b -> 16b
\insn16 r15, 1
\insn16 r15, 3
\insn32 r15, r15, 3 #32b -> 16b
\insn32 r8, r8, 3 #32b -> 16b
\insn32 r8, r8, 3 #32b -> 16b
\insn32 r15, r15, 1 #No transform
\insn32 r26, r23, 4
.endm
/* This macro transform 16b instruction to 32b. */
.macro tran1632 insn32, insn16
.align 4
\insn16 r0, 2 #16b -> 32b
\insn32 r20, r21, 2
\insn16 r15, 4 #16b -> 32b
\insn32 r25, r21, 4
\insn16 r15, 1 #16b -> 32b
\insn32 r25, r22, 1
\insn16 r8, 3 #No transform
\insn16 r8, 3 #No transform
\insn16 r6, 4 #No transform
\insn32 r6, r6, 4 #32b -> 16b
\insn32 r9, r9, 2 #32b -> 16b
\insn16 r9, 2 #No transform
.endm
.text
tran3216 "bitclr.c", "bitclr!"
tran3216 "bitset.c", "bitset!"
tran3216 "bittgl.c", "bittgl!"
tran3216 "slli.c", "slli!"
tran3216 "srli.c", "srli!"
tran1632 "bitclr.c", "bitclr!"
tran1632 "bitset.c", "bitset!"
tran1632 "bittgl.c", "bittgl!"
tran1632 "slli.c", "slli!"
tran1632 "srli.c", "srli!"
|
stsp/binutils-ia16
| 3,775
|
gas/testsuite/gas/score/branch_32.s
|
/*
* tests for branch instruction relaxation
*
* Author: libin
*/
.include "relaxation_macro.h"
.macro _b_op_pattern insn insn1
.balign 2
/*
* for local label 1, assembler should NOT alter instructions before .skip;
* but it SHOULD alter instructions afte it.
*/
1:
insn_16 "\insn! 1b"
tran_16_32 "\insn! 1b", "\insn 1b"
insn_16 "\insn1 1b"
.skip 512
insn_16 "\insn! 1b"
tran_16_32 "\insn! 1b", "\insn 1b"
insn_16 "\insn1 1b"
/*
* for local label 2, assembler SHOULD alter instructions before .skip;
* but it should NOT alter instructions after it.
*/
insn_16 "\insn! 2f"
tran_16_32 "\insn! 2f", "\insn 2f"
insn_16 "\insn1 2f"
.skip 511
insn_16 "\insn! 2f"
tran_16_32 "\insn! 2f", "\insn 2f"
insn_16 "\insn1 2f"
2:
nop!
/* tests for boundary */
3:
.skip 512
insn_16 "\insn! 3b"
insn_16 "\insn! 3b"
insn_16 "\insn! 4f"
insn_16 "\insn! 4f"
.skip 511
4:
nop!
.endm
.macro _br_op_pattern insn
.balign 2
insn_32 "\insn r0"
insn_32 "\insn r15"
tran_16_32 "\insn! r0", "\insn r0"
/* shouldn't alter */
insn_32 "\insn r16"
insn_32 "\insn r31"
.endm
.macro _bcmp_op_pattern1 insn
.balign 2
/* as will give "Using temp register(r1)" warning if you using r1 */
/*
* for local label 1, assembler should NOT alter instructions before .skip;
* but it SHOULD alter instructions afte it.
*/
1:
insn_32 "\insn r0, r15, 1b"
insn_32 "\insn r15, r16, 1b"
insn_32 "\insn r15, r31, 1b"
insn_32 "\insn r16, r31, 1b"
.skip 512
insn_32 "\insn r0, r15, 1b"
insn_32 "\insn r15, r16, 1b"
insn_32 "\insn r15, r31, 1b"
insn_32 "\insn r16, r31, 1b"
/*
* for local label 2, assembler SHOULD alter instructions before .skip;
* but it should NOT alter instructions after it.
*/
insn_32 "\insn r0, r15, 2f"
insn_32 "\insn r15, r16, 2f"
insn_32 "\insn r15, r31, 2f"
insn_32 "\insn r16, r31, 2f"
.skip 511
insn_32 "\insn r0, r15, 2f"
insn_32 "\insn r15, r16, 2f"
insn_32 "\insn r15, r31, 2f"
insn_32 "\insn r16, r31, 2f"
2:
nop!
/* tests for boundary */
3:
.skip 512
insn_32 "\insn r0, r15, 3b"
insn_32 "\insn r16, r15, 3b"
insn_32 "\insn r0, r15, 4f"
insn_32 "\insn r16, r15, 4f"
.skip 511
4:
.endm
.macro _bcmp_op_pattern2 insn
.balign 2
/* as will give "Using temp register(r1)" warning if you using r1 */
/*
* for local label 1, assembler should NOT alter instructions before .skip;
* but it SHOULD alter instructions afte it.
*/
1:
insn_32 "\insn r0, 1b"
insn_32 "\insn r15, 1b"
insn_32 "\insn r16, 1b"
insn_32 "\insn r31, 1b"
.skip 512
insn_32 "\insn r0, 1b"
insn_32 "\insn r15, 1b"
insn_32 "\insn r16, 1b"
insn_32 "\insn r31, 1b"
/*
* for local label 2, assembler SHOULD alter instructions before .skip;
* but it should NOT alter instructions after it.
*/
insn_32 "\insn r0, 2f"
insn_32 "\insn r15, 2f"
insn_32 "\insn r16, 2f"
insn_32 "\insn r31, 2f"
.skip 511
insn_32 "\insn r0, 2f"
insn_32 "\insn r15, 2f"
insn_32 "\insn r16, 2f"
insn_32 "\insn r31, 2f"
2:
nop!
/* tests for boundary */
3:
.skip 512
insn_32 "\insn r0, 3b"
insn_32 "\insn r16, 3b"
insn_32 "\insn r0, 4f"
insn_32 "\insn r16, 4f"
.skip 511
4:
.endm
.text
/* b Disp19 <-> b! Disp9 */
_b_op_pattern "bgtu", "bgtul"
_b_op_pattern "bleu", "bleul"
_b_op_pattern "beq", "beql"
_b_op_pattern "bne", "bnel"
_b_op_pattern "bgt", "bgtl"
_b_op_pattern "ble", "blel"
_b_op_pattern "bcnz", "bcnzl"
_b_op_pattern "b", "bl"
/* br rD <-> br! rD */
_br_op_pattern "br"
_br_op_pattern "brl"
/* bcmpeq/bcmpne rA,rB,Disp9 -> cmp/cmp! rA, rB; beq/bne Disp19 */
_bcmp_op_pattern1 "bcmpeq"
_bcmp_op_pattern1 "bcmpne"
/* bcmpeqz/bcmpnez rA,Disp9 -> cmpi! rA, 0; beq/bne Disp19 */
_bcmp_op_pattern2 "bcmpeqz"
_bcmp_op_pattern2 "bcmpnez"
|
stsp/binutils-ia16
| 1,118
|
gas/testsuite/gas/score/postlw.s
|
/*
* test relax
* post lw <-> pop! : offset == 4
* syntax:
lw rD, [rA]+, simm12 : rD and rA can be 0-31
pop! rD, [rAg0] : rAg0 must be in 0-7, rD can be 0-31
* Author: ligang
*/
/* This macro transform 32b instruction to 16b. */
.macro tran3216 insn32, insn16
.align 4
\insn32 r23, [r7]+, 4 #32b -> 16b
\insn16 r23, [r7]
\insn32 r0, [r2]+, 4 #32b -> 16b
\insn16 r0, [r2]
\insn32 r15, [r0]+, 4 #32b -> 16b
\insn16 r15, [r0]
\insn16 r15, [r7]
\insn32 r15, [r7]+, 4 #32b -> 16b
\insn32 r25, [r3]+, 4 #32b -> 16b
\insn32 r25, [r3]+, 4 #32b -> 16b
\insn32 r24, [r13]+, 4 #No transform
\insn32 r23, [r7]+, 5 #No transform
.endm
/* This macro transform 16b instruction to 32b. */
.macro tran1632 insn32, insn16
.align 4
\insn16 r0, [r7] #16b -> 32b
\insn32 r25, [r13]+, 4
\insn16 r25, [r0] #16b -> 32b
\insn32 r18, [r23]+, 4
\insn16 r6, [r3] #No transform
\insn16 r6, [r3] #No transform
\insn16 r3, [r7] #No transform
\insn32 r3, [r7]+, 4
.endm
tran3216 "lw", "pop!"
tran1632 "lw", "pop!"
|
stsp/binutils-ia16
| 1,133
|
gas/testsuite/gas/score/presw.s
|
/*
* test relax
* pre sw <-> push! : offset == -4
* syntax:
sw rD, [rA, simm12]+ : rD and rA can be 0-31
push! rD, [rAg0] : rAg0 must be in 0-7, rD can be 0-31
* Author: ligang
*/
/* This macro transform 32b instruction to 16b. */
.macro tran3216 insn32, insn16
.align 4
\insn32 r0, [r2, -4]+ #32b -> 16b
\insn16 r0, [r2]
\insn32 r23, [r7, -4]+ #32b -> 16b
\insn16 r23, [r7]
\insn32 r15, [r0, -4]+ #32b -> 16b
\insn16 r15, [r0]
\insn16 r15, [r7]
\insn32 r15, [r7, -4]+ #32b -> 16b
\insn32 r25, [r3, -4]+ #32b -> 16b
\insn32 r25, [r3, -4]+ #32b -> 16b
\insn32 r24, [r13, -4]+ #No transform
\insn32 r23, [r7, -5]+ #No transform
.endm
/* This macro transform 16b instruction to 32b. */
.macro tran1632 insn32, insn16
.align 4
\insn16 r0, [r7] #16b -> 32b
\insn32 r25, [r13, -4]+
\insn16 r25, [r0] #16b -> 32b
\insn32 r18, [r23, -4]+
\insn16 r6, [r3] #No transform
\insn16 r6, [r3] #No transform
\insn16 r3, [r7] #No transform
\insn32 r3, [r7, -4]+
.endm
tran3216 "sw", "push!"
tran1632 "sw", "push!"
|
stsp/binutils-ia16
| 1,910
|
gas/testsuite/gas/score/move.s
|
/*
* test relax
* mv <-> mv! : for mv! : register number must be in 0-15
* mv <-> mhfl! : for mhfl! : rD must be in 16-31, rS must be in 0-15
* mv <-> mlfh! : for mhfl! : rD must be in 0-15, rS must be in 16-31
* Author: ligang
*/
/* This block test mv -> mv! */
.align 4
mv r0, r15 #32b -> 16b
mv! r0, r15
mv r15, r15 #32b -> 16b
mv! r15, r15
mv r3, r5 #32b -> 16b
mv r3, r5 #32b -> 16b
mv! r6, r7
mv r6, r7 #32b -> 16b
mv r8, r10 #No transform
mv r21, r23
/* This block test mv! -> mv */
.align 4
mv! r0, r15 #16b -> 32b
mv r23, r27
mv! r2, r8 #No transform
mv! r2, r8 #No transform
mv! r2, r8 #No transform
mv r2, r8
/* This block test mv -> mhfl! */
.align 4
mv r31, r0 #32b -> 16b
mhfl! r31, r0
mv r16, r15 #32b -> 16b
mv! r16, r15
mv r23, r5 #32b -> 16b
mv r23, r5 #32b -> 16b
mhfl! r26, r7
mv r26, r7 #32b -> 16b
mv r28, r10 #No transform
mv r21, r23
/* This block test mhfl! -> mv */
.align 4
mhfl! r31, r0 #16b -> 32b
mv r23, r27
mhfl! r22, r8 #No transform
mhfl! r22, r8 #No transform
mhfl! r23, r15 #No transform
mv r23, r15
/* This block test mv -> mlfh! */
.align 4
mv r0, r31 #32b -> 16b
mlfh! r0, r31
mv r15, r16 #32b -> 16b
mv! r15, r16
mv r5, r23 #32b -> 16b
mv r5, r23 #32b -> 16b
mlfh! r7, r26
mv r7, r26 #32b -> 16b
mv r10, r28 #No transform
mv r21, r23
/* This block test mhfl! -> mv */
.align 4
mlfh! r0, r31 #16b -> 32b
mv r23, r27
mlfh! r8, r22 #No transform
mlfh! r8, r22 #No transform
mlfh! r15, r23 #No transform
mv r15, r23
|
stsp/binutils-ia16
| 1,107
|
gas/testsuite/gas/score/bittst.s
|
/*
* test relax
* bittst.c <-> bittst! : register number must be in 0-15
* Author: ligang
*/
/* This macro transform 32b instruction to 16b. */
.macro tran3216 insn32, insn16
\insn32 r0, 2 #32b -> 16b
\insn16 r0, 2
\insn32 r15, 4 #32b -> 16b
\insn16 r15, 4
\insn32 r15, 1 #32b -> 16b
\insn16 r15, 1
\insn16 r15, 3
\insn32 r15, 3 #32b -> 16b
\insn32 r8, 2 #32b -> 16b
\insn32 r8, 2 #32b -> 16b
\insn32 r15, 1 #No transform
\insn32 r26, 4
.endm
/* This macro transform 16b instruction to 32b. */
.macro tran1632 insn32, insn16
.align 4
\insn16 r0, 2 #16b -> 32b
\insn32 r20, 2
\insn16 r15, 4 #16b -> 32b
\insn32 r25, 4
\insn16 r15, 1 #16b -> 32b
\insn32 r25, 1
\insn16 r8, 1 #No transform
\insn16 r8, 1 #No transform
\insn16 r6, 4 #No transform
\insn32 r6, 4 #32b -> 16b
\insn32 r7, 3 #32b -> 16b
\insn16 r7, 3 #No transform
.endm
.text
tran3216 "bittst.c", "bittst!"
tran1632 "bittst.c", "bittst!"
|
stsp/binutils-ia16
| 1,129
|
gas/testsuite/gas/score/ldi.s
|
/*
* test relax
* ldi <-> ldiu! : for ldiu! : register number must be in 0-15, simm16: [0-255]
* (1)ldi rD, simm16 : rD = simm16
* (2)ldiu! rD, imm8 : rD = ZE(imm8)
* Author: ligang
*/
/* This macro transform 32b instruction to 16b. */
.macro tran3216 insn32, insn16
.align 4
\insn32 r2, 0 #32b -> 16b
\insn16 r2, 0
\insn32 r3, 255 #32b -> 16b
\insn16 r3, 255
\insn32 r4, 9 #32b -> 16b
\insn32 r4, 9 #32b -> 16b
\insn16 r3, 255
\insn32 r3, 255 #32b -> 16b
\insn32 r8, 3 #No transform
\insn32 r25, 3 #No transform
.endm
/* This macro transform 16b instruction to 32b. */
.macro tran1632 insn32, insn16
.align 4
\insn16 r2, 0 #16b -> 32b
\insn32 r25, 0
\insn16 r3, 255 #16b -> 32b
\insn32 r23, 1
\insn16 r15, 255 #No transform
\insn32 r15, 255
\insn16 r8, 3 #No transform
\insn16 r8, 3 #No transform
.endm
.text
tran3216 "ldi", "ldiu!"
tran1632 "ldi", "ldiu!"
|
stsp/binutils-ia16
| 1,491
|
gas/testsuite/gas/score/load_store_32.s
|
/*
* tests for load/store instruction relaxation
*
* Author: libin
*/
.include "relaxation_macro.h"
.macro _ls_op_pattern insn
.balign 2
insn_32 "\insn r0, [r0,0]"
insn_32 "\insn r15, [r0,0]"
insn_32 "\insn r0, [r7,0]"
insn_32 "\insn r15, [r7,0]"
/* NOTE: offset MUST be word aligned */
insn_32 "\insn r0, [r0,124]"
insn_32 "\insn r15, [r0,124]"
insn_32 "\insn r0, [r7,124]"
insn_32 "\insn r15, [r7,124]"
tran_16_32 "\insn! r0,[r0,124]", "\insn r0,[r0,124]"
/* shouldn't alter */
insn_32 "\insn r16, [r0, 0]"
insn_32 "\insn r0, [r8, 124]"
insn_32 "\insn r16, [r8, 124]"
insn_32 "\insn r0, [r7, -1]"
insn_32 "\insn r0, [r7, 128]"
.endm
.text
/* lw/sw rD,[rA,SImm15] -> lw!/sw! rD,[rA,Imm5] */
_ls_op_pattern "lw"
_ls_op_pattern "sw"
/* ldi rD,SImm16 -> ldiu! rD,Imm6 */
.balign 2
insn_32 "ldi r0, 0"
insn_32 "ldi r15, 0"
insn_32 "ldi r0, 31"
insn_32 "ldi r15, 31"
tran_16_32 "ldiu! r0, 0", "ldi r0, 0"
/* shouldn't alter */
insn_32 "ldi r16, 0"
insn_32 "ldi r0, -1"
insn_32 "ldi r0, 32"
insn_32 "ldi r16, 32"
/*
* lw rD,[rA]+,SImm12 -> pop! rD
*
* r0: stack pointer(sp)
*/
insn_32 "lw r2, [r0]+, 4"
insn_32 "lw r15, [r0]+, 4"
/* shouldn't alter */
insn_32 "lw r16, [r0]+, 4"
insn_32 "lw r4, [r2]+, 4"
insn_32 "lw r4, [r0]+, -4"
/* sw rD,[rA,SImm12]+ -> push! rD */
insn_32 "sw r2, [r0, -4]+"
insn_32 "sw r15, [r0, -4]+"
/* shouldn't alter */
insn_32 "sw r16, [r0, -4]+"
insn_32 "sw r4, [r2, -4]+"
insn_32 "sw r4, [r0, 4]+"
|
stsp/binutils-ia16
| 8,392
|
gas/testsuite/gas/s390/esa-g5.s
|
.text
foo:
a %r6,4095(%r5,%r10)
ad %f6,4095(%r5,%r10)
adb %f6,4095(%r5,%r10)
adbr %f6,%f9
adr %f6,%f9
ae %f6,4095(%r5,%r10)
aeb %f6,4095(%r5,%r10)
aebr %f6,%f9
aer %f6,%f9
ah %r6,4095(%r5,%r10)
ahi %r6,-32767
al %r6,4095(%r5,%r10)
alr %r6,%r9
ap 4095(6,%r5),4095(9,%r10)
ar %r6,%r9
au %f6,4095(%r5,%r10)
aur %f6,%f9
aw %f6,4095(%r5,%r10)
awr %f6,%f9
axbr %f4,%f8
axr %f4,%f8
b 4095(%r5,%r10)
bakr %r6,%r9
bal %r6,4095(%r5,%r10)
balr %r6,%r9
bas %r6,4095(%r5,%r10)
basr %r6,%r9
bassm %r6,%r9
bc 6,4095(%r5,%r10)
bcr 6,%r9
bct %r6,4095(%r5,%r10)
bctr %r6,%r9
be 4095(%r5,%r10)
ber %r9
bh 4095(%r5,%r10)
bhe 4095(%r5,%r10)
bher %r9
bhr %r9
bl 4095(%r5,%r10)
ble 4095(%r5,%r10)
bler %r9
blh 4095(%r5,%r10)
blhr %r9
blr %r9
bm 4095(%r5,%r10)
bmr %r9
bne 4095(%r5,%r10)
bner %r9
bnh 4095(%r5,%r10)
bnhe 4095(%r5,%r10)
bnher %r9
bnhr %r9
bnl 4095(%r5,%r10)
bnle 4095(%r5,%r10)
bnler %r9
bnlh 4095(%r5,%r10)
bnlhr %r9
bnlr %r9
bnm 4095(%r5,%r10)
bnmr %r9
bno 4095(%r5,%r10)
bnor %r9
bnp 4095(%r5,%r10)
bnpr %r9
bnz 4095(%r5,%r10)
bnzr %r9
bo 4095(%r5,%r10)
bor %r9
bp 4095(%r5,%r10)
bpr %r9
br %r9
bras %r9,.
jas %r6,.
brc 6,.
brct 6,.
jct %r6,.
brxh %r6,%r9,.
jxh %r6,%r9,.
brxle %r6,%r9,.
jxle %r6,%r9,.
bsa %r6,%r9
bsg %r6,%r9
bsm %r6,%r9
bxh %r6,%r9,4095(%r5)
bxle %r6,%r9,4095(%r5)
bz 4095(%r5,%r10)
bzr %r9
c %r6,4095(%r5,%r10)
cd %f6,4095(%r5,%r10)
cdb %f6,4095(%r5,%r10)
cdbr %f6,%f9
cdfbr %f6,%r9
cdfr %f6,%r9
cdr %f6,%f9
cds %r6,%r8,4095(%r5)
ce %f6,4095(%r5,%r10)
ceb %f6,4095(%r5,%r10)
cebr %f6,%f9
cefbr %f6,%r9
cefr %f6,%r9
cer %f6,%f9
cfc 4095(%r5)
cfdbr %r6,5,%f9
cfebr %r6,5,%f9
cfxbr %r5,5,%f8
cfdr %r6,9,%f5
cfer %r6,9,%f5
cfxr %r5,9,%f4
ch %r6,4095(%r5,%r10)
chi %r6,-32767
cksm %r6,%r9
cl %r6,4095(%r5,%r10)
clc 4095(256,%r5),4095(%r10)
clcl %r6,%r9
clcle %r6,%r9,4095(%r5)
cli 4095(%r5),255
clm %r6,10,4095(%r5)
clr %r6,%r9
clst %r6,%r9
cmpsc %r6,%r9
cp 4095(6,%r5),4095(9,%r10)
cpya %a6,%a9
cr %r6,%r9
cs %r6,%r9,4095(%r5)
csch
csp %r6,%r9
cuse %r6,%r8
cutfu %r6,%r8
cuutf %r6,%r8
cvb %r6,4095(%r5,%r10)
cvd %r6,4095(%r5,%r10)
cxbr %f5,%f8
cxfbr %f5,%r9
cxfr %f5,%r9
cxr %f5,%f9
d %r6,4095(%r5,%r10)
dd %f6,4095(%r5,%r10)
ddb %f6,4095(%r5,%r10)
ddbr %f6,%f9
ddr %f6,%f9
de %f6,4095(%r5,%r10)
deb %f6,4095(%r5,%r10)
debr %f6,%f9
der %f6,%f9
diag %r6,%r9,4095(%r5)
didbr %f6,%r9,%r5,10
diebr %f6,%r9,%r5,10
dp 4095(6,%r5),4095(9,%r10)
dr %r6,%r9
dxbr %f5,%f8
dxr %f5,%f8
ear %r6,%a9
ed 4095(256,%r5),4095(%r10)
edmk 4095(256,%r5),4095(%r10)
efpc %r6,%r9
efpc %r6
epar %r6
ereg %r6,%r9
esar %r6
esta %r6,%r9
ex %r6,4095(%r5,%r10)
fidbr %f6,5,%f9
fidr %f6,%f9
fiebr %f6,5,%f9
fier %f6,%f9
fixbr %f5,5,%f8
fixr %f5,%f8
hdr %f6,%f9
her %f6,%f9
hsch
iac %r6
ic %r6,4095(%r5,%r10)
icm %r6,10,4095(%r5)
ipk
ipm %r6
ipte %r6,%r9
iske %r6,%r9
ivsk %r6,%r9
j .
je .
jh .
jhe .
jl .
jle .
jlh .
jm .
jne .
jnh .
jnhe .
jnl .
jnle .
jnlh .
jnm .
jno .
jnp .
jnz .
jo .
jp .
jz .
jnop .
bro .
brh .
brp .
brnle .
brl .
brm .
brnhe .
brlh .
brne .
brnz .
bre .
brz .
brnlh .
brhe .
brnl .
brnm .
brle .
brnh .
brnp .
brno .
bru .
kdb %f6,4095(%r5,%r10)
kdbr %f6,%f9
keb %f6,4095(%r5,%r10)
kebr %f6,%f9
kxbr %f6,%f9
l %r6,4095(%r5,%r10)
la %r6,4095(%r5,%r10)
lae %r6,4095(%r5,%r10)
lam %a6,%a9,4095(%r5)
lasp 4095(%r5),4095(%r10)
lcdbr %f6,%f9
lcdr %f6,%f9
lcebr %f6,%f9
lcer %f6,%f9
lcr %r6,%r9
lctl %c6,%c9,4095(%r5)
lcxbr %f5,%f8
lcxr %f5,%f8
ld %f6,4095(%r5,%r10)
lde %f6,4095(%r5,%r10)
ldeb %f6,4095(%r5,%r10)
ldebr %f6,%f9
lder %f6,%f9
ldr %f6,%f9
ldxbr %f5,%f8
ldxr %f6,%f8
le %f6,4095(%r5,%r10)
ledbr %f6,%f9
ledr %f6,%f9
ler %f6,%f9
lexbr %f5,%f8
lexr %f6,%f8
lfpc 4095(%r5)
lh %r6,4095(%r5,%r10)
lhi %r6,-32767
lm %r6,%r9,4095(%r5)
lndbr %f6,%f9
lndr %f6,%f9
lnebr %f6,%f9
lner %f6,%f9
lnr %r6,%r9
lnxbr %f5,%f8
lnxr %f5,%f8
lpdbr %f6,%f9
lpdr %f6,%f9
lpebr %f6,%f9
lper %f6,%f9
lpr %r6,%r9
lpsw 4095(%r5)
lpxbr %f5,%f8
lpxr %f5,%f8
lr %r6,%r9
lra %r6,4095(%r5,%r10)
lrdr %f7,%f8
lrer %f6,%f9
ltdbr %f6,%f9
ltdr %f6,%f9
ltebr %f6,%f9
lter %f6,%f9
ltr %r6,%r9
ltxbr %f5,%f8
ltxr %f5,%f8
lura %r6,%r9
lxd %f5,4095(%r5,%r10)
lxdb %f5,4095(%r5,%r10)
lxdbr %f5,%f9
lxdr %f5,%f9
lxe %f5,4095(%r5,%r10)
lxeb %f5,4095(%r5,%r10)
lxebr %f5,%f9
lxer %f5,%f9
lxr %f5,%f8
lzdr %f6
lzer %f6
lzxr %f5
m %r6,4095(%r5,%r10)
madb %f6,%f9,4095(%r5,%r10)
madbr %f6,%f9,%f5
maeb %f6,%f9,4095(%r5,%r10)
maebr %f6,%f9,%f5
mc 4095(%r5),255
md %f6,4095(%r5,%r10)
mdb %f6,4095(%r5,%r10)
mdbr %f6,%f9
mde %f6,4095(%r5,%r10)
mdeb %f6,4095(%r5,%r10)
mdebr %f6,%f9
mder %f6,%f9
mdr %f6,%f9
me %f6,4095(%r5,%r10)
mee %f6,4095(%r5,%r10)
meeb %f6,4095(%r5,%r10)
meebr %f6,%f9
meer %f6,%f9
mer %f6,%f9
mh %r6,4095(%r5,%r10)
mhi %r6,-32767
mp 4095(6,%r5),4095(9,%r10)
mr %r6,%r9
ms %r6,4095(%r5,%r10)
msch 4095(%r5)
msdb %f6,%f9,4095(%r5,%r10)
msdbr %f6,%f9,%f5
mseb %f6,%f9,4095(%r5,%r10)
msebr %f6,%f9,%f5
msr %r6,%r9
msta %r6
mvc 4095(256,%r5),4095(%r10)
mvcdk 4095(%r5),4095(%r10)
mvcin 4095(256,%r5),4095(%r10)
mvck 4095(%r6,%r5),4095(%r10),%r9
mvcl %r6,%r9
mvcle %r6,%r8,4095(%r5)
mvclu %r6,%r8,4095(%r5)
mvcp 4095(%r6,%r5),4095(%r10),%r9
mvcs 4095(%r6,%r5),4095(%r10),%r9
mvcsk 4095(%r5),4095(%r10)
mvi 4095(%r5),255
mvn 4095(256,%r5),4095(%r10)
mvo 4095(6,%r5),4095(9,%r10)
mvpg %r6,%r9
mvst %r6,%r9
mvz 4095(256,%r5),4095(%r10)
mxbr %f5,%f8
mxd %f5,4095(%r5,%r10)
mxdb %f5,4095(%r5,%r10)
mxdbr %f5,%f9
mxdr %f5,%f9
mxr %f5,%f8
n %r6,4095(%r5,%r10)
nc 4095(256,%r5),4095(%r10)
ni 4095(%r5),255
nop 4095(%r5,%r10)
nopr %r9
nr %r6,%r9
o %r6,4095(%r5,%r10)
oc 4095(256,%r5),4095(%r10)
oi 4095(%r5),255
or %r6,%r9
pack 4095(6,%r5),4095(9,%r10)
palb
pc 4095(%r5)
pgin %r6,%r9
pgout %r6,%r9
pka 4095(%r5),4095(32,%r10)
pku 4095(%r5),4095(256,%r10)
plo %r6,4095(%r5),%r9,4095(%r10)
pr
pt %r6,%r9
ptlb
rchp
rp 4095(%r5)
rrbe %r6,%r9
rsch
s %r6,4095(%r5,%r10)
sac 4095(%r5)
sacf 4095(%r5)
sal
sar %a6,%r9
schm
sck 4095(%r5)
sckc 4095(%r5)
sckpf
sd %f6,4095(%r5,%r10)
sdb %f6,4095(%r5,%r10)
sdbr %f6,%f9
sdr %f6,%f9
se %f6,4095(%r5,%r10)
seb %f6,4095(%r5,%r10)
sebr %f6,%f9
ser %f6,%f9
sfpc %r6,%r9
sfpc %r6
sh %r6,4095(%r5,%r10)
sie 4095(%r5)
siga 4095(%r5)
sigp %r6,%r9,4095(%r5)
sl %r6,4095(%r5,%r10)
sla %r6,4095(%r5)
slda %r6,4095(%r5)
sldl %r6,4095(%r5)
sll %r6,4095(%r5)
slr %r6,%r9
sp 4095(6,%r5),4095(9,%r10)
spka 4095(%r5)
spm %r6
spt 4095(%r5)
spx 4095(%r5)
sqdb %f6,4095(%r5,%r10)
sqdbr %f6,%f9
sqdr %f6,%f9
sqe %f6,4095(%r5,%r10)
sqd %f6,4095(%r5,%r10)
sqeb %f6,4095(%r5,%r10)
sqebr %f6,%f9
sqer %f6,%f9
sqxbr %f5,%f8
sqxr %f5,%f8
sr %r6,%r9
sra %r6,4095(%r5)
srda %r6,4095(%r5)
srdl %r6,4095(%r5)
srl %r6,4095(%r5)
srnm 4095(%r5)
srp 4095(16,%r5),4095(%r10),10
srst %r6,%r9
ssar %r6
ssch 4095(%r5)
sske %r6,%r9
ssm 4095(%r5)
st %r6,4095(%r5,%r10)
stam %a6,%a9,4095(%r5)
stap 4095(%r5)
stc %r6,4095(%r5,%r10)
stck 4095(%r5)
stckc 4095(%r5)
stcke 4095(%r5)
stcm %r6,10,4095(%r5)
stcps 4095(%r5)
stcrw 4095(%r5)
stctl %c6,%c9,4095(%r5)
std %f6,4095(%r5,%r10)
ste %f6,4095(%r5,%r10)
stfpc 4095(%r5)
sth %r6,4095(%r5,%r10)
stidp 4095(%r5)
stm %r6,%r9,4095(%r5)
stnsm 4095(%r5),255
stosm 4095(%r5),255
stpt 4095(%r5)
stpx 4095(%r5)
stsch 4095(%r5)
stsi 4095(%r5)
stura %r6,%r9
su %f6,4095(%r5,%r10)
sur %f6,%f9
svc 255
sw %f6,4095(%r5,%r10)
swr %f6,%f9
sxbr %f5,%f8
sxr %f5,%f8
tar %a6,%r9
tb %r6,%r9
tbdr %r6,5,%r9
tbedr %r6,5,%r9
tcdb %f6,4095(%r5,%r10)
tceb %f6,4095(%r5,%r10)
tcxb %f5,4095(%r5,%r10)
thder %f6,%f9
thdr %f6,%f9
tm 4095(%r5),255
tmh %r6,65535
tml %r6,65535
tmlh %r6,65535
tmll %r6,65535
tp 4095(6,%r5)
tpi 4095(%r5)
tprot 4095(%r5),4095(%r10)
tr 4095(256,%r5),4095(%r10)
trace %r6,%r9,4095(%r5)
trap2
trap4 4095(%r5)
tre %r6,%r9
troo %r6,%r9
trot %r6,%r9
trt 4095(256,%r5),4095(%r10)
trto %r6,%r9
trtt %r6,%r9
ts 4095(%r5)
tsch 4095(%r5)
unpk 4095(6,%r5),4095(9,%r10)
unpka 4095(256,%r5),4095(%r10)
unpku 4095(256,%r5),4095(%r10)
upt
x %r6,4095(%r5,%r10)
xc 4095(256,%r5),4095(%r10)
xi 4095(%r5),255
xr %r6,%r9
xsch
zap 4095(6,%r5),4095(9,%r10)
ipte %r6,%r9,%r11
ipte %r6,%r9,%r11,13
|
stsp/binutils-ia16
| 4,787
|
gas/testsuite/gas/s390/zarch-arch12.s
|
.text
foo:
vbperm %v15,%v17,%v20
vllezlf %v15,4000(%r6,%r9)
vmsl %v15,%v17,%v20,%v24,13,12
vmslg %v15,%v17,%v20,%v24,13
vnx %v15,%v17,%v20
vnn %v15,%v17,%v20
voc %v15,%v17,%v20
vpopctb %v15,%v17
vpopcth %v15,%v17
vpopctf %v15,%v17
vpopctg %v15,%v17
vfasb %v15,%v17,%v20
wfasb %v15,%v17,%v20
wfaxb %v15,%v17,%v20
wfcsb %v15,%v17
wfcxb %v15,%v17
wfksb %v15,%v17
wfkxb %v15,%v17
vfcesb %v15,%v17,%v20
vfcesbs %v15,%v17,%v20
wfcesb %v15,%v17,%v20
wfcesbs %v15,%v17,%v20
wfcexb %v15,%v17,%v20
wfcexbs %v15,%v17,%v20
vfkesb %v15,%v17,%v20
vfkesbs %v15,%v17,%v20
wfkesb %v15,%v17,%v20
wfkesbs %v15,%v17,%v20
vfkedb %v15,%v17,%v20
vfkedbs %v15,%v17,%v20
wfkedb %v15,%v17,%v20
wfkedbs %v15,%v17,%v20
wfkexb %v15,%v17,%v20
wfkexbs %v15,%v17,%v20
vfchsb %v15,%v17,%v20
vfchsbs %v15,%v17,%v20
wfchsb %v15,%v17,%v20
wfchsbs %v15,%v17,%v20
wfchxb %v15,%v17,%v20
wfchxbs %v15,%v17,%v20
vfkhsb %v15,%v17,%v20
vfkhsbs %v15,%v17,%v20
wfkhsb %v15,%v17,%v20
wfkhsbs %v15,%v17,%v20
vfkhdb %v15,%v17,%v20
vfkhdbs %v15,%v17,%v20
wfkhdb %v15,%v17,%v20
wfkhdbs %v15,%v17,%v20
wfkhxb %v15,%v17,%v20
wfkhxbs %v15,%v17,%v20
vfchesb %v15,%v17,%v20
vfchesbs %v15,%v17,%v20
wfchesb %v15,%v17,%v20
wfchesbs %v15,%v17,%v20
wfchexb %v15,%v17,%v20
wfchexbs %v15,%v17,%v20
vfkhesb %v15,%v17,%v20
vfkhesbs %v15,%v17,%v20
wfkhesb %v15,%v17,%v20
wfkhesbs %v15,%v17,%v20
vfkhedb %v15,%v17,%v20
vfkhedbs %v15,%v17,%v20
wfkhedb %v15,%v17,%v20
wfkhedbs %v15,%v17,%v20
wfkhexb %v15,%v17,%v20
wfkhexbs %v15,%v17,%v20
vfdsb %v15,%v17,%v20
wfdsb %v15,%v17,%v20
wfdxb %v15,%v17,%v20
vfisb %v15,%v17,13,12
wfisb %v15,%v17,13,12
wfixb %v15,%v17,13,12
vfll %v15,%v17,13,12
vflls %v15,%v17
wflls %v15,%v17
wflld %v15,%v17
vflr %v15,%v17,13,12,11
vflrd %v15,%v17,13,12
wflrd %v15,%v17,13,12
wflrx %v15,%v17,13,12
vfmax %v15,%v17,%v20,13,12,11
vfmaxsb %v15,%v17,%v20,13
vfmaxdb %v15,%v17,%v20,13
wfmaxsb %v15,%v17,%v20,13
wfmaxdb %v15,%v17,%v20,13
wfmaxxb %v15,%v17,%v20,13
vfmin %v15,%v17,%v20,13,12,11
vfminsb %v15,%v17,%v20,13
vfmindb %v15,%v17,%v20,13
wfminsb %v15,%v17,%v20,13
wfmindb %v15,%v17,%v20,13
wfminxb %v15,%v17,%v20,13
vfmsb %v15,%v17,%v20
wfmsb %v15,%v17,%v20
wfmxb %v15,%v17,%v20
vfmasb %v15,%v17,%v20,%v24
wfmasb %v15,%v17,%v20,%v24
wfmaxb %v15,%v17,%v20,%v24
vfmssb %v15,%v17,%v20,%v24
wfmssb %v15,%v17,%v20,%v24
wfmsxb %v15,%v17,%v20,%v24
vfnma %v15,%v17,%v20,%v24,13,12
vfnmasb %v15,%v17,%v20,%v24
wfnmasb %v15,%v17,%v20,%v24
vfnmadb %v15,%v17,%v20,%v24
wfnmadb %v15,%v17,%v20,%v24
wfnmaxb %v15,%v17,%v20,%v24
vfnms %v15,%v17,%v20,%v24,13,12
vfnmssb %v15,%v17,%v20,%v24
wfnmssb %v15,%v17,%v20,%v24
vfnmsdb %v15,%v17,%v20,%v24
wfnmsdb %v15,%v17,%v20,%v24
wfnmsxb %v15,%v17,%v20,%v24
vfpsosb %v15,%v17,13
wfpsosb %v15,%v17,13
vflcsb %v15,%v17
wflcsb %v15,%v17
vflnsb %v15,%v17
wflnsb %v15,%v17
vflpsb %v15,%v17
wflpsb %v15,%v17
wfpsoxb %v15,%v17,13
wflcxb %v15,%v17
wflnxb %v15,%v17
wflpxb %v15,%v17
vfsqsb %v15,%v17
wfsqsb %v15,%v17
wfsqxb %v15,%v17
vfssb %v15,%v17,%v20
wfssb %v15,%v17,%v20
wfsxb %v15,%v17,%v20
vftcisb %v15,%v17,4093
wftcisb %v15,%v17,4093
wftcixb %v15,%v17,4093
agh %r6,-10000(%r9,%r11)
bic 13,-10000(%r6,%r9)
bi -10000(%r6,%r9)
bio -10000(%r6,%r9)
bih -10000(%r6,%r9)
bip -10000(%r6,%r9)
binle -10000(%r6,%r9)
bil -10000(%r6,%r9)
bim -10000(%r6,%r9)
binhe -10000(%r6,%r9)
bilh -10000(%r6,%r9)
bine -10000(%r6,%r9)
binz -10000(%r6,%r9)
bie -10000(%r6,%r9)
biz -10000(%r6,%r9)
binlh -10000(%r6,%r9)
bihe -10000(%r6,%r9)
binl -10000(%r6,%r9)
binm -10000(%r6,%r9)
bile -10000(%r6,%r9)
binh -10000(%r6,%r9)
binp -10000(%r6,%r9)
bino -10000(%r6,%r9)
mgrk %r6,%r9,%r11
mg %r6,-10000(%r9,%r11)
mgh %r6,-10000(%r9,%r11)
msrkc %r6,%r9,%r11
msgrkc %r6,%r9,%r11
msc %r6,-10000(%r9,%r11)
msgc %r6,-10000(%r9,%r11)
sgh %r6,-10000(%r9,%r11)
vlrlr %v15,%r6,4000(%r9)
vlrl %v15,4000(%r6),253
vstrlr %v15,%r6,4000(%r9)
vstrl %v15,4000(%r6),253
vap %v15,%v17,%v20,253,12
vcp %v15,%v17,13
vcvb %r6,%v15,13
vcvbg %r6,%v15,13
vcvd %v15,%r6,253,12
vcvdg %v15,%r6,253,12
vdp %v15,%v17,%v20,253,12
vlip %v15,65533,12
vmp %v15,%v17,%v20,253,12
vmsp %v15,%v17,%v20,253,12
vpkz %v15,4000(%r6),253
vpsop %v15,%v17,253,252,11
vrp %v15,%v17,%v20,253,12
vsdp %v15,%v17,%v20,253,12
vsrp %v15,%v17,253,252,11
vsp %v15,%v17,%v20,253,12
vtp %v15
vupkz %v15,4000(%r6),253
lgg %r6,-10000(%r9,%r11)
llgfsg %r6,-10000(%r9,%r11)
lgsc %r6,-10000(%r9,%r11)
stgsc %r6,-10000(%r9,%r11)
kma %r6,%r9,%r11
prno %r6,%r9
tpei %r6,%r9
irbm %r6,%r9
vl %v15,4000(%r6,%r9)
vl %v15,4000(%r6,%r9),13
vlm %v15,%v17,4000(%r6)
vlm %v15,%v17,4000(%r6),13
vst %v15,4000(%r6,%r9)
vst %v15,4000(%r6,%r9),13
vstm %v15,%v17,4000(%r6)
vstm %v15,%v17,4000(%r6),13
|
stsp/binutils-ia16
| 1,207
|
gas/testsuite/gas/s390/zarch-zEC12.s
|
.text
foo:
etnd %r6
ntstg %r6,-5555(%r7,%r8)
tabort 4000(%r6)
tbegin 4000(%r6),65000
tbeginc 4000(%r6),65000
tend
bpp 10,.,4000(%r6)
bprp 10,.,.+24
niai 10,13
lat %r6,-5555(%r7,%r8)
lgat %r6,-5555(%r7,%r8)
lfhat %r6,-5555(%r7,%r8)
llgfat %r6,-5555(%r7,%r8)
llgtat %r6,-5555(%r7,%r8)
clt %r6,10,-5555(%r7)
clth %r6,-5555(%r7)
cltnle %r6,-5555(%r7)
cltl %r6,-5555(%r7)
cltnhe %r6,-5555(%r7)
cltlh %r6,-5555(%r7)
cltne %r6,-5555(%r7)
clte %r6,-5555(%r7)
cltnlh %r6,-5555(%r7)
clthe %r6,-5555(%r7)
cltnl %r6,-5555(%r7)
cltle %r6,-5555(%r7)
cltnh %r6,-5555(%r7)
clgt %r6,10,-5555(%r7)
clgth %r6,-5555(%r7)
clgtnle %r6,-5555(%r7)
clgtl %r6,-5555(%r7)
clgtnhe %r6,-5555(%r7)
clgtlh %r6,-5555(%r7)
clgtne %r6,-5555(%r7)
clgte %r6,-5555(%r7)
clgtnlh %r6,-5555(%r7)
clgthe %r6,-5555(%r7)
clgtnl %r6,-5555(%r7)
clgtle %r6,-5555(%r7)
clgtnh %r6,-5555(%r7)
risbgn %r6,%r7,12,13,14
risbgn %r6,%r7,12,188,14
risbgnz %r6,%r7,12,20,14
cdzt %f6,4000(16,%r8),13
cxzt %f4,4000(34,%r8),13
czdt %f6,4000(16,%r8),13
czxt %f4,4000(34,%r8),13
ppa %r5,%r6,12
crdte %r5,%r6,%r9
crdte %r5,%r6,%r9,1
bprp 10,bar,bar
bprp 10,bar@PLT,bar@PLT
bpp 10,bar@PLT,0
bpp 10,baz,0
bar:
|
stsp/binutils-ia16
| 1,329
|
gas/testsuite/gas/s390/zarch-z9-ec.s
|
.text
foo:
lpdfr %f6,%f2
lndfr %f6,%f2
cpsdr %f6,%f1,%f2
lcdfr %f6,%f2
ldgr %f6,%r2
lgdr %r2,%f6
adtr %f6,%f2,%f4
axtr %f8,%f9,%f4
cdtr %f6,%f2
cxtr %f1,%f0
kdtr %f6,%f2
kxtr %f6,%f2
cedtr %f6,%f2
cextr %f1,%f0
cdgtr %f6,%r2
cxgtr %f1,%r2
cdstr %f6,%r2
cxstr %f6,%r2
cdutr %f6,%r2
cxutr %f1,%r2
cgdtr %r2,1,%f6
cgxtr %r2,1,%f1
csdtr %r6,%f3,13
csxtr %r6,%f1,13
cudtr %r2,%f6
cuxtr %r2,%f1
ddtr %f6,%f2,%f4
dxtr %f1,%f0,%f4
eedtr %r2,%f6
eextr %r2,%f1
esdtr %r2,%f6
esxtr %r2,%f1
iedtr %f6,%f2,%r4
iextr %f1,%f0,%r4
ltdtr %f6,%f2
ltxtr %f5,%f4
fidtr %f6,1,%f2,3
fixtr %f5,1,%f4,3
lfas 3(%r1)
ldetr %f6,%f2,1
lxdtr %f4,%f2,1
ledtr %f6,1,%f2,3
ldxtr %f6,1,%f4,3
mdtr %f6,%f2,%f4
mxtr %f9,%f8,%f4
qadtr %f6,%f2,%f4,1
qaxtr %f9,%f8,%f4,1
rrdtr %f6,%f2,%r4,1
rrxtr %f9,%f8,%r4,1
srnmt 3(%r1)
sfasr %r2
sldt %f6,%f2,3(%r1,%r4)
slxt %f5,%f4,3(%r1,%r4)
srdt %f6,%f2,3(%r1,%r4)
srxt %f5,%f4,3(%r1,%r4)
sdtr %f6,%f2,%f4
sxtr %f5,%f1,%f4
tdcet %f6,3(%r1,%r2)
tdcdt %f6,3(%r1,%r2)
tdcxt %f5,3(%r1,%r2)
tdget %f6,3(%r1,%r2)
tdgdt %f6,3(%r1,%r2)
tdgxt %f5,3(%r1,%r2)
pfpo
ectg 10(%r1),20(%r2),%r3
csst 10(%r1),20(%r2),%r3
/* The following .data section is 4 byte aligned.
So we get 2 additional bytes of 07 07 wherefor
we have to provide an instruction. */
bcr 0,%r7
|
stsp/binutils-ia16
| 5,178
|
gas/testsuite/gas/s390/zarch-z196.s
|
.text
foo:
ahhhr %r6,%r7,%r8
ahhlr %r6,%r7,%r8
aih %r6,-65000
alhhhr %r6,%r7,%r8
alhhlr %r6,%r7,%r8
alsih %r6,65000
alsihn %r6,65000
brcth %r6,.
chhr %r6,%r7
chlr %r6,%r7
chf %r6,5555(%r7,%r8)
cih %r6,65000
clhhr %r6,%r7
clhlr %r6,%r7
clhf %r6,5555(%r7,%r8)
clih %r6,650000
clih %r9,4000000000
lbh %r6,-5555(%r7,%r8)
lhh %r6,-5555(%r7,%r8)
lfh %r6,-5555(%r7,%r8)
llch %r6,-5555(%r7,%r8)
llhh %r6,-5555(%r7,%r8)
risbhg %r6,%r7,12,13,14
risblg %r6,%r7,12,13,14
stch %r6,-5555(%r7,%r8)
sthh %r6,-5555(%r7,%r8)
stfh %r6,-5555(%r7,%r8)
shhhr %r6,%r7,%r8
shhlr %r6,%r7,%r8
slhhhr %r6,%r7,%r8
slhhlr %r6,%r7,%r8
laa %r6,%r7,-5555(%r8)
laag %r6,%r7,-5555(%r8)
laal %r6,%r7,-5555(%r8)
laalg %r6,%r7,-5555(%r8)
lan %r6,%r7,-5555(%r8)
lang %r6,%r7,-5555(%r8)
lax %r6,%r7,-5555(%r8)
laxg %r6,%r7,-5555(%r8)
lao %r6,%r7,-5555(%r8)
laog %r6,%r7,-5555(%r8)
lpd %r6,2222(%r7),1111(%r8)
lpdg %r6,2222(%r7),1111(%r8)
locro %r6,%r7
locrh %r6,%r7
locrp %r6,%r7
locrnle %r6,%r7
locrl %r6,%r7
locrm %r6,%r7
locrnhe %r6,%r7
locrlh %r6,%r7
locrne %r6,%r7
locrnz %r6,%r7
locre %r6,%r7
locrz %r6,%r7
locrnlh %r6,%r7
locrhe %r6,%r7
locrnl %r6,%r7
locrnm %r6,%r7
locrle %r6,%r7
locrnh %r6,%r7
locrnp %r6,%r7
locrno %r6,%r7
locr %r6,%r7,8
locgro %r6,%r7
locgrh %r6,%r7
locgrp %r6,%r7
locgrnle %r6,%r7
locgrl %r6,%r7
locgrm %r6,%r7
locgrnhe %r6,%r7
locgrlh %r6,%r7
locgrne %r6,%r7
locgrnz %r6,%r7
locgre %r6,%r7
locgrz %r6,%r7
locgrnlh %r6,%r7
locgrhe %r6,%r7
locgrnl %r6,%r7
locgrnm %r6,%r7
locgrle %r6,%r7
locgrnh %r6,%r7
locgrnp %r6,%r7
locgrno %r6,%r7
locgr %r6,%r7,8
loco %r6,-5555(%r7)
loch %r6,-5555(%r7)
locp %r6,-5555(%r7)
locnle %r6,-5555(%r7)
locl %r6,-5555(%r7)
locm %r6,-5555(%r7)
locnhe %r6,-5555(%r7)
loclh %r6,-5555(%r7)
locne %r6,-5555(%r7)
locnz %r6,-5555(%r7)
loce %r6,-5555(%r7)
locz %r6,-5555(%r7)
locnlh %r6,-5555(%r7)
loche %r6,-5555(%r7)
locnl %r6,-5555(%r7)
locnm %r6,-5555(%r7)
locle %r6,-5555(%r7)
locnh %r6,-5555(%r7)
locnp %r6,-5555(%r7)
locno %r6,-5555(%r7)
loc %r6,-5555(%r7),8
locgo %r6,-5555(%r7)
locgh %r6,-5555(%r7)
locgp %r6,-5555(%r7)
locgnle %r6,-5555(%r7)
locgl %r6,-5555(%r7)
locgm %r6,-5555(%r7)
locgnhe %r6,-5555(%r7)
locglh %r6,-5555(%r7)
locgne %r6,-5555(%r7)
locgnz %r6,-5555(%r7)
locge %r6,-5555(%r7)
locgz %r6,-5555(%r7)
locgnlh %r6,-5555(%r7)
locghe %r6,-5555(%r7)
locgnl %r6,-5555(%r7)
locgnm %r6,-5555(%r7)
locgle %r6,-5555(%r7)
locgnh %r6,-5555(%r7)
locgnp %r6,-5555(%r7)
locgno %r6,-5555(%r7)
locg %r6,-5555(%r7),8
stoco %r6,-5555(%r7)
stoch %r6,-5555(%r7)
stocp %r6,-5555(%r7)
stocnle %r6,-5555(%r7)
stocl %r6,-5555(%r7)
stocm %r6,-5555(%r7)
stocnhe %r6,-5555(%r7)
stoclh %r6,-5555(%r7)
stocne %r6,-5555(%r7)
stocnz %r6,-5555(%r7)
stoce %r6,-5555(%r7)
stocz %r6,-5555(%r7)
stocnlh %r6,-5555(%r7)
stoche %r6,-5555(%r7)
stocnl %r6,-5555(%r7)
stocnm %r6,-5555(%r7)
stocle %r6,-5555(%r7)
stocnh %r6,-5555(%r7)
stocnp %r6,-5555(%r7)
stocno %r6,-5555(%r7)
stoc %r6,-5555(%r7),8
stocgo %r6,-5555(%r7)
stocgh %r6,-5555(%r7)
stocgp %r6,-5555(%r7)
stocgnle %r6,-5555(%r7)
stocgl %r6,-5555(%r7)
stocgm %r6,-5555(%r7)
stocgnhe %r6,-5555(%r7)
stocglh %r6,-5555(%r7)
stocgne %r6,-5555(%r7)
stocgnz %r6,-5555(%r7)
stocge %r6,-5555(%r7)
stocgz %r6,-5555(%r7)
stocgnlh %r6,-5555(%r7)
stocghe %r6,-5555(%r7)
stocgnl %r6,-5555(%r7)
stocgnm %r6,-5555(%r7)
stocgle %r6,-5555(%r7)
stocgnh %r6,-5555(%r7)
stocgnp %r6,-5555(%r7)
stocgno %r6,-5555(%r7)
stocg %r6,-5555(%r7),8
ark %r6,%r7,%r8
agrk %r6,%r7,%r8
ahik %r6,%r7,-32000
aghik %r6,%r7,-32000
alrk %r6,%r7,%r8
algrk %r6,%r7,%r8
alhsik %r6,%r7,-32000
alghsik %r6,%r7,-32000
nrk %r6,%r7,%r8
ngrk %r6,%r7,%r8
xrk %r6,%r7,%r8
xgrk %r6,%r7,%r8
ork %r6,%r7,%r8
ogrk %r6,%r7,%r8
slak %r6,%r7,-5555(%r8)
sllk %r6,%r7,-5555(%r8)
srak %r6,%r7,-5555(%r8)
srlk %r6,%r7,-5555(%r8)
srk %r6,%r7,%r8
sgrk %r6,%r7,%r8
slrk %r6,%r7,%r8
slgrk %r6,%r7,%r8
popcnt %r6,%r7
rrbm %r6,%r7
cefbra %f5,3,%r9,7
cdfbra %f5,3,%r9,7
cxfbra %f5,3,%r9,7
cegbra %f5,3,%r9,7
cdgbra %f5,3,%r9,7
cxgbra %f5,3,%r9,7
celfbr %f5,3,%r9,7
cdlfbr %f5,3,%r9,7
cxlfbr %f5,3,%r9,7
celgbr %f5,3,%r9,7
cdlgbr %f5,3,%r9,7
cxlgbr %f5,3,%r9,7
cfebra %r5,3,%f9,7
cfdbra %r5,3,%f9,7
cfxbra %r5,3,%f8,7
cgebra %r5,3,%f9,7
cgdbra %r5,3,%f9,7
cgxbra %r5,3,%f8,7
clfebr %r5,3,%f9,7
clfdbr %r5,3,%f9,7
clfxbr %r5,3,%f8,7
clgebr %r5,3,%f9,7
clgdbr %r5,3,%f9,7
clgxbr %r5,3,%f8,7
fiebra %f5,3,%f9,7
fidbra %f5,3,%f9,7
fixbra %f5,3,%f8,7
ledbra %f5,3,%f9,7
ldxbra %f5,3,%f8,7
lexbra %f5,3,%f8,7
adtra %f3,%f5,%f9,7
axtra %f1,%f4,%f5,7
cdgtra %f5,3,%r9,7
cdftr %f5,3,%r9,7
cxftr %f5,3,%r9,7
cxgtra %f5,3,%r9,7
cdlgtr %f5,3,%r9,7
cxlgtr %f5,3,%r9,7
cdlftr %f5,3,%r9,7
cxlftr %f5,3,%r9,7
cgdtra %r5,3,%f9,7
cgxtra %r5,3,%f8,7
cfdtr %r5,3,%f9,7
cfxtr %r5,3,%f9,7
clgdtr %r5,3,%f9,7
clgxtr %r5,3,%f8,7
clfdtr %r5,3,%f9,7
clfxtr %r5,3,%f8,7
ddtra %f3,%f5,%f9,7
dxtra %f1,%f4,%f5,7
mdtra %f3,%f5,%f9,7
mxtra %f1,%f4,%f5,7
sdtra %f3,%f5,%f9,7
sxtra %f1,%f4,%f5,7
srnmb 4000(%r7)
kmf %r5,%r6
kmo %r5,%r6
pcc
kmctr %r5,%r6,%r9
pckmo
|
stsp/binutils-ia16
| 2,724
|
gas/testsuite/gas/s390/zarch-z900.s
|
.text
foo:
ag %r9,4095(%r5,%r10)
agf %r9,4095(%r5,%r10)
agfr %r9,%r6
aghi %r9,-32767
agr %r9,%r6
alcg %r9,4095(%r5,%r10)
alcgr %r9,%r6
alg %r9,4095(%r5,%r10)
algf %r9,4095(%r5,%r10)
algfr %r9,%r6
algr %r9,%r6
bctg %r9,4095(%r5,%r10)
bctgr %r9,%r6
brctg %r9,.
jctg %r6,.
brxhg %r9,%r6,.
jxhg %r6,%r9,.
brxlg %r9,%r6,.
jxleg %r6,%r9,.
bxhg %r9,%r6,4095(%r5)
bxleg %r9,%r6,4095(%r5)
cdgbr %f9,%r6
cdgr %f9,%r6
cdsg %r8,%r6,4095(%r5)
cegbr %f9,%r6
cegr %f9,%r6
cg %r9,4095(%r5,%r10)
cgdbr %r6,15,%f5
cgdr %r6,15,%f5
cgebr %r6,15,%f5
cger %r6,15,%f5
cgf %r9,4095(%r5,%r10)
cgfr %r9,%r6
cghi %r9,-32767
cgr %r9,%r6
cgxbr %r6,15,%f4
cgxr %r6,15,%f4
clg %r9,4095(%r5,%r10)
clgf %r9,4095(%r5,%r10)
clgfr %r9,%r6
clgr %r9,%r6
clmh %r9,10,4095(%r5)
csg %r9,%r6,4095(%r5)
cvbg %r9,4095(%r5,%r10)
cvdg %r9,4095(%r5,%r10)
cxgbr %f8,%r6
cxgr %f8,%r6
dlg %r8,4095(%r5,%r10)
dlgr %r8,%r6
dsg %r8,4095(%r5,%r10)
dsgf %r8,4095(%r5,%r10)
dsgfr %r8,%r6
dsgr %r8,%r6
eregg %r9,%r6
esea %r9
icmh %r9,10,4095(%r5)
iihh %r9,65535
iihl %r9,65535
iilh %r9,65535
iill %r9,65535
lcgfr %r9,%r6
lcgr %r9,%r6
lctlg %c9,%c6,4095(%r5)
lg %r9,4095(%r5,%r10)
lgf %r9,4095(%r5,%r10)
lgfr %r9,%r6
lgh %r9,4095(%r5,%r10)
lghi %r9,-32767
lgr %r9,%r6
llgc %r9,4095(%r5,%r10)
llgf %r9,4095(%r5,%r10)
llgfr %r9,%r6
llgh %r9,4095(%r5,%r10)
llgt %r9,4095(%r5,%r10)
llgtr %r9,%r6
llihh %r9,65535
llihl %r9,65535
llilh %r9,65535
llill %r9,65535
lmd %r9,%r6,4095(%r5),4095(%r10)
lmg %r9,%r6,4095(%r5)
lmh %r9,%r6,4095(%r5)
lngfr %r9,%r6
lngr %r9,%r6
lpgfr %r9,%r6
lpgr %r9,%r6
lpq %r8,4095(%r5,%r10)
lpswe 4095(%r5)
lrag %r9,4095(%r5,%r10)
lrvg %r9,4095(%r5,%r10)
lrvgr %r9,%r6
ltgfr %r9,%r6
ltgr %r9,%r6
lurag %r9,%r6
mghi %r9,-32767
mlg %r8,4095(%r5,%r10)
mlgr %r8,%r6
msg %r9,4095(%r5,%r10)
msgf %r9,4095(%r5,%r10)
msgfr %r9,%r6
msgr %r9,%r6
ng %r9,4095(%r5,%r10)
ngr %r9,%r6
nihh %r9,65535
nihl %r9,65535
nilh %r9,65535
nill %r9,65535
og %r9,4095(%r5,%r10)
ogr %r9,%r6
oihh %r9,65535
oihl %r9,65535
oilh %r9,65535
oill %r9,65535
rllg %r9,%r6,4095(%r5)
sam64
sg %r9,4095(%r5,%r10)
sgf %r9,4095(%r5,%r10)
sgfr %r9,%r6
sgr %r9,%r6
slag %r9,%r6,4095(%r5)
slbg %r9,4095(%r5,%r10)
slbgr %r9,%r6
slg %r9,4095(%r5,%r10)
slgf %r9,4095(%r5,%r10)
slgfr %r9,%r6
slgr %r9,%r6
sllg %r9,%r6,4095(%r5)
srag %r9,%r6,4095(%r5)
srlg %r9,%r6,4095(%r5)
stcmh %r9,10,4095(%r5)
stctg %c9,%c6,4095(%r5)
stg %r9,4095(%r5,%r10)
stmg %r9,%r6,4095(%r5)
stmh %r9,%r6,4095(%r5)
stpq %r9,4095(%r5,%r10)
strag 4095(%r5),4095(%r9)
strvg %r9,4095(%r5,%r10)
sturg %r9,%r6
tmhh %r9,65535
tmhl %r9,65535
tracg %r9,%r6,4095(%r5)
xg %r9,4095(%r5,%r10)
xgr %r9,%r6
|
stsp/binutils-ia16
| 3,479
|
gas/testsuite/gas/s390/zarch-arch13.s
|
.text
foo:
ncrk %r6,%r9,%r11
ncgrk %r6,%r9,%r11
mvcrl 4000(%r6),4000(%r9)
nnrk %r6,%r9,%r11
nngrk %r6,%r9,%r11
nork %r6,%r9,%r11
nogrk %r6,%r9,%r11
nxrk %r6,%r9,%r11
nxgrk %r6,%r9,%r11
ocrk %r6,%r9,%r11
ocgrk %r6,%r9,%r11
popcnt %r6,%r9
popcnt %r6,%r9,13
selr %r6,%r9,%r11,13
selro %r6,%r9,%r11
selrh %r6,%r9,%r11
selrp %r6,%r9,%r11
selrnle %r6,%r9,%r11
selrl %r6,%r9,%r11
selrm %r6,%r9,%r11
selrnhe %r6,%r9,%r11
selrlh %r6,%r9,%r11
selrne %r6,%r9,%r11
selrnz %r6,%r9,%r11
selre %r6,%r9,%r11
selrz %r6,%r9,%r11
selrnlh %r6,%r9,%r11
selrhe %r6,%r9,%r11
selrnl %r6,%r9,%r11
selrnm %r6,%r9,%r11
selrle %r6,%r9,%r11
selrnh %r6,%r9,%r11
selrnp %r6,%r9,%r11
selrno %r6,%r9,%r11
selgr %r6,%r9,%r11,13
selgro %r6,%r9,%r11
selgrh %r6,%r9,%r11
selgrp %r6,%r9,%r11
selgrnle %r6,%r9,%r11
selgrl %r6,%r9,%r11
selgrm %r6,%r9,%r11
selgrnhe %r6,%r9,%r11
selgrlh %r6,%r9,%r11
selgrne %r6,%r9,%r11
selgrnz %r6,%r9,%r11
selgre %r6,%r9,%r11
selgrz %r6,%r9,%r11
selgrnlh %r6,%r9,%r11
selgrhe %r6,%r9,%r11
selgrnl %r6,%r9,%r11
selgrnm %r6,%r9,%r11
selgrle %r6,%r9,%r11
selgrnh %r6,%r9,%r11
selgrnp %r6,%r9,%r11
selgrno %r6,%r9,%r11
selfhr %r6,%r9,%r11,13
selfhro %r6,%r9,%r11
selfhrh %r6,%r9,%r11
selfhrp %r6,%r9,%r11
selfhrnle %r6,%r9,%r11
selfhrl %r6,%r9,%r11
selfhrm %r6,%r9,%r11
selfhrnhe %r6,%r9,%r11
selfhrlh %r6,%r9,%r11
selfhrne %r6,%r9,%r11
selfhrnz %r6,%r9,%r11
selfhre %r6,%r9,%r11
selfhrz %r6,%r9,%r11
selfhrnlh %r6,%r9,%r11
selfhrhe %r6,%r9,%r11
selfhrnl %r6,%r9,%r11
selfhrnm %r6,%r9,%r11
selfhrle %r6,%r9,%r11
selfhrnh %r6,%r9,%r11
selfhrnp %r6,%r9,%r11
selfhrno %r6,%r9,%r11
vlbr %v15,4000(%r6,%r9),13
vlbrh %v15,4000(%r6,%r9)
vlbrf %v15,4000(%r6,%r9)
vlbrg %v15,4000(%r6,%r9)
vlbrq %v15,4000(%r6,%r9)
vler %v15,4000(%r6,%r9),13
vlerh %v15,4000(%r6,%r9)
vlerf %v15,4000(%r6,%r9)
vlerg %v15,4000(%r6,%r9)
vllebrz %v15,4000(%r6,%r9),13
vllebrzh %v15,4000(%r6,%r9)
vllebrzf %v15,4000(%r6,%r9)
ldrv %v15,4000(%r6,%r9)
vllebrzg %v15,4000(%r6,%r9)
lerv %v15,4000(%r6,%r9)
vllebrze %v15,4000(%r6,%r9)
vlebrh %v15,4000(%r6,%r9),13
vlebrf %v15,4000(%r6,%r9),13
vlebrg %v15,4000(%r6,%r9),13
vlbrrep %v15,4000(%r6,%r9),13
vlbrreph %v15,4000(%r6,%r9)
vlbrrepf %v15,4000(%r6,%r9)
vlbrrepg %v15,4000(%r6,%r9)
vstbr %v15,4000(%r6,%r9),13
vstbrh %v15,4000(%r6,%r9)
vstbrf %v15,4000(%r6,%r9)
vstbrg %v15,4000(%r6,%r9)
vstbrq %v15,4000(%r6,%r9)
vster %v15,4000(%r6,%r9),13
vsterh %v15,4000(%r6,%r9)
vsterf %v15,4000(%r6,%r9)
vsterg %v15,4000(%r6,%r9)
vstebrh %v15,4000(%r6,%r9),13
vstebrf %v15,4000(%r6,%r9),13
sterv %v15,4000(%r6,%r9)
vstebrg %v15,4000(%r6,%r9),13
stdrv %v15,4000(%r6,%r9)
vsld %v15,%v17,%v20,253
vsrd %v15,%v17,%v20,253
vstrs %v15,%v17,%v20,%v24,13
vstrs %v15,%v17,%v20,%v24,13,12
vstrsb %v15,%v17,%v20,%v24
vstrsb %v15,%v17,%v20,%v24,13
vstrsh %v15,%v17,%v20,%v24
vstrsh %v15,%v17,%v20,%v24,13
vstrsf %v15,%v17,%v20,%v24
vstrsf %v15,%v17,%v20,%v24,13
vstrszb %v15,%v17,%v20,%v24
vstrszh %v15,%v17,%v20,%v24
vstrszf %v15,%v17,%v20,%v24
vcfps %v15,%v17,13,12,11
vcefb %v15,%v17,13,12
wcefb %v15,%v17,13,12
vcfpl %v15,%v17,13,12,11
vcelfb %v15,%v17,13,12
wcelfb %v15,%v17,13,12
vcsfp %v15,%v17,13,12,11
vcfeb %v15,%v17,13,12
wcfeb %v15,%v17,13,12
vclfp %v15,%v17,13,12,11
vclfeb %v15,%v17,13,12
wclfeb %v15,%v17,13,12
dfltcc %r6,%r9,%r11
sortl %r6,%r9
vcvb %r6,%v15,13
vcvb %r6,%v15,13,12
vcvbg %r6,%v15,13
vcvbg %r6,%v15,13,12
kdsa %r6,%r9
|
stsp/binutils-ia16
| 7,642
|
gas/testsuite/gas/s390/zarch-z10.s
|
.text
foo:
asi 5555(%r6),-42
agsi 5555(%r6),-42
alsi 5555(%r6),-42
algsi 5555(%r6),-42
crl %r6,.
cgrl %r6,.
cgfrl %r6,.
crb %r6,%r7,10,1111(%r8)
crbh %r6,%r7,1111(%r8)
crbnle %r6,%r7,1111(%r8)
crbl %r6,%r7,1111(%r8)
crbnhe %r6,%r7,1111(%r8)
crblh %r6,%r7,1111(%r8)
crbne %r6,%r7,1111(%r8)
crbe %r6,%r7,1111(%r8)
crbnlh %r6,%r7,1111(%r8)
crbhe %r6,%r7,1111(%r8)
crbnl %r6,%r7,1111(%r8)
crble %r6,%r7,1111(%r8)
crbnh %r6,%r7,1111(%r8)
cgrb %r6,%r7,10,1111(%r8)
cgrbh %r6,%r7,1111(%r8)
cgrbnle %r6,%r7,1111(%r8)
cgrbl %r6,%r7,1111(%r8)
cgrbnhe %r6,%r7,1111(%r8)
cgrblh %r6,%r7,1111(%r8)
cgrbne %r6,%r7,1111(%r8)
cgrbe %r6,%r7,1111(%r8)
cgrbnlh %r6,%r7,1111(%r8)
cgrbhe %r6,%r7,1111(%r8)
cgrbnl %r6,%r7,1111(%r8)
cgrble %r6,%r7,1111(%r8)
cgrbnh %r6,%r7,1111(%r8)
crj %r6,%r7,10,.
crjh %r6,%r7,.
crjnle %r6,%r7,.
crjl %r6,%r7,.
crjnhe %r6,%r7,.
crjlh %r6,%r7,.
crjne %r6,%r7,.
crje %r6,%r7,.
crjnlh %r6,%r7,.
crjhe %r6,%r7,.
crjnl %r6,%r7,.
crjle %r6,%r7,.
crjnh %r6,%r7,.
cgrj %r6,%r7,10,.
cgrjh %r6,%r7,.
cgrjnle %r6,%r7,.
cgrjl %r6,%r7,.
cgrjnhe %r6,%r7,.
cgrjlh %r6,%r7,.
cgrjne %r6,%r7,.
cgrje %r6,%r7,.
cgrjnlh %r6,%r7,.
cgrjhe %r6,%r7,.
cgrjnl %r6,%r7,.
cgrjle %r6,%r7,.
cgrjnh %r6,%r7,.
cib %r6,-42,10,1111(%r7)
cibh %r6,-42,1111(%r7)
cibnle %r6,-42,1111(%r7)
cibl %r6,-42,1111(%r7)
cibnhe %r6,-42,1111(%r7)
ciblh %r6,-42,1111(%r7)
cibne %r6,-42,1111(%r7)
cibe %r6,-42,1111(%r7)
cibnlh %r6,-42,1111(%r7)
cibhe %r6,-42,1111(%r7)
cibnl %r6,-42,1111(%r7)
cible %r6,-42,1111(%r7)
cibnh %r6,-42,1111(%r7)
cgib %r6,-42,10,1111(%r7)
cgibh %r6,-42,1111(%r7)
cgibnle %r6,-42,1111(%r7)
cgibl %r6,-42,1111(%r7)
cgibnhe %r6,-42,1111(%r7)
cgiblh %r6,-42,1111(%r7)
cgibne %r6,-42,1111(%r7)
cgibe %r6,-42,1111(%r7)
cgibnlh %r6,-42,1111(%r7)
cgibhe %r6,-42,1111(%r7)
cgibnl %r6,-42,1111(%r7)
cgible %r6,-42,1111(%r7)
cgibnh %r6,-42,1111(%r7)
cij %r6,-42,10,.
cijh %r6,-42,.
cijnle %r6,-42,.
cijl %r6,-42,.
cijnhe %r6,-42,.
cijlh %r6,-42,.
cijne %r6,-42,.
cije %r6,-42,.
cijnlh %r6,-42,.
cijhe %r6,-42,.
cijnl %r6,-42,.
cijle %r6,-42,.
cijnh %r6,-42,.
cgij %r6,-42,10,.
cgijh %r6,-42,.
cgijnle %r6,-42,.
cgijl %r6,-42,.
cgijnhe %r6,-42,.
cgijlh %r6,-42,.
cgijne %r6,-42,.
cgije %r6,-42,.
cgijnlh %r6,-42,.
cgijhe %r6,-42,.
cgijnl %r6,-42,.
cgijle %r6,-42,.
cgijnh %r6,-42,.
crt %r6,%r7,10
crth %r6,%r7
crtnle %r6,%r7
crtl %r6,%r7
crtnhe %r6,%r7
crtlh %r6,%r7
crtne %r6,%r7
crte %r6,%r7
crtnlh %r6,%r7
crthe %r6,%r7
crtnl %r6,%r7
crtle %r6,%r7
crtnh %r6,%r7
cgrt %r6,%r7,10
cgrth %r6,%r7
cgrtnle %r6,%r7
cgrtl %r6,%r7
cgrtnhe %r6,%r7
cgrtlh %r6,%r7
cgrtne %r6,%r7
cgrte %r6,%r7
cgrtnlh %r6,%r7
cgrthe %r6,%r7
cgrtnl %r6,%r7
cgrtle %r6,%r7
cgrtnh %r6,%r7
cit %r6,-30000,10
cith %r6,-30000
citnle %r6,-30000
citl %r6,-30000
citnhe %r6,-30000
citlh %r6,-30000
citne %r6,-30000
cite %r6,-30000
citnlh %r6,-30000
cithe %r6,-30000
citnl %r6,-30000
citle %r6,-30000
citnh %r6,-30000
cgit %r6,-30000,10
cgith %r6,-30000
cgitnle %r6,-30000
cgitl %r6,-30000
cgitnhe %r6,-30000
cgitlh %r6,-30000
cgitne %r6,-30000
cgite %r6,-30000
cgitnlh %r6,-30000
cgithe %r6,-30000
cgitnl %r6,-30000
cgitle %r6,-30000
cgitnh %r6,-30000
cgh %r6,5555(%r7,%r8)
chhsi 1111(%r6),-30000
chsi 1111(%r6),-30000
cghsi 1111(%r6),-30000
chrl %r6,.
cghrl %r6,.
clhhsi 1111(%r6),40000
clfhsi 1111(%r6),40000
clghsi 1111(%r6),40000
clrl %r6,.
clgrl %r6,.
clgfrl %r6,.
clhrl %r6,.
clghrl %r6,.
clrb %r6,%r7,10,1111(%r8)
clrbh %r6,%r7,1111(%r8)
clrbnle %r6,%r7,1111(%r8)
clrbl %r6,%r7,1111(%r8)
clrbnhe %r6,%r7,1111(%r8)
clrblh %r6,%r7,1111(%r8)
clrbne %r6,%r7,1111(%r8)
clrbe %r6,%r7,1111(%r8)
clrbnlh %r6,%r7,1111(%r8)
clrbhe %r6,%r7,1111(%r8)
clrbnl %r6,%r7,1111(%r8)
clrble %r6,%r7,1111(%r8)
clrbnh %r6,%r7,1111(%r8)
clgrb %r6,%r7,10,1111(%r8)
clgrbh %r6,%r7,1111(%r8)
clgrbnle %r6,%r7,1111(%r8)
clgrbl %r6,%r7,1111(%r8)
clgrbnhe %r6,%r7,1111(%r8)
clgrblh %r6,%r7,1111(%r8)
clgrbne %r6,%r7,1111(%r8)
clgrbe %r6,%r7,1111(%r8)
clgrbnlh %r6,%r7,1111(%r8)
clgrbhe %r6,%r7,1111(%r8)
clgrbnl %r6,%r7,1111(%r8)
clgrble %r6,%r7,1111(%r8)
clgrbnh %r6,%r7,1111(%r8)
clrj %r6,%r7,10,.
clrjh %r6,%r7,.
clrjnle %r6,%r7,.
clrjl %r6,%r7,.
clrjnhe %r6,%r7,.
clrjlh %r6,%r7,.
clrjne %r6,%r7,.
clrje %r6,%r7,.
clrjnlh %r6,%r7,.
clrjhe %r6,%r7,.
clrjnl %r6,%r7,.
clrjle %r6,%r7,.
clrjnh %r6,%r7,.
clgrj %r6,%r7,10,.
clgrjh %r6,%r7,.
clgrjnle %r6,%r7,.
clgrjl %r6,%r7,.
clgrjnhe %r6,%r7,.
clgrjlh %r6,%r7,.
clgrjne %r6,%r7,.
clgrje %r6,%r7,.
clgrjnlh %r6,%r7,.
clgrjhe %r6,%r7,.
clgrjnl %r6,%r7,.
clgrjle %r6,%r7,.
clgrjnh %r6,%r7,.
clib %r6,200,10,1111(%r7)
clibh %r6,200,1111(%r7)
clibnle %r6,200,1111(%r7)
clibl %r6,200,1111(%r7)
clibnhe %r6,200,1111(%r7)
cliblh %r6,200,1111(%r7)
clibne %r6,200,1111(%r7)
clibe %r6,200,1111(%r7)
clibnlh %r6,200,1111(%r7)
clibhe %r6,200,1111(%r7)
clibnl %r6,200,1111(%r7)
clible %r6,200,1111(%r7)
clibnh %r6,200,1111(%r7)
clgib %r6,200,10,1111(%r7)
clgibh %r6,200,1111(%r7)
clgibnle %r6,200,1111(%r7)
clgibl %r6,200,1111(%r7)
clgibnhe %r6,200,1111(%r7)
clgiblh %r6,200,1111(%r7)
clgibne %r6,200,1111(%r7)
clgibe %r6,200,1111(%r7)
clgibnlh %r6,200,1111(%r7)
clgibhe %r6,200,1111(%r7)
clgibnl %r6,200,1111(%r7)
clgible %r6,200,1111(%r7)
clgibnh %r6,200,1111(%r7)
clij %r6,200,10,.
clijh %r6,200,.
clijnle %r6,200,.
clijl %r6,200,.
clijnhe %r6,200,.
clijlh %r6,200,.
clijne %r6,200,.
clije %r6,200,.
clijnlh %r6,200,.
clijhe %r6,200,.
clijnl %r6,200,.
clijle %r6,200,.
clijnh %r6,200,.
clgij %r6,200,10,.
clgijh %r6,200,.
clgijnle %r6,200,.
clgijl %r6,200,.
clgijnhe %r6,200,.
clgijlh %r6,200,.
clgijne %r6,200,.
clgije %r6,200,.
clgijnlh %r6,200,.
clgijhe %r6,200,.
clgijnl %r6,200,.
clgijle %r6,200,.
clgijnh %r6,200,.
clrt %r6,%r7,10
clrth %r6,%r7
clrtnle %r6,%r7
clrtl %r6,%r7
clrtnhe %r6,%r7
clrtlh %r6,%r7
clrtne %r6,%r7
clrte %r6,%r7
clrtnlh %r6,%r7
clrthe %r6,%r7
clrtnl %r6,%r7
clrtle %r6,%r7
clrtnh %r6,%r7
clgrt %r6,%r7,10
clgrth %r6,%r7
clgrtnle %r6,%r7
clgrtl %r6,%r7
clgrtnhe %r6,%r7
clgrtlh %r6,%r7
clgrtne %r6,%r7
clgrte %r6,%r7
clgrtnlh %r6,%r7
clgrthe %r6,%r7
clgrtnl %r6,%r7
clgrtle %r6,%r7
clgrtnh %r6,%r7
clfit %r6,30000,10
clfith %r6,30000
clfitnle %r6,30000
clfitl %r6,30000
clfitnhe %r6,30000
clfitlh %r6,30000
clfitne %r6,30000
clfite %r6,30000
clfitnlh %r6,30000
clfithe %r6,30000
clfitnl %r6,30000
clfitle %r6,30000
clfitnh %r6,30000
clgit %r6,30000,10
clgith %r6,30000
clgitnle %r6,30000
clgitl %r6,30000
clgitnhe %r6,30000
clgitlh %r6,30000
clgitne %r6,30000
clgite %r6,30000
clgitnlh %r6,30000
clgithe %r6,30000
clgitnl %r6,30000
clgitle %r6,30000
clgitnh %r6,30000
ecag %r6,%r7,1111(%r8)
lrl %r6,.
lgrl %r6,.
lgfrl %r6,.
laey %r6,5555(%r7,%r8)
ltgf %r6,5555(%r7,%r8)
lhrl %r6,.
lghrl %r6,.
llgfrl %r6,.
llhrl %r6,.
llghrl %r6,.
mvhhi 1111(%r6),-30000
mvhi 1111(%r6),-30000
mvghi 1111(%r6),-30000
mfy %r6,5555(%r7,%r8)
mhy %r6,5555(%r7,%r8)
msfi %r6,-100000
msgfi %r6,-100000
pfd 10,5555(%r6,%r7)
pfdrl 10,.
rnsbg %r6,%r7,210,220,230
rxsbg %r6,%r7,210,220,230
rosbg %r6,%r7,210,220,230
risbg %r6,%r7,210,20,230
risbg %r6,%r7,210,188,230
risbgz %r6,%r7,210,20,230
strl %r6,.
stgrl %r6,.
sthrl %r6,.
exrl %r6,.
mc 3333(%r6),238
ptf %r6
pfmf %r6,%r7
trte %r6,%r7,10
trte %r6,%r7
trtre %r6,%r7,10
trtre %r6,%r7
ecpga %r6,%r7
ecctr %r6,%r7
epctr %r6,%r7
lcctl 3333(%r6)
lpctl 3333(%r6)
lsctl 3333(%r6)
qctri 3333(%r6)
qsi 3333(%r6)
scctr %r6,%r7
spctr %r6,%r7
lpp 3333(%r6)
|
stsp/binutils-ia16
| 15,787
|
gas/testsuite/gas/s390/zarch-z13.s
|
.text
foo:
lcbb %r6,4000(%r9,%r11),13
vgef %v15,4000(%r6,%r9),13
vgeg %v15,4000(%r6,%r9),13
vgbm %v15,65533
vzero %v15
vone %v15
vgm %v15,253,252,11
vgmb %v15,253,252
vgmh %v15,253,252
vgmf %v15,253,252
vgmg %v15,253,252
vlr %v15,%v17
vlrep %v15,4000(%r6,%r9),13
vlrepb %v15,4000(%r6,%r9)
vlreph %v15,4000(%r6,%r9)
vlrepf %v15,4000(%r6,%r9)
vlrepg %v15,4000(%r6,%r9)
vleb %v15,4000(%r6,%r9),13
vleh %v15,4000(%r6,%r9),13
vlef %v15,4000(%r6,%r9),13
vleg %v15,4000(%r6,%r9),13
vleib %v15,-32765,12
vleih %v15,-32765,12
vleif %v15,-32765,12
vleig %v15,-32765,12
vlgv %r6,%v15,4000(%r9),13
vlgvb %r6,%v15,4000(%r9)
vlgvh %r6,%v15,4000(%r9)
vlgvf %r6,%v15,4000(%r9)
vlgvg %r6,%v15,4000(%r9)
vllez %v15,4000(%r6,%r9),13
vllezb %v15,4000(%r6,%r9)
vllezh %v15,4000(%r6,%r9)
vllezf %v15,4000(%r6,%r9)
vllezg %v15,4000(%r6,%r9)
vlbb %v15,4000(%r6,%r9),13
vlvg %v15,%r6,4000(%r9),13
vlvgb %v15,%r6,4000(%r9)
vlvgh %v15,%r6,4000(%r9)
vlvgf %v15,%r6,4000(%r9)
vlvgg %v15,%r6,4000(%r9)
vlvgp %v15,%r6,%r9
vll %v15,%r6,4000(%r9)
vmrh %v15,%v17,%v20,13
vmrhb %v15,%v17,%v20
vmrhh %v15,%v17,%v20
vmrhf %v15,%v17,%v20
vmrhg %v15,%v17,%v20
vmrl %v15,%v17,%v20,13
vmrlb %v15,%v17,%v20
vmrlh %v15,%v17,%v20
vmrlf %v15,%v17,%v20
vmrlg %v15,%v17,%v20
vpk %v15,%v17,%v20,13
vpkh %v15,%v17,%v20
vpkf %v15,%v17,%v20
vpkg %v15,%v17,%v20
vpks %v15,%v17,%v20,13,12
vpksh %v15,%v17,%v20
vpksf %v15,%v17,%v20
vpksg %v15,%v17,%v20
vpkshs %v15,%v17,%v20
vpksfs %v15,%v17,%v20
vpksgs %v15,%v17,%v20
vpkls %v15,%v17,%v20,13,12
vpklsh %v15,%v17,%v20
vpklsf %v15,%v17,%v20
vpklsg %v15,%v17,%v20
vpklshs %v15,%v17,%v20
vpklsfs %v15,%v17,%v20
vpklsgs %v15,%v17,%v20
vperm %v15,%v17,%v20,%v24
vpdi %v15,%v17,%v20,13
vrep %v15,%v17,65533,12
vrepb %v15,%v17,65533
vreph %v15,%v17,65533
vrepf %v15,%v17,65533
vrepg %v15,%v17,65533
vrepi %v15,-32765,12
vrepib %v15,-32765
vrepih %v15,-32765
vrepif %v15,-32765
vrepig %v15,-32765
vscef %v15,4000(%r6,%r9),13
vsceg %v15,4000(%r6,%r9),13
vsel %v15,%v17,%v20,%v24
vseg %v15,%v17,13
vsegb %v15,%v17
vsegh %v15,%v17
vsegf %v15,%v17
vsteb %v15,4000(%r6,%r9),13
vsteh %v15,4000(%r6,%r9),13
vstef %v15,4000(%r6,%r9),13
vsteg %v15,4000(%r6,%r9),13
vstl %v15,%r6,4000(%r9)
vuph %v15,%v17,13
vuphb %v15,%v17
vuphh %v15,%v17
vuphf %v15,%v17
vuplh %v15,%v17,13
vuplhb %v15,%v17
vuplhh %v15,%v17
vuplhf %v15,%v17
vupl %v15,%v17,13
vuplb %v15,%v17
vuplhw %v15,%v17
vuplf %v15,%v17
vupll %v15,%v17,13
vupllb %v15,%v17
vupllh %v15,%v17
vupllf %v15,%v17
va %v15,%v17,%v20,13
vab %v15,%v17,%v20
vah %v15,%v17,%v20
vaf %v15,%v17,%v20
vag %v15,%v17,%v20
vaq %v15,%v17,%v20
vacc %v15,%v17,%v20,13
vaccb %v15,%v17,%v20
vacch %v15,%v17,%v20
vaccf %v15,%v17,%v20
vaccg %v15,%v17,%v20
vaccq %v15,%v17,%v20
vac %v15,%v17,%v20,%v24,13
vacq %v15,%v17,%v20,%v24
vaccc %v15,%v17,%v20,%v24,13
vacccq %v15,%v17,%v20,%v24
vn %v15,%v17,%v20
vnc %v15,%v17,%v20
vavg %v15,%v17,%v20,13
vavgb %v15,%v17,%v20
vavgh %v15,%v17,%v20
vavgf %v15,%v17,%v20
vavgg %v15,%v17,%v20
vavgl %v15,%v17,%v20,13
vavglb %v15,%v17,%v20
vavglh %v15,%v17,%v20
vavglf %v15,%v17,%v20
vavglg %v15,%v17,%v20
vcksm %v15,%v17,%v20
vec %v15,%v17,13
vecb %v15,%v17
vech %v15,%v17
vecf %v15,%v17
vecg %v15,%v17
vecl %v15,%v17,13
veclb %v15,%v17
veclh %v15,%v17
veclf %v15,%v17
veclg %v15,%v17
vceq %v15,%v17,%v20,13,12
vceqb %v15,%v17,%v20
vceqh %v15,%v17,%v20
vceqf %v15,%v17,%v20
vceqg %v15,%v17,%v20
vceqbs %v15,%v17,%v20
vceqhs %v15,%v17,%v20
vceqfs %v15,%v17,%v20
vceqgs %v15,%v17,%v20
vch %v15,%v17,%v20,13,12
vchb %v15,%v17,%v20
vchh %v15,%v17,%v20
vchf %v15,%v17,%v20
vchg %v15,%v17,%v20
vchbs %v15,%v17,%v20
vchhs %v15,%v17,%v20
vchfs %v15,%v17,%v20
vchgs %v15,%v17,%v20
vchl %v15,%v17,%v20,13,12
vchlb %v15,%v17,%v20
vchlh %v15,%v17,%v20
vchlf %v15,%v17,%v20
vchlg %v15,%v17,%v20
vchlbs %v15,%v17,%v20
vchlhs %v15,%v17,%v20
vchlfs %v15,%v17,%v20
vchlgs %v15,%v17,%v20
vclz %v15,%v17,13
vclzb %v15,%v17
vclzh %v15,%v17
vclzf %v15,%v17
vclzg %v15,%v17
vctz %v15,%v17,13
vctzb %v15,%v17
vctzh %v15,%v17
vctzf %v15,%v17
vctzg %v15,%v17
vx %v15,%v17,%v20
vgfm %v15,%v17,%v20,13
vgfmb %v15,%v17,%v20
vgfmh %v15,%v17,%v20
vgfmf %v15,%v17,%v20
vgfmg %v15,%v17,%v20
vgfma %v15,%v17,%v20,%v24,13
vgfmab %v15,%v17,%v20,%v24
vgfmah %v15,%v17,%v20,%v24
vgfmaf %v15,%v17,%v20,%v24
vgfmag %v15,%v17,%v20,%v24
vlc %v15,%v17,13
vlcb %v15,%v17
vlch %v15,%v17
vlcf %v15,%v17
vlcg %v15,%v17
vlp %v15,%v17,13
vlpb %v15,%v17
vlph %v15,%v17
vlpf %v15,%v17
vlpg %v15,%v17
vmx %v15,%v17,%v20,13
vmxb %v15,%v17,%v20
vmxh %v15,%v17,%v20
vmxf %v15,%v17,%v20
vmxg %v15,%v17,%v20
vmxl %v15,%v17,%v20,13
vmxlb %v15,%v17,%v20
vmxlh %v15,%v17,%v20
vmxlf %v15,%v17,%v20
vmxlg %v15,%v17,%v20
vmn %v15,%v17,%v20,13
vmnb %v15,%v17,%v20
vmnh %v15,%v17,%v20
vmnf %v15,%v17,%v20
vmng %v15,%v17,%v20
vmnl %v15,%v17,%v20,13
vmnlb %v15,%v17,%v20
vmnlh %v15,%v17,%v20
vmnlf %v15,%v17,%v20
vmnlg %v15,%v17,%v20
vmal %v15,%v17,%v20,%v24,13
vmalb %v15,%v17,%v20,%v24
vmalhw %v15,%v17,%v20,%v24
vmalf %v15,%v17,%v20,%v24
vmah %v15,%v17,%v20,%v24,13
vmahb %v15,%v17,%v20,%v24
vmahh %v15,%v17,%v20,%v24
vmahf %v15,%v17,%v20,%v24
vmalh %v15,%v17,%v20,%v24,13
vmalhb %v15,%v17,%v20,%v24
vmalhh %v15,%v17,%v20,%v24
vmalhf %v15,%v17,%v20,%v24
vmae %v15,%v17,%v20,%v24,13
vmaeb %v15,%v17,%v20,%v24
vmaeh %v15,%v17,%v20,%v24
vmaef %v15,%v17,%v20,%v24
vmale %v15,%v17,%v20,%v24,13
vmaleb %v15,%v17,%v20,%v24
vmaleh %v15,%v17,%v20,%v24
vmalef %v15,%v17,%v20,%v24
vmao %v15,%v17,%v20,%v24,13
vmaob %v15,%v17,%v20,%v24
vmaoh %v15,%v17,%v20,%v24
vmaof %v15,%v17,%v20,%v24
vmalo %v15,%v17,%v20,%v24,13
vmalob %v15,%v17,%v20,%v24
vmaloh %v15,%v17,%v20,%v24
vmalof %v15,%v17,%v20,%v24
vmh %v15,%v17,%v20,13
vmhb %v15,%v17,%v20
vmhh %v15,%v17,%v20
vmhf %v15,%v17,%v20
vmlh %v15,%v17,%v20,13
vmlhb %v15,%v17,%v20
vmlhh %v15,%v17,%v20
vmlhf %v15,%v17,%v20
vml %v15,%v17,%v20,13
vmlb %v15,%v17,%v20
vmlhw %v15,%v17,%v20
vmlf %v15,%v17,%v20
vme %v15,%v17,%v20,13
vmeb %v15,%v17,%v20
vmeh %v15,%v17,%v20
vmef %v15,%v17,%v20
vmle %v15,%v17,%v20,13
vmleb %v15,%v17,%v20
vmleh %v15,%v17,%v20
vmlef %v15,%v17,%v20
vmo %v15,%v17,%v20,13
vmob %v15,%v17,%v20
vmoh %v15,%v17,%v20
vmof %v15,%v17,%v20
vmlo %v15,%v17,%v20,13
vmlob %v15,%v17,%v20
vmloh %v15,%v17,%v20
vmlof %v15,%v17,%v20
vno %v15,%v17,%v20
vnot %v15,%v17
vo %v15,%v17,%v20
vpopct %v15,%v17,13
verllv %v15,%v17,%v20,13
verllvb %v15,%v17,%v20
verllvh %v15,%v17,%v20
verllvf %v15,%v17,%v20
verllvg %v15,%v17,%v20
verll %v15,%v17,4000(%r6),13
verllb %v15,%v17,4000(%r6)
verllh %v15,%v17,4000(%r6)
verllf %v15,%v17,4000(%r6)
verllg %v15,%v17,4000(%r6)
verim %v15,%v17,%v20,253,12
verimb %v15,%v17,%v20,253
verimh %v15,%v17,%v20,253
verimf %v15,%v17,%v20,253
verimg %v15,%v17,%v20,253
veslv %v15,%v17,%v20,13
veslvb %v15,%v17,%v20
veslvh %v15,%v17,%v20
veslvf %v15,%v17,%v20
veslvg %v15,%v17,%v20
vesl %v15,%v17,4000(%r6),13
veslb %v15,%v17,4000(%r6)
veslh %v15,%v17,4000(%r6)
veslf %v15,%v17,4000(%r6)
veslg %v15,%v17,4000(%r6)
vesrav %v15,%v17,%v20,13
vesravb %v15,%v17,%v20
vesravh %v15,%v17,%v20
vesravf %v15,%v17,%v20
vesravg %v15,%v17,%v20
vesra %v15,%v17,4000(%r6),13
vesrab %v15,%v17,4000(%r6)
vesrah %v15,%v17,4000(%r6)
vesraf %v15,%v17,4000(%r6)
vesrag %v15,%v17,4000(%r6)
vesrlv %v15,%v17,%v20,13
vesrlvb %v15,%v17,%v20
vesrlvh %v15,%v17,%v20
vesrlvf %v15,%v17,%v20
vesrlvg %v15,%v17,%v20
vesrl %v15,%v17,4000(%r6),13
vesrlb %v15,%v17,4000(%r6)
vesrlh %v15,%v17,4000(%r6)
vesrlf %v15,%v17,4000(%r6)
vesrlg %v15,%v17,4000(%r6)
vsl %v15,%v17,%v20
vslb %v15,%v17,%v20
vsldb %v15,%v17,%v20,253
vsra %v15,%v17,%v20
vsrab %v15,%v17,%v20
vsrl %v15,%v17,%v20
vsrlb %v15,%v17,%v20
vs %v15,%v17,%v20,13
vsb %v15,%v17,%v20
vsh %v15,%v17,%v20
vsf %v15,%v17,%v20
vsg %v15,%v17,%v20
vsq %v15,%v17,%v20
vscbi %v15,%v17,%v20,13
vscbib %v15,%v17,%v20
vscbih %v15,%v17,%v20
vscbif %v15,%v17,%v20
vscbig %v15,%v17,%v20
vscbiq %v15,%v17,%v20
vsbi %v15,%v17,%v20,%v24,13
vsbiq %v15,%v17,%v20,%v24
vsbcbi %v15,%v17,%v20,%v24,13
vsbcbiq %v15,%v17,%v20,%v24
vsumg %v15,%v17,%v20,13
vsumgh %v15,%v17,%v20
vsumgf %v15,%v17,%v20
vsumq %v15,%v17,%v20,13
vsumqf %v15,%v17,%v20
vsumqg %v15,%v17,%v20
vsum %v15,%v17,%v20,13
vsumb %v15,%v17,%v20
vsumh %v15,%v17,%v20
vtm %v15,%v17
vfae %v15,%v17,%v20,13
vfae %v15,%v17,%v20,13,12
vfaeb %v15,%v17,%v20
vfaeb %v15,%v17,%v20,13
vfaeh %v15,%v17,%v20
vfaeh %v15,%v17,%v20,13
vfaef %v15,%v17,%v20
vfaef %v15,%v17,%v20,13
vfaebs %v15,%v17,%v20
vfaebs %v15,%v17,%v20,13
vfaehs %v15,%v17,%v20
vfaehs %v15,%v17,%v20,13
vfaefs %v15,%v17,%v20
vfaefs %v15,%v17,%v20,13
vfaezb %v15,%v17,%v20
vfaezb %v15,%v17,%v20,13
vfaezh %v15,%v17,%v20
vfaezh %v15,%v17,%v20,13
vfaezf %v15,%v17,%v20
vfaezf %v15,%v17,%v20,13
vfaezbs %v15,%v17,%v20
vfaezbs %v15,%v17,%v20,13
vfaezhs %v15,%v17,%v20
vfaezhs %v15,%v17,%v20,13
vfaezfs %v15,%v17,%v20
vfaezfs %v15,%v17,%v20,13
vfee %v15,%v17,%v20,13
vfee %v15,%v17,%v20,13,12
vfeeb %v15,%v17,%v20
vfeeb %v15,%v17,%v20,13
vfeeh %v15,%v17,%v20
vfeeh %v15,%v17,%v20,13
vfeef %v15,%v17,%v20
vfeef %v15,%v17,%v20,13
vfeebs %v15,%v17,%v20
vfeehs %v15,%v17,%v20
vfeefs %v15,%v17,%v20
vfeezb %v15,%v17,%v20
vfeezh %v15,%v17,%v20
vfeezf %v15,%v17,%v20
vfeezbs %v15,%v17,%v20
vfeezhs %v15,%v17,%v20
vfeezfs %v15,%v17,%v20
vfene %v15,%v17,%v20,13
vfene %v15,%v17,%v20,13,12
vfeneb %v15,%v17,%v20
vfeneb %v15,%v17,%v20,13
vfeneh %v15,%v17,%v20
vfeneh %v15,%v17,%v20,13
vfenef %v15,%v17,%v20
vfenef %v15,%v17,%v20,13
vfenebs %v15,%v17,%v20
vfenehs %v15,%v17,%v20
vfenefs %v15,%v17,%v20
vfenezb %v15,%v17,%v20
vfenezh %v15,%v17,%v20
vfenezf %v15,%v17,%v20
vfenezbs %v15,%v17,%v20
vfenezhs %v15,%v17,%v20
vfenezfs %v15,%v17,%v20
vistr %v15,%v17,13
vistr %v15,%v17,13,12
vistrb %v15,%v17
vistrb %v15,%v17,13
vistrh %v15,%v17
vistrh %v15,%v17,13
vistrf %v15,%v17
vistrf %v15,%v17,13
vistrbs %v15,%v17
vistrhs %v15,%v17
vistrfs %v15,%v17
vstrc %v15,%v17,%v20,%v24,13
vstrc %v15,%v17,%v20,%v24,13,12
vstrcb %v15,%v17,%v20,%v24
vstrcb %v15,%v17,%v20,%v24,13
vstrch %v15,%v17,%v20,%v24
vstrch %v15,%v17,%v20,%v24,13
vstrcf %v15,%v17,%v20,%v24
vstrcf %v15,%v17,%v20,%v24,13
vstrcbs %v15,%v17,%v20,%v24
vstrcbs %v15,%v17,%v20,%v24,13
vstrchs %v15,%v17,%v20,%v24
vstrchs %v15,%v17,%v20,%v24,13
vstrcfs %v15,%v17,%v20,%v24
vstrcfs %v15,%v17,%v20,%v24,13
vstrczb %v15,%v17,%v20,%v24
vstrczb %v15,%v17,%v20,%v24,13
vstrczh %v15,%v17,%v20,%v24
vstrczh %v15,%v17,%v20,%v24,13
vstrczf %v15,%v17,%v20,%v24
vstrczf %v15,%v17,%v20,%v24,13
vstrczbs %v15,%v17,%v20,%v24
vstrczbs %v15,%v17,%v20,%v24,13
vstrczhs %v15,%v17,%v20,%v24
vstrczhs %v15,%v17,%v20,%v24,13
vstrczfs %v15,%v17,%v20,%v24
vstrczfs %v15,%v17,%v20,%v24,13
vfa %v15,%v17,%v20,13,12
vfadb %v15,%v17,%v20
wfadb %v15,%v17,%v20
wfc %v15,%v17,13,12
wfcdb %v15,%v17
wfk %v15,%v17,13,12
wfkdb %v15,%v17
vfce %v15,%v17,%v20,13,12,11
vfcedb %v15,%v17,%v20
vfcedbs %v15,%v17,%v20
wfcedb %v15,%v17,%v20
wfcedbs %v15,%v17,%v20
vfch %v15,%v17,%v20,13,12,11
vfchdb %v15,%v17,%v20
vfchdbs %v15,%v17,%v20
wfchdb %v15,%v17,%v20
wfchdbs %v15,%v17,%v20
vfche %v15,%v17,%v20,13,12,11
vfchedb %v15,%v17,%v20
vfchedbs %v15,%v17,%v20
wfchedb %v15,%v17,%v20
wfchedbs %v15,%v17,%v20
vcdg %v15,%v17,13,12,11
vcdgb %v15,%v17,13,12
wcdgb %v15,%v17,13,12
vcdlg %v15,%v17,13,12,11
vcdlgb %v15,%v17,13,12
wcdlgb %v15,%v17,13,12
vcgd %v15,%v17,13,12,11
vcgdb %v15,%v17,13,12
wcgdb %v15,%v17,13,12
vclgd %v15,%v17,13,12,11
vclgdb %v15,%v17,13,12
wclgdb %v15,%v17,13,12
vfd %v15,%v17,%v20,13,12
vfddb %v15,%v17,%v20
wfddb %v15,%v17,%v20
vfi %v15,%v17,13,12,11
vfidb %v15,%v17,13,12
wfidb %v15,%v17,13,12
vlde %v15,%v17,13,12
vldeb %v15,%v17
wldeb %v15,%v17
vled %v15,%v17,13,12,11
vledb %v15,%v17,13,12
wledb %v15,%v17,13,12
vfm %v15,%v17,%v20,13,12
vfmdb %v15,%v17,%v20
wfmdb %v15,%v17,%v20
vfma %v15,%v17,%v20,%v24,13,12
vfmadb %v15,%v17,%v20,%v24
wfmadb %v15,%v17,%v20,%v24
vfms %v15,%v17,%v20,%v24,13,12
vfmsdb %v15,%v17,%v20,%v24
wfmsdb %v15,%v17,%v20,%v24
vfpso %v15,%v17,13,12,11
vfpsodb %v15,%v17,13
wfpsodb %v15,%v17,13
vflcdb %v15,%v17
wflcdb %v15,%v17
vflndb %v15,%v17
wflndb %v15,%v17
vflpdb %v15,%v17
wflpdb %v15,%v17
vfsq %v15,%v17,13,12
vfsqdb %v15,%v17
wfsqdb %v15,%v17
vfs %v15,%v17,%v20,13,12
vfsdb %v15,%v17,%v20
wfsdb %v15,%v17,%v20
vftci %v15,%v17,4093,12,11
vftcidb %v15,%v17,4093
wftcidb %v15,%v17,4093
cdpt %f3,4000(251,%r6),12
cxpt %f1,4000(251,%r6),12
cpdt %f3,4000(251,%r6),12
cpxt %f1,4000(251,%r6),12
locfhr %r6,%r9,13
locfhro %r6,%r9
locfhrh %r6,%r9
locfhrp %r6,%r9
locfhrnle %r6,%r9
locfhrl %r6,%r9
locfhrm %r6,%r9
locfhrnhe %r6,%r9
locfhrlh %r6,%r9
locfhrne %r6,%r9
locfhrnz %r6,%r9
locfhre %r6,%r9
locfhrz %r6,%r9
locfhrnlh %r6,%r9
locfhrhe %r6,%r9
locfhrnl %r6,%r9
locfhrnm %r6,%r9
locfhrle %r6,%r9
locfhrnh %r6,%r9
locfhrnp %r6,%r9
locfhrno %r6,%r9
locfh %r6,-10000(%r9),13
locfho %r6,-10000(%r9)
locfhh %r6,-10000(%r9)
locfhp %r6,-10000(%r9)
locfhnle %r6,-10000(%r9)
locfhl %r6,-10000(%r9)
locfhm %r6,-10000(%r9)
locfhnhe %r6,-10000(%r9)
locfhlh %r6,-10000(%r9)
locfhne %r6,-10000(%r9)
locfhnz %r6,-10000(%r9)
locfhe %r6,-10000(%r9)
locfhz %r6,-10000(%r9)
locfhnlh %r6,-10000(%r9)
locfhhe %r6,-10000(%r9)
locfhnl %r6,-10000(%r9)
locfhnm %r6,-10000(%r9)
locfhle %r6,-10000(%r9)
locfhnh %r6,-10000(%r9)
locfhnp %r6,-10000(%r9)
locfhno %r6,-10000(%r9)
lochi %r6,-32765,12
lochio %r6,-32765
lochih %r6,-32765
lochip %r6,-32765
lochinle %r6,-32765
lochil %r6,-32765
lochim %r6,-32765
lochinhe %r6,-32765
lochilh %r6,-32765
lochine %r6,-32765
lochinz %r6,-32765
lochie %r6,-32765
lochiz %r6,-32765
lochinlh %r6,-32765
lochihe %r6,-32765
lochinl %r6,-32765
lochinm %r6,-32765
lochile %r6,-32765
lochinh %r6,-32765
lochinp %r6,-32765
lochino %r6,-32765
locghi %r6,-32765,12
locghio %r6,-32765
locghih %r6,-32765
locghip %r6,-32765
locghinle %r6,-32765
locghil %r6,-32765
locghim %r6,-32765
locghinhe %r6,-32765
locghilh %r6,-32765
locghine %r6,-32765
locghinz %r6,-32765
locghie %r6,-32765
locghiz %r6,-32765
locghinlh %r6,-32765
locghihe %r6,-32765
locghinl %r6,-32765
locghinm %r6,-32765
locghile %r6,-32765
locghinh %r6,-32765
locghinp %r6,-32765
locghino %r6,-32765
lochhi %r6,-32765,12
lochhio %r6,-32765
lochhih %r6,-32765
lochhip %r6,-32765
lochhinle %r6,-32765
lochhil %r6,-32765
lochhim %r6,-32765
lochhinhe %r6,-32765
lochhilh %r6,-32765
lochhine %r6,-32765
lochhinz %r6,-32765
lochhie %r6,-32765
lochhiz %r6,-32765
lochhinlh %r6,-32765
lochhihe %r6,-32765
lochhinl %r6,-32765
lochhinm %r6,-32765
lochhile %r6,-32765
lochhinh %r6,-32765
lochhinp %r6,-32765
lochhino %r6,-32765
stocfh %r6,-10000(%r9),13
stocfho %r6,-10000(%r9)
stocfhh %r6,-10000(%r9)
stocfhp %r6,-10000(%r9)
stocfhnle %r6,-10000(%r9)
stocfhl %r6,-10000(%r9)
stocfhm %r6,-10000(%r9)
stocfhnhe %r6,-10000(%r9)
stocfhlh %r6,-10000(%r9)
stocfhne %r6,-10000(%r9)
stocfhnz %r6,-10000(%r9)
stocfhe %r6,-10000(%r9)
stocfhz %r6,-10000(%r9)
stocfhnlh %r6,-10000(%r9)
stocfhhe %r6,-10000(%r9)
stocfhnl %r6,-10000(%r9)
stocfhnm %r6,-10000(%r9)
stocfhle %r6,-10000(%r9)
stocfhnh %r6,-10000(%r9)
stocfhnp %r6,-10000(%r9)
stocfhno %r6,-10000(%r9)
llzrgf %r6,-10000(%r9,%r11)
lzrf %r6,-10000(%r9,%r11)
lzrg %r6,-10000(%r9,%r11)
ppno %r6,%r9
vl %v15,4000(%r6,%r9)
vl %v15,4000(%r6,%r9),13
vlm %v15,%v17,4000(%r6)
vlm %v15,%v17,4000(%r6),13
vst %v15,4000(%r6,%r9)
vst %v15,4000(%r6,%r9),13
vstm %v15,%v17,4000(%r6)
vstm %v15,%v17,4000(%r6),13
|
stsp/binutils-ia16
| 3,296
|
gas/testsuite/gas/s390/zarch-z990.s
|
.text
foo:
ag %r6,-524288(%r5,%r10)
agf %r6,-524288(%r5,%r10)
ahy %r6,-524288(%r5,%r10)
alc %r6,-524288(%r5,%r10)
alcg %r6,-524288(%r5,%r10)
alg %r6,-524288(%r5,%r10)
algf %r6,-524288(%r5,%r10)
aly %r6,-524288(%r5,%r10)
ay %r6,-524288(%r5,%r10)
bctg %r6,-524288(%r5)
bxhg %r6,%r9,-524288(%r5)
bxleg %r6,%r9,-524288(%r5)
cdsg %r6,%r8,-524288(%r5)
cdsy %r6,%r8,-524288(%r5)
cg %r6,-524288(%r5,%r10)
cgf %r6,-524288(%r5,%r10)
chy %r6,-524288(%r5,%r10)
clg %r6,-524288(%r5,%r10)
clgf %r6,-524288(%r5,%r10)
cliy -524288(%r5),255
clmh %r6,15,-524288(%r5)
clmy %r6,15,-524288(%r5)
cly %r6,-524288(%r5,%r10)
csg %r6,%r9,-524288(%r5)
cspg %r6,%r9
csy %r6,%r9,-524288(%r5)
cvbg %r6,-524288(%r5,%r10)
cvby %r6,-524288(%r5,%r10)
cvdg %r6,-524288(%r5,%r10)
cvdy %r6,-524288(%r5,%r10)
cy %r6,-524288(%r5,%r10)
dl %r6,-524288(%r5,%r10)
dlg %r6,-524288(%r5,%r10)
dsg %r6,-524288(%r5,%r10)
dsgf %r6,-524288(%r5,%r10)
icmh %r6,15,-524288(%r5)
icmy %r6,15,-524288(%r5)
icy %r6,-524288(%r5,%r10)
idte %r6,%r9,%r11
idte %r6,%r9,%r11,13
lamy %a6,%a9,-524288(%r5)
lay %r6,-524288(%r5,%r10)
lb %r6,-524288(%r5,%r10)
lctlg %c6,%c9,-524288(%r5)
ldy %f6,-524288(%r5,%r10)
ley %f6,-524288(%r5,%r10)
lg %r6,-524288(%r5,%r10)
lgb %r6,-524288(%r5,%r10)
lgf %r6,-524288(%r5,%r10)
lgh %r6,-524288(%r5,%r10)
lhy %r6,-524288(%r5,%r10)
llgc %r6,-524288(%r5,%r10)
llgf %r6,-524288(%r5,%r10)
llgh %r6,-524288(%r5,%r10)
llgt %r6,-524288(%r5,%r10)
lmg %r6,%r9,-524288(%r5)
lmh %r6,%r9,-524288(%r5)
lmy %r6,%r9,-524288(%r5)
lpq %r6,-524288(%r5,%r10)
lrag %r6,-524288(%r5,%r10)
lray %r6,-524288(%r5,%r10)
lrv %r6,-524288(%r5,%r10)
lrvg %r6,-524288(%r5,%r10)
lrvh %r6,-524288(%r5,%r10)
ly %r6,-524288(%r5,%r10)
mad %f6,%f9,4095(%r5,%r10)
madr %f6,%f9,%f5
mae %f6,%f9,4095(%r5,%r10)
maer %f6,%f9,%f5
ml %r6,-524288(%r5,%r10)
mlg %r6,-524288(%r5,%r10)
msd %f6,%f9,4095(%r5,%r10)
msdr %f6,%f9,%f5
mse %f6,%f9,4095(%r5,%r10)
mser %f6,%f9,%f5
msg %r6,-524288(%r5,%r10)
msgf %r6,-524288(%r5,%r10)
msy %r6,-524288(%r5,%r10)
mvclu %r6,%r8,-524288(%r5)
mviy -524288(%r5),255
ng %r6,-524288(%r5,%r10)
niy -524288(%r5),255
ny %r6,-524288(%r5,%r10)
og %r6,-524288(%r5,%r10)
oiy -524288(%r5),255
oy %r6,-524288(%r5,%r10)
rll %r6,%r9,-524288(%r5)
rllg %r6,%r9,-524288(%r5)
sg %r6,-524288(%r5,%r10)
sgf %r6,-524288(%r5,%r10)
shy %r6,-524288(%r5,%r10)
slag %r6,%r9,-524288(%r5)
slb %r6,-524288(%r5,%r10)
slbg %r6,-524288(%r5,%r10)
slg %r6,-524288(%r5,%r10)
slgf %r6,-524288(%r5,%r10)
sllg %r6,%r9,-524288(%r5)
sly %r6,-524288(%r5,%r10)
srag %r6,%r9,-524288(%r5)
srlg %r6,%r9,-524288(%r5)
stamy %a6,%a9,-524288(%r5)
stcmh %r6,15,-524288(%r5)
stcmy %r6,15,-524288(%r5)
stctg %c6,%c9,-524288(%r5)
stcy %r6,-524288(%r5,%r10)
stdy %f6,-524288(%r5,%r10)
stey %f6,-524288(%r5,%r10)
stg %r6,-524288(%r5,%r10)
sthy %r6,-524288(%r5,%r10)
stmg %r6,%r9,-524288(%r5)
stmh %r6,%r9,-524288(%r5)
stmy %r6,%r9,-524288(%r5)
stpq %r6,-524288(%r5,%r10)
strv %r6,-524288(%r5,%r10)
strvg %r6,-524288(%r5,%r10)
strvh %r6,-524288(%r5,%r10)
sty %r6,-524288(%r5,%r10)
sy %r6,-524288(%r5,%r10)
tmy -524288(%r5),255
tracg %r6,%r9,-524288(%r5)
xg %r6,-524288(%r5,%r10)
xiy -524288(%r5),255
xy %r6,-524288(%r5,%r10)
epair %r6
esair %r6
pti %r6,%r5
ssair %r6
|
stsp/binutils-ia16
| 1,288
|
gas/testsuite/gas/s390/zarch-z9-109.s
|
.text
foo:
afi %r6,-2147483648
agfi %r6,-2147483648
alfi %r6,4294967295
algfi %r6,4294967295
nihf %r6,4294967295
nilf %r6,4294967295
cfi %r6,-2147483648
cgfi %r6,-2147483648
clfi %r6,4294967295
clgfi %r6,4294967295
xihf %r6,4294967295
xilf %r6,4294967295
iihf %r6,4294967295
iilf %r6,4294967295
flogr %r6,%r9
lt %r6,-524288(%r5,%r10)
ltg %r6,-524288(%r5,%r10)
lbr %r6,%r9
lgbr %r6,%r9
lhr %r6,%r9
lghr %r6,%r9
lgfi %r6,-2147483648
llc %r6,-524288(%r5,%r10)
llcr %r6,%r9
llgcr %r6,%r9
llh %r6,-524288(%r5,%r10)
llhr %r6,%r9
llghr %r6,%r9
llihf %r6,4294967295
llilf %r6,4294967295
oihf %r6,4294967295
oilf %r6,4294967295
slfi %r6,4294967295
slgfi %r6,4294967295
stfle 4095(%r5)
stckf 4095(%r5)
mvcos 4095(%r5),4095(%r10),%r6
lptea %r6,%r9,%r5,15
sske %r6,%r9,15
sske %r6,%r9
cu24 %r6,%r8,15
cu24 %r6,%r8
cu21 %r6,%r8,15
cu21 %r6,%r8
cu42 %r6,%r8
cu41 %r6,%r8
cu12 %r6,%r8,15
cu12 %r6,%r8
cu14 %r6,%r8,15
cu14 %r6,%r8
myr %f6,%f9,%f5
myhr %f6,%f9,%f5
mylr %f6,%f9,%f5
my %f5,%f9,4095(%r5,%r10)
myh %f6,%f9,4095(%r5,%r10)
myl %f6,%f9,4095(%r5,%r10)
mayr %f6,%f9,%f5
mayhr %f6,%f9,%f5
maylr %f6,%f9,%f5
may %f6,%f9,4095(%r5,%r10)
mayh %f6,%f9,4095(%r5,%r10)
mayl %f6,%f9,4095(%r5,%r10)
srstu %r6,%r7
trtr 4095(23,%r5),3333(%r10)
|
stsp/binutils-ia16
| 1,828
|
gas/testsuite/gas/i386/x86-64-notrack.s
|
# Check 64bit NOTRACK prefix
.allow_index_reg
.text
_start:
notrack call *%rax
notrack call *%r8
notrack jmp *%rax
notrack jmp *%r8
notrack call *(%rax)
notrack call *(%r8)
notrack jmp *(%rax)
notrack jmp *(%r8)
notrack call *(%eax)
notrack call *(%r8d)
notrack jmp *(%eax)
notrack jmp *(%r8d)
notrack bnd call *%rax
notrack bnd call *%r8
notrack bnd jmp *%rax
notrack bnd jmp *%r8
notrack bnd call *(%rax)
notrack bnd call *(%r8)
notrack bnd jmp *(%rax)
notrack bnd jmp *(%r8)
notrack bnd call *(%eax)
notrack bnd call *(%r8d)
notrack bnd jmp *(%eax)
notrack bnd jmp *(%r8d)
bnd notrack call *%rax
bnd notrack call *%r8
bnd notrack call *(%rax)
bnd notrack call *(%r8)
bnd notrack call *(%eax)
bnd notrack call *(%r8d)
.intel_syntax noprefix
notrack call rax
notrack call r8
notrack jmp rax
notrack jmp r8
notrack call QWORD PTR [rax]
notrack call QWORD PTR [r8]
notrack jmp QWORD PTR [rax]
notrack jmp QWORD PTR [r8]
notrack call QWORD PTR [eax]
notrack call QWORD PTR [r8d]
notrack jmp QWORD PTR [eax]
notrack jmp QWORD PTR [r8d]
notrack bnd call rax
notrack bnd call r8
notrack bnd jmp rax
notrack bnd jmp r8
notrack bnd call QWORD PTR [rax]
notrack bnd call QWORD PTR [r8]
notrack bnd jmp QWORD PTR [rax]
notrack bnd jmp QWORD PTR [r8]
notrack bnd call QWORD PTR [eax]
notrack bnd call QWORD PTR [r8d]
notrack bnd jmp QWORD PTR [eax]
notrack bnd jmp QWORD PTR [r8d]
bnd notrack call rax
bnd notrack call r8
bnd notrack call QWORD PTR [rax]
bnd notrack call QWORD PTR [r8]
bnd notrack call QWORD PTR [eax]
bnd notrack call QWORD PTR [r8d]
# bnd notrack callq *%rax
.byte 0xf2
.byte 0x3e
.byte 0xff
.byte 0xd0
# ds callw *%ax
.byte 0x3e
.byte 0x66
.byte 0xff
.byte 0xd0
# ds callw *%ax
.byte 0x66
.byte 0x3e
.byte 0xff
.byte 0xd0
|
stsp/binutils-ia16
| 3,065
|
gas/testsuite/gas/i386/x86-64-bundle.s
|
.bundle_align_mode 5
# We use these macros to test each pattern at every offset from
# bundle alignment, i.e. [0,31].
.macro offset_insn insn_name, offset
.p2align 5
\insn_name\()_offset_\offset\():
.if \offset
.space \offset, 0xf4
.endif
\insn_name
.endm
.macro test_offsets insn_name
offset_insn \insn_name, 0
offset_insn \insn_name, 1
offset_insn \insn_name, 2
offset_insn \insn_name, 3
offset_insn \insn_name, 4
offset_insn \insn_name, 5
offset_insn \insn_name, 6
offset_insn \insn_name, 7
offset_insn \insn_name, 8
offset_insn \insn_name, 9
offset_insn \insn_name, 10
offset_insn \insn_name, 11
offset_insn \insn_name, 12
offset_insn \insn_name, 13
offset_insn \insn_name, 14
offset_insn \insn_name, 15
offset_insn \insn_name, 16
offset_insn \insn_name, 17
offset_insn \insn_name, 18
offset_insn \insn_name, 19
offset_insn \insn_name, 20
offset_insn \insn_name, 21
offset_insn \insn_name, 22
offset_insn \insn_name, 23
offset_insn \insn_name, 24
offset_insn \insn_name, 25
offset_insn \insn_name, 26
offset_insn \insn_name, 27
offset_insn \insn_name, 28
offset_insn \insn_name, 29
offset_insn \insn_name, 30
offset_insn \insn_name, 31
.endm
# These are vanilla (non-relaxed) instructions of each length.
.macro test_1
clc
.endm
.macro test_2
add %eax,%eax
.endm
.macro test_3
and $3,%eax
.endm
.macro test_4
lock andl $3,(%rax)
.endm
.macro test_5
mov $0x11223344,%eax
.endm
.macro test_6
movl %eax,0x11223344(%rsi)
.endm
.macro test_7
movl $0x11223344,0x7f(%rsi)
.endm
.macro test_8
lock addl $0x11223344,0x10(%rsi)
.endm
.macro test_9
lock addl $0x11223344,%fs:0x10(%rsi)
.endm
.macro test_10
movl $0x11223344,0x7ff(%rsi)
.endm
.macro test_11
lock addl $0x11223344,0x7ff(%rsi)
.endm
.macro test_12
lock addl $0x11223344,%fs:0x7ff(%rsi)
.endm
.macro test_13
lock addl $0x11223344,%fs:0x7ff(%r11)
.endm
test_offsets test_1
test_offsets test_2
test_offsets test_3
test_offsets test_4
test_offsets test_5
test_offsets test_6
test_offsets test_7
test_offsets test_8
test_offsets test_9
test_offsets test_10
test_offsets test_11
test_offsets test_12
test_offsets test_13
# The only relaxation cases are the jump instructions.
# For each of the three flavors of jump (unconditional, conditional,
# and conditional with prediction), we test a case that can be relaxed
# to its shortest form, and one that must use the long form.
.macro jmp_2
jmp jmp_2_\@
movl $0xdeadbeef,%eax
jmp_2_\@\():
movl $0xb00b,%eax
.endm
.macro jmp_5
jmp jmp_5_\@
.rept 128
clc
.endr
jmp_5_\@\():
movl $0xb00b,%eax
.endm
.macro cjmp_2
jz cjmp_2_\@
movl $0xdeadbeef,%eax
cjmp_2_\@\():
movl $0xb00b,%eax
.endm
.macro cjmp_6
jz cjmp_6_\@
.rept 128
clc
.endr
cjmp_6_\@\():
movl $0xb00b,%eax
.endm
.macro pjmp_3
jz,pt pjmp_3_\@
movl $0xdeadbeef,%eax
pjmp_3_\@\():
movl $0xb00b,%eax
.endm
.macro pjmp_7
jz,pt pjmp_7_\@
.rept 128
clc
.endr
pjmp_7_\@\():
movl $0xb00b,%eax
.endm
test_offsets jmp_2
test_offsets cjmp_2
test_offsets pjmp_3
test_offsets jmp_5
test_offsets cjmp_6
test_offsets pjmp_7
.p2align 5
hlt
|
stsp/binutils-ia16
| 14,595
|
gas/testsuite/gas/i386/x86-64-avx512f_vl-wig.s
|
# Check 64bit AVX512{F,VL} WIG instructions
.allow_index_reg
.text
_start:
vpmovsxbd %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxbd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxbd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxbd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxbd 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbd 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbd -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbd -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbd %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxbd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxbd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxbd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxbd 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbd 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxbd -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbd -1032(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxbq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxbq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxbq 254(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbq 256(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbq -256(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbq -258(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxbq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxbq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxbq 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbq 512(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxbq -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbq -516(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxwd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxwd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxwd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwd 1024(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwd -1032(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxwd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxwd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxwd 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwd 2048(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwd -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwd -2064(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxwq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxwq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxwq 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwq 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwq -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwq -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxwq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxwq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxwq 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwq 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwq -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwq -1032(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxbd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxbd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxbd 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbd 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbd -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbd -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxbd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxbd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxbd 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbd 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbd -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbd -1032(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxbq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxbq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxbq 254(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbq 256(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbq -256(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbq -258(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxbq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxbq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxbq 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbq 512(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbq -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbq -516(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxwd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxwd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxwd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwd 1024(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwd -1032(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxwd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxwd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxwd 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwd 2048(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwd -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwd -2064(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxwq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxwq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxwq 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwq 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwq -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwq -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxwq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxwq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxwq 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwq 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwq -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwq -1032(%rdx), %ymm30 # AVX512{F,VL}
.intel_syntax noprefix
vpmovsxbd xmm30, xmm29 # AVX512{F,VL}
vpmovsxbd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovsxbd xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovsxbd xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovsxbd ymm30, xmm29 # AVX512{F,VL}
vpmovsxbd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovsxbd ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovsxbd ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovsxbq xmm30, xmm29 # AVX512{F,VL}
vpmovsxbq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rcx] # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rdx+254] # AVX512{F,VL} Disp8
vpmovsxbq xmm30, WORD PTR [rdx+256] # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rdx-256] # AVX512{F,VL} Disp8
vpmovsxbq xmm30, WORD PTR [rdx-258] # AVX512{F,VL}
vpmovsxbq ymm30, xmm29 # AVX512{F,VL}
vpmovsxbq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovsxbq ymm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovsxbq ymm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovsxwd xmm30, xmm29 # AVX512{F,VL}
vpmovsxwd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovsxwd xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovsxwd xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovsxwd ymm30, xmm29 # AVX512{F,VL}
vpmovsxwd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmovsxwd ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmovsxwd ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmovsxwq xmm30, xmm29 # AVX512{F,VL}
vpmovsxwq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovsxwq xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovsxwq xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovsxwq ymm30, xmm29 # AVX512{F,VL}
vpmovsxwq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovsxwq ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovsxwq ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovzxbd xmm30, xmm29 # AVX512{F,VL}
vpmovzxbd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovzxbd xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovzxbd xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovzxbd ymm30, xmm29 # AVX512{F,VL}
vpmovzxbd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovzxbd ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovzxbd ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovzxbq xmm30, xmm29 # AVX512{F,VL}
vpmovzxbq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rcx] # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rdx+254] # AVX512{F,VL} Disp8
vpmovzxbq xmm30, WORD PTR [rdx+256] # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rdx-256] # AVX512{F,VL} Disp8
vpmovzxbq xmm30, WORD PTR [rdx-258] # AVX512{F,VL}
vpmovzxbq ymm30, xmm29 # AVX512{F,VL}
vpmovzxbq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovzxbq ymm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovzxbq ymm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovzxwd xmm30, xmm29 # AVX512{F,VL}
vpmovzxwd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovzxwd xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovzxwd xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovzxwd ymm30, xmm29 # AVX512{F,VL}
vpmovzxwd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmovzxwd ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmovzxwd ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmovzxwq xmm30, xmm29 # AVX512{F,VL}
vpmovzxwq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovzxwq xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovzxwq xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovzxwq ymm30, xmm29 # AVX512{F,VL}
vpmovzxwq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovzxwq ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovzxwq ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
|
stsp/binutils-ia16
| 715,565
|
gas/testsuite/gas/i386/x86-64-avx512f_vl.s
|
# Check 64bit AVX512{F,VL} instructions
.allow_index_reg
.text
_start:
vaddpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vaddpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vaddpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vaddpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vaddpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vaddpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vaddpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vaddpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vaddpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vaddpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vaddpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vaddpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vaddpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vaddpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vaddpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vaddpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vaddpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vaddpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vaddpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vaddpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vaddps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vaddps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vaddps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vaddps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vaddps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vaddps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vaddps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vaddps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vaddps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vaddps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vaddps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vaddps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vaddps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vaddps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vaddps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vaddps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vaddps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vaddps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vaddps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vaddps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vaddps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vaddps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
valignd $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
valignd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
valignd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
valignd $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
valignd $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
valignd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
valignd $123, (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
valignd $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignd $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
valignd $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignd $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
valignd $123, 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignd $123, 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
valignd $123, -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignd $123, -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
valignd $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
valignd $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
valignd $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
valignd $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
valignd $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
valignd $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
valignd $123, (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
valignd $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignd $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
valignd $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignd $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
valignd $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignd $123, 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
valignd $123, -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignd $123, -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vblendmpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vblendmpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vblendmpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vblendmpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vblendmpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vblendmpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vblendmps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vblendmps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vblendmps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vblendmps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vblendmps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vblendmps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vblendmps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vblendmps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vblendmps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vblendmps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vblendmps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vblendmps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vblendmps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vblendmps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vblendmps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vblendmps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vblendmps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vblendmps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vblendmps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vblendmps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vblendmps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vblendmps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vbroadcastf32x4 (%rcx), %ymm30 # AVX512{F,VL}
vbroadcastf32x4 (%rcx), %ymm30{%k7} # AVX512{F,VL}
vbroadcastf32x4 (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vbroadcastf32x4 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vbroadcastf32x4 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcastf32x4 2048(%rdx), %ymm30 # AVX512{F,VL}
vbroadcastf32x4 -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcastf32x4 -2064(%rdx), %ymm30 # AVX512{F,VL}
vbroadcasti32x4 (%rcx), %ymm30 # AVX512{F,VL}
vbroadcasti32x4 (%rcx), %ymm30{%k7} # AVX512{F,VL}
vbroadcasti32x4 (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vbroadcasti32x4 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vbroadcasti32x4 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcasti32x4 2048(%rdx), %ymm30 # AVX512{F,VL}
vbroadcasti32x4 -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcasti32x4 -2064(%rdx), %ymm30 # AVX512{F,VL}
vbroadcastsd (%rcx), %ymm30 # AVX512{F,VL}
vbroadcastsd (%rcx), %ymm30{%k7} # AVX512{F,VL}
vbroadcastsd (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vbroadcastsd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vbroadcastsd 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcastsd 1024(%rdx), %ymm30 # AVX512{F,VL}
vbroadcastsd -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcastsd -1032(%rdx), %ymm30 # AVX512{F,VL}
vbroadcastsd %xmm29, %ymm30 # AVX512{F,VL}
vbroadcastsd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vbroadcastsd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vbroadcastss (%rcx), %xmm30 # AVX512{F,VL}
vbroadcastss (%rcx), %xmm30{%k7} # AVX512{F,VL}
vbroadcastss (%rcx), %xmm30{%k7}{z} # AVX512{F,VL}
vbroadcastss 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vbroadcastss 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vbroadcastss 512(%rdx), %xmm30 # AVX512{F,VL}
vbroadcastss -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vbroadcastss -516(%rdx), %xmm30 # AVX512{F,VL}
vbroadcastss (%rcx), %ymm30 # AVX512{F,VL}
vbroadcastss (%rcx), %ymm30{%k7} # AVX512{F,VL}
vbroadcastss (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vbroadcastss 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vbroadcastss 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcastss 512(%rdx), %ymm30 # AVX512{F,VL}
vbroadcastss -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vbroadcastss -516(%rdx), %ymm30 # AVX512{F,VL}
vbroadcastss %xmm29, %xmm30 # AVX512{F,VL}
vbroadcastss %xmm29, %xmm30{%k7} # AVX512{F,VL}
vbroadcastss %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vbroadcastss %xmm29, %ymm30 # AVX512{F,VL}
vbroadcastss %xmm29, %ymm30{%k7} # AVX512{F,VL}
vbroadcastss %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcmppd $0xab, %xmm28, %xmm29, %k5 # AVX512{F,VL}
vcmppd $0xab, %xmm28, %xmm29, %k5{%k7} # AVX512{F,VL}
vcmppd $123, %xmm28, %xmm29, %k5 # AVX512{F,VL}
vcmppd $123, (%rcx), %xmm29, %k5 # AVX512{F,VL}
vcmppd $123, 0x123(%rax,%r14,8), %xmm29, %k5 # AVX512{F,VL}
vcmppd $123, (%rcx){1to2}, %xmm29, %k5 # AVX512{F,VL}
vcmppd $123, 2032(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, 2048(%rdx), %xmm29, %k5 # AVX512{F,VL}
vcmppd $123, -2048(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, -2064(%rdx), %xmm29, %k5 # AVX512{F,VL}
vcmppd $123, 1016(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, 1024(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL}
vcmppd $123, -1024(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, -1032(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL}
vcmppd $0xab, %ymm28, %ymm29, %k5 # AVX512{F,VL}
vcmppd $0xab, %ymm28, %ymm29, %k5{%k7} # AVX512{F,VL}
vcmppd $123, %ymm28, %ymm29, %k5 # AVX512{F,VL}
vcmppd $123, (%rcx), %ymm29, %k5 # AVX512{F,VL}
vcmppd $123, 0x123(%rax,%r14,8), %ymm29, %k5 # AVX512{F,VL}
vcmppd $123, (%rcx){1to4}, %ymm29, %k5 # AVX512{F,VL}
vcmppd $123, 4064(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, 4096(%rdx), %ymm29, %k5 # AVX512{F,VL}
vcmppd $123, -4096(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, -4128(%rdx), %ymm29, %k5 # AVX512{F,VL}
vcmppd $123, 1016(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, 1024(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL}
vcmppd $123, -1024(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL} Disp8
vcmppd $123, -1032(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL}
vcmpps $0xab, %xmm28, %xmm29, %k5 # AVX512{F,VL}
vcmpps $0xab, %xmm28, %xmm29, %k5{%k7} # AVX512{F,VL}
vcmpps $123, %xmm28, %xmm29, %k5 # AVX512{F,VL}
vcmpps $123, (%rcx), %xmm29, %k5 # AVX512{F,VL}
vcmpps $123, 0x123(%rax,%r14,8), %xmm29, %k5 # AVX512{F,VL}
vcmpps $123, (%rcx){1to4}, %xmm29, %k5 # AVX512{F,VL}
vcmpps $123, 2032(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, 2048(%rdx), %xmm29, %k5 # AVX512{F,VL}
vcmpps $123, -2048(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, -2064(%rdx), %xmm29, %k5 # AVX512{F,VL}
vcmpps $123, 508(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, 512(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL}
vcmpps $123, -512(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, -516(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL}
vcmpps $0xab, %ymm28, %ymm29, %k5 # AVX512{F,VL}
vcmpps $0xab, %ymm28, %ymm29, %k5{%k7} # AVX512{F,VL}
vcmpps $123, %ymm28, %ymm29, %k5 # AVX512{F,VL}
vcmpps $123, (%rcx), %ymm29, %k5 # AVX512{F,VL}
vcmpps $123, 0x123(%rax,%r14,8), %ymm29, %k5 # AVX512{F,VL}
vcmpps $123, (%rcx){1to8}, %ymm29, %k5 # AVX512{F,VL}
vcmpps $123, 4064(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, 4096(%rdx), %ymm29, %k5 # AVX512{F,VL}
vcmpps $123, -4096(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, -4128(%rdx), %ymm29, %k5 # AVX512{F,VL}
vcmpps $123, 508(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, 512(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL}
vcmpps $123, -512(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL} Disp8
vcmpps $123, -516(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL}
vcompresspd %xmm30, (%rcx) # AVX512{F,VL}
vcompresspd %xmm30, (%rcx){%k7} # AVX512{F,VL}
vcompresspd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vcompresspd %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vcompresspd %xmm30, 1024(%rdx) # AVX512{F,VL}
vcompresspd %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vcompresspd %xmm30, -1032(%rdx) # AVX512{F,VL}
vcompresspd %ymm30, (%rcx) # AVX512{F,VL}
vcompresspd %ymm30, (%rcx){%k7} # AVX512{F,VL}
vcompresspd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vcompresspd %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vcompresspd %ymm30, 1024(%rdx) # AVX512{F,VL}
vcompresspd %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vcompresspd %ymm30, -1032(%rdx) # AVX512{F,VL}
vcompresspd %xmm29, %xmm30 # AVX512{F,VL}
vcompresspd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcompresspd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcompresspd %ymm29, %ymm30 # AVX512{F,VL}
vcompresspd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcompresspd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcompressps %xmm30, (%rcx) # AVX512{F,VL}
vcompressps %xmm30, (%rcx){%k7} # AVX512{F,VL}
vcompressps %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vcompressps %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vcompressps %xmm30, 512(%rdx) # AVX512{F,VL}
vcompressps %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vcompressps %xmm30, -516(%rdx) # AVX512{F,VL}
vcompressps %ymm30, (%rcx) # AVX512{F,VL}
vcompressps %ymm30, (%rcx){%k7} # AVX512{F,VL}
vcompressps %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vcompressps %ymm30, 508(%rdx) # AVX512{F,VL} Disp8
vcompressps %ymm30, 512(%rdx) # AVX512{F,VL}
vcompressps %ymm30, -512(%rdx) # AVX512{F,VL} Disp8
vcompressps %ymm30, -516(%rdx) # AVX512{F,VL}
vcompressps %xmm29, %xmm30 # AVX512{F,VL}
vcompressps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcompressps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcompressps %ymm29, %ymm30 # AVX512{F,VL}
vcompressps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcompressps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtdq2pd %xmm29, %xmm30 # AVX512{F,VL}
vcvtdq2pd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtdq2pd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtdq2pd (%rcx), %xmm30 # AVX512{F,VL}
vcvtdq2pd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtdq2pd (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvtdq2pd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtdq2pd 1024(%rdx), %xmm30 # AVX512{F,VL}
vcvtdq2pd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtdq2pd -1032(%rdx), %xmm30 # AVX512{F,VL}
vcvtdq2pd 508(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtdq2pd 512(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtdq2pd -512(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtdq2pd -516(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtdq2pd %xmm29, %ymm30 # AVX512{F,VL}
vcvtdq2pd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vcvtdq2pd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtdq2pd (%rcx), %ymm30 # AVX512{F,VL}
vcvtdq2pd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtdq2pd (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vcvtdq2pd 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtdq2pd 2048(%rdx), %ymm30 # AVX512{F,VL}
vcvtdq2pd -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtdq2pd -2064(%rdx), %ymm30 # AVX512{F,VL}
vcvtdq2pd 508(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vcvtdq2pd 512(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vcvtdq2pd -512(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vcvtdq2pd -516(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vcvtdq2ps %xmm29, %xmm30 # AVX512{F,VL}
vcvtdq2ps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtdq2ps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtdq2ps (%rcx), %xmm30 # AVX512{F,VL}
vcvtdq2ps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtdq2ps (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvtdq2ps 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtdq2ps 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvtdq2ps -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtdq2ps -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvtdq2ps 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtdq2ps 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtdq2ps -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtdq2ps -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtdq2ps %ymm29, %ymm30 # AVX512{F,VL}
vcvtdq2ps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcvtdq2ps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtdq2ps (%rcx), %ymm30 # AVX512{F,VL}
vcvtdq2ps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtdq2ps (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vcvtdq2ps 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtdq2ps 4096(%rdx), %ymm30 # AVX512{F,VL}
vcvtdq2ps -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtdq2ps -4128(%rdx), %ymm30 # AVX512{F,VL}
vcvtdq2ps 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtdq2ps 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvtdq2ps -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtdq2ps -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvtpd2dq %xmm29, %xmm30 # AVX512{F,VL}
vcvtpd2dq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtpd2dq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtpd2dqx (%rcx), %xmm30 # AVX512{F,VL}
vcvtpd2dqx 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtpd2dq (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2dqx 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqx 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2dqx -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqx -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2dqx 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqx 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2dqx -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqx -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2dq %ymm29, %xmm30 # AVX512{F,VL}
vcvtpd2dq %ymm29, %xmm30{%k7} # AVX512{F,VL}
vcvtpd2dq %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtpd2dqy (%rcx), %xmm30 # AVX512{F,VL}
vcvtpd2dqy 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtpd2dq (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2dqy 4064(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqy 4096(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2dqy -4096(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqy -4128(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2dqy 1016(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqy 1024(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2dqy -1024(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2dqy -1032(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2ps %xmm29, %xmm30 # AVX512{F,VL}
vcvtpd2ps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtpd2ps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtpd2psx (%rcx), %xmm30 # AVX512{F,VL}
vcvtpd2psx 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtpd2ps (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2psx 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psx 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2psx -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psx -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2psx 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psx 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2psx -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psx -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2ps %ymm29, %xmm30 # AVX512{F,VL}
vcvtpd2ps %ymm29, %xmm30{%k7} # AVX512{F,VL}
vcvtpd2ps %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtpd2psy (%rcx), %xmm30 # AVX512{F,VL}
vcvtpd2psy 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtpd2ps (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2psy 4064(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psy 4096(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2psy -4096(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psy -4128(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2psy 1016(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psy 1024(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2psy -1024(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2psy -1032(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2udq %xmm29, %xmm30 # AVX512{F,VL}
vcvtpd2udq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtpd2udq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtpd2udqx (%rcx), %xmm30 # AVX512{F,VL}
vcvtpd2udqx 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtpd2udq (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2udqx 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqx 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2udqx -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqx -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2udqx 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqx 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2udqx -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqx -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtpd2udq %ymm29, %xmm30 # AVX512{F,VL}
vcvtpd2udq %ymm29, %xmm30{%k7} # AVX512{F,VL}
vcvtpd2udq %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtpd2udqy (%rcx), %xmm30 # AVX512{F,VL}
vcvtpd2udqy 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtpd2udq (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2udqy 4064(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqy 4096(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2udqy -4096(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqy -4128(%rdx), %xmm30 # AVX512{F,VL}
vcvtpd2udqy 1016(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqy 1024(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtpd2udqy -1024(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtpd2udqy -1032(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtph2ps %xmm29, %xmm30 # AVX512{F,VL}
vcvtph2ps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtph2ps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtph2ps (%rcx), %xmm30 # AVX512{F,VL}
vcvtph2ps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtph2ps 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtph2ps 1024(%rdx), %xmm30 # AVX512{F,VL}
vcvtph2ps -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtph2ps -1032(%rdx), %xmm30 # AVX512{F,VL}
vcvtph2ps %xmm29, %ymm30 # AVX512{F,VL}
vcvtph2ps %xmm29, %ymm30{%k7} # AVX512{F,VL}
vcvtph2ps %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtph2ps (%rcx), %ymm30 # AVX512{F,VL}
vcvtph2ps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtph2ps 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtph2ps 2048(%rdx), %ymm30 # AVX512{F,VL}
vcvtph2ps -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtph2ps -2064(%rdx), %ymm30 # AVX512{F,VL}
vcvtps2dq %xmm29, %xmm30 # AVX512{F,VL}
vcvtps2dq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtps2dq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtps2dq (%rcx), %xmm30 # AVX512{F,VL}
vcvtps2dq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtps2dq (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvtps2dq 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtps2dq 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvtps2dq -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtps2dq -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvtps2dq 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtps2dq 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtps2dq -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtps2dq -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtps2dq %ymm29, %ymm30 # AVX512{F,VL}
vcvtps2dq %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcvtps2dq %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtps2dq (%rcx), %ymm30 # AVX512{F,VL}
vcvtps2dq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtps2dq (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vcvtps2dq 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtps2dq 4096(%rdx), %ymm30 # AVX512{F,VL}
vcvtps2dq -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtps2dq -4128(%rdx), %ymm30 # AVX512{F,VL}
vcvtps2dq 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtps2dq 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvtps2dq -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtps2dq -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvtps2pd %xmm29, %xmm30 # AVX512{F,VL}
vcvtps2pd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtps2pd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtps2pd (%rcx), %xmm30 # AVX512{F,VL}
vcvtps2pd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtps2pd (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvtps2pd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtps2pd 1024(%rdx), %xmm30 # AVX512{F,VL}
vcvtps2pd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtps2pd -1032(%rdx), %xmm30 # AVX512{F,VL}
vcvtps2pd 508(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtps2pd 512(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtps2pd -512(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtps2pd -516(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtps2pd %xmm29, %ymm30 # AVX512{F,VL}
vcvtps2pd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vcvtps2pd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtps2pd (%rcx), %ymm30 # AVX512{F,VL}
vcvtps2pd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtps2pd (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vcvtps2pd 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtps2pd 2048(%rdx), %ymm30 # AVX512{F,VL}
vcvtps2pd -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtps2pd -2064(%rdx), %ymm30 # AVX512{F,VL}
vcvtps2pd 508(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vcvtps2pd 512(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vcvtps2pd -512(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vcvtps2pd -516(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vcvtps2ph $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vcvtps2ph $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtps2ph $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtps2ph $123, %xmm29, %xmm30 # AVX512{F,VL}
vcvtps2ph $0xab, %ymm29, %xmm30 # AVX512{F,VL}
vcvtps2ph $0xab, %ymm29, %xmm30{%k7} # AVX512{F,VL}
vcvtps2ph $0xab, %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtps2ph $123, %ymm29, %xmm30 # AVX512{F,VL}
vcvtps2udq %xmm29, %xmm30 # AVX512{F,VL}
vcvtps2udq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtps2udq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtps2udq (%rcx), %xmm30 # AVX512{F,VL}
vcvtps2udq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtps2udq (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvtps2udq 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtps2udq 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvtps2udq -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtps2udq -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvtps2udq 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtps2udq 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtps2udq -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtps2udq -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtps2udq %ymm29, %ymm30 # AVX512{F,VL}
vcvtps2udq %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcvtps2udq %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtps2udq (%rcx), %ymm30 # AVX512{F,VL}
vcvtps2udq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtps2udq (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vcvtps2udq 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtps2udq 4096(%rdx), %ymm30 # AVX512{F,VL}
vcvtps2udq -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtps2udq -4128(%rdx), %ymm30 # AVX512{F,VL}
vcvtps2udq 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtps2udq 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvtps2udq -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtps2udq -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvttpd2dq %xmm29, %xmm30 # AVX512{F,VL}
vcvttpd2dq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvttpd2dq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvttpd2dqx (%rcx), %xmm30 # AVX512{F,VL}
vcvttpd2dqx 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvttpd2dq (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvttpd2dqx 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqx 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2dqx -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqx -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2dqx 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqx 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvttpd2dqx -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqx -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvttpd2dq %ymm29, %xmm30 # AVX512{F,VL}
vcvttpd2dq %ymm29, %xmm30{%k7} # AVX512{F,VL}
vcvttpd2dq %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvttpd2dqy (%rcx), %xmm30 # AVX512{F,VL}
vcvttpd2dqy 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvttpd2dq (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvttpd2dqy 4064(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqy 4096(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2dqy -4096(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqy -4128(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2dqy 1016(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqy 1024(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttpd2dqy -1024(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2dqy -1032(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2dq %xmm29, %xmm30 # AVX512{F,VL}
vcvttps2dq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvttps2dq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvttps2dq (%rcx), %xmm30 # AVX512{F,VL}
vcvttps2dq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvttps2dq (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2dq 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttps2dq 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvttps2dq -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttps2dq -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvttps2dq 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttps2dq 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2dq -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttps2dq -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2dq %ymm29, %ymm30 # AVX512{F,VL}
vcvttps2dq %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcvttps2dq %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvttps2dq (%rcx), %ymm30 # AVX512{F,VL}
vcvttps2dq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvttps2dq (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vcvttps2dq 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvttps2dq 4096(%rdx), %ymm30 # AVX512{F,VL}
vcvttps2dq -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvttps2dq -4128(%rdx), %ymm30 # AVX512{F,VL}
vcvttps2dq 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvttps2dq 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvttps2dq -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvttps2dq -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvtudq2pd %xmm29, %xmm30 # AVX512{F,VL}
vcvtudq2pd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtudq2pd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtudq2pd (%rcx), %xmm30 # AVX512{F,VL}
vcvtudq2pd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtudq2pd (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvtudq2pd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtudq2pd 1024(%rdx), %xmm30 # AVX512{F,VL}
vcvtudq2pd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtudq2pd -1032(%rdx), %xmm30 # AVX512{F,VL}
vcvtudq2pd 508(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtudq2pd 512(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtudq2pd -512(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvtudq2pd -516(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvtudq2pd %xmm29, %ymm30 # AVX512{F,VL}
vcvtudq2pd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vcvtudq2pd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtudq2pd (%rcx), %ymm30 # AVX512{F,VL}
vcvtudq2pd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtudq2pd (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vcvtudq2pd 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtudq2pd 2048(%rdx), %ymm30 # AVX512{F,VL}
vcvtudq2pd -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtudq2pd -2064(%rdx), %ymm30 # AVX512{F,VL}
vcvtudq2pd 508(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vcvtudq2pd 512(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vcvtudq2pd -512(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vcvtudq2pd -516(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vcvtudq2ps %xmm29, %xmm30 # AVX512{F,VL}
vcvtudq2ps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvtudq2ps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvtudq2ps (%rcx), %xmm30 # AVX512{F,VL}
vcvtudq2ps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvtudq2ps (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvtudq2ps 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtudq2ps 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvtudq2ps -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvtudq2ps -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvtudq2ps 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtudq2ps 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtudq2ps -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvtudq2ps -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvtudq2ps %ymm29, %ymm30 # AVX512{F,VL}
vcvtudq2ps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcvtudq2ps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtudq2ps (%rcx), %ymm30 # AVX512{F,VL}
vcvtudq2ps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvtudq2ps (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vcvtudq2ps 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtudq2ps 4096(%rdx), %ymm30 # AVX512{F,VL}
vcvtudq2ps -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvtudq2ps -4128(%rdx), %ymm30 # AVX512{F,VL}
vcvtudq2ps 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtudq2ps 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvtudq2ps -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvtudq2ps -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vdivpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vdivpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vdivpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vdivpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vdivpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vdivpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vdivpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vdivpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vdivpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vdivpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vdivpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vdivpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vdivpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vdivpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vdivpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vdivpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vdivpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vdivpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vdivpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vdivpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vdivps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vdivps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vdivps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vdivps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vdivps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vdivps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vdivps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vdivps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vdivps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vdivps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vdivps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vdivps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vdivps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vdivps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vdivps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vdivps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vdivps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vdivps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vdivps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vdivps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vdivps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vdivps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vexpandpd (%rcx), %xmm30 # AVX512{F,VL}
vexpandpd (%rcx), %xmm30{%k7} # AVX512{F,VL}
vexpandpd (%rcx), %xmm30{%k7}{z} # AVX512{F,VL}
vexpandpd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vexpandpd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vexpandpd 1024(%rdx), %xmm30 # AVX512{F,VL}
vexpandpd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vexpandpd -1032(%rdx), %xmm30 # AVX512{F,VL}
vexpandpd (%rcx), %ymm30 # AVX512{F,VL}
vexpandpd (%rcx), %ymm30{%k7} # AVX512{F,VL}
vexpandpd (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vexpandpd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vexpandpd 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vexpandpd 1024(%rdx), %ymm30 # AVX512{F,VL}
vexpandpd -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vexpandpd -1032(%rdx), %ymm30 # AVX512{F,VL}
vexpandpd %xmm29, %xmm30 # AVX512{F,VL}
vexpandpd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vexpandpd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vexpandpd %ymm29, %ymm30 # AVX512{F,VL}
vexpandpd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vexpandpd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vexpandps (%rcx), %xmm30 # AVX512{F,VL}
vexpandps (%rcx), %xmm30{%k7} # AVX512{F,VL}
vexpandps (%rcx), %xmm30{%k7}{z} # AVX512{F,VL}
vexpandps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vexpandps 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vexpandps 512(%rdx), %xmm30 # AVX512{F,VL}
vexpandps -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vexpandps -516(%rdx), %xmm30 # AVX512{F,VL}
vexpandps (%rcx), %ymm30 # AVX512{F,VL}
vexpandps (%rcx), %ymm30{%k7} # AVX512{F,VL}
vexpandps (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vexpandps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vexpandps 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vexpandps 512(%rdx), %ymm30 # AVX512{F,VL}
vexpandps -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vexpandps -516(%rdx), %ymm30 # AVX512{F,VL}
vexpandps %xmm29, %xmm30 # AVX512{F,VL}
vexpandps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vexpandps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vexpandps %ymm29, %ymm30 # AVX512{F,VL}
vexpandps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vexpandps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vextractf32x4 $0xab, %ymm29, %xmm30 # AVX512{F,VL}
vextractf32x4 $0xab, %ymm29, %xmm30{%k7} # AVX512{F,VL}
vextractf32x4 $0xab, %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vextractf32x4 $123, %ymm29, %xmm30 # AVX512{F,VL}
vextracti32x4 $0xab, %ymm29, %xmm30 # AVX512{F,VL}
vextracti32x4 $0xab, %ymm29, %xmm30{%k7} # AVX512{F,VL}
vextracti32x4 $0xab, %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vextracti32x4 $123, %ymm29, %xmm30 # AVX512{F,VL}
vfmadd132pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmadd132pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmadd132pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmadd132pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmadd132pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmadd132ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmadd132ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd132ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd132ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmadd132ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmadd132ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd132ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd132ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmadd213pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmadd213pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmadd213pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmadd213pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmadd213ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmadd213ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd213ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd213ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmadd213ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmadd213ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd213ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd213ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmadd231pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmadd231pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmadd231pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmadd231pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmadd231ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmadd231ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmadd231ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmadd231ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmadd231ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmadd231ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmadd231ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmadd231ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmaddsub132pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmaddsub132pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmaddsub132pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmaddsub132pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmaddsub132ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmaddsub132ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub132ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub132ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmaddsub132ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmaddsub132ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub132ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub132ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmaddsub213pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmaddsub213pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmaddsub213pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmaddsub213pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmaddsub213ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmaddsub213ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub213ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub213ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmaddsub213ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmaddsub213ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub213ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub213ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmaddsub231pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmaddsub231pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmaddsub231pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmaddsub231pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmaddsub231ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmaddsub231ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmaddsub231ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmaddsub231ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmaddsub231ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmaddsub231ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmaddsub231ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmaddsub231ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsub132pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsub132pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsub132pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsub132pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsub132ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsub132ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub132ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub132ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsub132ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsub132ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub132ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub132ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsub213pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsub213pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsub213pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsub213pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsub213ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsub213ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub213ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub213ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsub213ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsub213ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub213ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub213ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsub231pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsub231pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsub231pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsub231pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsub231ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsub231ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsub231ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsub231ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsub231ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsub231ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsub231ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsub231ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsubadd132pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsubadd132pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsubadd132pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsubadd132pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsubadd132ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsubadd132ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd132ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd132ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsubadd132ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsubadd132ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd132ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd132ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsubadd213pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsubadd213pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsubadd213pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsubadd213pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsubadd213ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsubadd213ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd213ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd213ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsubadd213ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsubadd213ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd213ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd213ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsubadd231pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsubadd231pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsubadd231pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsubadd231pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfmsubadd231ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfmsubadd231ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfmsubadd231ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfmsubadd231ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfmsubadd231ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfmsubadd231ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfmsubadd231ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfmsubadd231ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmadd132pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmadd132pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmadd132pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmadd132pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmadd132ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmadd132ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd132ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd132ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmadd132ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmadd132ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd132ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd132ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmadd213pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmadd213pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmadd213pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmadd213pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmadd213ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmadd213ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd213ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd213ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmadd213ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmadd213ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd213ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd213ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmadd231pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmadd231pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmadd231pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmadd231pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmadd231ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmadd231ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmadd231ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmadd231ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmadd231ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmadd231ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmadd231ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmadd231ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmsub132pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmsub132pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmsub132pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmsub132pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmsub132ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmsub132ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub132ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub132ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmsub132ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmsub132ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub132ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub132ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmsub213pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmsub213pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmsub213pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmsub213pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmsub213ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmsub213ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub213ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub213ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmsub213ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmsub213ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub213ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub213ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmsub231pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmsub231pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmsub231pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmsub231pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfnmsub231ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfnmsub231ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfnmsub231ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfnmsub231ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfnmsub231ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfnmsub231ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfnmsub231ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfnmsub231ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vgatherdpd 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vgatherdpd 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vgatherdpd 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vgatherdpd 123(%r14,%xmm31,8), %ymm30{%k1} # AVX512{F,VL}
vgatherdpd 256(%r9,%xmm31), %ymm30{%k1} # AVX512{F,VL}
vgatherdpd 1024(%rcx,%xmm31,4), %ymm30{%k1} # AVX512{F,VL}
vgatherdps 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vgatherdps 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vgatherdps 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vgatherdps 123(%r14,%ymm31,8), %ymm30{%k1} # AVX512{F,VL}
vgatherdps 256(%r9,%ymm31), %ymm30{%k1} # AVX512{F,VL}
vgatherdps 1024(%rcx,%ymm31,4), %ymm30{%k1} # AVX512{F,VL}
vgatherqpd 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vgatherqpd 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vgatherqpd 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vgatherqpd 123(%r14,%ymm31,8), %ymm30{%k1} # AVX512{F,VL}
vgatherqpd 256(%r9,%ymm31), %ymm30{%k1} # AVX512{F,VL}
vgatherqpd 1024(%rcx,%ymm31,4), %ymm30{%k1} # AVX512{F,VL}
vgatherqps 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vgatherqps 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vgatherqps 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vgatherqps 123(%r14,%ymm31,8), %xmm30{%k1} # AVX512{F,VL}
vgatherqps 256(%r9,%ymm31), %xmm30{%k1} # AVX512{F,VL}
vgatherqps 1024(%rcx,%ymm31,4), %xmm30{%k1} # AVX512{F,VL}
vgetexppd %xmm29, %xmm30 # AVX512{F,VL}
vgetexppd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vgetexppd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vgetexppd (%rcx), %xmm30 # AVX512{F,VL}
vgetexppd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vgetexppd (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vgetexppd 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetexppd 2048(%rdx), %xmm30 # AVX512{F,VL}
vgetexppd -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetexppd -2064(%rdx), %xmm30 # AVX512{F,VL}
vgetexppd 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vgetexppd 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vgetexppd -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vgetexppd -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vgetexppd %ymm29, %ymm30 # AVX512{F,VL}
vgetexppd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vgetexppd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vgetexppd (%rcx), %ymm30 # AVX512{F,VL}
vgetexppd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vgetexppd (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vgetexppd 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetexppd 4096(%rdx), %ymm30 # AVX512{F,VL}
vgetexppd -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetexppd -4128(%rdx), %ymm30 # AVX512{F,VL}
vgetexppd 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vgetexppd 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vgetexppd -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vgetexppd -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vgetexpps %xmm29, %xmm30 # AVX512{F,VL}
vgetexpps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vgetexpps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vgetexpps (%rcx), %xmm30 # AVX512{F,VL}
vgetexpps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vgetexpps (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vgetexpps 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetexpps 2048(%rdx), %xmm30 # AVX512{F,VL}
vgetexpps -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetexpps -2064(%rdx), %xmm30 # AVX512{F,VL}
vgetexpps 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vgetexpps 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vgetexpps -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vgetexpps -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vgetexpps %ymm29, %ymm30 # AVX512{F,VL}
vgetexpps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vgetexpps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vgetexpps (%rcx), %ymm30 # AVX512{F,VL}
vgetexpps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vgetexpps (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vgetexpps 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetexpps 4096(%rdx), %ymm30 # AVX512{F,VL}
vgetexpps -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetexpps -4128(%rdx), %ymm30 # AVX512{F,VL}
vgetexpps 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vgetexpps 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vgetexpps -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vgetexpps -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vgetmantpd $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vgetmantpd $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vgetmantpd $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vgetmantpd $123, %xmm29, %xmm30 # AVX512{F,VL}
vgetmantpd $123, (%rcx), %xmm30 # AVX512{F,VL}
vgetmantpd $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vgetmantpd $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vgetmantpd $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetmantpd $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vgetmantpd $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetmantpd $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vgetmantpd $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vgetmantpd $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vgetmantpd $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vgetmantpd $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vgetmantpd $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vgetmantpd $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vgetmantpd $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vgetmantpd $123, %ymm29, %ymm30 # AVX512{F,VL}
vgetmantpd $123, (%rcx), %ymm30 # AVX512{F,VL}
vgetmantpd $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vgetmantpd $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vgetmantpd $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetmantpd $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vgetmantpd $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetmantpd $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vgetmantpd $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vgetmantpd $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vgetmantpd $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vgetmantpd $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vgetmantps $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vgetmantps $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vgetmantps $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vgetmantps $123, %xmm29, %xmm30 # AVX512{F,VL}
vgetmantps $123, (%rcx), %xmm30 # AVX512{F,VL}
vgetmantps $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vgetmantps $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vgetmantps $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetmantps $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vgetmantps $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vgetmantps $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vgetmantps $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vgetmantps $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vgetmantps $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vgetmantps $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vgetmantps $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vgetmantps $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vgetmantps $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vgetmantps $123, %ymm29, %ymm30 # AVX512{F,VL}
vgetmantps $123, (%rcx), %ymm30 # AVX512{F,VL}
vgetmantps $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vgetmantps $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vgetmantps $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetmantps $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vgetmantps $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vgetmantps $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vgetmantps $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vgetmantps $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vgetmantps $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vgetmantps $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vinsertf32x4 $0xab, %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vinsertf32x4 $0xab, %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vinsertf32x4 $0xab, %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vinsertf32x4 $123, %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vinsertf32x4 $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vinsertf32x4 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vinsertf32x4 $123, 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vinsertf32x4 $123, 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vinsertf32x4 $123, -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vinsertf32x4 $123, -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vinserti32x4 $0xab, %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vinserti32x4 $0xab, %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vinserti32x4 $0xab, %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vinserti32x4 $123, %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vinserti32x4 $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vinserti32x4 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vinserti32x4 $123, 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vinserti32x4 $123, 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vinserti32x4 $123, -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vinserti32x4 $123, -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmaxpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmaxpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vmaxpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmaxpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmaxpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vmaxpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vmaxps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vmaxps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmaxps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmaxps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vmaxps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vmaxps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vmaxps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmaxps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmaxps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vmaxps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmaxps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vmaxps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vmaxps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmaxps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmaxps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vmaxps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vmaxps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vmaxps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmaxps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmaxps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vmaxps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmaxps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vminpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vminpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vminpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vminpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vminpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vminpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vminpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vminpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vminpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vminpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vminpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vminpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vminpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vminpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vminpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vminpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vminpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vminpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vminpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vminpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vminps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vminps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vminps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vminps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vminps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vminps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vminps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vminps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vminps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vminps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vminps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vminps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vminps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vminps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vminps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vminps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vminps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vminps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vminps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vminps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vminps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vminps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vmovapd %xmm29, %xmm30 # AVX512{F,VL}
vmovapd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovapd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovapd (%rcx), %xmm30 # AVX512{F,VL}
vmovapd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovapd 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovapd 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovapd -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovapd -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovapd %ymm29, %ymm30 # AVX512{F,VL}
vmovapd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovapd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovapd (%rcx), %ymm30 # AVX512{F,VL}
vmovapd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovapd 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovapd 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovapd -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovapd -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovaps %xmm29, %xmm30 # AVX512{F,VL}
vmovaps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovaps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovaps (%rcx), %xmm30 # AVX512{F,VL}
vmovaps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovaps 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovaps 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovaps -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovaps -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovaps %ymm29, %ymm30 # AVX512{F,VL}
vmovaps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovaps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovaps (%rcx), %ymm30 # AVX512{F,VL}
vmovaps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovaps 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovaps 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovaps -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovaps -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovddup %xmm29, %xmm30 # AVX512{F,VL}
vmovddup %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovddup %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovddup (%rcx), %xmm30 # AVX512{F,VL}
vmovddup 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovddup 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovddup 1024(%rdx), %xmm30 # AVX512{F,VL}
vmovddup -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovddup -1032(%rdx), %xmm30 # AVX512{F,VL}
vmovddup %ymm29, %ymm30 # AVX512{F,VL}
vmovddup %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovddup %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovddup (%rcx), %ymm30 # AVX512{F,VL}
vmovddup 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovddup 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovddup 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovddup -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovddup -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa32 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa32 (%rcx), %xmm30 # AVX512{F,VL}
vmovdqa32 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovdqa32 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqa32 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovdqa32 -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqa32 -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa32 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa32 (%rcx), %ymm30 # AVX512{F,VL}
vmovdqa32 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovdqa32 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqa32 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovdqa32 -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqa32 -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqa64 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqa64 (%rcx), %xmm30 # AVX512{F,VL}
vmovdqa64 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovdqa64 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqa64 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovdqa64 -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqa64 -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqa64 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqa64 (%rcx), %ymm30 # AVX512{F,VL}
vmovdqa64 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovdqa64 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqa64 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovdqa64 -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqa64 -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu32 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu32 (%rcx), %xmm30 # AVX512{F,VL}
vmovdqu32 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovdqu32 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqu32 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovdqu32 -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqu32 -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu32 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu32 (%rcx), %ymm30 # AVX512{F,VL}
vmovdqu32 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovdqu32 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqu32 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovdqu32 -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqu32 -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30 # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovdqu64 %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovdqu64 (%rcx), %xmm30 # AVX512{F,VL}
vmovdqu64 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovdqu64 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqu64 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovdqu64 -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovdqu64 -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30 # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovdqu64 %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovdqu64 (%rcx), %ymm30 # AVX512{F,VL}
vmovdqu64 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovdqu64 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqu64 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovdqu64 -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovdqu64 -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovntdq %xmm30, (%rcx) # AVX512{F,VL}
vmovntdq %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovntdq %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovntdq %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovntdq %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovntdq %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovntdq %ymm30, (%rcx) # AVX512{F,VL}
vmovntdq %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovntdq %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovntdq %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovntdq %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovntdq %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovntdqa (%rcx), %xmm30 # AVX512{F,VL}
vmovntdqa 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovntdqa 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovntdqa 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovntdqa -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovntdqa -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovntdqa (%rcx), %ymm30 # AVX512{F,VL}
vmovntdqa 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovntdqa 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovntdqa 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovntdqa -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovntdqa -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovntpd %xmm30, (%rcx) # AVX512{F,VL}
vmovntpd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovntpd %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovntpd %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovntpd %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovntpd %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovntpd %ymm30, (%rcx) # AVX512{F,VL}
vmovntpd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovntpd %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovntpd %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovntpd %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovntpd %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovntps %xmm30, (%rcx) # AVX512{F,VL}
vmovntps %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovntps %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovntps %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovntps %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovntps %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovntps %ymm30, (%rcx) # AVX512{F,VL}
vmovntps %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovntps %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovntps %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovntps %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovntps %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovshdup %xmm29, %xmm30 # AVX512{F,VL}
vmovshdup %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovshdup %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovshdup (%rcx), %xmm30 # AVX512{F,VL}
vmovshdup 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovshdup 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovshdup 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovshdup -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovshdup -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovshdup %ymm29, %ymm30 # AVX512{F,VL}
vmovshdup %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovshdup %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovshdup (%rcx), %ymm30 # AVX512{F,VL}
vmovshdup 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovshdup 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovshdup 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovshdup -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovshdup -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovsldup %xmm29, %xmm30 # AVX512{F,VL}
vmovsldup %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovsldup %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovsldup (%rcx), %xmm30 # AVX512{F,VL}
vmovsldup 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovsldup 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovsldup 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovsldup -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovsldup -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovsldup %ymm29, %ymm30 # AVX512{F,VL}
vmovsldup %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovsldup %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovsldup (%rcx), %ymm30 # AVX512{F,VL}
vmovsldup 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovsldup 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovsldup 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovsldup -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovsldup -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovupd %xmm29, %xmm30 # AVX512{F,VL}
vmovupd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovupd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovupd (%rcx), %xmm30 # AVX512{F,VL}
vmovupd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovupd 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovupd 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovupd -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovupd -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovupd %ymm29, %ymm30 # AVX512{F,VL}
vmovupd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovupd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovupd (%rcx), %ymm30 # AVX512{F,VL}
vmovupd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovupd 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovupd 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovupd -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovupd -4128(%rdx), %ymm30 # AVX512{F,VL}
vmovups %xmm29, %xmm30 # AVX512{F,VL}
vmovups %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmovups %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmovups (%rcx), %xmm30 # AVX512{F,VL}
vmovups 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vmovups 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovups 2048(%rdx), %xmm30 # AVX512{F,VL}
vmovups -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vmovups -2064(%rdx), %xmm30 # AVX512{F,VL}
vmovups %ymm29, %ymm30 # AVX512{F,VL}
vmovups %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmovups %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmovups (%rcx), %ymm30 # AVX512{F,VL}
vmovups 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vmovups 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovups 4096(%rdx), %ymm30 # AVX512{F,VL}
vmovups -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vmovups -4128(%rdx), %ymm30 # AVX512{F,VL}
vmulpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vmulpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmulpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmulpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vmulpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vmulpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vmulpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmulpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmulpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vmulpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vmulpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vmulpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmulpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmulpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vmulpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vmulpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vmulpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmulpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmulpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vmulpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vmulps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vmulps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vmulps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vmulps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vmulps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vmulps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vmulps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmulps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vmulps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vmulps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vmulps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vmulps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vmulps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vmulps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vmulps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vmulps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vmulps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vmulps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmulps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vmulps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vmulps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vmulps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpabsd %xmm29, %xmm30 # AVX512{F,VL}
vpabsd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpabsd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpabsd (%rcx), %xmm30 # AVX512{F,VL}
vpabsd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpabsd (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vpabsd 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpabsd 2048(%rdx), %xmm30 # AVX512{F,VL}
vpabsd -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpabsd -2064(%rdx), %xmm30 # AVX512{F,VL}
vpabsd 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpabsd 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpabsd -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpabsd -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpabsd %ymm29, %ymm30 # AVX512{F,VL}
vpabsd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpabsd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpabsd (%rcx), %ymm30 # AVX512{F,VL}
vpabsd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpabsd (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vpabsd 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpabsd 4096(%rdx), %ymm30 # AVX512{F,VL}
vpabsd -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpabsd -4128(%rdx), %ymm30 # AVX512{F,VL}
vpabsd 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpabsd 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpabsd -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpabsd -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpabsq %xmm29, %xmm30 # AVX512{F,VL}
vpabsq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpabsq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpabsq (%rcx), %xmm30 # AVX512{F,VL}
vpabsq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpabsq (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vpabsq 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpabsq 2048(%rdx), %xmm30 # AVX512{F,VL}
vpabsq -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpabsq -2064(%rdx), %xmm30 # AVX512{F,VL}
vpabsq 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpabsq 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpabsq -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpabsq -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpabsq %ymm29, %ymm30 # AVX512{F,VL}
vpabsq %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpabsq %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpabsq (%rcx), %ymm30 # AVX512{F,VL}
vpabsq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpabsq (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vpabsq 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpabsq 4096(%rdx), %ymm30 # AVX512{F,VL}
vpabsq -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpabsq -4128(%rdx), %ymm30 # AVX512{F,VL}
vpabsq 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpabsq 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpabsq -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpabsq -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpaddd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpaddd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpaddd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpaddd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpaddd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpaddd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpaddd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpaddd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpaddd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpaddd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpaddd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpaddd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpaddd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpaddd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpaddd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpaddd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpaddd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpaddd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpaddd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpaddd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpaddq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpaddq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpaddq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpaddq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpaddq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpaddq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpaddq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpaddq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpaddq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpaddq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpaddq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpaddq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpaddq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpaddq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpaddq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpaddq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpaddq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpaddq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpaddq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpaddq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpaddq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpaddq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpandd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpandd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpandd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpandd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpandd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpandd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpandd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpandd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpandd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpandd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpandd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpandd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpandd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpandd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpandd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpandd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpandnd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpandnd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpandnd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpandnd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpandnd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpandnd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpandnd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandnd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandnd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpandnd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpandnd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpandnd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpandnd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpandnd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpandnd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpandnd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpandnd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandnd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandnd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpandnd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpandnq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpandnq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpandnq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpandnq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpandnq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpandnq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpandnq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandnq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandnq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpandnq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandnq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpandnq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpandnq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpandnq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpandnq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpandnq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpandnq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpandnq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandnq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandnq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpandnq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandnq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpandq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpandq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpandq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpandq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpandq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpandq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpandq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpandq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpandq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpandq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpandq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpandq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpandq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpandq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpandq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpandq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpandq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpandq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpandq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpandq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpblendmd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpblendmd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpblendmd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpblendmd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpblendmd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpbroadcastd (%rcx), %xmm30 # AVX512{F,VL}
vpbroadcastd (%rcx), %xmm30{%k7} # AVX512{F,VL}
vpbroadcastd (%rcx), %xmm30{%k7}{z} # AVX512{F,VL}
vpbroadcastd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpbroadcastd 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpbroadcastd 512(%rdx), %xmm30 # AVX512{F,VL}
vpbroadcastd -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpbroadcastd -516(%rdx), %xmm30 # AVX512{F,VL}
vpbroadcastd (%rcx), %ymm30 # AVX512{F,VL}
vpbroadcastd (%rcx), %ymm30{%k7} # AVX512{F,VL}
vpbroadcastd (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vpbroadcastd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpbroadcastd 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpbroadcastd 512(%rdx), %ymm30 # AVX512{F,VL}
vpbroadcastd -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpbroadcastd -516(%rdx), %ymm30 # AVX512{F,VL}
vpbroadcastd %xmm29, %xmm30 # AVX512{F,VL}
vpbroadcastd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpbroadcastd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpbroadcastd %xmm29, %ymm30 # AVX512{F,VL}
vpbroadcastd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpbroadcastd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpbroadcastd %eax, %xmm30 # AVX512{F,VL}
vpbroadcastd %eax, %xmm30{%k7} # AVX512{F,VL}
vpbroadcastd %eax, %xmm30{%k7}{z} # AVX512{F,VL}
vpbroadcastd %ebp, %xmm30 # AVX512{F,VL}
vpbroadcastd %r13d, %xmm30 # AVX512{F,VL}
vpbroadcastd %eax, %ymm30 # AVX512{F,VL}
vpbroadcastd %eax, %ymm30{%k7} # AVX512{F,VL}
vpbroadcastd %eax, %ymm30{%k7}{z} # AVX512{F,VL}
vpbroadcastd %ebp, %ymm30 # AVX512{F,VL}
vpbroadcastd %r13d, %ymm30 # AVX512{F,VL}
vpbroadcastq (%rcx), %xmm30 # AVX512{F,VL}
vpbroadcastq (%rcx), %xmm30{%k7} # AVX512{F,VL}
vpbroadcastq (%rcx), %xmm30{%k7}{z} # AVX512{F,VL}
vpbroadcastq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpbroadcastq 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpbroadcastq 1024(%rdx), %xmm30 # AVX512{F,VL}
vpbroadcastq -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpbroadcastq -1032(%rdx), %xmm30 # AVX512{F,VL}
vpbroadcastq (%rcx), %ymm30 # AVX512{F,VL}
vpbroadcastq (%rcx), %ymm30{%k7} # AVX512{F,VL}
vpbroadcastq (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vpbroadcastq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpbroadcastq 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpbroadcastq 1024(%rdx), %ymm30 # AVX512{F,VL}
vpbroadcastq -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpbroadcastq -1032(%rdx), %ymm30 # AVX512{F,VL}
vpbroadcastq %xmm29, %xmm30 # AVX512{F,VL}
vpbroadcastq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpbroadcastq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpbroadcastq %xmm29, %ymm30 # AVX512{F,VL}
vpbroadcastq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpbroadcastq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpbroadcastq %rax, %xmm30 # AVX512{F,VL}
vpbroadcastq %rax, %xmm30{%k7} # AVX512{F,VL}
vpbroadcastq %rax, %xmm30{%k7}{z} # AVX512{F,VL}
vpbroadcastq %r8, %xmm30 # AVX512{F,VL}
vpbroadcastq %rax, %ymm30 # AVX512{F,VL}
vpbroadcastq %rax, %ymm30{%k7} # AVX512{F,VL}
vpbroadcastq %rax, %ymm30{%k7}{z} # AVX512{F,VL}
vpbroadcastq %r8, %ymm30 # AVX512{F,VL}
vpcmpd $0xab, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpd $0xab, %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpd $123, (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpd $123, 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpd $123, (%rcx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpd $123, 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpd $123, -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpd $123, 508(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, 512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpd $123, -512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, -516(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpd $0xab, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpd $0xab, %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpd $123, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpd $123, (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpd $123, 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpd $123, (%rcx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpd $123, 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpd $123, -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpd $123, 508(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, 512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpd $123, -512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpd $123, -516(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpeqd (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd (%rcx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd 508(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd 512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd -512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd -516(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqd %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpeqd (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd (%rcx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd 508(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd 512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqd -512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqd -516(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpeqq (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq (%rcx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq 1016(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq 1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq -1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq -1032(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpeqq %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpeqq (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq (%rcx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq 1016(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq 1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpeqq -1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpeqq -1032(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpgtd (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd (%rcx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd 508(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd 512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd -512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd -516(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtd %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpgtd (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd (%rcx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd 508(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd 512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtd -512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtd -516(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpgtq (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq (%rcx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq 1016(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq 1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq -1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq -1032(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpgtq %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpgtq (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq (%rcx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq 1016(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq 1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpgtq -1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpgtq -1032(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpq $0xab, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpq $0xab, %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpq $123, (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpq $123, 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpq $123, (%rcx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpq $123, 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpq $123, -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpq $123, 1016(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, 1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpq $123, -1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, -1032(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpq $0xab, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpq $0xab, %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpq $123, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpq $123, (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpq $123, 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpq $123, (%rcx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpq $123, 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpq $123, -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpq $123, 1016(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, 1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpq $123, -1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpq $123, -1032(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpud $0xab, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpud $0xab, %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpud $123, (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpud $123, 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpud $123, (%rcx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpud $123, 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpud $123, -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpud $123, 508(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, 512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpud $123, -512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, -516(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vpcmpud $0xab, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpud $0xab, %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpud $123, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpud $123, (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpud $123, 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpud $123, (%rcx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpud $123, 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpud $123, -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpud $123, 508(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, 512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpud $123, -512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpud $123, -516(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $0xab, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $0xab, %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, %xmm29, %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $123, (%rcx), %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $123, 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $123, (%rcx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $123, 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $123, -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $123, 1016(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, 1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $123, -1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, -1032(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vpcmpuq $0xab, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $0xab, %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vpcmpuq $123, %ymm29, %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $123, (%rcx), %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $123, 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $123, (%rcx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $123, 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $123, -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $123, 1016(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, 1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpcmpuq $123, -1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vpcmpuq $123, -1032(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpblendmq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpblendmq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpblendmq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpblendmq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpblendmq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpblendmq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpblendmq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpblendmq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpblendmq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpblendmq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpblendmq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpblendmq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpcompressd %xmm30, (%rcx) # AVX512{F,VL}
vpcompressd %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpcompressd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpcompressd %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vpcompressd %xmm30, 512(%rdx) # AVX512{F,VL}
vpcompressd %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vpcompressd %xmm30, -516(%rdx) # AVX512{F,VL}
vpcompressd %ymm30, (%rcx) # AVX512{F,VL}
vpcompressd %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpcompressd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpcompressd %ymm30, 508(%rdx) # AVX512{F,VL} Disp8
vpcompressd %ymm30, 512(%rdx) # AVX512{F,VL}
vpcompressd %ymm30, -512(%rdx) # AVX512{F,VL} Disp8
vpcompressd %ymm30, -516(%rdx) # AVX512{F,VL}
vpcompressd %xmm29, %xmm30 # AVX512{F,VL}
vpcompressd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpcompressd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpcompressd %ymm29, %ymm30 # AVX512{F,VL}
vpcompressd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpcompressd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermilpd $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermilpd $123, %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd $123, (%rcx), %xmm30 # AVX512{F,VL}
vpermilpd $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpermilpd $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vpermilpd $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpermilpd $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpermilpd $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpermilpd $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpermilpd $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpermilpd $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpermilpd $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpermilpd $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpermilpd $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermilpd $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermilpd $123, %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd $123, (%rcx), %ymm30 # AVX512{F,VL}
vpermilpd $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpermilpd $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vpermilpd $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermilpd $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpermilpd $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermilpd $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpermilpd $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpermilpd $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpermilpd $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpermilpd $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpermilpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermilpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermilpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermilpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermilpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermilpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermilpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermilps $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpermilps $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermilps $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermilps $123, %xmm29, %xmm30 # AVX512{F,VL}
vpermilps $123, (%rcx), %xmm30 # AVX512{F,VL}
vpermilps $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpermilps $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vpermilps $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpermilps $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpermilps $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpermilps $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpermilps $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpermilps $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpermilps $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpermilps $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpermilps $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpermilps $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermilps $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermilps $123, %ymm29, %ymm30 # AVX512{F,VL}
vpermilps $123, (%rcx), %ymm30 # AVX512{F,VL}
vpermilps $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpermilps $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vpermilps $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermilps $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpermilps $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermilps $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpermilps $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpermilps $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpermilps $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpermilps $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpermilps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermilps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermilps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermilps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermilps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermilps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermilps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermilps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermilps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermilps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermilps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermilps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermilps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermilps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermilps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermilps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermilps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermilps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermilps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermilps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermilps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermilps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermpd $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpermpd $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermpd $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermpd $123, %ymm29, %ymm30 # AVX512{F,VL}
vpermpd $123, (%rcx), %ymm30 # AVX512{F,VL}
vpermpd $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpermpd $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vpermpd $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermpd $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpermpd $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermpd $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpermpd $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpermpd $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpermpd $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpermpd $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpermps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermq $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpermq $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermq $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermq $123, %ymm29, %ymm30 # AVX512{F,VL}
vpermq $123, (%rcx), %ymm30 # AVX512{F,VL}
vpermq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpermq $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vpermq $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermq $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpermq $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpermq $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpermq $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpermq $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpermq $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpermq $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpexpandd (%rcx), %xmm30 # AVX512{F,VL}
vpexpandd (%rcx), %xmm30{%k7} # AVX512{F,VL}
vpexpandd (%rcx), %xmm30{%k7}{z} # AVX512{F,VL}
vpexpandd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpexpandd 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpexpandd 512(%rdx), %xmm30 # AVX512{F,VL}
vpexpandd -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpexpandd -516(%rdx), %xmm30 # AVX512{F,VL}
vpexpandd (%rcx), %ymm30 # AVX512{F,VL}
vpexpandd (%rcx), %ymm30{%k7} # AVX512{F,VL}
vpexpandd (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vpexpandd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpexpandd 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpexpandd 512(%rdx), %ymm30 # AVX512{F,VL}
vpexpandd -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpexpandd -516(%rdx), %ymm30 # AVX512{F,VL}
vpexpandd %xmm29, %xmm30 # AVX512{F,VL}
vpexpandd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpexpandd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpexpandd %ymm29, %ymm30 # AVX512{F,VL}
vpexpandd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpexpandd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpexpandq (%rcx), %xmm30 # AVX512{F,VL}
vpexpandq (%rcx), %xmm30{%k7} # AVX512{F,VL}
vpexpandq (%rcx), %xmm30{%k7}{z} # AVX512{F,VL}
vpexpandq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpexpandq 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpexpandq 1024(%rdx), %xmm30 # AVX512{F,VL}
vpexpandq -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpexpandq -1032(%rdx), %xmm30 # AVX512{F,VL}
vpexpandq (%rcx), %ymm30 # AVX512{F,VL}
vpexpandq (%rcx), %ymm30{%k7} # AVX512{F,VL}
vpexpandq (%rcx), %ymm30{%k7}{z} # AVX512{F,VL}
vpexpandq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpexpandq 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpexpandq 1024(%rdx), %ymm30 # AVX512{F,VL}
vpexpandq -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpexpandq -1032(%rdx), %ymm30 # AVX512{F,VL}
vpexpandq %xmm29, %xmm30 # AVX512{F,VL}
vpexpandq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpexpandq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpexpandq %ymm29, %ymm30 # AVX512{F,VL}
vpexpandq %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpexpandq %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpgatherdd 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vpgatherdd 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vpgatherdd 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vpgatherdd 123(%r14,%ymm31,8), %ymm30{%k1} # AVX512{F,VL}
vpgatherdd 256(%r9,%ymm31), %ymm30{%k1} # AVX512{F,VL}
vpgatherdd 1024(%rcx,%ymm31,4), %ymm30{%k1} # AVX512{F,VL}
vpgatherdq 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vpgatherdq 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vpgatherdq 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vpgatherdq 123(%r14,%xmm31,8), %ymm30{%k1} # AVX512{F,VL}
vpgatherdq 256(%r9,%xmm31), %ymm30{%k1} # AVX512{F,VL}
vpgatherdq 1024(%rcx,%xmm31,4), %ymm30{%k1} # AVX512{F,VL}
vpgatherqd 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vpgatherqd 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vpgatherqd 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vpgatherqd 123(%r14,%ymm31,8), %xmm30{%k1} # AVX512{F,VL}
vpgatherqd 256(%r9,%ymm31), %xmm30{%k1} # AVX512{F,VL}
vpgatherqd 1024(%rcx,%ymm31,4), %xmm30{%k1} # AVX512{F,VL}
vpgatherqq 123(%r14,%xmm31,8), %xmm30{%k1} # AVX512{F,VL}
vpgatherqq 256(%r9,%xmm31), %xmm30{%k1} # AVX512{F,VL}
vpgatherqq 1024(%rcx,%xmm31,4), %xmm30{%k1} # AVX512{F,VL}
vpgatherqq 123(%r14,%ymm31,8), %ymm30{%k1} # AVX512{F,VL}
vpgatherqq 256(%r9,%ymm31), %ymm30{%k1} # AVX512{F,VL}
vpgatherqq 1024(%rcx,%ymm31,4), %ymm30{%k1} # AVX512{F,VL}
vpmaxsd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmaxsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmaxsd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpmaxsd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmaxsd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmaxsq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmaxsq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxsq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxsq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpmaxsq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmaxsq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxsq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxsq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmaxud %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmaxud (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxud 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxud -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxud 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxud -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxud %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpmaxud %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmaxud (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxud 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxud -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxud 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxud -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxud -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmaxuq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmaxuq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxuq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxuq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxuq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmaxuq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmaxuq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpmaxuq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmaxuq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxuq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxuq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxuq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmaxuq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmaxuq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpminsd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpminsd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpminsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpminsd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpminsd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpminsd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpminsd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminsd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminsd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpminsd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpminsd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpminsd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpminsd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpminsd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpminsd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpminsd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpminsd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminsd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminsd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpminsd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpminsq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpminsq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpminsq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpminsq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpminsq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpminsq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpminsq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminsq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminsq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpminsq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminsq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpminsq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpminsq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpminsq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpminsq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpminsq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpminsq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpminsq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminsq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminsq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpminsq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminsq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpminud %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpminud %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpminud %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpminud (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpminud 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpminud (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpminud 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminud 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminud -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminud -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminud 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminud 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpminud -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminud -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpminud %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpminud %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpminud %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpminud (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpminud 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpminud (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpminud 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminud 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminud -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminud -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminud 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminud 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpminud -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminud -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpminuq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpminuq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpminuq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpminuq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpminuq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpminuq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpminuq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminuq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminuq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminuq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpminuq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminuq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpminuq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpminuq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpminuq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpminuq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpminuq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpminuq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpminuq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpminuq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpminuq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminuq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminuq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminuq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpminuq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminuq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpminuq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpminuq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmovsxbd %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxbd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxbd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxbd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxbd 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbd 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbd -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbd -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbd %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxbd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxbd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxbd (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxbd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxbd 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbd 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxbd -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbd -1032(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxbq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxbq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxbq 254(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbq 256(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbq -256(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxbq -258(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxbq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxbq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxbq (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxbq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxbq 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbq 512(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxbq -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxbq -516(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxdq %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxdq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxdq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxdq (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxdq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxdq 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxdq 1024(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxdq -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxdq -1032(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxdq %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxdq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxdq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxdq (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxdq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxdq 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxdq 2048(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxdq -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxdq -2064(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxwd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxwd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxwd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwd 1024(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwd -1032(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxwd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxwd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxwd (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxwd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxwd 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwd 2048(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwd -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwd -2064(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %xmm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsxwq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%rcx), %xmm30 # AVX512{F,VL}
vpmovsxwq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovsxwq 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwq 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwq -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovsxwq -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %ymm30 # AVX512{F,VL}
vpmovsxwq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovsxwq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovsxwq (%rcx), %ymm30 # AVX512{F,VL}
vpmovsxwq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovsxwq 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwq 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovsxwq -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovsxwq -1032(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxbd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxbd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxbd 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbd 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbd -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbd -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxbd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxbd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxbd (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxbd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxbd 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbd 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbd -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbd -1032(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxbq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxbq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxbq 254(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbq 256(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbq -256(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxbq -258(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxbq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxbq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxbq (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxbq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxbq 508(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbq 512(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxbq -512(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxbq -516(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxdq %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxdq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxdq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxdq (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxdq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxdq 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxdq 1024(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxdq -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxdq -1032(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxdq %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxdq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxdq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxdq (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxdq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxdq 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxdq 2048(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxdq -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxdq -2064(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxwd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxwd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxwd 1016(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwd 1024(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwd -1024(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwd -1032(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxwd %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxwd %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxwd (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxwd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxwd 2032(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwd 2048(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwd -2048(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwd -2064(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %xmm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovzxwq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%rcx), %xmm30 # AVX512{F,VL}
vpmovzxwq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpmovzxwq 508(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwq 512(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwq -512(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpmovzxwq -516(%rdx), %xmm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %ymm30 # AVX512{F,VL}
vpmovzxwq %xmm29, %ymm30{%k7} # AVX512{F,VL}
vpmovzxwq %xmm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmovzxwq (%rcx), %ymm30 # AVX512{F,VL}
vpmovzxwq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpmovzxwq 1016(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwq 1024(%rdx), %ymm30 # AVX512{F,VL}
vpmovzxwq -1024(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpmovzxwq -1032(%rdx), %ymm30 # AVX512{F,VL}
vpmuldq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmuldq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmuldq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuldq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuldq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuldq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuldq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmuldq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpmuldq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpmuldq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmuldq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpmuldq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpmuldq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmuldq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuldq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmuldq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuldq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmuldq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuldq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmuldq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuldq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmulld %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpmulld %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmulld %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmulld (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpmulld 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpmulld (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmulld 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmulld 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmulld -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmulld -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmulld 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmulld 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmulld -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmulld -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpmulld %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpmulld %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpmulld %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmulld (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpmulld 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpmulld (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmulld 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmulld 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmulld -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmulld -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmulld 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmulld 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmulld -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmulld -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmuludq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmuludq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuludq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuludq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuludq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpmuludq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpmuludq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpmuludq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpmuludq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuludq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuludq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuludq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmuludq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpmuludq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpord %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpord %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpord %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpord (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpord 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpord (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpord 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpord 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpord -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpord -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpord 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpord 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpord -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpord -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpord %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpord %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpord %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpord (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpord 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpord (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpord 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpord 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpord -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpord -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpord 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpord 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpord -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpord -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vporq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vporq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vporq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vporq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vporq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vporq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vporq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vporq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vporq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vporq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vporq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vporq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vporq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vporq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vporq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vporq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vporq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vporq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vporq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vporq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vporq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vporq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vporq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vporq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vporq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vporq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vporq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vporq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpscatterdd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterdd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterdd %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vpscatterdd %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vpscatterdd %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vpscatterdd %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vpscatterdd %ymm30, 256(%r9,%ymm31){%k1} # AVX512{F,VL}
vpscatterdd %ymm30, 1024(%rcx,%ymm31,4){%k1} # AVX512{F,VL}
vpscatterdq %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterdq %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterdq %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vpscatterdq %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vpscatterdq %ymm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterdq %ymm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterdq %ymm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vpscatterdq %ymm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 256(%r9,%ymm31){%k1} # AVX512{F,VL}
vpscatterqd %xmm30, 1024(%rcx,%ymm31,4){%k1} # AVX512{F,VL}
vpscatterqq %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterqq %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vpscatterqq %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vpscatterqq %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vpscatterqq %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vpscatterqq %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vpscatterqq %ymm30, 256(%r9,%ymm31){%k1} # AVX512{F,VL}
vpscatterqq %ymm30, 1024(%rcx,%ymm31,4){%k1} # AVX512{F,VL}
vpshufd $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpshufd $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpshufd $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpshufd $123, %xmm29, %xmm30 # AVX512{F,VL}
vpshufd $123, (%rcx), %xmm30 # AVX512{F,VL}
vpshufd $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpshufd $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vpshufd $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpshufd $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpshufd $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpshufd $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpshufd $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpshufd $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpshufd $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpshufd $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpshufd $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpshufd $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpshufd $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpshufd $123, %ymm29, %ymm30 # AVX512{F,VL}
vpshufd $123, (%rcx), %ymm30 # AVX512{F,VL}
vpshufd $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpshufd $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vpshufd $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpshufd $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpshufd $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpshufd $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpshufd $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpshufd $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpshufd $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpshufd $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpslld %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpslld %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpslld %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpslld (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpslld 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpslld 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpslld 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpslld -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpslld -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpslld %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vpslld %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpslld %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpslld (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpslld 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpslld 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpslld 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpslld -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpslld -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsllq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsllq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsllq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsllq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllq %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsllq %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsllq %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsllq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsllq 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllq 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllq -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllq -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsllvd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsllvd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsllvd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsllvd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsllvd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsllvq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsllvq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsllvq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsllvq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsllvq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsllvq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsllvq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsllvq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrad %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsrad %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrad %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrad (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrad 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsrad 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrad 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrad -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrad -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrad %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsrad %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrad %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrad (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrad 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsrad 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrad 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrad -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrad -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsraq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsraq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsraq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsraq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsraq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsraq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsraq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsraq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsraq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsraq %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsraq %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsraq %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsraq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsraq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsraq 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsraq 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsraq -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsraq -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsravd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsravd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsravd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsravd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsravd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsravd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsravd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsravd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsravd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsravd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsravd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsravd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsravd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsravd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsravd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsravd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsravd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsravd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsravd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsravd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsravq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsravq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsravq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsravq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsravq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsravq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsravq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsravq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsravq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsravq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsravq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsravq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsravq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsravq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsravq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsravq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsravq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsravq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsravq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsravq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsravq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsravq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrld %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsrld %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrld %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrld (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrld 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsrld 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrld 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrld -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrld -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrld %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsrld %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrld %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrld (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrld 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsrld 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrld 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrld -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrld -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrlq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrlq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlq %xmm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlq %xmm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrlq %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrlq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlq 2032(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlq 2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlq -2048(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlq -2064(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrlvd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrlvd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrlvd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrlvd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrlvq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrlvq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsrlvq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlvq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrlvq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrlvq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlvq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsrlvq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsrld $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpsrld $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrld $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrld $123, %xmm29, %xmm30 # AVX512{F,VL}
vpsrld $123, (%rcx), %xmm30 # AVX512{F,VL}
vpsrld $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpsrld $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vpsrld $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsrld $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpsrld $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsrld $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpsrld $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpsrld $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpsrld $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpsrld $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpsrld $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpsrld $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrld $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrld $123, %ymm29, %ymm30 # AVX512{F,VL}
vpsrld $123, (%rcx), %ymm30 # AVX512{F,VL}
vpsrld $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpsrld $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vpsrld $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsrld $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpsrld $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsrld $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpsrld $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpsrld $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpsrld $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpsrld $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpsrlq $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlq $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrlq $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrlq $123, %xmm29, %xmm30 # AVX512{F,VL}
vpsrlq $123, (%rcx), %xmm30 # AVX512{F,VL}
vpsrlq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpsrlq $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vpsrlq $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsrlq $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpsrlq $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsrlq $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpsrlq $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpsrlq $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpsrlq $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpsrlq $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpsrlq $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlq $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrlq $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrlq $123, %ymm29, %ymm30 # AVX512{F,VL}
vpsrlq $123, (%rcx), %ymm30 # AVX512{F,VL}
vpsrlq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpsrlq $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vpsrlq $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsrlq $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpsrlq $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsrlq $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpsrlq $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpsrlq $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpsrlq $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpsrlq $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpsubd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsubd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsubd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsubd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsubd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsubd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsubd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsubd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsubd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsubd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpsubd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsubd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsubd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsubd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsubd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsubd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsubd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsubd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsubd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsubd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpsubq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpsubq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsubq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsubq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpsubq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpsubq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsubq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsubq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpsubq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsubq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpsubq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpsubq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpsubq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsubq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsubq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpsubq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpsubq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsubq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsubq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpsubq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpsubq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpsubq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vptestmd %xmm29, %xmm30, %k5 # AVX512{F,VL}
vptestmd %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vptestmd (%rcx), %xmm30, %k5 # AVX512{F,VL}
vptestmd 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vptestmd (%rcx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vptestmd 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmd 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vptestmd -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmd -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vptestmd 508(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmd 512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vptestmd -512(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmd -516(%rdx){1to4}, %xmm30, %k5 # AVX512{F,VL}
vptestmd %ymm29, %ymm30, %k5 # AVX512{F,VL}
vptestmd %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vptestmd (%rcx), %ymm30, %k5 # AVX512{F,VL}
vptestmd 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vptestmd (%rcx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vptestmd 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmd 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vptestmd -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmd -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vptestmd 508(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmd 512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vptestmd -512(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmd -516(%rdx){1to8}, %ymm30, %k5 # AVX512{F,VL}
vptestmq %xmm29, %xmm30, %k5 # AVX512{F,VL}
vptestmq %xmm29, %xmm30, %k5{%k7} # AVX512{F,VL}
vptestmq (%rcx), %xmm30, %k5 # AVX512{F,VL}
vptestmq 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{F,VL}
vptestmq (%rcx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vptestmq 2032(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmq 2048(%rdx), %xmm30, %k5 # AVX512{F,VL}
vptestmq -2048(%rdx), %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmq -2064(%rdx), %xmm30, %k5 # AVX512{F,VL}
vptestmq 1016(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmq 1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vptestmq -1024(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL} Disp8
vptestmq -1032(%rdx){1to2}, %xmm30, %k5 # AVX512{F,VL}
vptestmq %ymm29, %ymm30, %k5 # AVX512{F,VL}
vptestmq %ymm29, %ymm30, %k5{%k7} # AVX512{F,VL}
vptestmq (%rcx), %ymm30, %k5 # AVX512{F,VL}
vptestmq 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{F,VL}
vptestmq (%rcx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vptestmq 4064(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmq 4096(%rdx), %ymm30, %k5 # AVX512{F,VL}
vptestmq -4096(%rdx), %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmq -4128(%rdx), %ymm30, %k5 # AVX512{F,VL}
vptestmq 1016(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmq 1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vptestmq -1024(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL} Disp8
vptestmq -1032(%rdx){1to4}, %ymm30, %k5 # AVX512{F,VL}
vpunpckhdq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpunpckhdq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpunpckhdq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhdq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhdq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhdq 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhdq -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhdq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhdq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpunpckhdq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpunpckhdq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhdq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhdq (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhdq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhdq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhdq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhdq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhdq 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhdq 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhdq -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhdq -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpunpckhqdq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpunpckhqdq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhqdq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhqdq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhqdq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckhqdq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckhqdq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpunpckhqdq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpunpckhqdq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhqdq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhqdq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhqdq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckhqdq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckhqdq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpunpckldq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpunpckldq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckldq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckldq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckldq 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpckldq -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpckldq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpunpckldq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpunpckldq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckldq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckldq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckldq 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpckldq -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpckldq -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpunpcklqdq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpunpcklqdq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpcklqdq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpcklqdq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpcklqdq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpunpcklqdq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpunpcklqdq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpunpcklqdq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpunpcklqdq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpcklqdq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpcklqdq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpcklqdq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpunpcklqdq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpunpcklqdq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpxord %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpxord %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpxord %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpxord (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpxord 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpxord (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpxord 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxord 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpxord -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxord -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpxord 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxord 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpxord -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxord -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpxord %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpxord %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpxord %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpxord (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpxord 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpxord (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpxord 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxord 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpxord -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxord -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpxord 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxord 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpxord -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxord -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpxorq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpxorq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpxorq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpxorq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpxorq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpxorq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpxorq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxorq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpxorq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxorq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpxorq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxorq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpxorq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpxorq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpxorq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpxorq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpxorq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpxorq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpxorq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpxorq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpxorq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxorq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpxorq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxorq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpxorq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxorq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpxorq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpxorq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vrcp14pd %xmm29, %xmm30 # AVX512{F,VL}
vrcp14pd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vrcp14pd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vrcp14pd (%rcx), %xmm30 # AVX512{F,VL}
vrcp14pd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vrcp14pd (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vrcp14pd 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrcp14pd 2048(%rdx), %xmm30 # AVX512{F,VL}
vrcp14pd -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrcp14pd -2064(%rdx), %xmm30 # AVX512{F,VL}
vrcp14pd 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vrcp14pd 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vrcp14pd -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vrcp14pd -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vrcp14pd %ymm29, %ymm30 # AVX512{F,VL}
vrcp14pd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vrcp14pd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vrcp14pd (%rcx), %ymm30 # AVX512{F,VL}
vrcp14pd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vrcp14pd (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vrcp14pd 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrcp14pd 4096(%rdx), %ymm30 # AVX512{F,VL}
vrcp14pd -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrcp14pd -4128(%rdx), %ymm30 # AVX512{F,VL}
vrcp14pd 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vrcp14pd 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vrcp14pd -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vrcp14pd -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vrcp14ps %xmm29, %xmm30 # AVX512{F,VL}
vrcp14ps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vrcp14ps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vrcp14ps (%rcx), %xmm30 # AVX512{F,VL}
vrcp14ps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vrcp14ps (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vrcp14ps 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrcp14ps 2048(%rdx), %xmm30 # AVX512{F,VL}
vrcp14ps -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrcp14ps -2064(%rdx), %xmm30 # AVX512{F,VL}
vrcp14ps 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vrcp14ps 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vrcp14ps -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vrcp14ps -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vrcp14ps %ymm29, %ymm30 # AVX512{F,VL}
vrcp14ps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vrcp14ps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vrcp14ps (%rcx), %ymm30 # AVX512{F,VL}
vrcp14ps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vrcp14ps (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vrcp14ps 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrcp14ps 4096(%rdx), %ymm30 # AVX512{F,VL}
vrcp14ps -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrcp14ps -4128(%rdx), %ymm30 # AVX512{F,VL}
vrcp14ps 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vrcp14ps 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vrcp14ps -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vrcp14ps -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vrsqrt14pd %xmm29, %xmm30 # AVX512{F,VL}
vrsqrt14pd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vrsqrt14pd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vrsqrt14pd (%rcx), %xmm30 # AVX512{F,VL}
vrsqrt14pd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vrsqrt14pd (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vrsqrt14pd 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrsqrt14pd 2048(%rdx), %xmm30 # AVX512{F,VL}
vrsqrt14pd -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrsqrt14pd -2064(%rdx), %xmm30 # AVX512{F,VL}
vrsqrt14pd 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vrsqrt14pd 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vrsqrt14pd -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vrsqrt14pd -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vrsqrt14pd %ymm29, %ymm30 # AVX512{F,VL}
vrsqrt14pd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vrsqrt14pd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vrsqrt14pd (%rcx), %ymm30 # AVX512{F,VL}
vrsqrt14pd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vrsqrt14pd (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vrsqrt14pd 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrsqrt14pd 4096(%rdx), %ymm30 # AVX512{F,VL}
vrsqrt14pd -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrsqrt14pd -4128(%rdx), %ymm30 # AVX512{F,VL}
vrsqrt14pd 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vrsqrt14pd 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vrsqrt14pd -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vrsqrt14pd -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vrsqrt14ps %xmm29, %xmm30 # AVX512{F,VL}
vrsqrt14ps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vrsqrt14ps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vrsqrt14ps (%rcx), %xmm30 # AVX512{F,VL}
vrsqrt14ps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vrsqrt14ps (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vrsqrt14ps 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrsqrt14ps 2048(%rdx), %xmm30 # AVX512{F,VL}
vrsqrt14ps -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrsqrt14ps -2064(%rdx), %xmm30 # AVX512{F,VL}
vrsqrt14ps 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vrsqrt14ps 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vrsqrt14ps -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vrsqrt14ps -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vrsqrt14ps %ymm29, %ymm30 # AVX512{F,VL}
vrsqrt14ps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vrsqrt14ps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vrsqrt14ps (%rcx), %ymm30 # AVX512{F,VL}
vrsqrt14ps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vrsqrt14ps (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vrsqrt14ps 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrsqrt14ps 4096(%rdx), %ymm30 # AVX512{F,VL}
vrsqrt14ps -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrsqrt14ps -4128(%rdx), %ymm30 # AVX512{F,VL}
vrsqrt14ps 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vrsqrt14ps 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vrsqrt14ps -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vrsqrt14ps -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vscatterdpd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterdpd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterdpd %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vscatterdpd %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vscatterdpd %ymm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterdpd %ymm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterdpd %ymm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vscatterdpd %ymm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vscatterdps %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterdps %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterdps %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vscatterdps %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vscatterdps %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vscatterdps %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vscatterdps %ymm30, 256(%r9,%ymm31){%k1} # AVX512{F,VL}
vscatterdps %ymm30, 1024(%rcx,%ymm31,4){%k1} # AVX512{F,VL}
vscatterqpd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterqpd %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterqpd %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vscatterqpd %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vscatterqpd %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vscatterqpd %ymm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vscatterqpd %ymm30, 256(%r9,%ymm31){%k1} # AVX512{F,VL}
vscatterqpd %ymm30, 1024(%rcx,%ymm31,4){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 123(%r14,%xmm31,8){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 256(%r9,%xmm31){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 1024(%rcx,%xmm31,4){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 123(%r14,%ymm31,8){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 256(%r9,%ymm31){%k1} # AVX512{F,VL}
vscatterqps %xmm30, 1024(%rcx,%ymm31,4){%k1} # AVX512{F,VL}
vshufpd $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vshufpd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vshufpd $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $123, (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufpd $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufpd $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufpd $123, 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $123, -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufpd $123, -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vshufpd $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vshufpd $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vshufpd $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $123, (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufpd $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufpd $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufpd $123, 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshufpd $123, -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufpd $123, -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshufps $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vshufps $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vshufps $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vshufps $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vshufps $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vshufps $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vshufps $123, (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vshufps $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufps $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vshufps $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufps $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vshufps $123, 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufps $123, 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vshufps $123, -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vshufps $123, -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vshufps $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufps $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vshufps $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vshufps $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufps $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vshufps $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vshufps $123, (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshufps $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufps $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufps $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufps $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufps $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufps $123, 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshufps $123, -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufps $123, -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vsqrtpd %xmm29, %xmm30 # AVX512{F,VL}
vsqrtpd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vsqrtpd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vsqrtpd (%rcx), %xmm30 # AVX512{F,VL}
vsqrtpd 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vsqrtpd (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vsqrtpd 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vsqrtpd 2048(%rdx), %xmm30 # AVX512{F,VL}
vsqrtpd -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vsqrtpd -2064(%rdx), %xmm30 # AVX512{F,VL}
vsqrtpd 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vsqrtpd 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vsqrtpd -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vsqrtpd -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vsqrtpd %ymm29, %ymm30 # AVX512{F,VL}
vsqrtpd %ymm29, %ymm30{%k7} # AVX512{F,VL}
vsqrtpd %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vsqrtpd (%rcx), %ymm30 # AVX512{F,VL}
vsqrtpd 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vsqrtpd (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vsqrtpd 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vsqrtpd 4096(%rdx), %ymm30 # AVX512{F,VL}
vsqrtpd -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vsqrtpd -4128(%rdx), %ymm30 # AVX512{F,VL}
vsqrtpd 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vsqrtpd 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vsqrtpd -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vsqrtpd -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vsqrtps %xmm29, %xmm30 # AVX512{F,VL}
vsqrtps %xmm29, %xmm30{%k7} # AVX512{F,VL}
vsqrtps %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vsqrtps (%rcx), %xmm30 # AVX512{F,VL}
vsqrtps 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vsqrtps (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vsqrtps 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vsqrtps 2048(%rdx), %xmm30 # AVX512{F,VL}
vsqrtps -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vsqrtps -2064(%rdx), %xmm30 # AVX512{F,VL}
vsqrtps 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vsqrtps 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vsqrtps -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vsqrtps -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vsqrtps %ymm29, %ymm30 # AVX512{F,VL}
vsqrtps %ymm29, %ymm30{%k7} # AVX512{F,VL}
vsqrtps %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vsqrtps (%rcx), %ymm30 # AVX512{F,VL}
vsqrtps 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vsqrtps (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vsqrtps 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vsqrtps 4096(%rdx), %ymm30 # AVX512{F,VL}
vsqrtps -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vsqrtps -4128(%rdx), %ymm30 # AVX512{F,VL}
vsqrtps 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vsqrtps 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vsqrtps -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vsqrtps -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vsubpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vsubpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vsubpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vsubpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vsubpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vsubpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vsubpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vsubpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vsubpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vsubpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vsubpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vsubpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vsubpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vsubpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vsubpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vsubpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vsubpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vsubpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vsubpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vsubpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vsubps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vsubps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vsubps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vsubps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vsubps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vsubps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vsubps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vsubps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vsubps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vsubps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vsubps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vsubps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vsubps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vsubps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vsubps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vsubps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vsubps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vsubps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vsubps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vsubps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vsubps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vsubps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vunpckhpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vunpckhpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vunpckhpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vunpckhpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vunpckhps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vunpckhps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpckhps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vunpckhps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vunpckhps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vunpckhps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vunpckhps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpckhps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vunpcklpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vunpcklpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vunpcklpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vunpcklpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vunpcklps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vunpcklps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vunpcklps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vunpcklps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vunpcklps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vunpcklps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vunpcklps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vunpcklps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpternlogd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpternlogd $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $123, (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogd $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogd $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $123, 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogd $123, 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $123, -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogd $123, -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogd $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpternlogd $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpternlogd $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $123, (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogd $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogd $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogd $123, 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogd $123, -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogd $123, -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpternlogq $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpternlogq $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $123, (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogq $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogq $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogq $123, 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $123, -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpternlogq $123, -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpternlogq $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpternlogq $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpternlogq $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $123, (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogq $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogq $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogq $123, 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpternlogq $123, -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpternlogq $123, -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpmovqb %xmm29, %xmm30 # AVX512{F,VL}
vpmovqb %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovqb %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovqb %ymm29, %xmm30 # AVX512{F,VL}
vpmovqb %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovqb %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsqb %xmm29, %xmm30 # AVX512{F,VL}
vpmovsqb %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsqb %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsqb %ymm29, %xmm30 # AVX512{F,VL}
vpmovsqb %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsqb %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusqb %xmm29, %xmm30 # AVX512{F,VL}
vpmovusqb %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusqb %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusqb %ymm29, %xmm30 # AVX512{F,VL}
vpmovusqb %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusqb %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovqw %xmm29, %xmm30 # AVX512{F,VL}
vpmovqw %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovqw %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovqw %ymm29, %xmm30 # AVX512{F,VL}
vpmovqw %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovqw %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsqw %xmm29, %xmm30 # AVX512{F,VL}
vpmovsqw %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsqw %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsqw %ymm29, %xmm30 # AVX512{F,VL}
vpmovsqw %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsqw %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusqw %xmm29, %xmm30 # AVX512{F,VL}
vpmovusqw %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusqw %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusqw %ymm29, %xmm30 # AVX512{F,VL}
vpmovusqw %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusqw %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovqd %xmm29, %xmm30 # AVX512{F,VL}
vpmovqd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovqd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovqd %ymm29, %xmm30 # AVX512{F,VL}
vpmovqd %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovqd %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsqd %xmm29, %xmm30 # AVX512{F,VL}
vpmovsqd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsqd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsqd %ymm29, %xmm30 # AVX512{F,VL}
vpmovsqd %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsqd %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusqd %xmm29, %xmm30 # AVX512{F,VL}
vpmovusqd %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusqd %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusqd %ymm29, %xmm30 # AVX512{F,VL}
vpmovusqd %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusqd %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovdb %xmm29, %xmm30 # AVX512{F,VL}
vpmovdb %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovdb %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovdb %ymm29, %xmm30 # AVX512{F,VL}
vpmovdb %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovdb %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsdb %xmm29, %xmm30 # AVX512{F,VL}
vpmovsdb %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsdb %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsdb %ymm29, %xmm30 # AVX512{F,VL}
vpmovsdb %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsdb %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusdb %xmm29, %xmm30 # AVX512{F,VL}
vpmovusdb %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusdb %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusdb %ymm29, %xmm30 # AVX512{F,VL}
vpmovusdb %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusdb %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovdw %xmm29, %xmm30 # AVX512{F,VL}
vpmovdw %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovdw %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovdw %ymm29, %xmm30 # AVX512{F,VL}
vpmovdw %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovdw %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsdw %xmm29, %xmm30 # AVX512{F,VL}
vpmovsdw %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsdw %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovsdw %ymm29, %xmm30 # AVX512{F,VL}
vpmovsdw %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovsdw %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusdw %xmm29, %xmm30 # AVX512{F,VL}
vpmovusdw %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusdw %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpmovusdw %ymm29, %xmm30 # AVX512{F,VL}
vpmovusdw %ymm29, %xmm30{%k7} # AVX512{F,VL}
vpmovusdw %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vshuff32x4 $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vshuff32x4 $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vshuff32x4 $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $123, (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff32x4 $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff32x4 $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff32x4 $123, 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshuff32x4 $123, -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff32x4 $123, -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vshuff64x2 $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vshuff64x2 $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $123, (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff64x2 $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff64x2 $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff64x2 $123, 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshuff64x2 $123, -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshuff64x2 $123, -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vshufi32x4 $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vshufi32x4 $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $123, (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi32x4 $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi32x4 $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi32x4 $123, 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshufi32x4 $123, -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi32x4 $123, -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vshufi64x2 $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vshufi64x2 $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $123, (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi64x2 $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi64x2 $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi64x2 $123, 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vshufi64x2 $123, -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vshufi64x2 $123, -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermt2d %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermt2d (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2d 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2d -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2d 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2d -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2d %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermt2d %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermt2d (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2d 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2d -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2d 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2d -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2d -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermt2q %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermt2q (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2q 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2q -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2q 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2q -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2q %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermt2q %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermt2q (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2q 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2q -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2q 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2q -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2q -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermt2ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermt2ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermt2ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermt2ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermt2pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermt2pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermt2pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermt2pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermt2pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermt2pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermt2pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermt2pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
valignq $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
valignq $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
valignq $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
valignq $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
valignq $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
valignq $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
valignq $123, (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
valignq $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignq $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
valignq $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignq $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
valignq $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignq $123, 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
valignq $123, -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
valignq $123, -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
valignq $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
valignq $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
valignq $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
valignq $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
valignq $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
valignq $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
valignq $123, (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
valignq $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignq $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
valignq $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignq $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
valignq $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignq $123, 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
valignq $123, -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
valignq $123, -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vscalefpd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vscalefpd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefpd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefpd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefpd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefpd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vscalefpd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vscalefpd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vscalefpd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefpd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefpd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefpd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vscalefpd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefpd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vscalefps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vscalefps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vscalefps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vscalefps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vscalefps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vscalefps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vscalefps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vscalefps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vscalefps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vscalefps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vscalefps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vscalefps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vscalefps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vscalefps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vscalefps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vscalefps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vscalefps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vscalefps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vscalefps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vscalefps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vscalefps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vscalefps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfixupimmpd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfixupimmpd $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $123, (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $123, 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $123, -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmpd $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfixupimmpd $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfixupimmpd $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $123, (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $123, 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmpd $123, -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmpd $123, -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $0xab, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vfixupimmps $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vfixupimmps $123, %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $123, (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $123, (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmps $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmps $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $123, 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmps $123, 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $123, -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vfixupimmps $123, -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vfixupimmps $0xab, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vfixupimmps $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vfixupimmps $123, %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $123, (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $123, (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmps $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmps $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $123, 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmps $123, 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vfixupimmps $123, -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vfixupimmps $123, -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpslld $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpslld $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpslld $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpslld $123, %xmm29, %xmm30 # AVX512{F,VL}
vpslld $123, (%rcx), %xmm30 # AVX512{F,VL}
vpslld $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpslld $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vpslld $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpslld $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpslld $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpslld $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpslld $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpslld $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpslld $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpslld $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpslld $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpslld $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpslld $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpslld $123, %ymm29, %ymm30 # AVX512{F,VL}
vpslld $123, (%rcx), %ymm30 # AVX512{F,VL}
vpslld $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpslld $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vpslld $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpslld $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpslld $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpslld $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpslld $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpslld $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpslld $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpslld $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpsllq $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpsllq $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsllq $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsllq $123, %xmm29, %xmm30 # AVX512{F,VL}
vpsllq $123, (%rcx), %xmm30 # AVX512{F,VL}
vpsllq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpsllq $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vpsllq $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsllq $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpsllq $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsllq $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpsllq $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpsllq $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpsllq $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpsllq $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpsllq $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpsllq $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsllq $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsllq $123, %ymm29, %ymm30 # AVX512{F,VL}
vpsllq $123, (%rcx), %ymm30 # AVX512{F,VL}
vpsllq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpsllq $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vpsllq $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsllq $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpsllq $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsllq $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpsllq $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpsllq $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpsllq $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpsllq $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpsrad $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpsrad $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsrad $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsrad $123, %xmm29, %xmm30 # AVX512{F,VL}
vpsrad $123, (%rcx), %xmm30 # AVX512{F,VL}
vpsrad $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpsrad $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vpsrad $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsrad $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpsrad $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsrad $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpsrad $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpsrad $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpsrad $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vpsrad $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vpsrad $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpsrad $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsrad $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsrad $123, %ymm29, %ymm30 # AVX512{F,VL}
vpsrad $123, (%rcx), %ymm30 # AVX512{F,VL}
vpsrad $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpsrad $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vpsrad $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsrad $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpsrad $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsrad $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpsrad $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpsrad $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpsrad $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vpsrad $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpsraq $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vpsraq $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpsraq $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpsraq $123, %xmm29, %xmm30 # AVX512{F,VL}
vpsraq $123, (%rcx), %xmm30 # AVX512{F,VL}
vpsraq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vpsraq $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vpsraq $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsraq $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vpsraq $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vpsraq $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vpsraq $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpsraq $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpsraq $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vpsraq $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vpsraq $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vpsraq $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpsraq $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpsraq $123, %ymm29, %ymm30 # AVX512{F,VL}
vpsraq $123, (%rcx), %ymm30 # AVX512{F,VL}
vpsraq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vpsraq $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vpsraq $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsraq $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vpsraq $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vpsraq $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vpsraq $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpsraq $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vpsraq $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vpsraq $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vprolvd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vprolvd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprolvd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprolvd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vprolvd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vprolvd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vprolvd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprolvd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprolvd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vprolvd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vprolvd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vprolvd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprolvd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprolvd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vprolvd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vprolvd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vprolvd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprolvd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprolvd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vprolvd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vprold $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vprold $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprold $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprold $123, %xmm29, %xmm30 # AVX512{F,VL}
vprold $123, (%rcx), %xmm30 # AVX512{F,VL}
vprold $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vprold $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vprold $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprold $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vprold $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprold $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vprold $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vprold $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vprold $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vprold $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vprold $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vprold $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprold $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprold $123, %ymm29, %ymm30 # AVX512{F,VL}
vprold $123, (%rcx), %ymm30 # AVX512{F,VL}
vprold $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vprold $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vprold $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprold $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vprold $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprold $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vprold $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vprold $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vprold $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vprold $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vprolvq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vprolvq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprolvq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprolvq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vprolvq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vprolvq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vprolvq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprolvq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprolvq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vprolvq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprolvq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vprolvq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vprolvq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprolvq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprolvq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vprolvq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vprolvq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vprolvq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprolvq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprolvq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vprolvq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprolvq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vprolq $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vprolq $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprolq $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprolq $123, %xmm29, %xmm30 # AVX512{F,VL}
vprolq $123, (%rcx), %xmm30 # AVX512{F,VL}
vprolq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vprolq $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vprolq $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprolq $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vprolq $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprolq $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vprolq $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vprolq $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vprolq $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vprolq $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vprolq $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vprolq $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprolq $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprolq $123, %ymm29, %ymm30 # AVX512{F,VL}
vprolq $123, (%rcx), %ymm30 # AVX512{F,VL}
vprolq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vprolq $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vprolq $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprolq $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vprolq $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprolq $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vprolq $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vprolq $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vprolq $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vprolq $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vprorvd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vprorvd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprorvd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprorvd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vprorvd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vprorvd (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vprorvd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprorvd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprorvd 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvd 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vprorvd -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvd -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vprorvd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vprorvd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprorvd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprorvd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vprorvd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vprorvd (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vprorvd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprorvd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprorvd 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvd 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vprorvd -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvd -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vprord $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vprord $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprord $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprord $123, %xmm29, %xmm30 # AVX512{F,VL}
vprord $123, (%rcx), %xmm30 # AVX512{F,VL}
vprord $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vprord $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vprord $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprord $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vprord $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprord $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vprord $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vprord $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vprord $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vprord $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vprord $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vprord $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprord $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprord $123, %ymm29, %ymm30 # AVX512{F,VL}
vprord $123, (%rcx), %ymm30 # AVX512{F,VL}
vprord $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vprord $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vprord $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprord $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vprord $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprord $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vprord $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vprord $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vprord $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vprord $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vprorvq %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vprorvq %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprorvq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprorvq (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vprorvq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vprorvq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vprorvq 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvq 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprorvq -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvq -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vprorvq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vprorvq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vprorvq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vprorvq %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vprorvq %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprorvq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprorvq (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vprorvq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vprorvq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vprorvq 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvq 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprorvq -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvq -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vprorvq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vprorvq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vprorvq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vprorq $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vprorq $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vprorq $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vprorq $123, %xmm29, %xmm30 # AVX512{F,VL}
vprorq $123, (%rcx), %xmm30 # AVX512{F,VL}
vprorq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vprorq $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vprorq $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprorq $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vprorq $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vprorq $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vprorq $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vprorq $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vprorq $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vprorq $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vprorq $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vprorq $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vprorq $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vprorq $123, %ymm29, %ymm30 # AVX512{F,VL}
vprorq $123, (%rcx), %ymm30 # AVX512{F,VL}
vprorq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vprorq $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vprorq $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprorq $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vprorq $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vprorq $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vprorq $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vprorq $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vprorq $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vprorq $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vrndscalepd $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vrndscalepd $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vrndscalepd $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vrndscalepd $123, %xmm29, %xmm30 # AVX512{F,VL}
vrndscalepd $123, (%rcx), %xmm30 # AVX512{F,VL}
vrndscalepd $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vrndscalepd $123, (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vrndscalepd $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrndscalepd $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vrndscalepd $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrndscalepd $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vrndscalepd $123, 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vrndscalepd $123, 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vrndscalepd $123, -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vrndscalepd $123, -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vrndscalepd $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vrndscalepd $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vrndscalepd $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vrndscalepd $123, %ymm29, %ymm30 # AVX512{F,VL}
vrndscalepd $123, (%rcx), %ymm30 # AVX512{F,VL}
vrndscalepd $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vrndscalepd $123, (%rcx){1to4}, %ymm30 # AVX512{F,VL}
vrndscalepd $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrndscalepd $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vrndscalepd $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrndscalepd $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vrndscalepd $123, 1016(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vrndscalepd $123, 1024(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vrndscalepd $123, -1024(%rdx){1to4}, %ymm30 # AVX512{F,VL} Disp8
vrndscalepd $123, -1032(%rdx){1to4}, %ymm30 # AVX512{F,VL}
vrndscaleps $0xab, %xmm29, %xmm30 # AVX512{F,VL}
vrndscaleps $0xab, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vrndscaleps $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vrndscaleps $123, %xmm29, %xmm30 # AVX512{F,VL}
vrndscaleps $123, (%rcx), %xmm30 # AVX512{F,VL}
vrndscaleps $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vrndscaleps $123, (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vrndscaleps $123, 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrndscaleps $123, 2048(%rdx), %xmm30 # AVX512{F,VL}
vrndscaleps $123, -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vrndscaleps $123, -2064(%rdx), %xmm30 # AVX512{F,VL}
vrndscaleps $123, 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vrndscaleps $123, 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vrndscaleps $123, -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vrndscaleps $123, -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vrndscaleps $0xab, %ymm29, %ymm30 # AVX512{F,VL}
vrndscaleps $0xab, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vrndscaleps $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vrndscaleps $123, %ymm29, %ymm30 # AVX512{F,VL}
vrndscaleps $123, (%rcx), %ymm30 # AVX512{F,VL}
vrndscaleps $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vrndscaleps $123, (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vrndscaleps $123, 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrndscaleps $123, 4096(%rdx), %ymm30 # AVX512{F,VL}
vrndscaleps $123, -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vrndscaleps $123, -4128(%rdx), %ymm30 # AVX512{F,VL}
vrndscaleps $123, 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vrndscaleps $123, 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vrndscaleps $123, -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vrndscaleps $123, -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpcompressq %xmm30, (%rcx) # AVX512{F,VL}
vpcompressq %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpcompressq %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpcompressq %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpcompressq %xmm30, 1024(%rdx) # AVX512{F,VL}
vpcompressq %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpcompressq %xmm30, -1032(%rdx) # AVX512{F,VL}
vpcompressq %ymm30, (%rcx) # AVX512{F,VL}
vpcompressq %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpcompressq %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpcompressq %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpcompressq %ymm30, 1024(%rdx) # AVX512{F,VL}
vpcompressq %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpcompressq %ymm30, -1032(%rdx) # AVX512{F,VL}
vpcompressq %xmm29, %xmm30 # AVX512{F,VL}
vpcompressq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpcompressq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpcompressq %ymm29, %ymm30 # AVX512{F,VL}
vpcompressq %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpcompressq %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvtps2ph $0xab, %xmm30, (%rcx) # AVX512{F,VL}
vcvtps2ph $0xab, %xmm30, (%rcx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %xmm30, (%rcx) # AVX512{F,VL}
vcvtps2ph $123, %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vcvtps2ph $123, %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vcvtps2ph $123, %xmm30, 1024(%rdx) # AVX512{F,VL}
vcvtps2ph $123, %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vcvtps2ph $123, %xmm30, -1032(%rdx) # AVX512{F,VL}
vcvtps2ph $0xab, %ymm30, (%rcx) # AVX512{F,VL}
vcvtps2ph $0xab, %ymm30, (%rcx){%k7} # AVX512{F,VL}
vcvtps2ph $123, %ymm30, (%rcx) # AVX512{F,VL}
vcvtps2ph $123, %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vcvtps2ph $123, %ymm30, 2032(%rdx) # AVX512{F,VL} Disp8
vcvtps2ph $123, %ymm30, 2048(%rdx) # AVX512{F,VL}
vcvtps2ph $123, %ymm30, -2048(%rdx) # AVX512{F,VL} Disp8
vcvtps2ph $123, %ymm30, -2064(%rdx) # AVX512{F,VL}
vextractf32x4 $0xab, %ymm29, (%rcx) # AVX512{F,VL}
vextractf32x4 $0xab, %ymm29, (%rcx){%k7} # AVX512{F,VL}
vextractf32x4 $123, %ymm29, (%rcx) # AVX512{F,VL}
vextractf32x4 $123, %ymm29, 0x123(%rax,%r14,8) # AVX512{F,VL}
vextractf32x4 $123, %ymm29, 2032(%rdx) # AVX512{F,VL} Disp8
vextractf32x4 $123, %ymm29, 2048(%rdx) # AVX512{F,VL}
vextractf32x4 $123, %ymm29, -2048(%rdx) # AVX512{F,VL} Disp8
vextractf32x4 $123, %ymm29, -2064(%rdx) # AVX512{F,VL}
vextracti32x4 $0xab, %ymm29, (%rcx) # AVX512{F,VL}
vextracti32x4 $0xab, %ymm29, (%rcx){%k7} # AVX512{F,VL}
vextracti32x4 $123, %ymm29, (%rcx) # AVX512{F,VL}
vextracti32x4 $123, %ymm29, 0x123(%rax,%r14,8) # AVX512{F,VL}
vextracti32x4 $123, %ymm29, 2032(%rdx) # AVX512{F,VL} Disp8
vextracti32x4 $123, %ymm29, 2048(%rdx) # AVX512{F,VL}
vextracti32x4 $123, %ymm29, -2048(%rdx) # AVX512{F,VL} Disp8
vextracti32x4 $123, %ymm29, -2064(%rdx) # AVX512{F,VL}
vmovapd %xmm30, (%rcx) # AVX512{F,VL}
vmovapd %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovapd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovapd %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovapd %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovapd %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovapd %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovapd %ymm30, (%rcx) # AVX512{F,VL}
vmovapd %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovapd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovapd %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovapd %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovapd %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovapd %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovaps %xmm30, (%rcx) # AVX512{F,VL}
vmovaps %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovaps %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovaps %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovaps %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovaps %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovaps %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovaps %ymm30, (%rcx) # AVX512{F,VL}
vmovaps %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovaps %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovaps %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovaps %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovaps %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovaps %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovdqa32 %xmm30, (%rcx) # AVX512{F,VL}
vmovdqa32 %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqa32 %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqa32 %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovdqa32 %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovdqa32 %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovdqa32 %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovdqa32 %ymm30, (%rcx) # AVX512{F,VL}
vmovdqa32 %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqa32 %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqa32 %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovdqa32 %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovdqa32 %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovdqa32 %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovdqa64 %xmm30, (%rcx) # AVX512{F,VL}
vmovdqa64 %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqa64 %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqa64 %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovdqa64 %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovdqa64 %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovdqa64 %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovdqa64 %ymm30, (%rcx) # AVX512{F,VL}
vmovdqa64 %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqa64 %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqa64 %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovdqa64 %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovdqa64 %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovdqa64 %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovdqu32 %xmm30, (%rcx) # AVX512{F,VL}
vmovdqu32 %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqu32 %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqu32 %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovdqu32 %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovdqu32 %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovdqu32 %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovdqu32 %ymm30, (%rcx) # AVX512{F,VL}
vmovdqu32 %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqu32 %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqu32 %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovdqu32 %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovdqu32 %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovdqu32 %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovdqu64 %xmm30, (%rcx) # AVX512{F,VL}
vmovdqu64 %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqu64 %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqu64 %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovdqu64 %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovdqu64 %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovdqu64 %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovdqu64 %ymm30, (%rcx) # AVX512{F,VL}
vmovdqu64 %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovdqu64 %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovdqu64 %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovdqu64 %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovdqu64 %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovdqu64 %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovupd %xmm30, (%rcx) # AVX512{F,VL}
vmovupd %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovupd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovupd %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovupd %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovupd %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovupd %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovupd %ymm30, (%rcx) # AVX512{F,VL}
vmovupd %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovupd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovupd %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovupd %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovupd %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovupd %ymm30, -4128(%rdx) # AVX512{F,VL}
vmovups %xmm30, (%rcx) # AVX512{F,VL}
vmovups %xmm30, (%rcx){%k7} # AVX512{F,VL}
vmovups %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovups %xmm30, 2032(%rdx) # AVX512{F,VL} Disp8
vmovups %xmm30, 2048(%rdx) # AVX512{F,VL}
vmovups %xmm30, -2048(%rdx) # AVX512{F,VL} Disp8
vmovups %xmm30, -2064(%rdx) # AVX512{F,VL}
vmovups %ymm30, (%rcx) # AVX512{F,VL}
vmovups %ymm30, (%rcx){%k7} # AVX512{F,VL}
vmovups %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vmovups %ymm30, 4064(%rdx) # AVX512{F,VL} Disp8
vmovups %ymm30, 4096(%rdx) # AVX512{F,VL}
vmovups %ymm30, -4096(%rdx) # AVX512{F,VL} Disp8
vmovups %ymm30, -4128(%rdx) # AVX512{F,VL}
vpmovqb %xmm30, (%rcx) # AVX512{F,VL}
vpmovqb %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovqb %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovqb %xmm30, 254(%rdx) # AVX512{F,VL} Disp8
vpmovqb %xmm30, 256(%rdx) # AVX512{F,VL}
vpmovqb %xmm30, -256(%rdx) # AVX512{F,VL} Disp8
vpmovqb %xmm30, -258(%rdx) # AVX512{F,VL}
vpmovqb %ymm30, (%rcx) # AVX512{F,VL}
vpmovqb %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovqb %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovqb %ymm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovqb %ymm30, 512(%rdx) # AVX512{F,VL}
vpmovqb %ymm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovqb %ymm30, -516(%rdx) # AVX512{F,VL}
vpmovsqb %xmm30, (%rcx) # AVX512{F,VL}
vpmovsqb %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsqb %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsqb %xmm30, 254(%rdx) # AVX512{F,VL} Disp8
vpmovsqb %xmm30, 256(%rdx) # AVX512{F,VL}
vpmovsqb %xmm30, -256(%rdx) # AVX512{F,VL} Disp8
vpmovsqb %xmm30, -258(%rdx) # AVX512{F,VL}
vpmovsqb %ymm30, (%rcx) # AVX512{F,VL}
vpmovsqb %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsqb %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsqb %ymm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovsqb %ymm30, 512(%rdx) # AVX512{F,VL}
vpmovsqb %ymm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovsqb %ymm30, -516(%rdx) # AVX512{F,VL}
vpmovusqb %xmm30, (%rcx) # AVX512{F,VL}
vpmovusqb %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusqb %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusqb %xmm30, 254(%rdx) # AVX512{F,VL} Disp8
vpmovusqb %xmm30, 256(%rdx) # AVX512{F,VL}
vpmovusqb %xmm30, -256(%rdx) # AVX512{F,VL} Disp8
vpmovusqb %xmm30, -258(%rdx) # AVX512{F,VL}
vpmovusqb %ymm30, (%rcx) # AVX512{F,VL}
vpmovusqb %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusqb %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusqb %ymm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovusqb %ymm30, 512(%rdx) # AVX512{F,VL}
vpmovusqb %ymm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovusqb %ymm30, -516(%rdx) # AVX512{F,VL}
vpmovqw %xmm30, (%rcx) # AVX512{F,VL}
vpmovqw %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovqw %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovqw %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovqw %xmm30, 512(%rdx) # AVX512{F,VL}
vpmovqw %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovqw %xmm30, -516(%rdx) # AVX512{F,VL}
vpmovqw %ymm30, (%rcx) # AVX512{F,VL}
vpmovqw %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovqw %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovqw %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovqw %ymm30, 1024(%rdx) # AVX512{F,VL}
vpmovqw %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovqw %ymm30, -1032(%rdx) # AVX512{F,VL}
vpmovsqw %xmm30, (%rcx) # AVX512{F,VL}
vpmovsqw %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsqw %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsqw %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovsqw %xmm30, 512(%rdx) # AVX512{F,VL}
vpmovsqw %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovsqw %xmm30, -516(%rdx) # AVX512{F,VL}
vpmovsqw %ymm30, (%rcx) # AVX512{F,VL}
vpmovsqw %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsqw %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsqw %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovsqw %ymm30, 1024(%rdx) # AVX512{F,VL}
vpmovsqw %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovsqw %ymm30, -1032(%rdx) # AVX512{F,VL}
vpmovusqw %xmm30, (%rcx) # AVX512{F,VL}
vpmovusqw %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusqw %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusqw %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovusqw %xmm30, 512(%rdx) # AVX512{F,VL}
vpmovusqw %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovusqw %xmm30, -516(%rdx) # AVX512{F,VL}
vpmovusqw %ymm30, (%rcx) # AVX512{F,VL}
vpmovusqw %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusqw %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusqw %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovusqw %ymm30, 1024(%rdx) # AVX512{F,VL}
vpmovusqw %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovusqw %ymm30, -1032(%rdx) # AVX512{F,VL}
vpmovqd %xmm30, (%rcx) # AVX512{F,VL}
vpmovqd %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovqd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovqd %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovqd %xmm30, 1024(%rdx) # AVX512{F,VL}
vpmovqd %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovqd %xmm30, -1032(%rdx) # AVX512{F,VL}
vpmovqd %ymm30, (%rcx) # AVX512{F,VL}
vpmovqd %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovqd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovqd %ymm30, 2032(%rdx) # AVX512{F,VL} Disp8
vpmovqd %ymm30, 2048(%rdx) # AVX512{F,VL}
vpmovqd %ymm30, -2048(%rdx) # AVX512{F,VL} Disp8
vpmovqd %ymm30, -2064(%rdx) # AVX512{F,VL}
vpmovsqd %xmm30, (%rcx) # AVX512{F,VL}
vpmovsqd %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsqd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsqd %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovsqd %xmm30, 1024(%rdx) # AVX512{F,VL}
vpmovsqd %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovsqd %xmm30, -1032(%rdx) # AVX512{F,VL}
vpmovsqd %ymm30, (%rcx) # AVX512{F,VL}
vpmovsqd %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsqd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsqd %ymm30, 2032(%rdx) # AVX512{F,VL} Disp8
vpmovsqd %ymm30, 2048(%rdx) # AVX512{F,VL}
vpmovsqd %ymm30, -2048(%rdx) # AVX512{F,VL} Disp8
vpmovsqd %ymm30, -2064(%rdx) # AVX512{F,VL}
vpmovusqd %xmm30, (%rcx) # AVX512{F,VL}
vpmovusqd %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusqd %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusqd %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovusqd %xmm30, 1024(%rdx) # AVX512{F,VL}
vpmovusqd %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovusqd %xmm30, -1032(%rdx) # AVX512{F,VL}
vpmovusqd %ymm30, (%rcx) # AVX512{F,VL}
vpmovusqd %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusqd %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusqd %ymm30, 2032(%rdx) # AVX512{F,VL} Disp8
vpmovusqd %ymm30, 2048(%rdx) # AVX512{F,VL}
vpmovusqd %ymm30, -2048(%rdx) # AVX512{F,VL} Disp8
vpmovusqd %ymm30, -2064(%rdx) # AVX512{F,VL}
vpmovdb %xmm30, (%rcx) # AVX512{F,VL}
vpmovdb %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovdb %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovdb %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovdb %xmm30, 512(%rdx) # AVX512{F,VL}
vpmovdb %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovdb %xmm30, -516(%rdx) # AVX512{F,VL}
vpmovdb %ymm30, (%rcx) # AVX512{F,VL}
vpmovdb %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovdb %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovdb %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovdb %ymm30, 1024(%rdx) # AVX512{F,VL}
vpmovdb %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovdb %ymm30, -1032(%rdx) # AVX512{F,VL}
vpmovsdb %xmm30, (%rcx) # AVX512{F,VL}
vpmovsdb %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsdb %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsdb %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovsdb %xmm30, 512(%rdx) # AVX512{F,VL}
vpmovsdb %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovsdb %xmm30, -516(%rdx) # AVX512{F,VL}
vpmovsdb %ymm30, (%rcx) # AVX512{F,VL}
vpmovsdb %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsdb %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsdb %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovsdb %ymm30, 1024(%rdx) # AVX512{F,VL}
vpmovsdb %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovsdb %ymm30, -1032(%rdx) # AVX512{F,VL}
vpmovusdb %xmm30, (%rcx) # AVX512{F,VL}
vpmovusdb %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusdb %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusdb %xmm30, 508(%rdx) # AVX512{F,VL} Disp8
vpmovusdb %xmm30, 512(%rdx) # AVX512{F,VL}
vpmovusdb %xmm30, -512(%rdx) # AVX512{F,VL} Disp8
vpmovusdb %xmm30, -516(%rdx) # AVX512{F,VL}
vpmovusdb %ymm30, (%rcx) # AVX512{F,VL}
vpmovusdb %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusdb %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusdb %ymm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovusdb %ymm30, 1024(%rdx) # AVX512{F,VL}
vpmovusdb %ymm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovusdb %ymm30, -1032(%rdx) # AVX512{F,VL}
vpmovdw %xmm30, (%rcx) # AVX512{F,VL}
vpmovdw %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovdw %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovdw %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovdw %xmm30, 1024(%rdx) # AVX512{F,VL}
vpmovdw %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovdw %xmm30, -1032(%rdx) # AVX512{F,VL}
vpmovdw %ymm30, (%rcx) # AVX512{F,VL}
vpmovdw %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovdw %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovdw %ymm30, 2032(%rdx) # AVX512{F,VL} Disp8
vpmovdw %ymm30, 2048(%rdx) # AVX512{F,VL}
vpmovdw %ymm30, -2048(%rdx) # AVX512{F,VL} Disp8
vpmovdw %ymm30, -2064(%rdx) # AVX512{F,VL}
vpmovsdw %xmm30, (%rcx) # AVX512{F,VL}
vpmovsdw %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsdw %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsdw %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovsdw %xmm30, 1024(%rdx) # AVX512{F,VL}
vpmovsdw %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovsdw %xmm30, -1032(%rdx) # AVX512{F,VL}
vpmovsdw %ymm30, (%rcx) # AVX512{F,VL}
vpmovsdw %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovsdw %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovsdw %ymm30, 2032(%rdx) # AVX512{F,VL} Disp8
vpmovsdw %ymm30, 2048(%rdx) # AVX512{F,VL}
vpmovsdw %ymm30, -2048(%rdx) # AVX512{F,VL} Disp8
vpmovsdw %ymm30, -2064(%rdx) # AVX512{F,VL}
vpmovusdw %xmm30, (%rcx) # AVX512{F,VL}
vpmovusdw %xmm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusdw %xmm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusdw %xmm30, 1016(%rdx) # AVX512{F,VL} Disp8
vpmovusdw %xmm30, 1024(%rdx) # AVX512{F,VL}
vpmovusdw %xmm30, -1024(%rdx) # AVX512{F,VL} Disp8
vpmovusdw %xmm30, -1032(%rdx) # AVX512{F,VL}
vpmovusdw %ymm30, (%rcx) # AVX512{F,VL}
vpmovusdw %ymm30, (%rcx){%k7} # AVX512{F,VL}
vpmovusdw %ymm30, 0x123(%rax,%r14,8) # AVX512{F,VL}
vpmovusdw %ymm30, 2032(%rdx) # AVX512{F,VL} Disp8
vpmovusdw %ymm30, 2048(%rdx) # AVX512{F,VL}
vpmovusdw %ymm30, -2048(%rdx) # AVX512{F,VL} Disp8
vpmovusdw %ymm30, -2064(%rdx) # AVX512{F,VL}
vcvttpd2udq %xmm29, %xmm30 # AVX512{F,VL}
vcvttpd2udq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvttpd2udq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvttpd2udqx (%rcx), %xmm30 # AVX512{F,VL}
vcvttpd2udqx 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvttpd2udqx (%rcx){1to2}, %xmm30 # AVX512{F,VL}
vcvttpd2udqx 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqx 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2udqx -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqx -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2udqx 1016(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqx 1024(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvttpd2udqx -1024(%rdx){1to2}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqx -1032(%rdx){1to2}, %xmm30 # AVX512{F,VL}
vcvttpd2udq %ymm29, %xmm30 # AVX512{F,VL}
vcvttpd2udq %ymm29, %xmm30{%k7} # AVX512{F,VL}
vcvttpd2udq %ymm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvttpd2udqy (%rcx), %xmm30 # AVX512{F,VL}
vcvttpd2udqy 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvttpd2udqy (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvttpd2udqy 4064(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqy 4096(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2udqy -4096(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqy -4128(%rdx), %xmm30 # AVX512{F,VL}
vcvttpd2udqy 1016(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqy 1024(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttpd2udqy -1024(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttpd2udqy -1032(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2udq %xmm29, %xmm30 # AVX512{F,VL}
vcvttps2udq %xmm29, %xmm30{%k7} # AVX512{F,VL}
vcvttps2udq %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vcvttps2udq (%rcx), %xmm30 # AVX512{F,VL}
vcvttps2udq 0x123(%rax,%r14,8), %xmm30 # AVX512{F,VL}
vcvttps2udq (%rcx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2udq 2032(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttps2udq 2048(%rdx), %xmm30 # AVX512{F,VL}
vcvttps2udq -2048(%rdx), %xmm30 # AVX512{F,VL} Disp8
vcvttps2udq -2064(%rdx), %xmm30 # AVX512{F,VL}
vcvttps2udq 508(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttps2udq 512(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2udq -512(%rdx){1to4}, %xmm30 # AVX512{F,VL} Disp8
vcvttps2udq -516(%rdx){1to4}, %xmm30 # AVX512{F,VL}
vcvttps2udq %ymm29, %ymm30 # AVX512{F,VL}
vcvttps2udq %ymm29, %ymm30{%k7} # AVX512{F,VL}
vcvttps2udq %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vcvttps2udq (%rcx), %ymm30 # AVX512{F,VL}
vcvttps2udq 0x123(%rax,%r14,8), %ymm30 # AVX512{F,VL}
vcvttps2udq (%rcx){1to8}, %ymm30 # AVX512{F,VL}
vcvttps2udq 4064(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvttps2udq 4096(%rdx), %ymm30 # AVX512{F,VL}
vcvttps2udq -4096(%rdx), %ymm30 # AVX512{F,VL} Disp8
vcvttps2udq -4128(%rdx), %ymm30 # AVX512{F,VL}
vcvttps2udq 508(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvttps2udq 512(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vcvttps2udq -512(%rdx){1to8}, %ymm30 # AVX512{F,VL} Disp8
vcvttps2udq -516(%rdx){1to8}, %ymm30 # AVX512{F,VL}
vpermi2d %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermi2d %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermi2d (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2d 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2d -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2d 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2d -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2d %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2d %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermi2d %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermi2d (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2d 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2d (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2d 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2d 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2d -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2d -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2d 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2d 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2d -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2d -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermi2q %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermi2q (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2q 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2q -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2q 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2q -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2q %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermi2q %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermi2q (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2q 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2q -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2q 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2q -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2q -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermi2ps %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermi2ps (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps (%rcx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2ps 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2ps -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps 508(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2ps 512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps -512(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2ps -516(%rdx){1to4}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2ps %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermi2ps %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermi2ps (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps (%rcx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2ps 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2ps -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2ps 512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2ps -512(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2ps -516(%rdx){1to8}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd %xmm28, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd %xmm28, %xmm29, %xmm30{%k7} # AVX512{F,VL}
vpermi2pd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{F,VL}
vpermi2pd (%rcx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd (%rcx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd 2032(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2pd 2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd -2048(%rdx), %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2pd -2064(%rdx), %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2pd 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL} Disp8
vpermi2pd -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{F,VL}
vpermi2pd %ymm28, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd %ymm28, %ymm29, %ymm30{%k7} # AVX512{F,VL}
vpermi2pd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{F,VL}
vpermi2pd (%rcx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd (%rcx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd 4064(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2pd 4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd -4096(%rdx), %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2pd -4128(%rdx), %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2pd 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vpermi2pd -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL} Disp8
vpermi2pd -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{F,VL}
vptestnmd %xmm28, %xmm29, %k5 # AVX512{F,VL}
vptestnmd %xmm28, %xmm29, %k5{%k7} # AVX512{F,VL}
vptestnmd (%rcx), %xmm29, %k5 # AVX512{F,VL}
vptestnmd 0x123(%rax,%r14,8), %xmm29, %k5 # AVX512{F,VL}
vptestnmd (%rcx){1to4}, %xmm29, %k5 # AVX512{F,VL}
vptestnmd 2032(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmd 2048(%rdx), %xmm29, %k5 # AVX512{F,VL}
vptestnmd -2048(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmd -2064(%rdx), %xmm29, %k5 # AVX512{F,VL}
vptestnmd 508(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmd 512(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL}
vptestnmd -512(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmd -516(%rdx){1to4}, %xmm29, %k5 # AVX512{F,VL}
vptestnmd %ymm28, %ymm29, %k5 # AVX512{F,VL}
vptestnmd %ymm28, %ymm29, %k5{%k7} # AVX512{F,VL}
vptestnmd (%rcx), %ymm29, %k5 # AVX512{F,VL}
vptestnmd 0x123(%rax,%r14,8), %ymm29, %k5 # AVX512{F,VL}
vptestnmd (%rcx){1to8}, %ymm29, %k5 # AVX512{F,VL}
vptestnmd 4064(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmd 4096(%rdx), %ymm29, %k5 # AVX512{F,VL}
vptestnmd -4096(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmd -4128(%rdx), %ymm29, %k5 # AVX512{F,VL}
vptestnmd 508(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmd 512(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL}
vptestnmd -512(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmd -516(%rdx){1to8}, %ymm29, %k5 # AVX512{F,VL}
vptestnmq %xmm28, %xmm29, %k5 # AVX512{F,VL}
vptestnmq %xmm28, %xmm29, %k5{%k7} # AVX512{F,VL}
vptestnmq (%rcx), %xmm29, %k5 # AVX512{F,VL}
vptestnmq 0x123(%rax,%r14,8), %xmm29, %k5 # AVX512{F,VL}
vptestnmq (%rcx){1to2}, %xmm29, %k5 # AVX512{F,VL}
vptestnmq 2032(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmq 2048(%rdx), %xmm29, %k5 # AVX512{F,VL}
vptestnmq -2048(%rdx), %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmq -2064(%rdx), %xmm29, %k5 # AVX512{F,VL}
vptestnmq 1016(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmq 1024(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL}
vptestnmq -1024(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL} Disp8
vptestnmq -1032(%rdx){1to2}, %xmm29, %k5 # AVX512{F,VL}
vptestnmq %ymm28, %ymm29, %k5 # AVX512{F,VL}
vptestnmq %ymm28, %ymm29, %k5{%k7} # AVX512{F,VL}
vptestnmq (%rcx), %ymm29, %k5 # AVX512{F,VL}
vptestnmq 0x123(%rax,%r14,8), %ymm29, %k5 # AVX512{F,VL}
vptestnmq (%rcx){1to4}, %ymm29, %k5 # AVX512{F,VL}
vptestnmq 4064(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmq 4096(%rdx), %ymm29, %k5 # AVX512{F,VL}
vptestnmq -4096(%rdx), %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmq -4128(%rdx), %ymm29, %k5 # AVX512{F,VL}
vptestnmq 1016(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmq 1024(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL}
vptestnmq -1024(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL} Disp8
vptestnmq -1032(%rdx){1to4}, %ymm29, %k5 # AVX512{F,VL}
.intel_syntax noprefix
vaddpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vaddpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vaddpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vaddpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vaddpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vaddpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vaddpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vaddpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vaddpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vaddpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vaddpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vaddpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vaddpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vaddpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vaddpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vaddpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vaddpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vaddpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vaddpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vaddpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vaddpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vaddpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vaddpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vaddpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vaddpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vaddpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vaddpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vaddpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vaddps xmm30, xmm29, xmm28 # AVX512{F,VL}
vaddps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vaddps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vaddps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vaddps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vaddps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vaddps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vaddps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vaddps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vaddps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vaddps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vaddps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vaddps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vaddps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vaddps ymm30, ymm29, ymm28 # AVX512{F,VL}
vaddps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vaddps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vaddps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vaddps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vaddps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vaddps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vaddps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vaddps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vaddps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vaddps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vaddps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vaddps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vaddps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
valignd xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
valignd xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
valignd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
valignd xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
valignd xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
valignd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
valignd xmm30, xmm29, [rcx]{1to4}, 123 # AVX512{F,VL}
valignd xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
valignd xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
valignd xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
valignd xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
valignd xmm30, xmm29, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
valignd xmm30, xmm29, [rdx+512]{1to4}, 123 # AVX512{F,VL}
valignd xmm30, xmm29, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
valignd xmm30, xmm29, [rdx-516]{1to4}, 123 # AVX512{F,VL}
valignd ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
valignd ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
valignd ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
valignd ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
valignd ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
valignd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
valignd ymm30, ymm29, [rcx]{1to8}, 123 # AVX512{F,VL}
valignd ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
valignd ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
valignd ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
valignd ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
valignd ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
valignd ymm30, ymm29, [rdx+512]{1to8}, 123 # AVX512{F,VL}
valignd ymm30, ymm29, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
valignd ymm30, ymm29, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vblendmpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vblendmpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vblendmpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vblendmpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vblendmpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vblendmpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vblendmpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vblendmpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vblendmpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vblendmpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vblendmpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vblendmpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vblendmpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vblendmpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vblendmpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vblendmpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vblendmpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vblendmpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vblendmpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vblendmpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vblendmpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vblendmpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vblendmpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vblendmpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vblendmpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vblendmpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vblendmpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vblendmpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vblendmps xmm30, xmm29, xmm28 # AVX512{F,VL}
vblendmps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vblendmps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vblendmps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vblendmps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vblendmps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vblendmps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vblendmps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vblendmps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vblendmps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vblendmps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vblendmps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vblendmps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vblendmps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vblendmps ymm30, ymm29, ymm28 # AVX512{F,VL}
vblendmps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vblendmps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vblendmps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vblendmps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vblendmps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vblendmps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vblendmps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vblendmps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vblendmps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vblendmps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vblendmps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vblendmps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vblendmps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vbroadcastf32x4 ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vbroadcastf32x4 ymm30{k7}, XMMWORD PTR [rcx] # AVX512{F,VL}
vbroadcastf32x4 ymm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{F,VL}
vbroadcastf32x4 ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vbroadcastf32x4 ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vbroadcastf32x4 ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vbroadcastf32x4 ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vbroadcastf32x4 ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vbroadcasti32x4 ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vbroadcasti32x4 ymm30{k7}, XMMWORD PTR [rcx] # AVX512{F,VL}
vbroadcasti32x4 ymm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{F,VL}
vbroadcasti32x4 ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vbroadcasti32x4 ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vbroadcasti32x4 ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vbroadcasti32x4 ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vbroadcasti32x4 ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vbroadcastsd ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vbroadcastsd ymm30{k7}, QWORD PTR [rcx] # AVX512{F,VL}
vbroadcastsd ymm30{k7}{z}, QWORD PTR [rcx] # AVX512{F,VL}
vbroadcastsd ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vbroadcastsd ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vbroadcastsd ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vbroadcastsd ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vbroadcastsd ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vbroadcastsd ymm30, xmm29 # AVX512{F,VL}
vbroadcastsd ymm30{k7}, xmm29 # AVX512{F,VL}
vbroadcastsd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vbroadcastss xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vbroadcastss xmm30{k7}, DWORD PTR [rcx] # AVX512{F,VL}
vbroadcastss xmm30{k7}{z}, DWORD PTR [rcx] # AVX512{F,VL}
vbroadcastss xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vbroadcastss xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vbroadcastss xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vbroadcastss xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vbroadcastss xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vbroadcastss ymm30, DWORD PTR [rcx] # AVX512{F,VL}
vbroadcastss ymm30{k7}, DWORD PTR [rcx] # AVX512{F,VL}
vbroadcastss ymm30{k7}{z}, DWORD PTR [rcx] # AVX512{F,VL}
vbroadcastss ymm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vbroadcastss ymm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vbroadcastss ymm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vbroadcastss ymm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vbroadcastss ymm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vbroadcastss xmm30, xmm29 # AVX512{F,VL}
vbroadcastss xmm30{k7}, xmm29 # AVX512{F,VL}
vbroadcastss xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vbroadcastss ymm30, xmm29 # AVX512{F,VL}
vbroadcastss ymm30{k7}, xmm29 # AVX512{F,VL}
vbroadcastss ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vcmppd k5, xmm29, xmm28, 0xab # AVX512{F,VL}
vcmppd k5{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vcmppd k5, xmm29, xmm28, 123 # AVX512{F,VL}
vcmppd k5, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vcmppd k5, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vcmppd k5, xmm29, [rcx]{1to2}, 123 # AVX512{F,VL}
vcmppd k5, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vcmppd k5, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vcmppd k5, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vcmppd k5, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vcmppd k5, xmm29, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vcmppd k5, xmm29, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vcmppd k5, xmm29, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vcmppd k5, xmm29, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vcmppd k5, ymm29, ymm28, 0xab # AVX512{F,VL}
vcmppd k5{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vcmppd k5, ymm29, ymm28, 123 # AVX512{F,VL}
vcmppd k5, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vcmppd k5, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vcmppd k5, ymm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vcmppd k5, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vcmppd k5, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vcmppd k5, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vcmppd k5, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vcmppd k5, ymm29, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vcmppd k5, ymm29, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vcmppd k5, ymm29, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vcmppd k5, ymm29, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vcmpps k5, xmm29, xmm28, 0xab # AVX512{F,VL}
vcmpps k5{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vcmpps k5, xmm29, xmm28, 123 # AVX512{F,VL}
vcmpps k5, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vcmpps k5, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vcmpps k5, xmm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vcmpps k5, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vcmpps k5, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vcmpps k5, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vcmpps k5, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vcmpps k5, xmm29, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vcmpps k5, xmm29, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vcmpps k5, xmm29, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vcmpps k5, xmm29, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vcmpps k5, ymm29, ymm28, 0xab # AVX512{F,VL}
vcmpps k5{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vcmpps k5, ymm29, ymm28, 123 # AVX512{F,VL}
vcmpps k5, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vcmpps k5, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vcmpps k5, ymm29, [rcx]{1to8}, 123 # AVX512{F,VL}
vcmpps k5, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vcmpps k5, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vcmpps k5, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vcmpps k5, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vcmpps k5, ymm29, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vcmpps k5, ymm29, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vcmpps k5, ymm29, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vcmpps k5, ymm29, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vcompresspd XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vcompresspd XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vcompresspd XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vcompresspd XMMWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vcompresspd XMMWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vcompresspd XMMWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vcompresspd XMMWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vcompresspd YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vcompresspd YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vcompresspd YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vcompresspd YMMWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vcompresspd YMMWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vcompresspd YMMWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vcompresspd YMMWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vcompresspd xmm30, xmm29 # AVX512{F,VL}
vcompresspd xmm30{k7}, xmm29 # AVX512{F,VL}
vcompresspd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcompresspd ymm30, ymm29 # AVX512{F,VL}
vcompresspd ymm30{k7}, ymm29 # AVX512{F,VL}
vcompresspd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcompressps XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vcompressps XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vcompressps XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vcompressps XMMWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vcompressps XMMWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vcompressps XMMWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vcompressps XMMWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vcompressps YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vcompressps YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vcompressps YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vcompressps YMMWORD PTR [rdx+508], ymm30 # AVX512{F,VL} Disp8
vcompressps YMMWORD PTR [rdx+512], ymm30 # AVX512{F,VL}
vcompressps YMMWORD PTR [rdx-512], ymm30 # AVX512{F,VL} Disp8
vcompressps YMMWORD PTR [rdx-516], ymm30 # AVX512{F,VL}
vcompressps xmm30, xmm29 # AVX512{F,VL}
vcompressps xmm30{k7}, xmm29 # AVX512{F,VL}
vcompressps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcompressps ymm30, ymm29 # AVX512{F,VL}
vcompressps ymm30{k7}, ymm29 # AVX512{F,VL}
vcompressps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtdq2pd xmm30, xmm29 # AVX512{F,VL}
vcvtdq2pd xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtdq2pd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtdq2pd xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vcvtdq2pd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtdq2pd xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvtdq2pd xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vcvtdq2pd xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vcvtdq2pd xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vcvtdq2pd xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vcvtdq2pd xmm30, [rdx+508]{1to2} # AVX512{F,VL} Disp8
vcvtdq2pd xmm30, [rdx+512]{1to2} # AVX512{F,VL}
vcvtdq2pd xmm30, [rdx-512]{1to2} # AVX512{F,VL} Disp8
vcvtdq2pd xmm30, [rdx-516]{1to2} # AVX512{F,VL}
vcvtdq2pd ymm30, xmm29 # AVX512{F,VL}
vcvtdq2pd ymm30{k7}, xmm29 # AVX512{F,VL}
vcvtdq2pd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtdq2pd ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtdq2pd ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtdq2pd ymm30, [rcx]{1to4} # AVX512{F,VL}
vcvtdq2pd ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtdq2pd ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtdq2pd ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtdq2pd ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtdq2pd ymm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvtdq2pd ymm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvtdq2pd ymm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvtdq2pd ymm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvtdq2ps xmm30, xmm29 # AVX512{F,VL}
vcvtdq2ps xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtdq2ps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtdq2ps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtdq2ps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtdq2ps xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvtdq2ps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtdq2ps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtdq2ps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtdq2ps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtdq2ps xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvtdq2ps xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvtdq2ps xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvtdq2ps xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvtdq2ps ymm30, ymm29 # AVX512{F,VL}
vcvtdq2ps ymm30{k7}, ymm29 # AVX512{F,VL}
vcvtdq2ps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtdq2ps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvtdq2ps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtdq2ps ymm30, [rcx]{1to8} # AVX512{F,VL}
vcvtdq2ps ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvtdq2ps ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvtdq2ps ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvtdq2ps ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvtdq2ps ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vcvtdq2ps ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vcvtdq2ps ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vcvtdq2ps ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vcvtpd2dq xmm30, xmm29 # AVX512{F,VL}
vcvtpd2dq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtpd2dq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtpd2dq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtpd2dq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtpd2dq xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvtpd2dq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtpd2dq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtpd2dq xmm30, QWORD BCST [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, QWORD BCST [rdx+1024]{1to2} # AVX512{F,VL}
vcvtpd2dq xmm30, QWORD BCST [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, QWORD BCST [rdx-1032]{1to2} # AVX512{F,VL}
vcvtpd2dq xmm30, ymm29 # AVX512{F,VL}
vcvtpd2dq xmm30{k7}, ymm29 # AVX512{F,VL}
vcvtpd2dq xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtpd2dq xmm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvtpd2dq xmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtpd2dq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvtpd2dq xmm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvtpd2dq xmm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvtpd2dq xmm30, QWORD BCST [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, QWORD BCST [rdx+1024]{1to4} # AVX512{F,VL}
vcvtpd2dq xmm30, QWORD BCST [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vcvtpd2dq xmm30, QWORD BCST [rdx-1032]{1to4} # AVX512{F,VL}
vcvtpd2ps xmm30, xmm29 # AVX512{F,VL}
vcvtpd2ps xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtpd2ps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtpd2ps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtpd2ps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtpd2ps xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvtpd2ps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtpd2ps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtpd2ps xmm30, QWORD BCST [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, QWORD BCST [rdx+1024]{1to2} # AVX512{F,VL}
vcvtpd2ps xmm30, QWORD BCST [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, QWORD BCST [rdx-1032]{1to2} # AVX512{F,VL}
vcvtpd2ps xmm30, ymm29 # AVX512{F,VL}
vcvtpd2ps xmm30{k7}, ymm29 # AVX512{F,VL}
vcvtpd2ps xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtpd2ps xmm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvtpd2ps xmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtpd2ps xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvtpd2ps xmm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvtpd2ps xmm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvtpd2ps xmm30, QWORD BCST [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, QWORD BCST [rdx+1024]{1to4} # AVX512{F,VL}
vcvtpd2ps xmm30, QWORD BCST [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vcvtpd2ps xmm30, QWORD BCST [rdx-1032]{1to4} # AVX512{F,VL}
vcvtpd2udq xmm30, xmm29 # AVX512{F,VL}
vcvtpd2udq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtpd2udq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtpd2udq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtpd2udq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtpd2udq xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvtpd2udq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtpd2udq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtpd2udq xmm30, QWORD BCST [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, QWORD BCST [rdx+1024]{1to2} # AVX512{F,VL}
vcvtpd2udq xmm30, QWORD BCST [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, QWORD BCST [rdx-1032]{1to2} # AVX512{F,VL}
vcvtpd2udq xmm30, ymm29 # AVX512{F,VL}
vcvtpd2udq xmm30{k7}, ymm29 # AVX512{F,VL}
vcvtpd2udq xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtpd2udq xmm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvtpd2udq xmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtpd2udq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvtpd2udq xmm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvtpd2udq xmm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvtpd2udq xmm30, QWORD BCST [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, QWORD BCST [rdx+1024]{1to4} # AVX512{F,VL}
vcvtpd2udq xmm30, QWORD BCST [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vcvtpd2udq xmm30, QWORD BCST [rdx-1032]{1to4} # AVX512{F,VL}
vcvtph2ps xmm30, xmm29 # AVX512{F,VL}
vcvtph2ps xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtph2ps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtph2ps xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vcvtph2ps xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtph2ps xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vcvtph2ps xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vcvtph2ps xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vcvtph2ps xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vcvtph2ps ymm30, xmm29 # AVX512{F,VL}
vcvtph2ps ymm30{k7}, xmm29 # AVX512{F,VL}
vcvtph2ps ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtph2ps ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtph2ps ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtph2ps ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtph2ps ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtph2ps ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtph2ps ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtps2dq xmm30, xmm29 # AVX512{F,VL}
vcvtps2dq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtps2dq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtps2dq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtps2dq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtps2dq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvtps2dq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtps2dq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtps2dq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtps2dq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtps2dq xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvtps2dq xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvtps2dq xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvtps2dq xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvtps2dq ymm30, ymm29 # AVX512{F,VL}
vcvtps2dq ymm30{k7}, ymm29 # AVX512{F,VL}
vcvtps2dq ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtps2dq ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvtps2dq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtps2dq ymm30, [rcx]{1to8} # AVX512{F,VL}
vcvtps2dq ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvtps2dq ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvtps2dq ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvtps2dq ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvtps2dq ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vcvtps2dq ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vcvtps2dq ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vcvtps2dq ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vcvtps2pd xmm30, xmm29 # AVX512{F,VL}
vcvtps2pd xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtps2pd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtps2pd xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vcvtps2pd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtps2pd xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvtps2pd xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vcvtps2pd xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vcvtps2pd xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vcvtps2pd xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vcvtps2pd xmm30, [rdx+508]{1to2} # AVX512{F,VL} Disp8
vcvtps2pd xmm30, [rdx+512]{1to2} # AVX512{F,VL}
vcvtps2pd xmm30, [rdx-512]{1to2} # AVX512{F,VL} Disp8
vcvtps2pd xmm30, [rdx-516]{1to2} # AVX512{F,VL}
vcvtps2pd ymm30, xmm29 # AVX512{F,VL}
vcvtps2pd ymm30{k7}, xmm29 # AVX512{F,VL}
vcvtps2pd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtps2pd ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtps2pd ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtps2pd ymm30, [rcx]{1to4} # AVX512{F,VL}
vcvtps2pd ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtps2pd ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtps2pd ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtps2pd ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtps2pd ymm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvtps2pd ymm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvtps2pd ymm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvtps2pd ymm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvtps2ph xmm30, xmm29, 0xab # AVX512{F,VL}
vcvtps2ph xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vcvtps2ph xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vcvtps2ph xmm30, xmm29, 123 # AVX512{F,VL}
vcvtps2ph xmm30, ymm29, 0xab # AVX512{F,VL}
vcvtps2ph xmm30{k7}, ymm29, 0xab # AVX512{F,VL}
vcvtps2ph xmm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vcvtps2ph xmm30, ymm29, 123 # AVX512{F,VL}
vcvtps2udq xmm30, xmm29 # AVX512{F,VL}
vcvtps2udq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtps2udq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtps2udq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtps2udq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtps2udq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvtps2udq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtps2udq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtps2udq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtps2udq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtps2udq xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvtps2udq xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvtps2udq xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvtps2udq xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvtps2udq ymm30, ymm29 # AVX512{F,VL}
vcvtps2udq ymm30{k7}, ymm29 # AVX512{F,VL}
vcvtps2udq ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtps2udq ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvtps2udq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtps2udq ymm30, [rcx]{1to8} # AVX512{F,VL}
vcvtps2udq ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvtps2udq ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvtps2udq ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvtps2udq ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvtps2udq ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vcvtps2udq ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vcvtps2udq ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vcvtps2udq ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vcvttpd2dq xmm30, xmm29 # AVX512{F,VL}
vcvttpd2dq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvttpd2dq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvttpd2dq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvttpd2dq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttpd2dq xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvttpd2dq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvttpd2dq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvttpd2dq xmm30, QWORD BCST [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, QWORD BCST [rdx+1024]{1to2} # AVX512{F,VL}
vcvttpd2dq xmm30, QWORD BCST [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, QWORD BCST [rdx-1032]{1to2} # AVX512{F,VL}
vcvttpd2dq xmm30, ymm29 # AVX512{F,VL}
vcvttpd2dq xmm30{k7}, ymm29 # AVX512{F,VL}
vcvttpd2dq xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvttpd2dq xmm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvttpd2dq xmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttpd2dq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvttpd2dq xmm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvttpd2dq xmm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvttpd2dq xmm30, QWORD BCST [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, QWORD BCST [rdx+1024]{1to4} # AVX512{F,VL}
vcvttpd2dq xmm30, QWORD BCST [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vcvttpd2dq xmm30, QWORD BCST [rdx-1032]{1to4} # AVX512{F,VL}
vcvttps2dq xmm30, xmm29 # AVX512{F,VL}
vcvttps2dq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvttps2dq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvttps2dq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvttps2dq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttps2dq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvttps2dq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvttps2dq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvttps2dq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvttps2dq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvttps2dq xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvttps2dq xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvttps2dq xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvttps2dq xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvttps2dq ymm30, ymm29 # AVX512{F,VL}
vcvttps2dq ymm30{k7}, ymm29 # AVX512{F,VL}
vcvttps2dq ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvttps2dq ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvttps2dq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttps2dq ymm30, [rcx]{1to8} # AVX512{F,VL}
vcvttps2dq ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvttps2dq ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvttps2dq ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvttps2dq ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvttps2dq ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vcvttps2dq ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vcvttps2dq ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vcvttps2dq ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vcvtudq2pd xmm30, xmm29 # AVX512{F,VL}
vcvtudq2pd xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtudq2pd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtudq2pd xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vcvtudq2pd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtudq2pd xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvtudq2pd xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vcvtudq2pd xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vcvtudq2pd xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vcvtudq2pd xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vcvtudq2pd xmm30, [rdx+508]{1to2} # AVX512{F,VL} Disp8
vcvtudq2pd xmm30, [rdx+512]{1to2} # AVX512{F,VL}
vcvtudq2pd xmm30, [rdx-512]{1to2} # AVX512{F,VL} Disp8
vcvtudq2pd xmm30, [rdx-516]{1to2} # AVX512{F,VL}
vcvtudq2pd ymm30, xmm29 # AVX512{F,VL}
vcvtudq2pd ymm30{k7}, xmm29 # AVX512{F,VL}
vcvtudq2pd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtudq2pd ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtudq2pd ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtudq2pd ymm30, [rcx]{1to4} # AVX512{F,VL}
vcvtudq2pd ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtudq2pd ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtudq2pd ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtudq2pd ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtudq2pd ymm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvtudq2pd ymm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvtudq2pd ymm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvtudq2pd ymm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvtudq2ps xmm30, xmm29 # AVX512{F,VL}
vcvtudq2ps xmm30{k7}, xmm29 # AVX512{F,VL}
vcvtudq2ps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvtudq2ps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvtudq2ps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtudq2ps xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvtudq2ps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvtudq2ps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvtudq2ps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvtudq2ps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvtudq2ps xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvtudq2ps xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvtudq2ps xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvtudq2ps xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvtudq2ps ymm30, ymm29 # AVX512{F,VL}
vcvtudq2ps ymm30{k7}, ymm29 # AVX512{F,VL}
vcvtudq2ps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtudq2ps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvtudq2ps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvtudq2ps ymm30, [rcx]{1to8} # AVX512{F,VL}
vcvtudq2ps ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvtudq2ps ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvtudq2ps ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvtudq2ps ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvtudq2ps ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vcvtudq2ps ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vcvtudq2ps ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vcvtudq2ps ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vdivpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vdivpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vdivpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vdivpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vdivpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vdivpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vdivpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vdivpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vdivpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vdivpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vdivpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vdivpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vdivpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vdivpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vdivpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vdivpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vdivpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vdivpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vdivpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vdivpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vdivpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vdivpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vdivpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vdivpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vdivpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vdivpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vdivpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vdivpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vdivps xmm30, xmm29, xmm28 # AVX512{F,VL}
vdivps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vdivps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vdivps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vdivps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vdivps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vdivps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vdivps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vdivps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vdivps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vdivps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vdivps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vdivps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vdivps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vdivps ymm30, ymm29, ymm28 # AVX512{F,VL}
vdivps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vdivps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vdivps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vdivps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vdivps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vdivps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vdivps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vdivps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vdivps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vdivps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vdivps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vdivps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vdivps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vexpandpd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vexpandpd xmm30{k7}, XMMWORD PTR [rcx] # AVX512{F,VL}
vexpandpd xmm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{F,VL}
vexpandpd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vexpandpd xmm30, XMMWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vexpandpd xmm30, XMMWORD PTR [rdx+1024] # AVX512{F,VL}
vexpandpd xmm30, XMMWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vexpandpd xmm30, XMMWORD PTR [rdx-1032] # AVX512{F,VL}
vexpandpd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vexpandpd ymm30{k7}, YMMWORD PTR [rcx] # AVX512{F,VL}
vexpandpd ymm30{k7}{z}, YMMWORD PTR [rcx] # AVX512{F,VL}
vexpandpd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vexpandpd ymm30, YMMWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vexpandpd ymm30, YMMWORD PTR [rdx+1024] # AVX512{F,VL}
vexpandpd ymm30, YMMWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vexpandpd ymm30, YMMWORD PTR [rdx-1032] # AVX512{F,VL}
vexpandpd xmm30, xmm29 # AVX512{F,VL}
vexpandpd xmm30{k7}, xmm29 # AVX512{F,VL}
vexpandpd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vexpandpd ymm30, ymm29 # AVX512{F,VL}
vexpandpd ymm30{k7}, ymm29 # AVX512{F,VL}
vexpandpd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vexpandps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vexpandps xmm30{k7}, XMMWORD PTR [rcx] # AVX512{F,VL}
vexpandps xmm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{F,VL}
vexpandps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vexpandps xmm30, XMMWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vexpandps xmm30, XMMWORD PTR [rdx+512] # AVX512{F,VL}
vexpandps xmm30, XMMWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vexpandps xmm30, XMMWORD PTR [rdx-516] # AVX512{F,VL}
vexpandps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vexpandps ymm30{k7}, YMMWORD PTR [rcx] # AVX512{F,VL}
vexpandps ymm30{k7}{z}, YMMWORD PTR [rcx] # AVX512{F,VL}
vexpandps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vexpandps ymm30, YMMWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vexpandps ymm30, YMMWORD PTR [rdx+512] # AVX512{F,VL}
vexpandps ymm30, YMMWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vexpandps ymm30, YMMWORD PTR [rdx-516] # AVX512{F,VL}
vexpandps xmm30, xmm29 # AVX512{F,VL}
vexpandps xmm30{k7}, xmm29 # AVX512{F,VL}
vexpandps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vexpandps ymm30, ymm29 # AVX512{F,VL}
vexpandps ymm30{k7}, ymm29 # AVX512{F,VL}
vexpandps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vextractf32x4 xmm30, ymm29, 0xab # AVX512{F,VL}
vextractf32x4 xmm30{k7}, ymm29, 0xab # AVX512{F,VL}
vextractf32x4 xmm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vextractf32x4 xmm30, ymm29, 123 # AVX512{F,VL}
vextracti32x4 xmm30, ymm29, 0xab # AVX512{F,VL}
vextracti32x4 xmm30{k7}, ymm29, 0xab # AVX512{F,VL}
vextracti32x4 xmm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vextracti32x4 xmm30, ymm29, 123 # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmadd132pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmadd132pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmadd132pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmadd132pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmadd132pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmadd132pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmadd132pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmadd132pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmadd132pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmadd132pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmadd132pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmadd132pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmadd132pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmadd132pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmadd132ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmadd132ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmadd132ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmadd132ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmadd132ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmadd132ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmadd132ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmadd132ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmadd132ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmadd132ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmadd132ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmadd132ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmadd132ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmadd132ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmadd213pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmadd213pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmadd213pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmadd213pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmadd213pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmadd213pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmadd213pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmadd213pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmadd213pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmadd213pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmadd213pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmadd213pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmadd213pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmadd213pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmadd213ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmadd213ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmadd213ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmadd213ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmadd213ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmadd213ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmadd213ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmadd213ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmadd213ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmadd213ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmadd213ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmadd213ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmadd213ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmadd213ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmadd231pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmadd231pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmadd231pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmadd231pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmadd231pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmadd231pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmadd231pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmadd231pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmadd231pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmadd231pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmadd231pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmadd231pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmadd231pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmadd231pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmadd231ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmadd231ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmadd231ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmadd231ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmadd231ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmadd231ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmadd231ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmadd231ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmadd231ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmadd231ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmadd231ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmadd231ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmadd231ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmadd231ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub132pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub132pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmaddsub132pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmaddsub132pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmaddsub132pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmaddsub132pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmaddsub132pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub132pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub132pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmaddsub132pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmaddsub132pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmaddsub132pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub132ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub132ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmaddsub132ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmaddsub132ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmaddsub132ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmaddsub132ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub132ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub132ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmaddsub132ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmaddsub132ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmaddsub132ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmaddsub132ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmaddsub132ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub213pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub213pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmaddsub213pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmaddsub213pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmaddsub213pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmaddsub213pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmaddsub213pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub213pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub213pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmaddsub213pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmaddsub213pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmaddsub213pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub213ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub213ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmaddsub213ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmaddsub213ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmaddsub213ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmaddsub213ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub213ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub213ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmaddsub213ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmaddsub213ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmaddsub213ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmaddsub213ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmaddsub213ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub231pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub231pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmaddsub231pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmaddsub231pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmaddsub231pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmaddsub231pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmaddsub231pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub231pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub231pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmaddsub231pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmaddsub231pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmaddsub231pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub231ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub231ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmaddsub231ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmaddsub231ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmaddsub231ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmaddsub231ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub231ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub231ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmaddsub231ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmaddsub231ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmaddsub231ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmaddsub231ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmaddsub231ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsub132pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsub132pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsub132pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsub132pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsub132pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmsub132pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsub132pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsub132pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsub132pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsub132pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsub132pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsub132pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmsub132pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsub132pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsub132ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsub132ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsub132ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsub132ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmsub132ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmsub132ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmsub132ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsub132ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsub132ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsub132ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsub132ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmsub132ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmsub132ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmsub132ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsub213pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsub213pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsub213pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsub213pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsub213pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmsub213pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsub213pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsub213pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsub213pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsub213pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsub213pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsub213pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmsub213pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsub213pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsub213ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsub213ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsub213ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsub213ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmsub213ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmsub213ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmsub213ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsub213ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsub213ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsub213ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsub213ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmsub213ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmsub213ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmsub213ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsub231pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsub231pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsub231pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsub231pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsub231pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmsub231pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsub231pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsub231pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsub231pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsub231pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsub231pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsub231pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmsub231pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsub231pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsub231ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsub231ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsub231ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsub231ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmsub231ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmsub231ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmsub231ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsub231ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsub231ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsub231ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsub231ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmsub231ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmsub231ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmsub231ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd132pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd132pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsubadd132pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsubadd132pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsubadd132pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmsubadd132pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsubadd132pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd132pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd132pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsubadd132pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsubadd132pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmsubadd132pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd132ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd132ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsubadd132ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsubadd132ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmsubadd132ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmsubadd132ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd132ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd132ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsubadd132ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsubadd132ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmsubadd132ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmsubadd132ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmsubadd132ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd213pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd213pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsubadd213pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsubadd213pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsubadd213pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmsubadd213pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsubadd213pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd213pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd213pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsubadd213pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsubadd213pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmsubadd213pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd213ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd213ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsubadd213ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsubadd213ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmsubadd213ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmsubadd213ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd213ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd213ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsubadd213ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsubadd213ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmsubadd213ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmsubadd213ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmsubadd213ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd231pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd231pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsubadd231pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsubadd231pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfmsubadd231pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfmsubadd231pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfmsubadd231pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd231pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd231pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsubadd231pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsubadd231pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfmsubadd231pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd231ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd231ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfmsubadd231ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfmsubadd231ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfmsubadd231ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfmsubadd231ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd231ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd231ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfmsubadd231ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfmsubadd231ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfmsubadd231ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfmsubadd231ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfmsubadd231ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmadd132pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd132pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmadd132pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmadd132pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmadd132pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfnmadd132pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmadd132pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmadd132pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd132pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmadd132pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmadd132pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmadd132pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfnmadd132pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmadd132pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmadd132ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd132ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmadd132ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmadd132ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfnmadd132ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfnmadd132ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfnmadd132ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmadd132ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd132ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmadd132ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmadd132ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfnmadd132ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfnmadd132ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfnmadd132ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmadd213pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd213pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmadd213pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmadd213pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmadd213pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfnmadd213pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmadd213pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmadd213pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd213pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmadd213pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmadd213pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmadd213pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfnmadd213pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmadd213pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmadd213ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd213ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmadd213ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmadd213ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfnmadd213ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfnmadd213ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfnmadd213ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmadd213ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd213ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmadd213ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmadd213ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfnmadd213ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfnmadd213ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfnmadd213ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmadd231pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd231pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmadd231pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmadd231pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmadd231pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfnmadd231pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmadd231pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmadd231pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd231pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmadd231pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmadd231pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmadd231pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfnmadd231pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmadd231pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmadd231ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd231ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmadd231ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmadd231ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfnmadd231ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfnmadd231ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfnmadd231ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmadd231ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd231ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmadd231ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmadd231ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfnmadd231ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfnmadd231ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfnmadd231ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmsub132pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub132pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmsub132pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmsub132pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmsub132pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfnmsub132pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmsub132pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmsub132pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub132pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmsub132pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmsub132pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmsub132pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfnmsub132pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmsub132pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmsub132ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub132ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmsub132ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmsub132ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfnmsub132ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfnmsub132ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfnmsub132ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmsub132ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub132ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmsub132ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmsub132ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfnmsub132ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfnmsub132ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfnmsub132ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmsub213pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub213pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmsub213pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmsub213pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmsub213pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfnmsub213pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmsub213pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmsub213pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub213pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmsub213pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmsub213pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmsub213pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfnmsub213pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmsub213pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmsub213ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub213ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmsub213ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmsub213ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfnmsub213ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfnmsub213ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfnmsub213ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmsub213ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub213ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmsub213ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmsub213ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfnmsub213ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfnmsub213ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfnmsub213ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmsub231pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub231pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmsub231pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmsub231pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vfnmsub231pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vfnmsub231pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vfnmsub231pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmsub231pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub231pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmsub231pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmsub231pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vfnmsub231pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vfnmsub231pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vfnmsub231pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vfnmsub231ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub231ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vfnmsub231ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vfnmsub231ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vfnmsub231ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vfnmsub231ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vfnmsub231ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vfnmsub231ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub231ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vfnmsub231ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vfnmsub231ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vfnmsub231ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vfnmsub231ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vfnmsub231ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vgatherdpd xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vgatherdpd xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vgatherdpd xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vgatherdpd ymm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vgatherdpd ymm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vgatherdpd ymm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vgatherdps xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vgatherdps xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vgatherdps xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vgatherdps ymm30{k1}, [r14+ymm31*8-123] # AVX512{F,VL}
vgatherdps ymm30{k1}, [r9+ymm31+256] # AVX512{F,VL}
vgatherdps ymm30{k1}, [rcx+ymm31*4+1024] # AVX512{F,VL}
vgatherqpd xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vgatherqpd xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vgatherqpd xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vgatherqpd ymm30{k1}, [r14+ymm31*8-123] # AVX512{F,VL}
vgatherqpd ymm30{k1}, [r9+ymm31+256] # AVX512{F,VL}
vgatherqpd ymm30{k1}, [rcx+ymm31*4+1024] # AVX512{F,VL}
vgatherqps xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vgatherqps xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vgatherqps xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vgatherqps xmm30{k1}, [r14+ymm31*8-123] # AVX512{F,VL}
vgatherqps xmm30{k1}, [r9+ymm31+256] # AVX512{F,VL}
vgatherqps xmm30{k1}, [rcx+ymm31*4+1024] # AVX512{F,VL}
vgetexppd xmm30, xmm29 # AVX512{F,VL}
vgetexppd xmm30{k7}, xmm29 # AVX512{F,VL}
vgetexppd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vgetexppd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vgetexppd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vgetexppd xmm30, [rcx]{1to2} # AVX512{F,VL}
vgetexppd xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vgetexppd xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vgetexppd xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vgetexppd xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vgetexppd xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vgetexppd xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vgetexppd xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vgetexppd xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vgetexppd ymm30, ymm29 # AVX512{F,VL}
vgetexppd ymm30{k7}, ymm29 # AVX512{F,VL}
vgetexppd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vgetexppd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vgetexppd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vgetexppd ymm30, [rcx]{1to4} # AVX512{F,VL}
vgetexppd ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vgetexppd ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vgetexppd ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vgetexppd ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vgetexppd ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vgetexppd ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vgetexppd ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vgetexppd ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vgetexpps xmm30, xmm29 # AVX512{F,VL}
vgetexpps xmm30{k7}, xmm29 # AVX512{F,VL}
vgetexpps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vgetexpps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vgetexpps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vgetexpps xmm30, [rcx]{1to4} # AVX512{F,VL}
vgetexpps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vgetexpps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vgetexpps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vgetexpps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vgetexpps xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vgetexpps xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vgetexpps xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vgetexpps xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vgetexpps ymm30, ymm29 # AVX512{F,VL}
vgetexpps ymm30{k7}, ymm29 # AVX512{F,VL}
vgetexpps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vgetexpps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vgetexpps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vgetexpps ymm30, [rcx]{1to8} # AVX512{F,VL}
vgetexpps ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vgetexpps ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vgetexpps ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vgetexpps ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vgetexpps ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vgetexpps ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vgetexpps ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vgetexpps ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vgetmantpd xmm30, xmm29, 0xab # AVX512{F,VL}
vgetmantpd xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vgetmantpd xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vgetmantpd xmm30, xmm29, 123 # AVX512{F,VL}
vgetmantpd xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vgetmantpd xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vgetmantpd xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vgetmantpd xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vgetmantpd xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vgetmantpd xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vgetmantpd xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vgetmantpd xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vgetmantpd xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vgetmantpd xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vgetmantpd xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vgetmantpd ymm30, ymm29, 0xab # AVX512{F,VL}
vgetmantpd ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vgetmantpd ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vgetmantpd ymm30, ymm29, 123 # AVX512{F,VL}
vgetmantpd ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vgetmantpd ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vgetmantpd ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vgetmantpd ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vgetmantpd ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vgetmantpd ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vgetmantpd ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vgetmantpd ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantpd ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vgetmantpd ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantpd ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vgetmantps xmm30, xmm29, 0xab # AVX512{F,VL}
vgetmantps xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vgetmantps xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vgetmantps xmm30, xmm29, 123 # AVX512{F,VL}
vgetmantps xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vgetmantps xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vgetmantps xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vgetmantps xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vgetmantps xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vgetmantps xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vgetmantps xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vgetmantps xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantps xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vgetmantps xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vgetmantps xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vgetmantps ymm30, ymm29, 0xab # AVX512{F,VL}
vgetmantps ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vgetmantps ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vgetmantps ymm30, ymm29, 123 # AVX512{F,VL}
vgetmantps ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vgetmantps ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vgetmantps ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vgetmantps ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vgetmantps ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vgetmantps ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vgetmantps ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vgetmantps ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vgetmantps ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vgetmantps ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vgetmantps ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vinsertf32x4 ymm30, ymm29, xmm28, 0xab # AVX512{F,VL}
vinsertf32x4 ymm30{k7}, ymm29, xmm28, 0xab # AVX512{F,VL}
vinsertf32x4 ymm30{k7}{z}, ymm29, xmm28, 0xab # AVX512{F,VL}
vinsertf32x4 ymm30, ymm29, xmm28, 123 # AVX512{F,VL}
vinsertf32x4 ymm30, ymm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vinsertf32x4 ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vinsertf32x4 ymm30, ymm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vinsertf32x4 ymm30, ymm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vinsertf32x4 ymm30, ymm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vinsertf32x4 ymm30, ymm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vinserti32x4 ymm30, ymm29, xmm28, 0xab # AVX512{F,VL}
vinserti32x4 ymm30{k7}, ymm29, xmm28, 0xab # AVX512{F,VL}
vinserti32x4 ymm30{k7}{z}, ymm29, xmm28, 0xab # AVX512{F,VL}
vinserti32x4 ymm30, ymm29, xmm28, 123 # AVX512{F,VL}
vinserti32x4 ymm30, ymm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vinserti32x4 ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vinserti32x4 ymm30, ymm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vinserti32x4 ymm30, ymm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vinserti32x4 ymm30, ymm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vinserti32x4 ymm30, ymm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vmaxpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vmaxpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vmaxpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vmaxpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vmaxpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmaxpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vmaxpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmaxpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmaxpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmaxpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmaxpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vmaxpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vmaxpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vmaxpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vmaxpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vmaxpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vmaxpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vmaxpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vmaxpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmaxpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vmaxpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmaxpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmaxpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmaxpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmaxpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vmaxpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vmaxpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vmaxpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vmaxps xmm30, xmm29, xmm28 # AVX512{F,VL}
vmaxps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vmaxps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vmaxps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vmaxps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmaxps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vmaxps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmaxps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmaxps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmaxps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmaxps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vmaxps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vmaxps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vmaxps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vmaxps ymm30, ymm29, ymm28 # AVX512{F,VL}
vmaxps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vmaxps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vmaxps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vmaxps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmaxps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vmaxps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmaxps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmaxps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmaxps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmaxps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vmaxps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vmaxps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vmaxps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vminpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vminpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vminpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vminpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vminpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vminpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vminpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vminpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vminpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vminpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vminpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vminpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vminpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vminpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vminpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vminpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vminpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vminpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vminpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vminpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vminpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vminpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vminpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vminpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vminpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vminpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vminpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vminpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vminps xmm30, xmm29, xmm28 # AVX512{F,VL}
vminps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vminps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vminps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vminps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vminps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vminps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vminps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vminps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vminps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vminps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vminps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vminps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vminps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vminps ymm30, ymm29, ymm28 # AVX512{F,VL}
vminps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vminps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vminps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vminps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vminps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vminps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vminps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vminps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vminps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vminps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vminps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vminps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vminps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vmovapd xmm30, xmm29 # AVX512{F,VL}
vmovapd xmm30{k7}, xmm29 # AVX512{F,VL}
vmovapd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovapd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovapd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovapd xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovapd xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovapd xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovapd xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovapd ymm30, ymm29 # AVX512{F,VL}
vmovapd ymm30{k7}, ymm29 # AVX512{F,VL}
vmovapd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovapd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovapd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovapd ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovapd ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovapd ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovapd ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovaps xmm30, xmm29 # AVX512{F,VL}
vmovaps xmm30{k7}, xmm29 # AVX512{F,VL}
vmovaps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovaps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovaps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovaps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovaps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovaps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovaps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovaps ymm30, ymm29 # AVX512{F,VL}
vmovaps ymm30{k7}, ymm29 # AVX512{F,VL}
vmovaps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovaps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovaps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovaps ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovaps ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovaps ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovaps ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovddup xmm30, xmm29 # AVX512{F,VL}
vmovddup xmm30{k7}, xmm29 # AVX512{F,VL}
vmovddup xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovddup xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vmovddup xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovddup xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vmovddup xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vmovddup xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vmovddup xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vmovddup ymm30, ymm29 # AVX512{F,VL}
vmovddup ymm30{k7}, ymm29 # AVX512{F,VL}
vmovddup ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovddup ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovddup ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovddup ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovddup ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovddup ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovddup ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovdqa32 xmm30, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa32 xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqa32 xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqa32 xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovdqa32 xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovdqa32 xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovdqa32 xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovdqa32 ymm30, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa32 ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqa32 ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqa32 ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovdqa32 ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovdqa32 ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovdqa32 ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovdqa64 xmm30, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqa64 xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqa64 xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqa64 xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovdqa64 xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovdqa64 xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovdqa64 xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovdqa64 ymm30, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqa64 ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqa64 ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqa64 ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovdqa64 ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovdqa64 ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovdqa64 ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovdqu32 xmm30, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu32 xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqu32 xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqu32 xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovdqu32 xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovdqu32 xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovdqu32 xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovdqu32 ymm30, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu32 ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqu32 ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqu32 ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovdqu32 ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovdqu32 ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovdqu32 ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovdqu64 xmm30, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30{k7}, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovdqu64 xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqu64 xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqu64 xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovdqu64 xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovdqu64 xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovdqu64 xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovdqu64 ymm30, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30{k7}, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovdqu64 ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovdqu64 ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovdqu64 ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovdqu64 ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovdqu64 ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovdqu64 ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovntdq XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovntdq XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovntdq XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovntdq XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovntdq XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovntdq XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovntdq YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovntdq YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovntdq YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovntdq YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovntdq YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovntdq YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovntdqa xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovntdqa xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovntdqa xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovntdqa xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovntdqa xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovntdqa xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovntdqa ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovntdqa ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovntdqa ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovntdqa ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovntdqa ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovntdqa ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovntpd XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovntpd XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovntpd XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovntpd XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovntpd XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovntpd XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovntpd YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovntpd YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovntpd YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovntpd YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovntpd YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovntpd YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovntps XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovntps XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovntps XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovntps XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovntps XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovntps XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovntps YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovntps YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovntps YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovntps YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovntps YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovntps YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovshdup xmm30, xmm29 # AVX512{F,VL}
vmovshdup xmm30{k7}, xmm29 # AVX512{F,VL}
vmovshdup xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovshdup xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovshdup xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovshdup xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovshdup xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovshdup xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovshdup xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovshdup ymm30, ymm29 # AVX512{F,VL}
vmovshdup ymm30{k7}, ymm29 # AVX512{F,VL}
vmovshdup ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovshdup ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovshdup ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovshdup ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovshdup ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovshdup ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovshdup ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovsldup xmm30, xmm29 # AVX512{F,VL}
vmovsldup xmm30{k7}, xmm29 # AVX512{F,VL}
vmovsldup xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovsldup xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovsldup xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovsldup xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovsldup xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovsldup xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovsldup xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovsldup ymm30, ymm29 # AVX512{F,VL}
vmovsldup ymm30{k7}, ymm29 # AVX512{F,VL}
vmovsldup ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovsldup ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovsldup ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovsldup ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovsldup ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovsldup ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovsldup ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovupd xmm30, xmm29 # AVX512{F,VL}
vmovupd xmm30{k7}, xmm29 # AVX512{F,VL}
vmovupd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovupd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovupd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovupd xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovupd xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovupd xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovupd xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovupd ymm30, ymm29 # AVX512{F,VL}
vmovupd ymm30{k7}, ymm29 # AVX512{F,VL}
vmovupd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovupd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovupd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovupd ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovupd ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovupd ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovupd ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmovups xmm30, xmm29 # AVX512{F,VL}
vmovups xmm30{k7}, xmm29 # AVX512{F,VL}
vmovups xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vmovups xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vmovups xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovups xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmovups xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmovups xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmovups xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmovups ymm30, ymm29 # AVX512{F,VL}
vmovups ymm30{k7}, ymm29 # AVX512{F,VL}
vmovups ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vmovups ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vmovups ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmovups ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmovups ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmovups ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmovups ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmulpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vmulpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vmulpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vmulpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vmulpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmulpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vmulpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmulpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmulpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmulpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmulpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vmulpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vmulpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vmulpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vmulpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vmulpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vmulpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vmulpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vmulpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmulpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vmulpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmulpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmulpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmulpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmulpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vmulpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vmulpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vmulpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vmulps xmm30, xmm29, xmm28 # AVX512{F,VL}
vmulps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vmulps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vmulps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vmulps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmulps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vmulps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vmulps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vmulps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vmulps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vmulps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vmulps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vmulps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vmulps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vmulps ymm30, ymm29, ymm28 # AVX512{F,VL}
vmulps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vmulps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vmulps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vmulps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vmulps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vmulps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vmulps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vmulps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vmulps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vmulps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vmulps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vmulps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vmulps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpabsd xmm30, xmm29 # AVX512{F,VL}
vpabsd xmm30{k7}, xmm29 # AVX512{F,VL}
vpabsd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpabsd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpabsd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpabsd xmm30, [rcx]{1to4} # AVX512{F,VL}
vpabsd xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpabsd xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpabsd xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpabsd xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpabsd xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpabsd xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vpabsd xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpabsd xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vpabsd ymm30, ymm29 # AVX512{F,VL}
vpabsd ymm30{k7}, ymm29 # AVX512{F,VL}
vpabsd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vpabsd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpabsd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpabsd ymm30, [rcx]{1to8} # AVX512{F,VL}
vpabsd ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpabsd ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpabsd ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpabsd ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpabsd ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpabsd ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vpabsd ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpabsd ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vpabsq xmm30, xmm29 # AVX512{F,VL}
vpabsq xmm30{k7}, xmm29 # AVX512{F,VL}
vpabsq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpabsq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpabsq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpabsq xmm30, [rcx]{1to2} # AVX512{F,VL}
vpabsq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpabsq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpabsq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpabsq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpabsq xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpabsq xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vpabsq xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpabsq xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vpabsq ymm30, ymm29 # AVX512{F,VL}
vpabsq ymm30{k7}, ymm29 # AVX512{F,VL}
vpabsq ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vpabsq ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpabsq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpabsq ymm30, [rcx]{1to4} # AVX512{F,VL}
vpabsq ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpabsq ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpabsq ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpabsq ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpabsq ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpabsq ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vpabsq ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpabsq ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vpaddd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpaddd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpaddd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpaddd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpaddd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpaddd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpaddd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpaddd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpaddd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpaddd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpaddd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpaddd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpaddd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpaddd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpaddd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpaddd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpaddd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpaddd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpaddd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpaddd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpaddd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpaddd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpaddd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpaddd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpaddd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpaddd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpaddd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpaddd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpaddq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpaddq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpaddq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpaddq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpaddq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpaddq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpaddq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpaddq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpaddq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpaddq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpaddq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpaddq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpaddq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpaddq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpaddq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpaddq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpaddq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpaddq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpaddq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpaddq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpaddq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpaddq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpaddq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpaddq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpaddq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpaddq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpaddq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpaddq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpandd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpandd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpandd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpandd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpandd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpandd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpandd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpandd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpandd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpandd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpandd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpandd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpandd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpandd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpandd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpandd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpandd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpandd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpandd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpandd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpandd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpandd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpandd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpandd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpandd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpandd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpandnd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpandnd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpandnd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpandnd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpandnd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandnd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpandnd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpandnd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpandnd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpandnd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpandnd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpandnd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpandnd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpandnd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpandnd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpandnd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpandnd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpandnd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpandnd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandnd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpandnd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpandnd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpandnd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpandnd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpandnd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpandnd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpandnd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpandnd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpandnq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpandnq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpandnq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpandnq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpandnq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandnq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpandnq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpandnq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpandnq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpandnq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpandnq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpandnq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpandnq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpandnq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpandnq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpandnq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpandnq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpandnq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpandnq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandnq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpandnq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpandnq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpandnq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpandnq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpandnq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpandnq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpandnq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpandnq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpandq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpandq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpandq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpandq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpandq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpandq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpandq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpandq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpandq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpandq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpandq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpandq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpandq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpandq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpandq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpandq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpandq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpandq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpandq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpandq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpandq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpandq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpandq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpandq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpandq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpandq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpandq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpblendmd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpblendmd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpblendmd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpblendmd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpblendmd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpblendmd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpblendmd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpblendmd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpblendmd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpblendmd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpblendmd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpblendmd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpblendmd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpblendmd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpblendmd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpblendmd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpblendmd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpblendmd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpblendmd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpblendmd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpblendmd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpblendmd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpblendmd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpblendmd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpblendmd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpblendmd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpblendmd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpblendmd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpbroadcastd xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastd xmm30{k7}, DWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastd xmm30{k7}{z}, DWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastd xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpbroadcastd xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpbroadcastd xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpbroadcastd xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpbroadcastd xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpbroadcastd ymm30, DWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastd ymm30{k7}, DWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastd ymm30{k7}{z}, DWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastd ymm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpbroadcastd ymm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpbroadcastd ymm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpbroadcastd ymm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpbroadcastd ymm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpbroadcastd xmm30, xmm29 # AVX512{F,VL}
vpbroadcastd xmm30{k7}, xmm29 # AVX512{F,VL}
vpbroadcastd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpbroadcastd ymm30, xmm29 # AVX512{F,VL}
vpbroadcastd ymm30{k7}, xmm29 # AVX512{F,VL}
vpbroadcastd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpbroadcastd xmm30, eax # AVX512{F,VL}
vpbroadcastd xmm30{k7}, eax # AVX512{F,VL}
vpbroadcastd xmm30{k7}{z}, eax # AVX512{F,VL}
vpbroadcastd xmm30, ebp # AVX512{F,VL}
vpbroadcastd xmm30, r13d # AVX512{F,VL}
vpbroadcastd ymm30, eax # AVX512{F,VL}
vpbroadcastd ymm30{k7}, eax # AVX512{F,VL}
vpbroadcastd ymm30{k7}{z}, eax # AVX512{F,VL}
vpbroadcastd ymm30, ebp # AVX512{F,VL}
vpbroadcastd ymm30, r13d # AVX512{F,VL}
vpbroadcastq xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastq xmm30{k7}, QWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastq xmm30{k7}{z}, QWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpbroadcastq xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpbroadcastq xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpbroadcastq xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpbroadcastq xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpbroadcastq ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastq ymm30{k7}, QWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastq ymm30{k7}{z}, QWORD PTR [rcx] # AVX512{F,VL}
vpbroadcastq ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpbroadcastq ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpbroadcastq ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpbroadcastq ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpbroadcastq ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpbroadcastq xmm30, xmm29 # AVX512{F,VL}
vpbroadcastq xmm30{k7}, xmm29 # AVX512{F,VL}
vpbroadcastq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpbroadcastq ymm30, xmm29 # AVX512{F,VL}
vpbroadcastq ymm30{k7}, xmm29 # AVX512{F,VL}
vpbroadcastq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpbroadcastq xmm30, rax # AVX512{F,VL}
vpbroadcastq xmm30{k7}, rax # AVX512{F,VL}
vpbroadcastq xmm30{k7}{z}, rax # AVX512{F,VL}
vpbroadcastq xmm30, r8 # AVX512{F,VL}
vpbroadcastq ymm30, rax # AVX512{F,VL}
vpbroadcastq ymm30{k7}, rax # AVX512{F,VL}
vpbroadcastq ymm30{k7}{z}, rax # AVX512{F,VL}
vpbroadcastq ymm30, r8 # AVX512{F,VL}
vpcmpd k5, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpd k5{k7}, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpd k5, xmm30, xmm29, 123 # AVX512{F,VL}
vpcmpd k5, xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpd k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpd k5, xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpcmpd k5, xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpcmpd k5, xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpcmpd k5, xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpcmpd k5, xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpcmpd k5, xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpd k5, xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpcmpd k5, xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpd k5, xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpcmpd k5, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpd k5{k7}, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpd k5, ymm30, ymm29, 123 # AVX512{F,VL}
vpcmpd k5, ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpd k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpd k5, ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vpcmpd k5, ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpcmpd k5, ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpcmpd k5, ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpcmpd k5, ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpcmpd k5, ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpd k5, ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpcmpd k5, ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpd k5, ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpcmpeqd k5, xmm30, xmm29 # AVX512{F,VL}
vpcmpeqd k5{k7}, xmm30, xmm29 # AVX512{F,VL}
vpcmpeqd k5, xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpeqd k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpeqd k5, xmm30, [rcx]{1to4} # AVX512{F,VL}
vpcmpeqd k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpcmpeqd k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpcmpeqd k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpcmpeqd k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpcmpeqd k5, xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpcmpeqd k5, xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vpcmpeqd k5, xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpcmpeqd k5, xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vpcmpeqd k5, ymm30, ymm29 # AVX512{F,VL}
vpcmpeqd k5{k7}, ymm30, ymm29 # AVX512{F,VL}
vpcmpeqd k5, ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpeqd k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpeqd k5, ymm30, [rcx]{1to8} # AVX512{F,VL}
vpcmpeqd k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpcmpeqd k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpcmpeqd k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpcmpeqd k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpcmpeqd k5, ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpcmpeqd k5, ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vpcmpeqd k5, ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpcmpeqd k5, ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vpcmpeqq k5, xmm30, xmm29 # AVX512{F,VL}
vpcmpeqq k5{k7}, xmm30, xmm29 # AVX512{F,VL}
vpcmpeqq k5, xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpeqq k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpeqq k5, xmm30, [rcx]{1to2} # AVX512{F,VL}
vpcmpeqq k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpcmpeqq k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpcmpeqq k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpcmpeqq k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpcmpeqq k5, xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpcmpeqq k5, xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vpcmpeqq k5, xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpcmpeqq k5, xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vpcmpeqq k5, ymm30, ymm29 # AVX512{F,VL}
vpcmpeqq k5{k7}, ymm30, ymm29 # AVX512{F,VL}
vpcmpeqq k5, ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpeqq k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpeqq k5, ymm30, [rcx]{1to4} # AVX512{F,VL}
vpcmpeqq k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpcmpeqq k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpcmpeqq k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpcmpeqq k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpcmpeqq k5, ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpcmpeqq k5, ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vpcmpeqq k5, ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpcmpeqq k5, ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vpcmpgtd k5, xmm30, xmm29 # AVX512{F,VL}
vpcmpgtd k5{k7}, xmm30, xmm29 # AVX512{F,VL}
vpcmpgtd k5, xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpgtd k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpgtd k5, xmm30, [rcx]{1to4} # AVX512{F,VL}
vpcmpgtd k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpcmpgtd k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpcmpgtd k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpcmpgtd k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpcmpgtd k5, xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpcmpgtd k5, xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vpcmpgtd k5, xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpcmpgtd k5, xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vpcmpgtd k5, ymm30, ymm29 # AVX512{F,VL}
vpcmpgtd k5{k7}, ymm30, ymm29 # AVX512{F,VL}
vpcmpgtd k5, ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpgtd k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpgtd k5, ymm30, [rcx]{1to8} # AVX512{F,VL}
vpcmpgtd k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpcmpgtd k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpcmpgtd k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpcmpgtd k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpcmpgtd k5, ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpcmpgtd k5, ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vpcmpgtd k5, ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpcmpgtd k5, ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vpcmpgtq k5, xmm30, xmm29 # AVX512{F,VL}
vpcmpgtq k5{k7}, xmm30, xmm29 # AVX512{F,VL}
vpcmpgtq k5, xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpgtq k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpgtq k5, xmm30, [rcx]{1to2} # AVX512{F,VL}
vpcmpgtq k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpcmpgtq k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpcmpgtq k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpcmpgtq k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpcmpgtq k5, xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpcmpgtq k5, xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vpcmpgtq k5, xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpcmpgtq k5, xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vpcmpgtq k5, ymm30, ymm29 # AVX512{F,VL}
vpcmpgtq k5{k7}, ymm30, ymm29 # AVX512{F,VL}
vpcmpgtq k5, ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpcmpgtq k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpcmpgtq k5, ymm30, [rcx]{1to4} # AVX512{F,VL}
vpcmpgtq k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpcmpgtq k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpcmpgtq k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpcmpgtq k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpcmpgtq k5, ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpcmpgtq k5, ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vpcmpgtq k5, ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpcmpgtq k5, ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vpcmpq k5, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpq k5{k7}, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpq k5, xmm30, xmm29, 123 # AVX512{F,VL}
vpcmpq k5, xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpq k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpq k5, xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vpcmpq k5, xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpcmpq k5, xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpcmpq k5, xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpcmpq k5, xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpcmpq k5, xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpq k5, xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vpcmpq k5, xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpq k5, xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vpcmpq k5, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpq k5{k7}, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpq k5, ymm30, ymm29, 123 # AVX512{F,VL}
vpcmpq k5, ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpq k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpq k5, ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpcmpq k5, ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpcmpq k5, ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpcmpq k5, ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpcmpq k5, ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpcmpq k5, ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpq k5, ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpcmpq k5, ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpq k5, ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpud k5{k7}, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpud k5, xmm30, xmm29, 123 # AVX512{F,VL}
vpcmpud k5, xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpud k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpud k5, xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5, xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpcmpud k5, xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpcmpud k5, xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpcmpud k5, xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpcmpud k5, xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpud k5, xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5, xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpud k5, xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpcmpud k5, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpud k5{k7}, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpud k5, ymm30, ymm29, 123 # AVX512{F,VL}
vpcmpud k5, ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpud k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpud k5, ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vpcmpud k5, ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpcmpud k5, ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpcmpud k5, ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpcmpud k5, ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpcmpud k5, ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpud k5, ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpcmpud k5, ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpcmpud k5, ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpuq k5{k7}, xmm30, xmm29, 0xab # AVX512{F,VL}
vpcmpuq k5, xmm30, xmm29, 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpcmpuq k5, xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpcmpuq k5, xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5, xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vpcmpuq k5, xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5, xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpuq k5{k7}, ymm30, ymm29, 0xab # AVX512{F,VL}
vpcmpuq k5, ymm30, ymm29, 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpcmpuq k5, ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpcmpuq k5, ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5, ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpcmpuq k5, ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpcmpuq k5, ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpblendmq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpblendmq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpblendmq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpblendmq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpblendmq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpblendmq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpblendmq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpblendmq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpblendmq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpblendmq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpblendmq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpblendmq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpblendmq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpblendmq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpblendmq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpblendmq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpblendmq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpblendmq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpblendmq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpblendmq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpblendmq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpblendmq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpblendmq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpblendmq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpblendmq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpblendmq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpblendmq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpblendmq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpcompressd XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpcompressd XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpcompressd XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpcompressd XMMWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vpcompressd XMMWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vpcompressd XMMWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vpcompressd XMMWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vpcompressd YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpcompressd YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpcompressd YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpcompressd YMMWORD PTR [rdx+508], ymm30 # AVX512{F,VL} Disp8
vpcompressd YMMWORD PTR [rdx+512], ymm30 # AVX512{F,VL}
vpcompressd YMMWORD PTR [rdx-512], ymm30 # AVX512{F,VL} Disp8
vpcompressd YMMWORD PTR [rdx-516], ymm30 # AVX512{F,VL}
vpcompressd xmm30, xmm29 # AVX512{F,VL}
vpcompressd xmm30{k7}, xmm29 # AVX512{F,VL}
vpcompressd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpcompressd ymm30, ymm29 # AVX512{F,VL}
vpcompressd ymm30{k7}, ymm29 # AVX512{F,VL}
vpcompressd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vpermd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpermd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpermd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpermd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpermd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpermilpd xmm30, xmm29, 0xab # AVX512{F,VL}
vpermilpd xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpermilpd xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpermilpd xmm30, xmm29, 123 # AVX512{F,VL}
vpermilpd xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpermilpd xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpermilpd xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vpermilpd xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpermilpd xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpermilpd xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpermilpd xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpermilpd xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpermilpd xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vpermilpd xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpermilpd xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vpermilpd ymm30, ymm29, 0xab # AVX512{F,VL}
vpermilpd ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpermilpd ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpermilpd ymm30, ymm29, 123 # AVX512{F,VL}
vpermilpd ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpermilpd ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpermilpd ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpermilpd ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpermilpd ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpermilpd ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpermilpd ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpermilpd ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilpd ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpermilpd ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilpd ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpermilpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermilpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermilpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermilpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermilpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermilpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpermilpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermilpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermilpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermilpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermilpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpermilpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpermilpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpermilpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpermilpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermilpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermilpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermilpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermilpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermilpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpermilpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermilpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermilpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermilpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermilpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpermilpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpermilpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpermilpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpermilps xmm30, xmm29, 0xab # AVX512{F,VL}
vpermilps xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpermilps xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpermilps xmm30, xmm29, 123 # AVX512{F,VL}
vpermilps xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpermilps xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpermilps xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpermilps xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpermilps xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpermilps xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpermilps xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpermilps xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilps xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpermilps xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpermilps xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpermilps ymm30, ymm29, 0xab # AVX512{F,VL}
vpermilps ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpermilps ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpermilps ymm30, ymm29, 123 # AVX512{F,VL}
vpermilps ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpermilps ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpermilps ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vpermilps ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpermilps ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpermilps ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpermilps ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpermilps ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpermilps ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpermilps ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpermilps ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpermilps xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermilps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermilps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermilps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermilps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermilps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpermilps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermilps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermilps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermilps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermilps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpermilps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpermilps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpermilps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpermilps ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermilps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermilps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermilps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermilps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermilps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpermilps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermilps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermilps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermilps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermilps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpermilps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpermilps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpermilps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpermpd ymm30, ymm29, 0xab # AVX512{F,VL}
vpermpd ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpermpd ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpermpd ymm30, ymm29, 123 # AVX512{F,VL}
vpermpd ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpermpd ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpermpd ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpermpd ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpermpd ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpermpd ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpermpd ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpermpd ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpermpd ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpermpd ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpermpd ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpermps ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpermps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpermps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpermps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpermps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpermq ymm30, ymm29, 0xab # AVX512{F,VL}
vpermq ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpermq ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpermq ymm30, ymm29, 123 # AVX512{F,VL}
vpermq ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpermq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpermq ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpermq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpermq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpermq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpermq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpermq ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpermq ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpermq ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpermq ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpexpandd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandd xmm30{k7}, XMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandd xmm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpexpandd xmm30, XMMWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpexpandd xmm30, XMMWORD PTR [rdx+512] # AVX512{F,VL}
vpexpandd xmm30, XMMWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpexpandd xmm30, XMMWORD PTR [rdx-516] # AVX512{F,VL}
vpexpandd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandd ymm30{k7}, YMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandd ymm30{k7}{z}, YMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpexpandd ymm30, YMMWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpexpandd ymm30, YMMWORD PTR [rdx+512] # AVX512{F,VL}
vpexpandd ymm30, YMMWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpexpandd ymm30, YMMWORD PTR [rdx-516] # AVX512{F,VL}
vpexpandd xmm30, xmm29 # AVX512{F,VL}
vpexpandd xmm30{k7}, xmm29 # AVX512{F,VL}
vpexpandd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpexpandd ymm30, ymm29 # AVX512{F,VL}
vpexpandd ymm30{k7}, ymm29 # AVX512{F,VL}
vpexpandd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vpexpandq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandq xmm30{k7}, XMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandq xmm30{k7}{z}, XMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpexpandq xmm30, XMMWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpexpandq xmm30, XMMWORD PTR [rdx+1024] # AVX512{F,VL}
vpexpandq xmm30, XMMWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpexpandq xmm30, XMMWORD PTR [rdx-1032] # AVX512{F,VL}
vpexpandq ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandq ymm30{k7}, YMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandq ymm30{k7}{z}, YMMWORD PTR [rcx] # AVX512{F,VL}
vpexpandq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpexpandq ymm30, YMMWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpexpandq ymm30, YMMWORD PTR [rdx+1024] # AVX512{F,VL}
vpexpandq ymm30, YMMWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpexpandq ymm30, YMMWORD PTR [rdx-1032] # AVX512{F,VL}
vpexpandq xmm30, xmm29 # AVX512{F,VL}
vpexpandq xmm30{k7}, xmm29 # AVX512{F,VL}
vpexpandq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpexpandq ymm30, ymm29 # AVX512{F,VL}
vpexpandq ymm30{k7}, ymm29 # AVX512{F,VL}
vpexpandq ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vpgatherdd xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vpgatherdd xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vpgatherdd xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vpgatherdd ymm30{k1}, [r14+ymm31*8-123] # AVX512{F,VL}
vpgatherdd ymm30{k1}, [r9+ymm31+256] # AVX512{F,VL}
vpgatherdd ymm30{k1}, [rcx+ymm31*4+1024] # AVX512{F,VL}
vpgatherdq xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vpgatherdq xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vpgatherdq xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vpgatherdq ymm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vpgatherdq ymm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vpgatherdq ymm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vpgatherqd xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vpgatherqd xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vpgatherqd xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vpgatherqd xmm30{k1}, [r14+ymm31*8-123] # AVX512{F,VL}
vpgatherqd xmm30{k1}, [r9+ymm31+256] # AVX512{F,VL}
vpgatherqd xmm30{k1}, [rcx+ymm31*4+1024] # AVX512{F,VL}
vpgatherqq xmm30{k1}, [r14+xmm31*8-123] # AVX512{F,VL}
vpgatherqq xmm30{k1}, [r9+xmm31+256] # AVX512{F,VL}
vpgatherqq xmm30{k1}, [rcx+xmm31*4+1024] # AVX512{F,VL}
vpgatherqq ymm30{k1}, [r14+ymm31*8-123] # AVX512{F,VL}
vpgatherqq ymm30{k1}, [r9+ymm31+256] # AVX512{F,VL}
vpgatherqq ymm30{k1}, [rcx+ymm31*4+1024] # AVX512{F,VL}
vpmaxsd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpmaxsd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpmaxsd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpmaxsd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxsd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxsd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpmaxsd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmaxsd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmaxsd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmaxsd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmaxsd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpmaxsd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpmaxsd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpmaxsd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpmaxsd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpmaxsd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpmaxsd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpmaxsd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxsd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxsd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpmaxsd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpmaxsd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpmaxsd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpmaxsd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpmaxsd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpmaxsd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpmaxsd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpmaxsd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpmaxsq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpmaxsq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpmaxsq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpmaxsq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxsq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxsq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpmaxsq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmaxsq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmaxsq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmaxsq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmaxsq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpmaxsq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpmaxsq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpmaxsq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpmaxsq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpmaxsq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpmaxsq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpmaxsq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxsq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxsq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpmaxsq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpmaxsq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpmaxsq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpmaxsq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpmaxsq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpmaxsq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpmaxsq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpmaxsq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpmaxud xmm30, xmm29, xmm28 # AVX512{F,VL}
vpmaxud xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpmaxud xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpmaxud xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxud xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxud xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpmaxud xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmaxud xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmaxud xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmaxud xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmaxud xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpmaxud xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpmaxud xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpmaxud xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpmaxud ymm30, ymm29, ymm28 # AVX512{F,VL}
vpmaxud ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpmaxud ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpmaxud ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxud ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxud ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpmaxud ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpmaxud ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpmaxud ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpmaxud ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpmaxud ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpmaxud ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpmaxud ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpmaxud ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpmaxuq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpmaxuq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpmaxuq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpmaxuq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxuq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxuq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpmaxuq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmaxuq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmaxuq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmaxuq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmaxuq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpmaxuq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpmaxuq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpmaxuq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpmaxuq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpmaxuq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpmaxuq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpmaxuq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpmaxuq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmaxuq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpmaxuq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpmaxuq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpmaxuq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpmaxuq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpmaxuq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpmaxuq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpmaxuq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpmaxuq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpminsd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpminsd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpminsd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpminsd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpminsd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminsd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpminsd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpminsd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpminsd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpminsd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpminsd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpminsd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpminsd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpminsd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpminsd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpminsd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpminsd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpminsd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpminsd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminsd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpminsd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpminsd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpminsd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpminsd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpminsd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpminsd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpminsd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpminsd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpminsq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpminsq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpminsq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpminsq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpminsq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminsq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpminsq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpminsq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpminsq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpminsq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpminsq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpminsq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpminsq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpminsq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpminsq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpminsq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpminsq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpminsq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpminsq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminsq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpminsq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpminsq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpminsq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpminsq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpminsq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpminsq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpminsq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpminsq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpminud xmm30, xmm29, xmm28 # AVX512{F,VL}
vpminud xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpminud xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpminud xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpminud xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminud xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpminud xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpminud xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpminud xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpminud xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpminud xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpminud xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpminud xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpminud xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpminud ymm30, ymm29, ymm28 # AVX512{F,VL}
vpminud ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpminud ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpminud ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpminud ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminud ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpminud ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpminud ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpminud ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpminud ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpminud ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpminud ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpminud ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpminud ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpminuq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpminuq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpminuq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpminuq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpminuq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminuq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpminuq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpminuq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpminuq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpminuq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpminuq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpminuq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpminuq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpminuq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpminuq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpminuq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpminuq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpminuq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpminuq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpminuq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpminuq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpminuq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpminuq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpminuq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpminuq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpminuq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpminuq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpminuq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpmovsxbd xmm30, xmm29 # AVX512{F,VL}
vpmovsxbd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovsxbd xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovsxbd xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovsxbd xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovsxbd ymm30, xmm29 # AVX512{F,VL}
vpmovsxbd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovsxbd ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovsxbd ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovsxbd ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovsxbq xmm30, xmm29 # AVX512{F,VL}
vpmovsxbq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rcx] # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rdx+254] # AVX512{F,VL} Disp8
vpmovsxbq xmm30, WORD PTR [rdx+256] # AVX512{F,VL}
vpmovsxbq xmm30, WORD PTR [rdx-256] # AVX512{F,VL} Disp8
vpmovsxbq xmm30, WORD PTR [rdx-258] # AVX512{F,VL}
vpmovsxbq ymm30, xmm29 # AVX512{F,VL}
vpmovsxbq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxbq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovsxbq ymm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovsxbq ymm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovsxbq ymm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovsxdq xmm30, xmm29 # AVX512{F,VL}
vpmovsxdq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxdq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxdq xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovsxdq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxdq xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovsxdq xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovsxdq xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovsxdq xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovsxdq ymm30, xmm29 # AVX512{F,VL}
vpmovsxdq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxdq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxdq ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmovsxdq ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxdq ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmovsxdq ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmovsxdq ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmovsxdq ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmovsxwd xmm30, xmm29 # AVX512{F,VL}
vpmovsxwd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovsxwd xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovsxwd xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovsxwd xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovsxwd ymm30, xmm29 # AVX512{F,VL}
vpmovsxwd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmovsxwd ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmovsxwd ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmovsxwd ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmovsxwq xmm30, xmm29 # AVX512{F,VL}
vpmovsxwq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovsxwq xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovsxwq xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovsxwq xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovsxwq ymm30, xmm29 # AVX512{F,VL}
vpmovsxwq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovsxwq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovsxwq ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovsxwq ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovsxwq ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovzxbd xmm30, xmm29 # AVX512{F,VL}
vpmovzxbd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovzxbd xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovzxbd xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovzxbd xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovzxbd ymm30, xmm29 # AVX512{F,VL}
vpmovzxbd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovzxbd ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovzxbd ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovzxbd ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovzxbq xmm30, xmm29 # AVX512{F,VL}
vpmovzxbq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rcx] # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rdx+254] # AVX512{F,VL} Disp8
vpmovzxbq xmm30, WORD PTR [rdx+256] # AVX512{F,VL}
vpmovzxbq xmm30, WORD PTR [rdx-256] # AVX512{F,VL} Disp8
vpmovzxbq xmm30, WORD PTR [rdx-258] # AVX512{F,VL}
vpmovzxbq ymm30, xmm29 # AVX512{F,VL}
vpmovzxbq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxbq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovzxbq ymm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovzxbq ymm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovzxbq ymm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovzxdq xmm30, xmm29 # AVX512{F,VL}
vpmovzxdq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxdq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxdq xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovzxdq xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxdq xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovzxdq xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovzxdq xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovzxdq xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovzxdq ymm30, xmm29 # AVX512{F,VL}
vpmovzxdq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxdq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxdq ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmovzxdq ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxdq ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmovzxdq ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmovzxdq ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmovzxdq ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmovzxwd xmm30, xmm29 # AVX512{F,VL}
vpmovzxwd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovzxwd xmm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovzxwd xmm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovzxwd xmm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmovzxwd ymm30, xmm29 # AVX512{F,VL}
vpmovzxwd ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwd ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmovzxwd ymm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmovzxwd ymm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmovzxwd ymm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmovzxwq xmm30, xmm29 # AVX512{F,VL}
vpmovzxwq xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rdx+508] # AVX512{F,VL} Disp8
vpmovzxwq xmm30, DWORD PTR [rdx+512] # AVX512{F,VL}
vpmovzxwq xmm30, DWORD PTR [rdx-512] # AVX512{F,VL} Disp8
vpmovzxwq xmm30, DWORD PTR [rdx-516] # AVX512{F,VL}
vpmovzxwq ymm30, xmm29 # AVX512{F,VL}
vpmovzxwq ymm30{k7}, xmm29 # AVX512{F,VL}
vpmovzxwq ymm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rcx] # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rdx+1016] # AVX512{F,VL} Disp8
vpmovzxwq ymm30, QWORD PTR [rdx+1024] # AVX512{F,VL}
vpmovzxwq ymm30, QWORD PTR [rdx-1024] # AVX512{F,VL} Disp8
vpmovzxwq ymm30, QWORD PTR [rdx-1032] # AVX512{F,VL}
vpmuldq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpmuldq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpmuldq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpmuldq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmuldq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmuldq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpmuldq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmuldq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmuldq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmuldq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmuldq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpmuldq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpmuldq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpmuldq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpmuldq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpmuldq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpmuldq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpmuldq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpmuldq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmuldq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpmuldq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpmuldq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpmuldq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpmuldq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpmuldq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpmuldq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpmuldq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpmuldq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpmulld xmm30, xmm29, xmm28 # AVX512{F,VL}
vpmulld xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpmulld xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpmulld xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmulld xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmulld xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpmulld xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmulld xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmulld xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmulld xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmulld xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpmulld xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpmulld xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpmulld xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpmulld ymm30, ymm29, ymm28 # AVX512{F,VL}
vpmulld ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpmulld ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpmulld ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpmulld ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmulld ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpmulld ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpmulld ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpmulld ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpmulld ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpmulld ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpmulld ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpmulld ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpmulld ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpmuludq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpmuludq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpmuludq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpmuludq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpmuludq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmuludq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpmuludq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpmuludq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpmuludq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpmuludq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpmuludq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpmuludq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpmuludq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpmuludq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpmuludq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpmuludq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpmuludq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpmuludq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpmuludq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpmuludq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpmuludq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpmuludq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpmuludq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpmuludq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpmuludq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpmuludq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpmuludq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpmuludq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpord xmm30, xmm29, xmm28 # AVX512{F,VL}
vpord xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpord xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpord xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpord xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpord xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpord xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpord xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpord xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpord xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpord xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpord xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpord xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpord xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpord ymm30, ymm29, ymm28 # AVX512{F,VL}
vpord ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpord ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpord ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpord ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpord ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpord ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpord ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpord ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpord ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpord ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpord ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpord ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpord ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vporq xmm30, xmm29, xmm28 # AVX512{F,VL}
vporq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vporq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vporq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vporq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vporq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vporq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vporq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vporq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vporq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vporq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vporq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vporq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vporq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vporq ymm30, ymm29, ymm28 # AVX512{F,VL}
vporq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vporq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vporq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vporq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vporq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vporq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vporq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vporq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vporq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vporq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vporq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vporq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vporq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpscatterdd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterdd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterdd [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vpscatterdd [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vpscatterdd [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vpscatterdd [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vpscatterdd [r9+ymm31+256]{k1}, ymm30 # AVX512{F,VL}
vpscatterdd [rcx+ymm31*4+1024]{k1}, ymm30 # AVX512{F,VL}
vpscatterdq [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterdq [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterdq [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vpscatterdq [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vpscatterdq [r14+xmm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vpscatterdq [r14+xmm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vpscatterdq [r9+xmm31+256]{k1}, ymm30 # AVX512{F,VL}
vpscatterdq [rcx+xmm31*4+1024]{k1}, ymm30 # AVX512{F,VL}
vpscatterqd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterqd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterqd [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vpscatterqd [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vpscatterqd [r14+ymm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterqd [r14+ymm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterqd [r9+ymm31+256]{k1}, xmm30 # AVX512{F,VL}
vpscatterqd [rcx+ymm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vpscatterqq [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterqq [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vpscatterqq [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vpscatterqq [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vpscatterqq [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vpscatterqq [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vpscatterqq [r9+ymm31+256]{k1}, ymm30 # AVX512{F,VL}
vpscatterqq [rcx+ymm31*4+1024]{k1}, ymm30 # AVX512{F,VL}
vpshufd xmm30, xmm29, 0xab # AVX512{F,VL}
vpshufd xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpshufd xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpshufd xmm30, xmm29, 123 # AVX512{F,VL}
vpshufd xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpshufd xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpshufd xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpshufd xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpshufd xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpshufd xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpshufd xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpshufd xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpshufd xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpshufd xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpshufd xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpshufd ymm30, ymm29, 0xab # AVX512{F,VL}
vpshufd ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpshufd ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpshufd ymm30, ymm29, 123 # AVX512{F,VL}
vpshufd ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpshufd ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpshufd ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vpshufd ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpshufd ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpshufd ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpshufd ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpshufd ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpshufd ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpshufd ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpshufd ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpslld xmm30, xmm29, xmm28 # AVX512{F,VL}
vpslld xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpslld xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpslld xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpslld xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpslld xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpslld xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpslld xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpslld xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpslld ymm30, ymm29, xmm28 # AVX512{F,VL}
vpslld ymm30{k7}, ymm29, xmm28 # AVX512{F,VL}
vpslld ymm30{k7}{z}, ymm29, xmm28 # AVX512{F,VL}
vpslld ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpslld ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpslld ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpslld ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpslld ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpslld ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsllq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsllq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsllq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsllq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsllq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsllq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsllq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsllq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsllq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsllq ymm30, ymm29, xmm28 # AVX512{F,VL}
vpsllq ymm30{k7}, ymm29, xmm28 # AVX512{F,VL}
vpsllq ymm30{k7}{z}, ymm29, xmm28 # AVX512{F,VL}
vpsllq ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsllq ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsllq ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsllq ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsllq ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsllq ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsllvd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsllvd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsllvd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsllvd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsllvd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsllvd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpsllvd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsllvd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsllvd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsllvd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsllvd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpsllvd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpsllvd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpsllvd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpsllvd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsllvd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsllvd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsllvd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsllvd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsllvd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpsllvd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsllvd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsllvd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsllvd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsllvd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpsllvd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpsllvd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpsllvd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpsllvq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsllvq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsllvq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsllvq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsllvq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsllvq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpsllvq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsllvq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsllvq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsllvq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsllvq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpsllvq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpsllvq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpsllvq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpsllvq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsllvq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsllvq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsllvq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsllvq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsllvq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpsllvq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsllvq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsllvq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsllvq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsllvq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpsllvq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpsllvq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpsllvq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpsrad xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsrad xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsrad xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsrad xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrad xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrad xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrad xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrad xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrad xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsrad ymm30, ymm29, xmm28 # AVX512{F,VL}
vpsrad ymm30{k7}, ymm29, xmm28 # AVX512{F,VL}
vpsrad ymm30{k7}{z}, ymm29, xmm28 # AVX512{F,VL}
vpsrad ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrad ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrad ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrad ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrad ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrad ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsraq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsraq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsraq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsraq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsraq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsraq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsraq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsraq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsraq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsraq ymm30, ymm29, xmm28 # AVX512{F,VL}
vpsraq ymm30{k7}, ymm29, xmm28 # AVX512{F,VL}
vpsraq ymm30{k7}{z}, ymm29, xmm28 # AVX512{F,VL}
vpsraq ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsraq ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsraq ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsraq ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsraq ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsraq ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsravd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsravd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsravd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsravd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsravd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsravd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpsravd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsravd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsravd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsravd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsravd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpsravd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpsravd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpsravd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpsravd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsravd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsravd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsravd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsravd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsravd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpsravd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsravd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsravd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsravd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsravd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpsravd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpsravd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpsravd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpsravq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsravq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsravq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsravq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsravq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsravq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpsravq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsravq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsravq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsravq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsravq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpsravq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpsravq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpsravq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpsravq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsravq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsravq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsravq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsravq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsravq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpsravq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsravq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsravq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsravq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsravq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpsravq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpsravq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpsravq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpsrld xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsrld xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsrld xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsrld xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrld xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrld xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrld xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrld xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrld xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsrld ymm30, ymm29, xmm28 # AVX512{F,VL}
vpsrld ymm30{k7}, ymm29, xmm28 # AVX512{F,VL}
vpsrld ymm30{k7}{z}, ymm29, xmm28 # AVX512{F,VL}
vpsrld ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrld ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrld ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrld ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrld ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrld ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsrlq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsrlq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsrlq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsrlq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrlq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrlq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrlq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrlq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrlq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsrlq ymm30, ymm29, xmm28 # AVX512{F,VL}
vpsrlq ymm30{k7}, ymm29, xmm28 # AVX512{F,VL}
vpsrlq ymm30{k7}{z}, ymm29, xmm28 # AVX512{F,VL}
vpsrlq ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrlq ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrlq ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrlq ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrlq ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrlq ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsrlvd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsrlvd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsrlvd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsrlvd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrlvd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrlvd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpsrlvd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrlvd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrlvd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrlvd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsrlvd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpsrlvd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpsrlvd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpsrlvd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpsrlvd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsrlvd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsrlvd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsrlvd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsrlvd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrlvd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpsrlvd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsrlvd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsrlvd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsrlvd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsrlvd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpsrlvd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpsrlvd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpsrlvd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpsrlvq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsrlvq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsrlvq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsrlvq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsrlvq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrlvq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpsrlvq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsrlvq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsrlvq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsrlvq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsrlvq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpsrlvq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpsrlvq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpsrlvq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpsrlvq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsrlvq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsrlvq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsrlvq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsrlvq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsrlvq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpsrlvq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsrlvq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsrlvq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsrlvq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsrlvq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpsrlvq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpsrlvq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpsrlvq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpsrld xmm30, xmm29, 0xab # AVX512{F,VL}
vpsrld xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpsrld xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpsrld xmm30, xmm29, 123 # AVX512{F,VL}
vpsrld xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsrld xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsrld xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpsrld xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpsrld xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpsrld xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpsrld xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpsrld xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrld xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpsrld xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrld xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpsrld ymm30, ymm29, 0xab # AVX512{F,VL}
vpsrld ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpsrld ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpsrld ymm30, ymm29, 123 # AVX512{F,VL}
vpsrld ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsrld ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsrld ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vpsrld ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpsrld ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpsrld ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpsrld ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpsrld ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrld ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpsrld ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrld ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpsrlq xmm30, xmm29, 0xab # AVX512{F,VL}
vpsrlq xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpsrlq xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpsrlq xmm30, xmm29, 123 # AVX512{F,VL}
vpsrlq xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsrlq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsrlq xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vpsrlq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpsrlq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpsrlq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpsrlq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpsrlq xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpsrlq xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vpsrlq xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpsrlq xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vpsrlq ymm30, ymm29, 0xab # AVX512{F,VL}
vpsrlq ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpsrlq ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpsrlq ymm30, ymm29, 123 # AVX512{F,VL}
vpsrlq ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsrlq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsrlq ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpsrlq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpsrlq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpsrlq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpsrlq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpsrlq ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrlq ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpsrlq ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrlq ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpsubd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsubd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsubd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsubd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsubd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsubd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpsubd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsubd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsubd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsubd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsubd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpsubd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpsubd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpsubd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpsubd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsubd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsubd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsubd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsubd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsubd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpsubd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsubd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsubd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsubd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsubd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpsubd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpsubd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpsubd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpsubq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpsubq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpsubq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpsubq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpsubq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsubq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpsubq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpsubq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpsubq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpsubq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpsubq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpsubq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpsubq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpsubq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpsubq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpsubq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpsubq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpsubq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpsubq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpsubq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpsubq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpsubq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpsubq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpsubq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpsubq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpsubq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpsubq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpsubq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vptestmd k5, xmm30, xmm29 # AVX512{F,VL}
vptestmd k5{k7}, xmm30, xmm29 # AVX512{F,VL}
vptestmd k5, xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vptestmd k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestmd k5, xmm30, [rcx]{1to4} # AVX512{F,VL}
vptestmd k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vptestmd k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vptestmd k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vptestmd k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vptestmd k5, xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vptestmd k5, xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vptestmd k5, xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vptestmd k5, xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vptestmd k5, ymm30, ymm29 # AVX512{F,VL}
vptestmd k5{k7}, ymm30, ymm29 # AVX512{F,VL}
vptestmd k5, ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vptestmd k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestmd k5, ymm30, [rcx]{1to8} # AVX512{F,VL}
vptestmd k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vptestmd k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vptestmd k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vptestmd k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vptestmd k5, ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vptestmd k5, ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vptestmd k5, ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vptestmd k5, ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vptestmq k5, xmm30, xmm29 # AVX512{F,VL}
vptestmq k5{k7}, xmm30, xmm29 # AVX512{F,VL}
vptestmq k5, xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vptestmq k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestmq k5, xmm30, [rcx]{1to2} # AVX512{F,VL}
vptestmq k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vptestmq k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vptestmq k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vptestmq k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vptestmq k5, xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vptestmq k5, xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vptestmq k5, xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vptestmq k5, xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vptestmq k5, ymm30, ymm29 # AVX512{F,VL}
vptestmq k5{k7}, ymm30, ymm29 # AVX512{F,VL}
vptestmq k5, ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vptestmq k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestmq k5, ymm30, [rcx]{1to4} # AVX512{F,VL}
vptestmq k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vptestmq k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vptestmq k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vptestmq k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vptestmq k5, ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vptestmq k5, ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vptestmq k5, ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vptestmq k5, ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpunpckhdq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpunpckhdq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpunpckhdq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpunpckhdq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpunpckhdq xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpunpckhdq xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpunpckhdq xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpunpckhdq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpunpckhdq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpunpckhdq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpunpckhdq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpunpckhdq ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpunpckhdq ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpunpckhdq ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpunpckhqdq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpunpckhqdq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpunpckhqdq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpunpckhqdq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpunpckhqdq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpunpckhqdq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpunpckhqdq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpunpckhqdq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpunpckhqdq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpunpckhqdq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpunpckhqdq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpunpckhqdq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpunpckhqdq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpunpckhqdq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpunpckldq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpunpckldq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpunpckldq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpunpckldq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpunpckldq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpckldq xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpunpckldq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpunpckldq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpunpckldq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpunpckldq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpunpckldq xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpunpckldq xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpunpckldq xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpunpckldq xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpunpckldq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpunpckldq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpunpckldq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpunpckldq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpunpckldq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpckldq ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpunpckldq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpunpckldq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpunpckldq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpunpckldq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpunpckldq ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpunpckldq ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpunpckldq ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpunpckldq ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpunpcklqdq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpunpcklqdq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpunpcklqdq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpunpcklqdq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpunpcklqdq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpunpcklqdq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpunpcklqdq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpunpcklqdq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpunpcklqdq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpunpcklqdq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpunpcklqdq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpunpcklqdq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpunpcklqdq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpunpcklqdq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpxord xmm30, xmm29, xmm28 # AVX512{F,VL}
vpxord xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpxord xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpxord xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpxord xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpxord xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpxord xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpxord xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpxord xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpxord xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpxord xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpxord xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpxord xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpxord xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpxord ymm30, ymm29, ymm28 # AVX512{F,VL}
vpxord ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpxord ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpxord ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpxord ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpxord ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpxord ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpxord ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpxord ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpxord ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpxord ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpxord ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpxord ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpxord ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpxorq xmm30, xmm29, xmm28 # AVX512{F,VL}
vpxorq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpxorq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpxorq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpxorq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpxorq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpxorq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpxorq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpxorq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpxorq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpxorq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpxorq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpxorq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpxorq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpxorq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpxorq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpxorq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpxorq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpxorq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpxorq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpxorq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpxorq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpxorq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpxorq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpxorq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpxorq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpxorq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpxorq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vrcp14pd xmm30, xmm29 # AVX512{F,VL}
vrcp14pd xmm30{k7}, xmm29 # AVX512{F,VL}
vrcp14pd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vrcp14pd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vrcp14pd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrcp14pd xmm30, [rcx]{1to2} # AVX512{F,VL}
vrcp14pd xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vrcp14pd xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vrcp14pd xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vrcp14pd xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vrcp14pd xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vrcp14pd xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vrcp14pd xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vrcp14pd xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vrcp14pd ymm30, ymm29 # AVX512{F,VL}
vrcp14pd ymm30{k7}, ymm29 # AVX512{F,VL}
vrcp14pd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vrcp14pd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vrcp14pd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrcp14pd ymm30, [rcx]{1to4} # AVX512{F,VL}
vrcp14pd ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vrcp14pd ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vrcp14pd ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vrcp14pd ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vrcp14pd ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vrcp14pd ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vrcp14pd ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vrcp14pd ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vrcp14ps xmm30, xmm29 # AVX512{F,VL}
vrcp14ps xmm30{k7}, xmm29 # AVX512{F,VL}
vrcp14ps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vrcp14ps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vrcp14ps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrcp14ps xmm30, [rcx]{1to4} # AVX512{F,VL}
vrcp14ps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vrcp14ps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vrcp14ps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vrcp14ps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vrcp14ps xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vrcp14ps xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vrcp14ps xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vrcp14ps xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vrcp14ps ymm30, ymm29 # AVX512{F,VL}
vrcp14ps ymm30{k7}, ymm29 # AVX512{F,VL}
vrcp14ps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vrcp14ps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vrcp14ps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrcp14ps ymm30, [rcx]{1to8} # AVX512{F,VL}
vrcp14ps ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vrcp14ps ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vrcp14ps ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vrcp14ps ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vrcp14ps ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vrcp14ps ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vrcp14ps ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vrcp14ps ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vrsqrt14pd xmm30, xmm29 # AVX512{F,VL}
vrsqrt14pd xmm30{k7}, xmm29 # AVX512{F,VL}
vrsqrt14pd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vrsqrt14pd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vrsqrt14pd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrsqrt14pd xmm30, [rcx]{1to2} # AVX512{F,VL}
vrsqrt14pd xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vrsqrt14pd xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vrsqrt14pd xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vrsqrt14pd xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vrsqrt14pd xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vrsqrt14pd xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vrsqrt14pd xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vrsqrt14pd xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vrsqrt14pd ymm30, ymm29 # AVX512{F,VL}
vrsqrt14pd ymm30{k7}, ymm29 # AVX512{F,VL}
vrsqrt14pd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vrsqrt14pd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vrsqrt14pd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrsqrt14pd ymm30, [rcx]{1to4} # AVX512{F,VL}
vrsqrt14pd ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vrsqrt14pd ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vrsqrt14pd ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vrsqrt14pd ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vrsqrt14pd ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vrsqrt14pd ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vrsqrt14pd ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vrsqrt14pd ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vrsqrt14ps xmm30, xmm29 # AVX512{F,VL}
vrsqrt14ps xmm30{k7}, xmm29 # AVX512{F,VL}
vrsqrt14ps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vrsqrt14ps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vrsqrt14ps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrsqrt14ps xmm30, [rcx]{1to4} # AVX512{F,VL}
vrsqrt14ps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vrsqrt14ps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vrsqrt14ps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vrsqrt14ps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vrsqrt14ps xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vrsqrt14ps xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vrsqrt14ps xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vrsqrt14ps xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vrsqrt14ps ymm30, ymm29 # AVX512{F,VL}
vrsqrt14ps ymm30{k7}, ymm29 # AVX512{F,VL}
vrsqrt14ps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vrsqrt14ps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vrsqrt14ps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vrsqrt14ps ymm30, [rcx]{1to8} # AVX512{F,VL}
vrsqrt14ps ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vrsqrt14ps ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vrsqrt14ps ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vrsqrt14ps ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vrsqrt14ps ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vrsqrt14ps ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vrsqrt14ps ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vrsqrt14ps ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vscatterdpd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterdpd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterdpd [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vscatterdpd [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vscatterdpd [r14+xmm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vscatterdpd [r14+xmm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vscatterdpd [r9+xmm31+256]{k1}, ymm30 # AVX512{F,VL}
vscatterdpd [rcx+xmm31*4+1024]{k1}, ymm30 # AVX512{F,VL}
vscatterdps [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterdps [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterdps [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vscatterdps [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vscatterdps [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vscatterdps [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vscatterdps [r9+ymm31+256]{k1}, ymm30 # AVX512{F,VL}
vscatterdps [rcx+ymm31*4+1024]{k1}, ymm30 # AVX512{F,VL}
vscatterqpd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterqpd [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterqpd [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vscatterqpd [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vscatterqpd [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vscatterqpd [r14+ymm31*8-123]{k1}, ymm30 # AVX512{F,VL}
vscatterqpd [r9+ymm31+256]{k1}, ymm30 # AVX512{F,VL}
vscatterqpd [rcx+ymm31*4+1024]{k1}, ymm30 # AVX512{F,VL}
vscatterqps [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterqps [r14+xmm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterqps [r9+xmm31+256]{k1}, xmm30 # AVX512{F,VL}
vscatterqps [rcx+xmm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vscatterqps [r14+ymm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterqps [r14+ymm31*8-123]{k1}, xmm30 # AVX512{F,VL}
vscatterqps [r9+ymm31+256]{k1}, xmm30 # AVX512{F,VL}
vscatterqps [rcx+ymm31*4+1024]{k1}, xmm30 # AVX512{F,VL}
vshufpd xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
vshufpd xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vshufpd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
vshufpd xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
vshufpd xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshufpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshufpd xmm30, xmm29, [rcx]{1to2}, 123 # AVX512{F,VL}
vshufpd xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vshufpd xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vshufpd xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vshufpd xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vshufpd xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vshufpd xmm30, xmm29, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vshufpd xmm30, xmm29, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vshufpd xmm30, xmm29, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufpd ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufpd ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufpd ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vshufpd ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vshufpd ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vshufpd ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vshufpd ymm30, ymm29, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vshufpd ymm30, ymm29, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vshufps xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
vshufps xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vshufps xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
vshufps xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
vshufps xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshufps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshufps xmm30, xmm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vshufps xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vshufps xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vshufps xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vshufps xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vshufps xmm30, xmm29, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vshufps xmm30, xmm29, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vshufps xmm30, xmm29, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vshufps xmm30, xmm29, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vshufps ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufps ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufps ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufps ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vshufps ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshufps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshufps ymm30, ymm29, [rcx]{1to8}, 123 # AVX512{F,VL}
vshufps ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vshufps ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vshufps ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vshufps ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vshufps ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vshufps ymm30, ymm29, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vshufps ymm30, ymm29, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vshufps ymm30, ymm29, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vsqrtpd xmm30, xmm29 # AVX512{F,VL}
vsqrtpd xmm30{k7}, xmm29 # AVX512{F,VL}
vsqrtpd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vsqrtpd xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vsqrtpd xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsqrtpd xmm30, [rcx]{1to2} # AVX512{F,VL}
vsqrtpd xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vsqrtpd xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vsqrtpd xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vsqrtpd xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vsqrtpd xmm30, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vsqrtpd xmm30, [rdx+1024]{1to2} # AVX512{F,VL}
vsqrtpd xmm30, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vsqrtpd xmm30, [rdx-1032]{1to2} # AVX512{F,VL}
vsqrtpd ymm30, ymm29 # AVX512{F,VL}
vsqrtpd ymm30{k7}, ymm29 # AVX512{F,VL}
vsqrtpd ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vsqrtpd ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vsqrtpd ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsqrtpd ymm30, [rcx]{1to4} # AVX512{F,VL}
vsqrtpd ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vsqrtpd ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vsqrtpd ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vsqrtpd ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vsqrtpd ymm30, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vsqrtpd ymm30, [rdx+1024]{1to4} # AVX512{F,VL}
vsqrtpd ymm30, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vsqrtpd ymm30, [rdx-1032]{1to4} # AVX512{F,VL}
vsqrtps xmm30, xmm29 # AVX512{F,VL}
vsqrtps xmm30{k7}, xmm29 # AVX512{F,VL}
vsqrtps xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vsqrtps xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vsqrtps xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsqrtps xmm30, [rcx]{1to4} # AVX512{F,VL}
vsqrtps xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vsqrtps xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vsqrtps xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vsqrtps xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vsqrtps xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vsqrtps xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vsqrtps xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vsqrtps xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vsqrtps ymm30, ymm29 # AVX512{F,VL}
vsqrtps ymm30{k7}, ymm29 # AVX512{F,VL}
vsqrtps ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vsqrtps ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vsqrtps ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsqrtps ymm30, [rcx]{1to8} # AVX512{F,VL}
vsqrtps ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vsqrtps ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vsqrtps ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vsqrtps ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vsqrtps ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vsqrtps ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vsqrtps ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vsqrtps ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vsubpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vsubpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vsubpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vsubpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vsubpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsubpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vsubpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vsubpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vsubpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vsubpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vsubpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vsubpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vsubpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vsubpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vsubpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vsubpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vsubpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vsubpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vsubpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsubpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vsubpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vsubpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vsubpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vsubpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vsubpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vsubpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vsubpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vsubpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vsubps xmm30, xmm29, xmm28 # AVX512{F,VL}
vsubps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vsubps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vsubps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vsubps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsubps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vsubps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vsubps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vsubps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vsubps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vsubps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vsubps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vsubps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vsubps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vsubps ymm30, ymm29, ymm28 # AVX512{F,VL}
vsubps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vsubps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vsubps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vsubps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vsubps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vsubps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vsubps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vsubps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vsubps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vsubps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vsubps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vsubps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vsubps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vunpckhpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vunpckhpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vunpckhpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vunpckhpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vunpckhpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpckhpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vunpckhpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vunpckhpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vunpckhpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vunpckhpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vunpckhpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vunpckhpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vunpckhpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vunpckhpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vunpckhpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vunpckhpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vunpckhpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vunpckhpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vunpckhpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpckhpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vunpckhpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vunpckhpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vunpckhpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vunpckhpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vunpckhpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vunpckhpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vunpckhpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vunpckhpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vunpckhps xmm30, xmm29, xmm28 # AVX512{F,VL}
vunpckhps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vunpckhps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vunpckhps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vunpckhps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpckhps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vunpckhps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vunpckhps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vunpckhps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vunpckhps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vunpckhps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vunpckhps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vunpckhps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vunpckhps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vunpckhps ymm30, ymm29, ymm28 # AVX512{F,VL}
vunpckhps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vunpckhps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vunpckhps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vunpckhps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpckhps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vunpckhps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vunpckhps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vunpckhps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vunpckhps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vunpckhps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vunpckhps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vunpckhps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vunpckhps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vunpcklpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vunpcklpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vunpcklpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vunpcklpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vunpcklpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpcklpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vunpcklpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vunpcklpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vunpcklpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vunpcklpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vunpcklpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vunpcklpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vunpcklpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vunpcklpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vunpcklpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vunpcklpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vunpcklpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vunpcklpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vunpcklpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpcklpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vunpcklpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vunpcklpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vunpcklpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vunpcklpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vunpcklpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vunpcklpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vunpcklpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vunpcklpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vunpcklps xmm30, xmm29, xmm28 # AVX512{F,VL}
vunpcklps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vunpcklps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vunpcklps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vunpcklps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpcklps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vunpcklps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vunpcklps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vunpcklps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vunpcklps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vunpcklps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vunpcklps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vunpcklps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vunpcklps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vunpcklps ymm30, ymm29, ymm28 # AVX512{F,VL}
vunpcklps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vunpcklps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vunpcklps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vunpcklps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vunpcklps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vunpcklps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vunpcklps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vunpcklps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vunpcklps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vunpcklps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vunpcklps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vunpcklps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vunpcklps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpternlogd xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
vpternlogd xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vpternlogd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
vpternlogd xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
vpternlogd xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpternlogd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpternlogd xmm30, xmm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vpternlogd xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpternlogd xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpternlogd xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpternlogd xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpternlogd xmm30, xmm29, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogd xmm30, xmm29, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpternlogd xmm30, xmm29, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogd xmm30, xmm29, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vpternlogd ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vpternlogd ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vpternlogd ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, [rcx]{1to8}, 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpternlogd ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpternlogd ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpternlogd ymm30, ymm29, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpternlogd ymm30, ymm29, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpternlogd ymm30, ymm29, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
vpternlogq xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vpternlogq xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
vpternlogq xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, [rcx]{1to2}, 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpternlogq xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpternlogq xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpternlogq xmm30, xmm29, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vpternlogq xmm30, xmm29, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpternlogq xmm30, xmm29, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vpternlogq ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vpternlogq ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vpternlogq ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpternlogq ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpternlogq ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogq ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpternlogq ymm30, ymm29, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpternlogq ymm30, ymm29, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpmovqb xmm30, xmm29 # AVX512{F,VL}
vpmovqb xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovqb xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovqb xmm30, ymm29 # AVX512{F,VL}
vpmovqb xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovqb xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovsqb xmm30, xmm29 # AVX512{F,VL}
vpmovsqb xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsqb xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsqb xmm30, ymm29 # AVX512{F,VL}
vpmovsqb xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovsqb xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovusqb xmm30, xmm29 # AVX512{F,VL}
vpmovusqb xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovusqb xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovusqb xmm30, ymm29 # AVX512{F,VL}
vpmovusqb xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovusqb xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovqw xmm30, xmm29 # AVX512{F,VL}
vpmovqw xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovqw xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovqw xmm30, ymm29 # AVX512{F,VL}
vpmovqw xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovqw xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovsqw xmm30, xmm29 # AVX512{F,VL}
vpmovsqw xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsqw xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsqw xmm30, ymm29 # AVX512{F,VL}
vpmovsqw xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovsqw xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovusqw xmm30, xmm29 # AVX512{F,VL}
vpmovusqw xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovusqw xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovusqw xmm30, ymm29 # AVX512{F,VL}
vpmovusqw xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovusqw xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovqd xmm30, xmm29 # AVX512{F,VL}
vpmovqd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovqd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovqd xmm30, ymm29 # AVX512{F,VL}
vpmovqd xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovqd xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovsqd xmm30, xmm29 # AVX512{F,VL}
vpmovsqd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsqd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsqd xmm30, ymm29 # AVX512{F,VL}
vpmovsqd xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovsqd xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovusqd xmm30, xmm29 # AVX512{F,VL}
vpmovusqd xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovusqd xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovusqd xmm30, ymm29 # AVX512{F,VL}
vpmovusqd xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovusqd xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovdb xmm30, xmm29 # AVX512{F,VL}
vpmovdb xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovdb xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovdb xmm30, ymm29 # AVX512{F,VL}
vpmovdb xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovdb xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovsdb xmm30, xmm29 # AVX512{F,VL}
vpmovsdb xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsdb xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsdb xmm30, ymm29 # AVX512{F,VL}
vpmovsdb xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovsdb xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovusdb xmm30, xmm29 # AVX512{F,VL}
vpmovusdb xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovusdb xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovusdb xmm30, ymm29 # AVX512{F,VL}
vpmovusdb xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovusdb xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovdw xmm30, xmm29 # AVX512{F,VL}
vpmovdw xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovdw xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovdw xmm30, ymm29 # AVX512{F,VL}
vpmovdw xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovdw xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovsdw xmm30, xmm29 # AVX512{F,VL}
vpmovsdw xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovsdw xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovsdw xmm30, ymm29 # AVX512{F,VL}
vpmovsdw xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovsdw xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vpmovusdw xmm30, xmm29 # AVX512{F,VL}
vpmovusdw xmm30{k7}, xmm29 # AVX512{F,VL}
vpmovusdw xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpmovusdw xmm30, ymm29 # AVX512{F,VL}
vpmovusdw xmm30{k7}, ymm29 # AVX512{F,VL}
vpmovusdw xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vshuff32x4 ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshuff32x4 ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, [rcx]{1to8}, 123 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm30, ymm29, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vshuff32x4 ymm30, ymm29, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vshuff32x4 ymm30, ymm29, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vshuff64x2 ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshuff64x2 ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vshuff64x2 ymm30, ymm29, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vshuff64x2 ymm30, ymm29, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufi32x4 ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufi32x4 ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, [rcx]{1to8}, 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm30, ymm29, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vshufi32x4 ymm30, ymm29, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vshufi32x4 ymm30, ymm29, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufi64x2 ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufi64x2 ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vshufi64x2 ymm30, ymm29, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vshufi64x2 ymm30, ymm29, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpermq ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpermq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpermq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpermq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpermq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpermpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpermpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpermpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpermpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpermpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpermt2d xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermt2d xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermt2d xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermt2d xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2d xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2d xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpermt2d xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermt2d xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermt2d xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermt2d xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermt2d xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpermt2d xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpermt2d xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpermt2d xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpermt2d ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermt2d ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermt2d ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermt2d ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2d ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2d ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpermt2d ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermt2d ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermt2d ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermt2d ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermt2d ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpermt2d ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpermt2d ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpermt2d ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpermt2q xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermt2q xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermt2q xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermt2q xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2q xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2q xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpermt2q xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermt2q xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermt2q xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermt2q xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermt2q xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpermt2q xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpermt2q xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpermt2q xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpermt2q ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermt2q ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermt2q ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermt2q ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2q ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2q ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpermt2q ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermt2q ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermt2q ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermt2q ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermt2q ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpermt2q ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpermt2q ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpermt2q ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpermt2ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermt2ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermt2ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermt2ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpermt2ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermt2ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermt2ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermt2ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermt2ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpermt2ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpermt2ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpermt2ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpermt2ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermt2ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermt2ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermt2ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpermt2ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermt2ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermt2ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermt2ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermt2ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpermt2ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpermt2ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpermt2ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpermt2pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermt2pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermt2pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermt2pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpermt2pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermt2pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermt2pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermt2pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermt2pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpermt2pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpermt2pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpermt2pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpermt2pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermt2pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermt2pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermt2pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermt2pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermt2pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpermt2pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermt2pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermt2pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermt2pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermt2pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpermt2pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpermt2pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpermt2pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
valignq xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
valignq xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
valignq xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
valignq xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
valignq xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
valignq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
valignq xmm30, xmm29, [rcx]{1to2}, 123 # AVX512{F,VL}
valignq xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
valignq xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
valignq xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
valignq xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
valignq xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
valignq xmm30, xmm29, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
valignq xmm30, xmm29, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
valignq xmm30, xmm29, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
valignq ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
valignq ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
valignq ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
valignq ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
valignq ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
valignq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
valignq ymm30, ymm29, [rcx]{1to4}, 123 # AVX512{F,VL}
valignq ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
valignq ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
valignq ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
valignq ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
valignq ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
valignq ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
valignq ymm30, ymm29, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
valignq ymm30, ymm29, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vscalefpd xmm30, xmm29, xmm28 # AVX512{F,VL}
vscalefpd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vscalefpd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vscalefpd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vscalefpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vscalefpd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vscalefpd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vscalefpd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vscalefpd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vscalefpd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vscalefpd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vscalefpd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vscalefpd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vscalefpd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vscalefpd ymm30, ymm29, ymm28 # AVX512{F,VL}
vscalefpd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vscalefpd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vscalefpd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vscalefpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vscalefpd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vscalefpd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vscalefpd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vscalefpd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vscalefpd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vscalefpd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vscalefpd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vscalefpd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vscalefpd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vscalefps xmm30, xmm29, xmm28 # AVX512{F,VL}
vscalefps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vscalefps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vscalefps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vscalefps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vscalefps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vscalefps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vscalefps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vscalefps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vscalefps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vscalefps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vscalefps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vscalefps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vscalefps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vscalefps ymm30, ymm29, ymm28 # AVX512{F,VL}
vscalefps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vscalefps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vscalefps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vscalefps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vscalefps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vscalefps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vscalefps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vscalefps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vscalefps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vscalefps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vscalefps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vscalefps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vscalefps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
vfixupimmpd xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vfixupimmpd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, [rcx]{1to2}, 123 # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm30, xmm29, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vfixupimmpd xmm30, xmm29, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vfixupimmpd xmm30, xmm29, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vfixupimmpd ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vfixupimmpd ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm30, ymm29, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vfixupimmpd ymm30, ymm29, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmpd ymm30, ymm29, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, xmm28, 0xab # AVX512{F,VL}
vfixupimmps xmm30{k7}, xmm29, xmm28, 0xab # AVX512{F,VL}
vfixupimmps xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{F,VL}
vfixupimmps xmm30, xmm29, xmm28, 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, [rcx]{1to4}, 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vfixupimmps xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vfixupimmps xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmps xmm30, xmm29, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vfixupimmps xmm30, xmm29, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vfixupimmps xmm30, xmm29, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, ymm28, 0xab # AVX512{F,VL}
vfixupimmps ymm30{k7}, ymm29, ymm28, 0xab # AVX512{F,VL}
vfixupimmps ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{F,VL}
vfixupimmps ymm30, ymm29, ymm28, 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, [rcx]{1to8}, 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vfixupimmps ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vfixupimmps ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vfixupimmps ymm30, ymm29, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vfixupimmps ymm30, ymm29, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vfixupimmps ymm30, ymm29, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpslld xmm30, xmm29, 0xab # AVX512{F,VL}
vpslld xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpslld xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpslld xmm30, xmm29, 123 # AVX512{F,VL}
vpslld xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpslld xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpslld xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpslld xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpslld xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpslld xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpslld xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpslld xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpslld xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpslld xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpslld xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpslld ymm30, ymm29, 0xab # AVX512{F,VL}
vpslld ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpslld ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpslld ymm30, ymm29, 123 # AVX512{F,VL}
vpslld ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpslld ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpslld ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vpslld ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpslld ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpslld ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpslld ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpslld ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpslld ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpslld ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpslld ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpsllq xmm30, xmm29, 0xab # AVX512{F,VL}
vpsllq xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpsllq xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpsllq xmm30, xmm29, 123 # AVX512{F,VL}
vpsllq xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsllq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsllq xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vpsllq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpsllq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpsllq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpsllq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpsllq xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpsllq xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vpsllq xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpsllq xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vpsllq ymm30, ymm29, 0xab # AVX512{F,VL}
vpsllq ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpsllq ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpsllq ymm30, ymm29, 123 # AVX512{F,VL}
vpsllq ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsllq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsllq ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpsllq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpsllq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpsllq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpsllq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpsllq ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpsllq ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpsllq ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpsllq ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vpsrad xmm30, xmm29, 0xab # AVX512{F,VL}
vpsrad xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpsrad xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpsrad xmm30, xmm29, 123 # AVX512{F,VL}
vpsrad xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsrad xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsrad xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpsrad xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpsrad xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpsrad xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpsrad xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpsrad xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrad xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vpsrad xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vpsrad xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vpsrad ymm30, ymm29, 0xab # AVX512{F,VL}
vpsrad ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpsrad ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpsrad ymm30, ymm29, 123 # AVX512{F,VL}
vpsrad ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsrad ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsrad ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vpsrad ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpsrad ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpsrad ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpsrad ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpsrad ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrad ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vpsrad ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vpsrad ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpsraq xmm30, xmm29, 0xab # AVX512{F,VL}
vpsraq xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vpsraq xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vpsraq xmm30, xmm29, 123 # AVX512{F,VL}
vpsraq xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsraq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsraq xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vpsraq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vpsraq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vpsraq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vpsraq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vpsraq xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vpsraq xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vpsraq xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vpsraq xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vpsraq ymm30, ymm29, 0xab # AVX512{F,VL}
vpsraq ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vpsraq ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vpsraq ymm30, ymm29, 123 # AVX512{F,VL}
vpsraq ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vpsraq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vpsraq ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vpsraq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vpsraq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vpsraq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vpsraq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vpsraq ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vpsraq ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vpsraq ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vpsraq ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vprolvd xmm30, xmm29, xmm28 # AVX512{F,VL}
vprolvd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vprolvd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vprolvd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vprolvd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprolvd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vprolvd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vprolvd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vprolvd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vprolvd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vprolvd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vprolvd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vprolvd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vprolvd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vprolvd ymm30, ymm29, ymm28 # AVX512{F,VL}
vprolvd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vprolvd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vprolvd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vprolvd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprolvd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vprolvd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vprolvd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vprolvd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vprolvd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vprolvd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vprolvd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vprolvd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vprolvd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vprold xmm30, xmm29, 0xab # AVX512{F,VL}
vprold xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vprold xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vprold xmm30, xmm29, 123 # AVX512{F,VL}
vprold xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprold xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprold xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vprold xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vprold xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vprold xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vprold xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vprold xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vprold xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vprold xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vprold xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vprold ymm30, ymm29, 0xab # AVX512{F,VL}
vprold ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vprold ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vprold ymm30, ymm29, 123 # AVX512{F,VL}
vprold ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprold ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprold ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vprold ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vprold ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vprold ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vprold ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vprold ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vprold ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vprold ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vprold ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vprolvq xmm30, xmm29, xmm28 # AVX512{F,VL}
vprolvq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vprolvq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vprolvq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vprolvq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprolvq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vprolvq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vprolvq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vprolvq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vprolvq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vprolvq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vprolvq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vprolvq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vprolvq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vprolvq ymm30, ymm29, ymm28 # AVX512{F,VL}
vprolvq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vprolvq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vprolvq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vprolvq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprolvq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vprolvq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vprolvq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vprolvq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vprolvq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vprolvq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vprolvq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vprolvq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vprolvq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vprolq xmm30, xmm29, 0xab # AVX512{F,VL}
vprolq xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vprolq xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vprolq xmm30, xmm29, 123 # AVX512{F,VL}
vprolq xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprolq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprolq xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vprolq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vprolq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vprolq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vprolq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vprolq xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vprolq xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vprolq xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vprolq xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vprolq ymm30, ymm29, 0xab # AVX512{F,VL}
vprolq ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vprolq ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vprolq ymm30, ymm29, 123 # AVX512{F,VL}
vprolq ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprolq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprolq ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vprolq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vprolq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vprolq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vprolq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vprolq ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vprolq ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vprolq ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vprolq ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vprorvd xmm30, xmm29, xmm28 # AVX512{F,VL}
vprorvd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vprorvd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vprorvd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vprorvd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprorvd xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vprorvd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vprorvd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vprorvd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vprorvd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vprorvd xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vprorvd xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vprorvd xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vprorvd xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vprorvd ymm30, ymm29, ymm28 # AVX512{F,VL}
vprorvd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vprorvd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vprorvd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vprorvd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprorvd ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vprorvd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vprorvd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vprorvd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vprorvd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vprorvd ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vprorvd ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vprorvd ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vprorvd ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vprord xmm30, xmm29, 0xab # AVX512{F,VL}
vprord xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vprord xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vprord xmm30, xmm29, 123 # AVX512{F,VL}
vprord xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprord xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprord xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vprord xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vprord xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vprord xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vprord xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vprord xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vprord xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vprord xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vprord xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vprord ymm30, ymm29, 0xab # AVX512{F,VL}
vprord ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vprord ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vprord ymm30, ymm29, 123 # AVX512{F,VL}
vprord ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprord ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprord ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vprord ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vprord ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vprord ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vprord ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vprord ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vprord ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vprord ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vprord ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vprorvq xmm30, xmm29, xmm28 # AVX512{F,VL}
vprorvq xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vprorvq xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vprorvq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vprorvq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprorvq xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vprorvq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vprorvq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vprorvq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vprorvq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vprorvq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vprorvq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vprorvq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vprorvq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vprorvq ymm30, ymm29, ymm28 # AVX512{F,VL}
vprorvq ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vprorvq ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vprorvq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vprorvq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vprorvq ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vprorvq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vprorvq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vprorvq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vprorvq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vprorvq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vprorvq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vprorvq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vprorvq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vprorq xmm30, xmm29, 0xab # AVX512{F,VL}
vprorq xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vprorq xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vprorq xmm30, xmm29, 123 # AVX512{F,VL}
vprorq xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprorq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprorq xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vprorq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vprorq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vprorq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vprorq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vprorq xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vprorq xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vprorq xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vprorq xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vprorq ymm30, ymm29, 0xab # AVX512{F,VL}
vprorq ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vprorq ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vprorq ymm30, ymm29, 123 # AVX512{F,VL}
vprorq ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vprorq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vprorq ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vprorq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vprorq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vprorq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vprorq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vprorq ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vprorq ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vprorq ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vprorq ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vrndscalepd xmm30, xmm29, 0xab # AVX512{F,VL}
vrndscalepd xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vrndscalepd xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vrndscalepd xmm30, xmm29, 123 # AVX512{F,VL}
vrndscalepd xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vrndscalepd xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vrndscalepd xmm30, [rcx]{1to2}, 123 # AVX512{F,VL}
vrndscalepd xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vrndscalepd xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vrndscalepd xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vrndscalepd xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vrndscalepd xmm30, [rdx+1016]{1to2}, 123 # AVX512{F,VL} Disp8
vrndscalepd xmm30, [rdx+1024]{1to2}, 123 # AVX512{F,VL}
vrndscalepd xmm30, [rdx-1024]{1to2}, 123 # AVX512{F,VL} Disp8
vrndscalepd xmm30, [rdx-1032]{1to2}, 123 # AVX512{F,VL}
vrndscalepd ymm30, ymm29, 0xab # AVX512{F,VL}
vrndscalepd ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vrndscalepd ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vrndscalepd ymm30, ymm29, 123 # AVX512{F,VL}
vrndscalepd ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vrndscalepd ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vrndscalepd ymm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vrndscalepd ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vrndscalepd ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vrndscalepd ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vrndscalepd ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vrndscalepd ymm30, [rdx+1016]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscalepd ymm30, [rdx+1024]{1to4}, 123 # AVX512{F,VL}
vrndscalepd ymm30, [rdx-1024]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscalepd ymm30, [rdx-1032]{1to4}, 123 # AVX512{F,VL}
vrndscaleps xmm30, xmm29, 0xab # AVX512{F,VL}
vrndscaleps xmm30{k7}, xmm29, 0xab # AVX512{F,VL}
vrndscaleps xmm30{k7}{z}, xmm29, 0xab # AVX512{F,VL}
vrndscaleps xmm30, xmm29, 123 # AVX512{F,VL}
vrndscaleps xmm30, XMMWORD PTR [rcx], 123 # AVX512{F,VL}
vrndscaleps xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vrndscaleps xmm30, [rcx]{1to4}, 123 # AVX512{F,VL}
vrndscaleps xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{F,VL} Disp8
vrndscaleps xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{F,VL}
vrndscaleps xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{F,VL} Disp8
vrndscaleps xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{F,VL}
vrndscaleps xmm30, [rdx+508]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscaleps xmm30, [rdx+512]{1to4}, 123 # AVX512{F,VL}
vrndscaleps xmm30, [rdx-512]{1to4}, 123 # AVX512{F,VL} Disp8
vrndscaleps xmm30, [rdx-516]{1to4}, 123 # AVX512{F,VL}
vrndscaleps ymm30, ymm29, 0xab # AVX512{F,VL}
vrndscaleps ymm30{k7}, ymm29, 0xab # AVX512{F,VL}
vrndscaleps ymm30{k7}{z}, ymm29, 0xab # AVX512{F,VL}
vrndscaleps ymm30, ymm29, 123 # AVX512{F,VL}
vrndscaleps ymm30, YMMWORD PTR [rcx], 123 # AVX512{F,VL}
vrndscaleps ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{F,VL}
vrndscaleps ymm30, [rcx]{1to8}, 123 # AVX512{F,VL}
vrndscaleps ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{F,VL} Disp8
vrndscaleps ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{F,VL}
vrndscaleps ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{F,VL} Disp8
vrndscaleps ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{F,VL}
vrndscaleps ymm30, [rdx+508]{1to8}, 123 # AVX512{F,VL} Disp8
vrndscaleps ymm30, [rdx+512]{1to8}, 123 # AVX512{F,VL}
vrndscaleps ymm30, [rdx-512]{1to8}, 123 # AVX512{F,VL} Disp8
vrndscaleps ymm30, [rdx-516]{1to8}, 123 # AVX512{F,VL}
vpcompressq XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpcompressq XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpcompressq XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpcompressq XMMWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vpcompressq XMMWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vpcompressq XMMWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vpcompressq XMMWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vpcompressq YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpcompressq YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpcompressq YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpcompressq YMMWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vpcompressq YMMWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vpcompressq YMMWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vpcompressq YMMWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vpcompressq xmm30, xmm29 # AVX512{F,VL}
vpcompressq xmm30{k7}, xmm29 # AVX512{F,VL}
vpcompressq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vpcompressq ymm30, ymm29 # AVX512{F,VL}
vpcompressq ymm30{k7}, ymm29 # AVX512{F,VL}
vpcompressq ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvtps2ph QWORD PTR [rcx], xmm30, 0xab # AVX512{F,VL}
vcvtps2ph QWORD PTR [rcx]{k7}, xmm30, 0xab # AVX512{F,VL}
vcvtps2ph QWORD PTR [rcx], xmm30, 123 # AVX512{F,VL}
vcvtps2ph QWORD PTR [rax+r14*8+0x1234], xmm30, 123 # AVX512{F,VL}
vcvtps2ph QWORD PTR [rdx+1016], xmm30, 123 # AVX512{F,VL} Disp8
vcvtps2ph QWORD PTR [rdx+1024], xmm30, 123 # AVX512{F,VL}
vcvtps2ph QWORD PTR [rdx-1024], xmm30, 123 # AVX512{F,VL} Disp8
vcvtps2ph QWORD PTR [rdx-1032], xmm30, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [rcx], ymm30, 0xab # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [rcx]{k7}, ymm30, 0xab # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [rcx], ymm30, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [rax+r14*8+0x1234], ymm30, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [rdx+2032], ymm30, 123 # AVX512{F,VL} Disp8
vcvtps2ph XMMWORD PTR [rdx+2048], ymm30, 123 # AVX512{F,VL}
vcvtps2ph XMMWORD PTR [rdx-2048], ymm30, 123 # AVX512{F,VL} Disp8
vcvtps2ph XMMWORD PTR [rdx-2064], ymm30, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [rcx], ymm29, 0xab # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [rcx]{k7}, ymm29, 0xab # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [rcx], ymm29, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [rax+r14*8+0x1234], ymm29, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [rdx+2032], ymm29, 123 # AVX512{F,VL} Disp8
vextractf32x4 XMMWORD PTR [rdx+2048], ymm29, 123 # AVX512{F,VL}
vextractf32x4 XMMWORD PTR [rdx-2048], ymm29, 123 # AVX512{F,VL} Disp8
vextractf32x4 XMMWORD PTR [rdx-2064], ymm29, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [rcx], ymm29, 0xab # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [rcx]{k7}, ymm29, 0xab # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [rcx], ymm29, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [rax+r14*8+0x1234], ymm29, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [rdx+2032], ymm29, 123 # AVX512{F,VL} Disp8
vextracti32x4 XMMWORD PTR [rdx+2048], ymm29, 123 # AVX512{F,VL}
vextracti32x4 XMMWORD PTR [rdx-2048], ymm29, 123 # AVX512{F,VL} Disp8
vextracti32x4 XMMWORD PTR [rdx-2064], ymm29, 123 # AVX512{F,VL}
vmovapd XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovapd XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovapd XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovapd XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovapd XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovapd XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovapd XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovapd YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovapd YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovapd YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovapd YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovapd YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovapd YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovapd YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovaps XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovaps XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovaps XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovaps XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovaps XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovaps XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovaps XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovaps YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovaps YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovaps YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovaps YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovaps YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovaps YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovaps YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovdqa32 XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovdqa32 XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovdqa32 XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovdqa32 YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovdqa32 YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovdqa32 YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovdqa64 XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovdqa64 XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovdqa64 XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovdqa64 YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovdqa64 YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovdqa64 YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovdqu32 XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovdqu32 XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovdqu32 XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovdqu32 YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovdqu32 YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovdqu32 YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovdqu64 XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovdqu64 XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovdqu64 XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovdqu64 YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovdqu64 YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovdqu64 YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovupd XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovupd XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovupd XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovupd XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovupd XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovupd XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovupd XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovupd YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovupd YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovupd YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovupd YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovupd YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovupd YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovupd YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vmovups XMMWORD PTR [rcx], xmm30 # AVX512{F,VL}
vmovups XMMWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vmovups XMMWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vmovups XMMWORD PTR [rdx+2032], xmm30 # AVX512{F,VL} Disp8
vmovups XMMWORD PTR [rdx+2048], xmm30 # AVX512{F,VL}
vmovups XMMWORD PTR [rdx-2048], xmm30 # AVX512{F,VL} Disp8
vmovups XMMWORD PTR [rdx-2064], xmm30 # AVX512{F,VL}
vmovups YMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vmovups YMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vmovups YMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vmovups YMMWORD PTR [rdx+4064], ymm30 # AVX512{F,VL} Disp8
vmovups YMMWORD PTR [rdx+4096], ymm30 # AVX512{F,VL}
vmovups YMMWORD PTR [rdx-4096], ymm30 # AVX512{F,VL} Disp8
vmovups YMMWORD PTR [rdx-4128], ymm30 # AVX512{F,VL}
vpmovqb WORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovqb WORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovqb WORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovqb WORD PTR [rdx+254], xmm30 # AVX512{F,VL} Disp8
vpmovqb WORD PTR [rdx+256], xmm30 # AVX512{F,VL}
vpmovqb WORD PTR [rdx-256], xmm30 # AVX512{F,VL} Disp8
vpmovqb WORD PTR [rdx-258], xmm30 # AVX512{F,VL}
vpmovqb DWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovqb DWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovqb DWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovqb DWORD PTR [rdx+508], ymm30 # AVX512{F,VL} Disp8
vpmovqb DWORD PTR [rdx+512], ymm30 # AVX512{F,VL}
vpmovqb DWORD PTR [rdx-512], ymm30 # AVX512{F,VL} Disp8
vpmovqb DWORD PTR [rdx-516], ymm30 # AVX512{F,VL}
vpmovsqb WORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovsqb WORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovsqb WORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovsqb WORD PTR [rdx+254], xmm30 # AVX512{F,VL} Disp8
vpmovsqb WORD PTR [rdx+256], xmm30 # AVX512{F,VL}
vpmovsqb WORD PTR [rdx-256], xmm30 # AVX512{F,VL} Disp8
vpmovsqb WORD PTR [rdx-258], xmm30 # AVX512{F,VL}
vpmovsqb DWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovsqb DWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovsqb DWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovsqb DWORD PTR [rdx+508], ymm30 # AVX512{F,VL} Disp8
vpmovsqb DWORD PTR [rdx+512], ymm30 # AVX512{F,VL}
vpmovsqb DWORD PTR [rdx-512], ymm30 # AVX512{F,VL} Disp8
vpmovsqb DWORD PTR [rdx-516], ymm30 # AVX512{F,VL}
vpmovusqb WORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovusqb WORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovusqb WORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovusqb WORD PTR [rdx+254], xmm30 # AVX512{F,VL} Disp8
vpmovusqb WORD PTR [rdx+256], xmm30 # AVX512{F,VL}
vpmovusqb WORD PTR [rdx-256], xmm30 # AVX512{F,VL} Disp8
vpmovusqb WORD PTR [rdx-258], xmm30 # AVX512{F,VL}
vpmovusqb DWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovusqb DWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovusqb DWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovusqb DWORD PTR [rdx+508], ymm30 # AVX512{F,VL} Disp8
vpmovusqb DWORD PTR [rdx+512], ymm30 # AVX512{F,VL}
vpmovusqb DWORD PTR [rdx-512], ymm30 # AVX512{F,VL} Disp8
vpmovusqb DWORD PTR [rdx-516], ymm30 # AVX512{F,VL}
vpmovqw DWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovqw DWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovqw DWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovqw DWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vpmovqw DWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vpmovqw DWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vpmovqw DWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vpmovqw QWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovqw QWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovqw QWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovqw QWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vpmovqw QWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vpmovqw QWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vpmovqw QWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vpmovsqw DWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovsqw DWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovsqw DWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovsqw DWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vpmovsqw DWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vpmovsqw DWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vpmovsqw DWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vpmovsqw QWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovsqw QWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovsqw QWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovsqw QWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vpmovsqw QWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vpmovsqw QWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vpmovsqw QWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vpmovusqw DWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovusqw DWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovusqw DWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovusqw DWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vpmovusqw DWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vpmovusqw DWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vpmovusqw DWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vpmovusqw QWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovusqw QWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovusqw QWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovusqw QWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vpmovusqw QWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vpmovusqw QWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vpmovusqw QWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vpmovqd QWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovqd QWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovqd QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovqd QWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vpmovqd QWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vpmovqd QWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vpmovqd QWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vpmovqd XMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovqd XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovqd XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovqd XMMWORD PTR [rdx+2032], ymm30 # AVX512{F,VL} Disp8
vpmovqd XMMWORD PTR [rdx+2048], ymm30 # AVX512{F,VL}
vpmovqd XMMWORD PTR [rdx-2048], ymm30 # AVX512{F,VL} Disp8
vpmovqd XMMWORD PTR [rdx-2064], ymm30 # AVX512{F,VL}
vpmovsqd QWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovsqd QWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovsqd QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovsqd QWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vpmovsqd QWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vpmovsqd QWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vpmovsqd QWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [rdx+2032], ymm30 # AVX512{F,VL} Disp8
vpmovsqd XMMWORD PTR [rdx+2048], ymm30 # AVX512{F,VL}
vpmovsqd XMMWORD PTR [rdx-2048], ymm30 # AVX512{F,VL} Disp8
vpmovsqd XMMWORD PTR [rdx-2064], ymm30 # AVX512{F,VL}
vpmovusqd QWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovusqd QWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovusqd QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovusqd QWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vpmovusqd QWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vpmovusqd QWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vpmovusqd QWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [rdx+2032], ymm30 # AVX512{F,VL} Disp8
vpmovusqd XMMWORD PTR [rdx+2048], ymm30 # AVX512{F,VL}
vpmovusqd XMMWORD PTR [rdx-2048], ymm30 # AVX512{F,VL} Disp8
vpmovusqd XMMWORD PTR [rdx-2064], ymm30 # AVX512{F,VL}
vpmovdb DWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovdb DWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovdb DWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovdb DWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vpmovdb DWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vpmovdb DWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vpmovdb DWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vpmovdb QWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovdb QWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovdb QWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovdb QWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vpmovdb QWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vpmovdb QWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vpmovdb QWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vpmovsdb DWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovsdb DWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovsdb DWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovsdb DWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vpmovsdb DWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vpmovsdb DWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vpmovsdb DWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vpmovsdb QWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovsdb QWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovsdb QWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovsdb QWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vpmovsdb QWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vpmovsdb QWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vpmovsdb QWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vpmovusdb DWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovusdb DWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovusdb DWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovusdb DWORD PTR [rdx+508], xmm30 # AVX512{F,VL} Disp8
vpmovusdb DWORD PTR [rdx+512], xmm30 # AVX512{F,VL}
vpmovusdb DWORD PTR [rdx-512], xmm30 # AVX512{F,VL} Disp8
vpmovusdb DWORD PTR [rdx-516], xmm30 # AVX512{F,VL}
vpmovusdb QWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovusdb QWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovusdb QWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovusdb QWORD PTR [rdx+1016], ymm30 # AVX512{F,VL} Disp8
vpmovusdb QWORD PTR [rdx+1024], ymm30 # AVX512{F,VL}
vpmovusdb QWORD PTR [rdx-1024], ymm30 # AVX512{F,VL} Disp8
vpmovusdb QWORD PTR [rdx-1032], ymm30 # AVX512{F,VL}
vpmovdw QWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovdw QWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovdw QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovdw QWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vpmovdw QWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vpmovdw QWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vpmovdw QWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vpmovdw XMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovdw XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovdw XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovdw XMMWORD PTR [rdx+2032], ymm30 # AVX512{F,VL} Disp8
vpmovdw XMMWORD PTR [rdx+2048], ymm30 # AVX512{F,VL}
vpmovdw XMMWORD PTR [rdx-2048], ymm30 # AVX512{F,VL} Disp8
vpmovdw XMMWORD PTR [rdx-2064], ymm30 # AVX512{F,VL}
vpmovsdw QWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovsdw QWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovsdw QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovsdw QWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vpmovsdw QWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vpmovsdw QWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vpmovsdw QWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [rdx+2032], ymm30 # AVX512{F,VL} Disp8
vpmovsdw XMMWORD PTR [rdx+2048], ymm30 # AVX512{F,VL}
vpmovsdw XMMWORD PTR [rdx-2048], ymm30 # AVX512{F,VL} Disp8
vpmovsdw XMMWORD PTR [rdx-2064], ymm30 # AVX512{F,VL}
vpmovusdw QWORD PTR [rcx], xmm30 # AVX512{F,VL}
vpmovusdw QWORD PTR [rcx]{k7}, xmm30 # AVX512{F,VL}
vpmovusdw QWORD PTR [rax+r14*8+0x1234], xmm30 # AVX512{F,VL}
vpmovusdw QWORD PTR [rdx+1016], xmm30 # AVX512{F,VL} Disp8
vpmovusdw QWORD PTR [rdx+1024], xmm30 # AVX512{F,VL}
vpmovusdw QWORD PTR [rdx-1024], xmm30 # AVX512{F,VL} Disp8
vpmovusdw QWORD PTR [rdx-1032], xmm30 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [rcx], ymm30 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [rcx]{k7}, ymm30 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [rax+r14*8+0x1234], ymm30 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [rdx+2032], ymm30 # AVX512{F,VL} Disp8
vpmovusdw XMMWORD PTR [rdx+2048], ymm30 # AVX512{F,VL}
vpmovusdw XMMWORD PTR [rdx-2048], ymm30 # AVX512{F,VL} Disp8
vpmovusdw XMMWORD PTR [rdx-2064], ymm30 # AVX512{F,VL}
vcvttpd2udq xmm30, xmm29 # AVX512{F,VL}
vcvttpd2udq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvttpd2udq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvttpd2udq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvttpd2udq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttpd2udq xmm30, [rcx]{1to2} # AVX512{F,VL}
vcvttpd2udq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvttpd2udq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvttpd2udq xmm30, QWORD BCST [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, QWORD BCST [rdx+1024]{1to2} # AVX512{F,VL}
vcvttpd2udq xmm30, QWORD BCST [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, QWORD BCST [rdx-1032]{1to2} # AVX512{F,VL}
vcvttpd2udq xmm30, ymm29 # AVX512{F,VL}
vcvttpd2udq xmm30{k7}, ymm29 # AVX512{F,VL}
vcvttpd2udq xmm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvttpd2udq xmm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvttpd2udq xmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttpd2udq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvttpd2udq xmm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvttpd2udq xmm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvttpd2udq xmm30, QWORD BCST [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, QWORD BCST [rdx+1024]{1to4} # AVX512{F,VL}
vcvttpd2udq xmm30, QWORD BCST [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vcvttpd2udq xmm30, QWORD BCST [rdx-1032]{1to4} # AVX512{F,VL}
vcvttps2udq xmm30, xmm29 # AVX512{F,VL}
vcvttps2udq xmm30{k7}, xmm29 # AVX512{F,VL}
vcvttps2udq xmm30{k7}{z}, xmm29 # AVX512{F,VL}
vcvttps2udq xmm30, XMMWORD PTR [rcx] # AVX512{F,VL}
vcvttps2udq xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttps2udq xmm30, [rcx]{1to4} # AVX512{F,VL}
vcvttps2udq xmm30, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vcvttps2udq xmm30, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vcvttps2udq xmm30, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vcvttps2udq xmm30, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vcvttps2udq xmm30, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vcvttps2udq xmm30, [rdx+512]{1to4} # AVX512{F,VL}
vcvttps2udq xmm30, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vcvttps2udq xmm30, [rdx-516]{1to4} # AVX512{F,VL}
vcvttps2udq ymm30, ymm29 # AVX512{F,VL}
vcvttps2udq ymm30{k7}, ymm29 # AVX512{F,VL}
vcvttps2udq ymm30{k7}{z}, ymm29 # AVX512{F,VL}
vcvttps2udq ymm30, YMMWORD PTR [rcx] # AVX512{F,VL}
vcvttps2udq ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vcvttps2udq ymm30, [rcx]{1to8} # AVX512{F,VL}
vcvttps2udq ymm30, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vcvttps2udq ymm30, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vcvttps2udq ymm30, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vcvttps2udq ymm30, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vcvttps2udq ymm30, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vcvttps2udq ymm30, [rdx+512]{1to8} # AVX512{F,VL}
vcvttps2udq ymm30, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vcvttps2udq ymm30, [rdx-516]{1to8} # AVX512{F,VL}
vpermi2d xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermi2d xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermi2d xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermi2d xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2d xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2d xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpermi2d xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermi2d xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermi2d xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermi2d xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermi2d xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpermi2d xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpermi2d xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpermi2d xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpermi2d ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermi2d ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermi2d ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermi2d ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2d ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2d ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpermi2d ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermi2d ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermi2d ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermi2d ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermi2d ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpermi2d ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpermi2d ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpermi2d ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpermi2q xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermi2q xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermi2q xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermi2q xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2q xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2q xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpermi2q xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermi2q xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermi2q xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermi2q xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermi2q xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpermi2q xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpermi2q xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpermi2q xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpermi2q ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermi2q ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermi2q ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermi2q ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2q ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2q ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpermi2q ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermi2q ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermi2q ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermi2q ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermi2q ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpermi2q ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpermi2q ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpermi2q ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vpermi2ps xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermi2ps xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermi2ps xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermi2ps xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2ps xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2ps xmm30, xmm29, [rcx]{1to4} # AVX512{F,VL}
vpermi2ps xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermi2ps xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermi2ps xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermi2ps xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermi2ps xmm30, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vpermi2ps xmm30, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vpermi2ps xmm30, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vpermi2ps xmm30, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vpermi2ps ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermi2ps ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermi2ps ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermi2ps ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2ps ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2ps ymm30, ymm29, [rcx]{1to8} # AVX512{F,VL}
vpermi2ps ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermi2ps ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermi2ps ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermi2ps ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermi2ps ymm30, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vpermi2ps ymm30, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vpermi2ps ymm30, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vpermi2ps ymm30, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vpermi2pd xmm30, xmm29, xmm28 # AVX512{F,VL}
vpermi2pd xmm30{k7}, xmm29, xmm28 # AVX512{F,VL}
vpermi2pd xmm30{k7}{z}, xmm29, xmm28 # AVX512{F,VL}
vpermi2pd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2pd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2pd xmm30, xmm29, [rcx]{1to2} # AVX512{F,VL}
vpermi2pd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vpermi2pd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vpermi2pd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vpermi2pd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vpermi2pd xmm30, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vpermi2pd xmm30, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vpermi2pd xmm30, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vpermi2pd xmm30, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vpermi2pd ymm30, ymm29, ymm28 # AVX512{F,VL}
vpermi2pd ymm30{k7}, ymm29, ymm28 # AVX512{F,VL}
vpermi2pd ymm30{k7}{z}, ymm29, ymm28 # AVX512{F,VL}
vpermi2pd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vpermi2pd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vpermi2pd ymm30, ymm29, [rcx]{1to4} # AVX512{F,VL}
vpermi2pd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vpermi2pd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vpermi2pd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vpermi2pd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vpermi2pd ymm30, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vpermi2pd ymm30, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vpermi2pd ymm30, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vpermi2pd ymm30, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
vptestnmd k5, xmm29, xmm28 # AVX512{F,VL}
vptestnmd k5{k7}, xmm29, xmm28 # AVX512{F,VL}
vptestnmd k5, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vptestnmd k5, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestnmd k5, xmm29, [rcx]{1to4} # AVX512{F,VL}
vptestnmd k5, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vptestnmd k5, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vptestnmd k5, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vptestnmd k5, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vptestnmd k5, xmm29, [rdx+508]{1to4} # AVX512{F,VL} Disp8
vptestnmd k5, xmm29, [rdx+512]{1to4} # AVX512{F,VL}
vptestnmd k5, xmm29, [rdx-512]{1to4} # AVX512{F,VL} Disp8
vptestnmd k5, xmm29, [rdx-516]{1to4} # AVX512{F,VL}
vptestnmd k5, ymm29, ymm28 # AVX512{F,VL}
vptestnmd k5{k7}, ymm29, ymm28 # AVX512{F,VL}
vptestnmd k5, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vptestnmd k5, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestnmd k5, ymm29, [rcx]{1to8} # AVX512{F,VL}
vptestnmd k5, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vptestnmd k5, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vptestnmd k5, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vptestnmd k5, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vptestnmd k5, ymm29, [rdx+508]{1to8} # AVX512{F,VL} Disp8
vptestnmd k5, ymm29, [rdx+512]{1to8} # AVX512{F,VL}
vptestnmd k5, ymm29, [rdx-512]{1to8} # AVX512{F,VL} Disp8
vptestnmd k5, ymm29, [rdx-516]{1to8} # AVX512{F,VL}
vptestnmq k5, xmm29, xmm28 # AVX512{F,VL}
vptestnmq k5{k7}, xmm29, xmm28 # AVX512{F,VL}
vptestnmq k5, xmm29, XMMWORD PTR [rcx] # AVX512{F,VL}
vptestnmq k5, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestnmq k5, xmm29, [rcx]{1to2} # AVX512{F,VL}
vptestnmq k5, xmm29, XMMWORD PTR [rdx+2032] # AVX512{F,VL} Disp8
vptestnmq k5, xmm29, XMMWORD PTR [rdx+2048] # AVX512{F,VL}
vptestnmq k5, xmm29, XMMWORD PTR [rdx-2048] # AVX512{F,VL} Disp8
vptestnmq k5, xmm29, XMMWORD PTR [rdx-2064] # AVX512{F,VL}
vptestnmq k5, xmm29, [rdx+1016]{1to2} # AVX512{F,VL} Disp8
vptestnmq k5, xmm29, [rdx+1024]{1to2} # AVX512{F,VL}
vptestnmq k5, xmm29, [rdx-1024]{1to2} # AVX512{F,VL} Disp8
vptestnmq k5, xmm29, [rdx-1032]{1to2} # AVX512{F,VL}
vptestnmq k5, ymm29, ymm28 # AVX512{F,VL}
vptestnmq k5{k7}, ymm29, ymm28 # AVX512{F,VL}
vptestnmq k5, ymm29, YMMWORD PTR [rcx] # AVX512{F,VL}
vptestnmq k5, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{F,VL}
vptestnmq k5, ymm29, [rcx]{1to4} # AVX512{F,VL}
vptestnmq k5, ymm29, YMMWORD PTR [rdx+4064] # AVX512{F,VL} Disp8
vptestnmq k5, ymm29, YMMWORD PTR [rdx+4096] # AVX512{F,VL}
vptestnmq k5, ymm29, YMMWORD PTR [rdx-4096] # AVX512{F,VL} Disp8
vptestnmq k5, ymm29, YMMWORD PTR [rdx-4128] # AVX512{F,VL}
vptestnmq k5, ymm29, [rdx+1016]{1to4} # AVX512{F,VL} Disp8
vptestnmq k5, ymm29, [rdx+1024]{1to4} # AVX512{F,VL}
vptestnmq k5, ymm29, [rdx-1024]{1to4} # AVX512{F,VL} Disp8
vptestnmq k5, ymm29, [rdx-1032]{1to4} # AVX512{F,VL}
|
stsp/binutils-ia16
| 6,004
|
gas/testsuite/gas/i386/x86-64-avx512vnni_vl.s
|
# Check 64bit AVX512{VNNI,VL} instructions
.allow_index_reg
.text
_start:
vpdpwssd %xmm20, %xmm22, %xmm26 # AVX512{VNNI,VL}
vpdpwssd %xmm20, %xmm22, %xmm26{%k3} # AVX512{VNNI,VL}
vpdpwssd %xmm20, %xmm22, %xmm26{%k3}{z} # AVX512{VNNI,VL}
vpdpwssd 0x123(%rax,%r14,8), %xmm22, %xmm26 # AVX512{VNNI,VL}
vpdpwssd 2032(%rdx), %xmm22, %xmm26 # AVX512{VNNI,VL} Disp8
vpdpwssd 508(%rdx){1to4}, %xmm22, %xmm26 # AVX512{VNNI,VL} Disp8
vpdpwssd %ymm18, %ymm20, %ymm20 # AVX512{VNNI,VL}
vpdpwssd %ymm18, %ymm20, %ymm20{%k5} # AVX512{VNNI,VL}
vpdpwssd %ymm18, %ymm20, %ymm20{%k5}{z} # AVX512{VNNI,VL}
vpdpwssd 0x123(%rax,%r14,8), %ymm20, %ymm20 # AVX512{VNNI,VL}
vpdpwssd 4064(%rdx), %ymm20, %ymm20 # AVX512{VNNI,VL} Disp8
vpdpwssd 508(%rdx){1to8}, %ymm20, %ymm20 # AVX512{VNNI,VL} Disp8
vpdpwssds %xmm23, %xmm19, %xmm22 # AVX512{VNNI,VL}
vpdpwssds %xmm23, %xmm19, %xmm22{%k7} # AVX512{VNNI,VL}
vpdpwssds %xmm23, %xmm19, %xmm22{%k7}{z} # AVX512{VNNI,VL}
vpdpwssds 0x123(%rax,%r14,8), %xmm19, %xmm22 # AVX512{VNNI,VL}
vpdpwssds 2032(%rdx), %xmm19, %xmm22 # AVX512{VNNI,VL} Disp8
vpdpwssds 508(%rdx){1to4}, %xmm19, %xmm22 # AVX512{VNNI,VL} Disp8
vpdpwssds %ymm28, %ymm23, %ymm23 # AVX512{VNNI,VL}
vpdpwssds %ymm28, %ymm23, %ymm23{%k3} # AVX512{VNNI,VL}
vpdpwssds %ymm28, %ymm23, %ymm23{%k3}{z} # AVX512{VNNI,VL}
vpdpwssds 0x123(%rax,%r14,8), %ymm23, %ymm23 # AVX512{VNNI,VL}
vpdpwssds 4064(%rdx), %ymm23, %ymm23 # AVX512{VNNI,VL} Disp8
vpdpwssds 508(%rdx){1to8}, %ymm23, %ymm23 # AVX512{VNNI,VL} Disp8
vpdpbusd %xmm28, %xmm29, %xmm18 # AVX512{VNNI,VL}
vpdpbusd %xmm28, %xmm29, %xmm18{%k3} # AVX512{VNNI,VL}
vpdpbusd %xmm28, %xmm29, %xmm18{%k3}{z} # AVX512{VNNI,VL}
vpdpbusd 0x123(%rax,%r14,8), %xmm29, %xmm18 # AVX512{VNNI,VL}
vpdpbusd 2032(%rdx), %xmm29, %xmm18 # AVX512{VNNI,VL} Disp8
vpdpbusd 508(%rdx){1to4}, %xmm29, %xmm18 # AVX512{VNNI,VL} Disp8
vpdpbusd %ymm17, %ymm18, %ymm20 # AVX512{VNNI,VL}
vpdpbusd %ymm17, %ymm18, %ymm20{%k2} # AVX512{VNNI,VL}
vpdpbusd %ymm17, %ymm18, %ymm20{%k2}{z} # AVX512{VNNI,VL}
vpdpbusd 0x123(%rax,%r14,8), %ymm18, %ymm20 # AVX512{VNNI,VL}
vpdpbusd 4064(%rdx), %ymm18, %ymm20 # AVX512{VNNI,VL} Disp8
vpdpbusd 508(%rdx){1to8}, %ymm18, %ymm20 # AVX512{VNNI,VL} Disp8
vpdpbusds %xmm27, %xmm26, %xmm24 # AVX512{VNNI,VL}
vpdpbusds %xmm27, %xmm26, %xmm24{%k4} # AVX512{VNNI,VL}
vpdpbusds %xmm27, %xmm26, %xmm24{%k4}{z} # AVX512{VNNI,VL}
vpdpbusds 0x123(%rax,%r14,8), %xmm26, %xmm24 # AVX512{VNNI,VL}
vpdpbusds 2032(%rdx), %xmm26, %xmm24 # AVX512{VNNI,VL} Disp8
vpdpbusds 508(%rdx){1to4}, %xmm26, %xmm24 # AVX512{VNNI,VL} Disp8
vpdpbusds %ymm25, %ymm29, %ymm30 # AVX512{VNNI,VL}
vpdpbusds %ymm25, %ymm29, %ymm30{%k1} # AVX512{VNNI,VL}
vpdpbusds %ymm25, %ymm29, %ymm30{%k1}{z} # AVX512{VNNI,VL}
vpdpbusds 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VNNI,VL}
vpdpbusds 4064(%rdx), %ymm29, %ymm30 # AVX512{VNNI,VL} Disp8
vpdpbusds 508(%rdx){1to8}, %ymm29, %ymm30 # AVX512{VNNI,VL} Disp8
.intel_syntax noprefix
vpdpwssd xmm21, xmm20, xmm23 # AVX512{VNNI,VL}
vpdpwssd xmm21{k6}, xmm20, xmm23 # AVX512{VNNI,VL}
vpdpwssd xmm21{k6}{z}, xmm20, xmm23 # AVX512{VNNI,VL}
vpdpwssd xmm21, xmm20, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpwssd xmm21, xmm20, XMMWORD PTR [rdx+2032] # AVX512{VNNI,VL} Disp8
vpdpwssd xmm21, xmm20, [rdx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpwssd ymm25, ymm27, ymm17 # AVX512{VNNI,VL}
vpdpwssd ymm25{k6}, ymm27, ymm17 # AVX512{VNNI,VL}
vpdpwssd ymm25{k6}{z}, ymm27, ymm17 # AVX512{VNNI,VL}
vpdpwssd ymm25, ymm27, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpwssd ymm25, ymm27, YMMWORD PTR [rdx+4064] # AVX512{VNNI,VL} Disp8
vpdpwssd ymm25, ymm27, [rdx+508]{1to8} # AVX512{VNNI,VL} Disp8
vpdpwssds xmm30, xmm25, xmm21 # AVX512{VNNI,VL}
vpdpwssds xmm30{k6}, xmm25, xmm21 # AVX512{VNNI,VL}
vpdpwssds xmm30{k6}{z}, xmm25, xmm21 # AVX512{VNNI,VL}
vpdpwssds xmm30, xmm25, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpwssds xmm30, xmm25, XMMWORD PTR [rdx+2032] # AVX512{VNNI,VL} Disp8
vpdpwssds xmm30, xmm25, [rdx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpwssds ymm28, ymm27, ymm27 # AVX512{VNNI,VL}
vpdpwssds ymm28{k7}, ymm27, ymm27 # AVX512{VNNI,VL}
vpdpwssds ymm28{k7}{z}, ymm27, ymm27 # AVX512{VNNI,VL}
vpdpwssds ymm28, ymm27, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpwssds ymm28, ymm27, YMMWORD PTR [rdx+4064] # AVX512{VNNI,VL} Disp8
vpdpwssds ymm28, ymm27, [rdx+508]{1to8} # AVX512{VNNI,VL} Disp8
vpdpbusd xmm26, xmm18, xmm19 # AVX512{VNNI,VL}
vpdpbusd xmm26{k6}, xmm18, xmm19 # AVX512{VNNI,VL}
vpdpbusd xmm26{k6}{z}, xmm18, xmm19 # AVX512{VNNI,VL}
vpdpbusd xmm26, xmm18, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpbusd xmm26, xmm18, XMMWORD PTR [rdx+2032] # AVX512{VNNI,VL} Disp8
vpdpbusd xmm26, xmm18, [rdx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpbusd ymm21, ymm17, ymm27 # AVX512{VNNI,VL}
vpdpbusd ymm21{k2}, ymm17, ymm27 # AVX512{VNNI,VL}
vpdpbusd ymm21{k2}{z}, ymm17, ymm27 # AVX512{VNNI,VL}
vpdpbusd ymm21, ymm17, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpbusd ymm21, ymm17, YMMWORD PTR [rdx+4064] # AVX512{VNNI,VL} Disp8
vpdpbusd ymm21, ymm17, [rdx+508]{1to8} # AVX512{VNNI,VL} Disp8
vpdpbusds xmm28, xmm26, xmm24 # AVX512{VNNI,VL}
vpdpbusds xmm28{k1}, xmm26, xmm24 # AVX512{VNNI,VL}
vpdpbusds xmm28{k1}{z}, xmm26, xmm24 # AVX512{VNNI,VL}
vpdpbusds xmm28, xmm26, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpbusds xmm28, xmm26, XMMWORD PTR [rdx+2032] # AVX512{VNNI,VL} Disp8
vpdpbusds xmm28, xmm26, [rdx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpbusds ymm23, ymm18, ymm27 # AVX512{VNNI,VL}
vpdpbusds ymm23{k6}, ymm18, ymm27 # AVX512{VNNI,VL}
vpdpbusds ymm23{k6}{z}, ymm18, ymm27 # AVX512{VNNI,VL}
vpdpbusds ymm23, ymm18, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VNNI,VL}
vpdpbusds ymm23, ymm18, YMMWORD PTR [rdx+4064] # AVX512{VNNI,VL} Disp8
vpdpbusds ymm23, ymm18, [rdx+508]{1to8} # AVX512{VNNI,VL} Disp8
|
stsp/binutils-ia16
| 10,356
|
gas/testsuite/gas/i386/x86-64-avx512vbmi_vl.s
|
# Check 64bit AVX512{VBMI,VL} instructions
.allow_index_reg
.text
_start:
vpermb %xmm28, %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermb %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI,VL}
vpermb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI,VL}
vpermb (%rcx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermb 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpermb 2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermb -2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpermb -2064(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermb %ymm28, %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermb %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI,VL}
vpermb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI,VL}
vpermb (%rcx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermb 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpermb 4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermb -4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpermb -4128(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermi2b %xmm28, %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermi2b %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI,VL}
vpermi2b %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI,VL}
vpermi2b (%rcx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermi2b 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermi2b 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpermi2b 2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermi2b -2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpermi2b -2064(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermi2b %ymm28, %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermi2b %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI,VL}
vpermi2b %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI,VL}
vpermi2b (%rcx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermi2b 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermi2b 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpermi2b 4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermi2b -4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpermi2b -4128(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermt2b %xmm28, %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermt2b %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI,VL}
vpermt2b %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI,VL}
vpermt2b (%rcx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermt2b 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermt2b 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpermt2b 2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermt2b -2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpermt2b -2064(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpermt2b %ymm28, %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermt2b %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI,VL}
vpermt2b %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI,VL}
vpermt2b (%rcx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermt2b 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermt2b 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpermt2b 4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpermt2b -4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpermt2b -4128(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb %xmm28, %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb %xmm28, %xmm29, %xmm30{%k7} # AVX512{VBMI,VL}
vpmultishiftqb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{VBMI,VL}
vpmultishiftqb (%rcx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb (%rcx){1to2}, %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb 2032(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb 2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb -2048(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb -2064(%rdx), %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{VBMI,VL}
vpmultishiftqb %ymm28, %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb %ymm28, %ymm29, %ymm30{%k7} # AVX512{VBMI,VL}
vpmultishiftqb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{VBMI,VL}
vpmultishiftqb (%rcx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb (%rcx){1to4}, %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb 4064(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb 4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb -4096(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb -4128(%rdx), %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{VBMI,VL}
vpmultishiftqb -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{VBMI,VL} Disp8
vpmultishiftqb -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{VBMI,VL}
.intel_syntax noprefix
vpermb xmm30, xmm29, xmm28 # AVX512{VBMI,VL}
vpermb xmm30{k7}, xmm29, xmm28 # AVX512{VBMI,VL}
vpermb xmm30{k7}{z}, xmm29, xmm28 # AVX512{VBMI,VL}
vpermb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpermb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpermb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{VBMI,VL} Disp8
vpermb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{VBMI,VL}
vpermb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{VBMI,VL} Disp8
vpermb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{VBMI,VL}
vpermb ymm30, ymm29, ymm28 # AVX512{VBMI,VL}
vpermb ymm30{k7}, ymm29, ymm28 # AVX512{VBMI,VL}
vpermb ymm30{k7}{z}, ymm29, ymm28 # AVX512{VBMI,VL}
vpermb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpermb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpermb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{VBMI,VL} Disp8
vpermb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{VBMI,VL}
vpermb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{VBMI,VL} Disp8
vpermb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{VBMI,VL}
vpermi2b xmm30, xmm29, xmm28 # AVX512{VBMI,VL}
vpermi2b xmm30{k7}, xmm29, xmm28 # AVX512{VBMI,VL}
vpermi2b xmm30{k7}{z}, xmm29, xmm28 # AVX512{VBMI,VL}
vpermi2b xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpermi2b xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpermi2b xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{VBMI,VL} Disp8
vpermi2b xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{VBMI,VL}
vpermi2b xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{VBMI,VL} Disp8
vpermi2b xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{VBMI,VL}
vpermi2b ymm30, ymm29, ymm28 # AVX512{VBMI,VL}
vpermi2b ymm30{k7}, ymm29, ymm28 # AVX512{VBMI,VL}
vpermi2b ymm30{k7}{z}, ymm29, ymm28 # AVX512{VBMI,VL}
vpermi2b ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpermi2b ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpermi2b ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{VBMI,VL} Disp8
vpermi2b ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{VBMI,VL}
vpermi2b ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{VBMI,VL} Disp8
vpermi2b ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{VBMI,VL}
vpermt2b xmm30, xmm29, xmm28 # AVX512{VBMI,VL}
vpermt2b xmm30{k7}, xmm29, xmm28 # AVX512{VBMI,VL}
vpermt2b xmm30{k7}{z}, xmm29, xmm28 # AVX512{VBMI,VL}
vpermt2b xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpermt2b xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpermt2b xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{VBMI,VL} Disp8
vpermt2b xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{VBMI,VL}
vpermt2b xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{VBMI,VL} Disp8
vpermt2b xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{VBMI,VL}
vpermt2b ymm30, ymm29, ymm28 # AVX512{VBMI,VL}
vpermt2b ymm30{k7}, ymm29, ymm28 # AVX512{VBMI,VL}
vpermt2b ymm30{k7}{z}, ymm29, ymm28 # AVX512{VBMI,VL}
vpermt2b ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpermt2b ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpermt2b ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{VBMI,VL} Disp8
vpermt2b ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{VBMI,VL}
vpermt2b ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{VBMI,VL} Disp8
vpermt2b ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, xmm28 # AVX512{VBMI,VL}
vpmultishiftqb xmm30{k7}, xmm29, xmm28 # AVX512{VBMI,VL}
vpmultishiftqb xmm30{k7}{z}, xmm29, xmm28 # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, [rcx]{1to2} # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{VBMI,VL} Disp8
vpmultishiftqb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{VBMI,VL} Disp8
vpmultishiftqb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, [rdx+1016]{1to2} # AVX512{VBMI,VL} Disp8
vpmultishiftqb xmm30, xmm29, [rdx+1024]{1to2} # AVX512{VBMI,VL}
vpmultishiftqb xmm30, xmm29, [rdx-1024]{1to2} # AVX512{VBMI,VL} Disp8
vpmultishiftqb xmm30, xmm29, [rdx-1032]{1to2} # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, ymm28 # AVX512{VBMI,VL}
vpmultishiftqb ymm30{k7}, ymm29, ymm28 # AVX512{VBMI,VL}
vpmultishiftqb ymm30{k7}{z}, ymm29, ymm28 # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, [rcx]{1to4} # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{VBMI,VL} Disp8
vpmultishiftqb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{VBMI,VL} Disp8
vpmultishiftqb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, [rdx+1016]{1to4} # AVX512{VBMI,VL} Disp8
vpmultishiftqb ymm30, ymm29, [rdx+1024]{1to4} # AVX512{VBMI,VL}
vpmultishiftqb ymm30, ymm29, [rdx-1024]{1to4} # AVX512{VBMI,VL} Disp8
vpmultishiftqb ymm30, ymm29, [rdx-1032]{1to4} # AVX512{VBMI,VL}
|
stsp/binutils-ia16
| 2,228
|
gas/testsuite/gas/i386/x86-64-avx512dq-rcig.s
|
# Check 64bit AVX512DQ-RCIG instructions
.allow_index_reg
.text
_start:
vrangepd $0xab, {sae}, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangepd $123, {sae}, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangeps $0xab, {sae}, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangeps $123, {sae}, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangesd $0xab, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vrangesd $123, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vrangess $0xab, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vrangess $123, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducepd $0xab, {sae}, %zmm29, %zmm30 # AVX512DQ
vreducepd $123, {sae}, %zmm29, %zmm30 # AVX512DQ
vreduceps $0xab, {sae}, %zmm29, %zmm30 # AVX512DQ
vreduceps $123, {sae}, %zmm29, %zmm30 # AVX512DQ
vreducesd $0xab, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducesd $123, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducess $0xab, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducess $123, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vcvttpd2qq {sae}, %zmm29, %zmm30 # AVX512DQ
vcvttpd2uqq {sae}, %zmm29, %zmm30 # AVX512DQ
vcvttps2qq {sae}, %ymm29, %zmm30 # AVX512DQ
vcvttps2uqq {sae}, %ymm29, %zmm30 # AVX512DQ
.intel_syntax noprefix
vrangepd zmm30, zmm29, zmm28, {sae}, 0xab # AVX512DQ
vrangepd zmm30, zmm29, zmm28, {sae}, 123 # AVX512DQ
vrangeps zmm30, zmm29, zmm28, {sae}, 0xab # AVX512DQ
vrangeps zmm30, zmm29, zmm28, {sae}, 123 # AVX512DQ
vrangesd xmm30, xmm29, xmm28, {sae}, 0xab # AVX512DQ
vrangesd xmm30, xmm29, xmm28, {sae}, 123 # AVX512DQ
vrangess xmm30, xmm29, xmm28, {sae}, 0xab # AVX512DQ
vrangess xmm30, xmm29, xmm28, {sae}, 123 # AVX512DQ
vreducepd zmm30, zmm29, {sae}, 0xab # AVX512DQ
vreducepd zmm30, zmm29, {sae}, 123 # AVX512DQ
vreduceps zmm30, zmm29, {sae}, 0xab # AVX512DQ
vreduceps zmm30, zmm29, {sae}, 123 # AVX512DQ
vreducesd xmm30, xmm29, xmm28, {sae}, 0xab # AVX512DQ
vreducesd xmm30, xmm29, xmm28, {sae}, 123 # AVX512DQ
vreducess xmm30, xmm29, xmm28, {sae}, 0xab # AVX512DQ
vreducess xmm30, xmm29, xmm28, {sae}, 123 # AVX512DQ
vcvttpd2qq zmm30, zmm29, {sae} # AVX512DQ
vcvttpd2uqq zmm30, zmm29, {sae} # AVX512DQ
vcvttps2qq zmm30, ymm29, {sae} # AVX512DQ
vcvttps2uqq zmm30, ymm29, {sae} # AVX512DQ
|
stsp/binutils-ia16
| 4,894
|
gas/testsuite/gas/i386/reloc64.s
|
.macro bad args:vararg
.ifdef _bad_
\args
.endif
.endm
.macro ill args:vararg
# This is used to mark entries that aren't handled consistently,
# and thus shouldn't currently be checked for.
# \args
.endm
.text
_start:
movabs $xtrn, %rax
add $xtrn, %rax
mov $xtrn, %eax
mov $xtrn, %ax
mov $xtrn, %al
mov xtrn(%rbx), %eax
mov xtrn(%ebx), %eax
movabs $(xtrn - .), %rax
add $(xtrn - .), %rax
ill mov $(xtrn - .), %eax
mov $(xtrn - .), %ax
mov $(xtrn - .), %al
mov xtrn(%rip), %eax
mov xtrn(%eip), %eax
call xtrn
jrcxz xtrn
movabs $xtrn@got, %rax
add $xtrn@got, %rax
bad mov $xtrn@got, %eax
bad mov $xtrn@got, %ax
bad mov $xtrn@got, %al
mov xtrn@got(%rbx), %eax
bad mov xtrn@got(%ebx), %eax
bad call xtrn@got
movabs $xtrn@gotoff, %rax
bad add $xtrn@gotoff, %rax
bad mov $xtrn@gotoff, %eax
bad mov $xtrn@gotoff, %ax
bad mov $xtrn@gotoff, %al
bad mov xtrn@gotoff(%rbx), %eax
bad mov xtrn@gotoff(%ebx), %eax
bad call xtrn@gotoff
bad movabs $xtrn@gotpcrel, %rax
add $xtrn@gotpcrel, %rax
bad mov $xtrn@gotpcrel, %eax
bad mov $xtrn@gotpcrel, %ax
bad mov $xtrn@gotpcrel, %al
mov xtrn@gotpcrel(%rbx), %eax
bad mov xtrn@gotpcrel(%ebx), %eax
call xtrn@gotpcrel
ill movabs $_GLOBAL_OFFSET_TABLE_, %rax
add $_GLOBAL_OFFSET_TABLE_, %rax
ill add $_GLOBAL_OFFSET_TABLE_, %eax
ill add $_GLOBAL_OFFSET_TABLE_, %ax
ill add $_GLOBAL_OFFSET_TABLE_, %al
lea _GLOBAL_OFFSET_TABLE_(%rip), %rax
lea _GLOBAL_OFFSET_TABLE_(%eip), %rax
ill movabs $(_GLOBAL_OFFSET_TABLE_ - .), %rax
add $(_GLOBAL_OFFSET_TABLE_ - .), %rax
ill add $(_GLOBAL_OFFSET_TABLE_ - .), %eax
ill add $(_GLOBAL_OFFSET_TABLE_ - .), %ax
ill add $(_GLOBAL_OFFSET_TABLE_ - .), %al
bad movabs $xtrn@plt, %rax
add $xtrn@plt, %rax
bad mov $xtrn@plt, %eax
bad mov $xtrn@plt, %ax
bad mov $xtrn@plt, %al
mov xtrn@plt(%rbx), %eax
bad mov xtrn@plt(%ebx), %eax
call xtrn@plt
bad jrcxz xtrn@plt
bad movabs $xtrn@tlsgd, %rax
add $xtrn@tlsgd, %rax
bad mov $xtrn@tlsgd, %eax
bad mov $xtrn@tlsgd, %ax
bad mov $xtrn@tlsgd, %al
mov xtrn@tlsgd(%rbx), %eax
bad mov xtrn@tlsgd(%ebx), %eax
call xtrn@tlsgd
bad movabs $xtrn@gottpoff, %rax
add $xtrn@gottpoff, %rax
bad mov $xtrn@gottpoff, %eax
bad mov $xtrn@gottpoff, %ax
bad mov $xtrn@gottpoff, %al
mov xtrn@gottpoff(%rbx), %eax
bad mov xtrn@gottpoff(%ebx), %eax
call xtrn@gottpoff
bad movabs $xtrn@tlsld, %rax
add $xtrn@tlsld, %rax
bad mov $xtrn@tlsld, %eax
bad mov $xtrn@tlsld, %ax
bad mov $xtrn@tlsld, %al
mov xtrn@tlsld(%rbx), %eax
bad mov xtrn@tlsld(%ebx), %eax
call xtrn@tlsld
movabs $xtrn@dtpoff, %rax
add $xtrn@dtpoff, %rax
bad mov $xtrn@dtpoff, %eax
bad mov $xtrn@dtpoff, %ax
bad mov $xtrn@dtpoff, %al
mov xtrn@dtpoff(%rbx), %eax
bad mov xtrn@dtpoff(%ebx), %eax
bad call xtrn@dtpoff
movabs $xtrn@tpoff, %rax
add $xtrn@tpoff, %rax
bad mov $xtrn@tpoff, %eax
bad mov $xtrn@tpoff, %ax
bad mov $xtrn@tpoff, %al
mov xtrn@tpoff(%rbx), %eax
bad mov xtrn@tpoff(%ebx), %eax
bad call xtrn@tpoff
.data
.quad xtrn
.quad xtrn - .
.quad xtrn@got
.quad xtrn@gotoff
.quad xtrn@gotpcrel
ill .quad _GLOBAL_OFFSET_TABLE_
ill .quad _GLOBAL_OFFSET_TABLE_ - .
bad .quad xtrn@plt
bad .quad xtrn@tlsgd
bad .quad xtrn@gottpoff
bad .quad xtrn@tlsld
.quad xtrn@dtpoff
.quad xtrn@tpoff
.long xtrn
.long xtrn - .
.long xtrn@got
bad .long xtrn@gotoff
.long xtrn@gotpcrel
.long _GLOBAL_OFFSET_TABLE_
.long _GLOBAL_OFFSET_TABLE_ - .
.long xtrn@plt
.long xtrn@tlsgd
.long xtrn@gottpoff
.long xtrn@tlsld
.long xtrn@dtpoff
.long xtrn@tpoff
.slong xtrn
.slong xtrn - .
.slong xtrn@got
bad .slong xtrn@gotoff
.slong xtrn@gotpcrel
.slong _GLOBAL_OFFSET_TABLE_
.slong _GLOBAL_OFFSET_TABLE_ - .
.slong xtrn@plt
.slong xtrn@tlsgd
.slong xtrn@gottpoff
.slong xtrn@tlsld
.slong xtrn@dtpoff
.slong xtrn@tpoff
.word xtrn
.word xtrn - .
bad .word xtrn@got
bad .word xtrn@gotoff
bad .word xtrn@gotpcrel
ill .word _GLOBAL_OFFSET_TABLE_
ill .word _GLOBAL_OFFSET_TABLE_ - .
bad .word xtrn@plt
bad .word xtrn@tlsgd
bad .word xtrn@gottpoff
bad .word xtrn@tlsld
bad .word xtrn@dtpoff
bad .word xtrn@tpoff
.byte xtrn
.byte xtrn - .
bad .byte xtrn@got
bad .byte xtrn@gotoff
bad .byte xtrn@gotpcrel
ill .byte _GLOBAL_OFFSET_TABLE_
ill .byte _GLOBAL_OFFSET_TABLE_ - .
bad .byte xtrn@plt
bad .byte xtrn@tlsgd
bad .byte xtrn@gottpoff
bad .byte xtrn@tlsld
bad .byte xtrn@dtpoff
bad .byte xtrn@tpoff
.text
mov xtrn@tpoff (%rbx), %eax
.data
.long xtrn@got - 4
.long xtrn@got + 4
.text
movabs $xtrn@gotplt, %rax
bad add $xtrn@gotplt, %rax
bad mov $xtrn@gotplt, %eax
bad mov $xtrn@gotplt, %ax
bad mov $xtrn@gotplt, %al
bad mov xtrn@gotplt(%rbx), %eax
bad mov xtrn@gotplt(%ebx), %eax
bad call xtrn@gotplt
.data
.quad xtrn@gotplt
bad .long xtrn@gotplt
bad .word xtrn@gotplt
bad .byte xtrn@gotplt
.text
mov xtrn(,%rbx), %eax
mov xtrn(,%ebx), %eax
vgatherdps %xmm2, xtrn(,%xmm1), %xmm0
addr32 vgatherdps %xmm2, xtrn(,%xmm1), %xmm0
bad .long xtrn@plt - .
|
stsp/binutils-ia16
| 5,772
|
gas/testsuite/gas/i386/disassem.s
|
.text
.byte 0xFF, 0xEF
.byte 0xFF, 0xD8
.fill 0x5, 0x1, 0x90
.byte 0xC5, 0xEC, 0x4A, 0x9B
.byte 0xC5, 0xEC, 0x4A, 0x6F
.byte 0xC5, 0xEC, 0x4A, 0x3F
.byte 0xC5, 0xED, 0x4A, 0x9B
.byte 0xC5, 0xED, 0x4A, 0x6F
.byte 0xC5, 0xED, 0x4A, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x4A, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x4A, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x4A, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x4A, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x4A, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x4A, 0x3F
.byte 0xC5, 0xEC, 0x41, 0x9B
.byte 0xC5, 0xEC, 0x41, 0x6F
.byte 0xC5, 0xEC, 0x41, 0x3F
.byte 0xC5, 0xED, 0x41, 0x9B
.byte 0xC5, 0xED, 0x41, 0x6F
.byte 0xC5, 0xED, 0x41, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x41, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x41, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x41, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x41, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x41, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x41, 0x3F
.byte 0xC5, 0xEC, 0x42, 0x9B
.byte 0xC5, 0xEC, 0x42, 0x6F
.byte 0xC5, 0xEC, 0x42, 0x3F
.byte 0xC5, 0xED, 0x42, 0x9B
.byte 0xC5, 0xED, 0x42, 0x6F
.byte 0xC5, 0xED, 0x42, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x42, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x42, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x42, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x42, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x42, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x42, 0x3F
.byte 0xC5, 0xEC, 0x4B, 0x9B
.byte 0xC5, 0xEC, 0x4B, 0x6F
.byte 0xC5, 0xEC, 0x4B, 0x3F
.byte 0xC5, 0xED, 0x4B, 0x9B
.byte 0xC5, 0xED, 0x4B, 0x6F
.byte 0xC5, 0xED, 0x4B, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x4B, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x4B, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x4B, 0x3F
.byte 0xC5, 0xF8, 0x44, 0x9B
.byte 0xC5, 0xF8, 0x44, 0x6F
.byte 0xC5, 0xF8, 0x44, 0x3F
.byte 0xC5, 0xF9, 0x44, 0x9B
.byte 0xC5, 0xF9, 0x44, 0x6F
.byte 0xC5, 0xF9, 0x44, 0x3F
.byte 0xC4, 0xE1, 0xF8, 0x44, 0x9B
.byte 0xC4, 0xE1, 0xF8, 0x44, 0x6F
.byte 0xC4, 0xE1, 0xF8, 0x44, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x44, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x44, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x44, 0x3F
.byte 0xC5, 0xEC, 0x45, 0x9B
.byte 0xC5, 0xEC, 0x45, 0x6F
.byte 0xC5, 0xEC, 0x45, 0x3F
.byte 0xC5, 0xED, 0x45, 0x9B
.byte 0xC5, 0xED, 0x45, 0x6F
.byte 0xC5, 0xED, 0x45, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x45, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x45, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x45, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x45, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x45, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x45, 0x3F
.byte 0xC5, 0xF8, 0x98, 0x9B
.byte 0xC5, 0xF8, 0x98, 0x6F
.byte 0xC5, 0xF8, 0x98, 0x3F
.byte 0xC5, 0xF9, 0x98, 0x9B
.byte 0xC5, 0xF9, 0x98, 0x6F
.byte 0xC5, 0xF9, 0x98, 0x3F
.byte 0xC4, 0xE1, 0xF8, 0x98, 0x9B
.byte 0xC4, 0xE1, 0xF8, 0x98, 0x6F
.byte 0xC4, 0xE1, 0xF8, 0x98, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x98, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x98, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x98, 0x3F
.byte 0xC5, 0xEC, 0x46, 0x9B
.byte 0xC5, 0xEC, 0x46, 0x6F
.byte 0xC5, 0xEC, 0x46, 0x3F
.byte 0xC5, 0xED, 0x46, 0x9B
.byte 0xC5, 0xED, 0x46, 0x6F
.byte 0xC5, 0xED, 0x46, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x46, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x46, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x46, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x46, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x46, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x46, 0x3F
.byte 0xC5, 0xEC, 0x47, 0x9B
.byte 0xC5, 0xEC, 0x47, 0x6F
.byte 0xC5, 0xEC, 0x47, 0x3F
.byte 0xC5, 0xED, 0x47, 0x9B
.byte 0xC5, 0xED, 0x47, 0x6F
.byte 0xC5, 0xED, 0x47, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x47, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x47, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x47, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x47, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x47, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x47, 0x3F
.byte 0xC5, 0xF8, 0x99, 0x9B
.byte 0xC5, 0xF8, 0x99, 0x6F
.byte 0xC5, 0xF8, 0x99, 0x3F
.byte 0xC5, 0xF9, 0x99, 0x9B
.byte 0xC5, 0xF9, 0x99, 0x6F
.byte 0xC5, 0xF9, 0x99, 0x3F
.byte 0xC4, 0xE1, 0xF8, 0x99, 0x9B
.byte 0xC4, 0xE1, 0xF8, 0x99, 0x6F
.byte 0xC4, 0xE1, 0xF8, 0x99, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x99, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x99, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x99, 0x3F
.byte 0xC4, 0xE3, 0xF9, 0x30, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x30, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x30, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x30, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x30, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x30, 0x04, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x31, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x31, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x31, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x31, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x31, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x31, 0x04, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x32, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x32, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x32, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x32, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x32, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x32, 0x04, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x33, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x33, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x33, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x33, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x33, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x33, 0x04, 0x01
.byte 0xC5, 0xF8, 0x92, 0x9B
.byte 0xC5, 0xF8, 0x92, 0x6F
.byte 0xC5, 0xF8, 0x92, 0x3F
.byte 0xC5, 0xF9, 0x92, 0x9B
.byte 0xC5, 0xF9, 0x92, 0x6F
.byte 0xC5, 0xF9, 0x92, 0x3F
.byte 0xC5, 0xFB, 0x92, 0x9B
.byte 0xC5, 0xFB, 0x92, 0x6F
.byte 0xC5, 0xFB, 0x92, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x92, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x92, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x92, 0x3F
.byte 0xC5, 0xF8, 0x93, 0x9B
.byte 0xC5, 0xF8, 0x93, 0x6F
.byte 0xC5, 0xF8, 0x93, 0x3F
.byte 0xC5, 0xF9, 0x93, 0x9B
.byte 0xC5, 0xF9, 0x93, 0x6F
.byte 0xC5, 0xF9, 0x93, 0x3F
.byte 0xC5, 0xFB, 0x93, 0x9B
.byte 0xC5, 0xFB, 0x93, 0x6F
.byte 0xC5, 0xFB, 0x93, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x93, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x93, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x93, 0x3F
.byte 0xc4, 0xe2, 0x1, 0x1c, 0x41, 0x37
.byte 0x62, 0xf2, 0xad, 0x08, 0x1c, 0x01
.byte 0x1
.byte 0x62, 0xf3, 0x7d, 0x28, 0x1b, 0xc8, 0x25
.byte 0x62, 0xf3
.byte 0x62, 0xf3, 0x75, 0x08, 0x23, 0xc2, 0x25
.byte 0x62
.byte 0x62, 0xf2, 0x7d, 0x28, 0x5b, 0x41, 0x37
|
stsp/binutils-ia16
| 3,346
|
gas/testsuite/gas/i386/noreg64.s
|
.macro pfx insn:vararg
.ifdef DATA16
data16 \insn
.else
.ifdef REX64
rex64 \insn
.else
\insn
.endif
.endif
.endm
.macro pfx16 insn:vararg
.ifndef REX64
pfx \insn
.endif
.endm
.macro pfx64 insn:vararg
.ifndef DATA16
pfx \insn
.endif
.endm
.text
noreg:
pfx adc $1, (%rax)
pfx adc $0x89, (%rax)
pfx adc $0x1234, (%rax)
pfx adc $0x12345678, (%rax)
pfx add $1, (%rax)
pfx add $0x89, (%rax)
pfx add $0x1234, (%rax)
pfx add $0x12345678, (%rax)
pfx and $1, (%rax)
pfx and $0x89, (%rax)
pfx and $0x1234, (%rax)
pfx and $0x12345678, (%rax)
pfx bt $1, (%rax)
pfx btc $1, (%rax)
pfx btr $1, (%rax)
pfx bts $1, (%rax)
pfx call *(%rax)
pfx cmp $1, (%rax)
pfx cmp $0x89, (%rax)
pfx cmp $0x1234, (%rax)
pfx cmp $0x12345678, (%rax)
pfx cmps
pfx cmps %es:(%rdi), (%rsi)
pfx crc32 (%rax), %eax
pfx16 crc32 (%rax), %rax
pfx dec (%rax)
pfx div (%rax)
pfx fadd (%rax)
pfx fcom (%rax)
pfx fcomp (%rax)
pfx fdiv (%rax)
pfx fdivr (%rax)
pfx fiadd (%rax)
pfx ficom (%rax)
pfx ficomp (%rax)
pfx fidiv (%rax)
pfx fidivr (%rax)
pfx fild (%rax)
pfx fimul (%rax)
pfx fist (%rax)
pfx fistp (%rax)
pfx fisttp (%rax)
pfx fisub (%rax)
pfx fisubr (%rax)
pfx fld (%rax)
pfx fmul (%rax)
pfx fst (%rax)
pfx fstp (%rax)
pfx fsub (%rax)
pfx fsubr (%rax)
pfx idiv (%rax)
pfx imul (%rax)
pfx in $0
pfx in %dx
pfx inc (%rax)
pfx ins
pfx ins %dx, %es:(%rdi)
pfx iret
pfx jmp *(%rax)
pfx lcall *(%rax)
pfx lgdt (%rax)
pfx lidt (%rax)
pfx ljmp *(%rax)
pfx lldt (%rax)
pfx lmsw (%rax)
pfx lods
pfx lods (%rsi)
pfx lret
pfx lret $4
pfx ltr (%rax)
pfx mov $0x12, (%rax)
pfx mov $0x1234, (%rax)
pfx mov $0x12345678, (%rax)
pfx mov %es, (%rax)
pfx mov (%rax), %es
pfx movs
pfx movs (%rsi), %es:(%rdi)
pfx64 movsx (%rax), %ax
pfx movsx (%rax), %eax
pfx16 movsx (%rax), %rax
pfx64 movzx (%rax), %ax
pfx movzx (%rax), %eax
pfx16 movzx (%rax), %rax
pfx mul (%rax)
pfx neg (%rax)
pfx nop (%rax)
pfx not (%rax)
pfx or $1, (%rax)
pfx or $0x89, (%rax)
pfx or $0x1234, (%rax)
pfx or $0x12345678, (%rax)
pfx out $0
pfx out %dx
pfx outs
pfx outs (%rsi), %dx
pfx pop (%rax)
pfx pop %fs
pfx64 ptwrite (%rax)
pfx push (%rax)
pfx push %fs
pfx rcl $1, (%rax)
pfx rcl $2, (%rax)
pfx rcl %cl, (%rax)
pfx rcl (%rax)
pfx rcr $1, (%rax)
pfx rcr $2, (%rax)
pfx rcr %cl, (%rax)
pfx rcr (%rax)
pfx rol $1, (%rax)
pfx rol $2, (%rax)
pfx rol %cl, (%rax)
pfx rol (%rax)
pfx ror $1, (%rax)
pfx ror $2, (%rax)
pfx ror %cl, (%rax)
pfx ror (%rax)
pfx sbb $1, (%rax)
pfx sbb $0x89, (%rax)
pfx sbb $0x1234, (%rax)
pfx sbb $0x12345678, (%rax)
pfx scas
pfx scas %es:(%rdi)
pfx sal $1, (%rax)
pfx sal $2, (%rax)
pfx sal %cl, (%rax)
pfx sal (%rax)
pfx sar $1, (%rax)
pfx sar $2, (%rax)
pfx sar %cl, (%rax)
pfx sar (%rax)
pfx shl $1, (%rax)
pfx shl $2, (%rax)
pfx shl %cl, (%rax)
pfx shl (%rax)
pfx shr $1, (%rax)
pfx shr $2, (%rax)
pfx shr %cl, (%rax)
pfx shr (%rax)
pfx stos
pfx stos %es:(%rdi)
pfx sub $1, (%rax)
pfx sub $0x89, (%rax)
pfx sub $0x1234, (%rax)
pfx sub $0x12345678, (%rax)
pfx sysexit
pfx sysret
pfx test $0x89, (%rax)
pfx test $0x1234, (%rax)
pfx test $0x12345678, (%rax)
pfx xor $1, (%rax)
pfx xor $0x89, (%rax)
pfx xor $0x1234, (%rax)
pfx xor $0x12345678, (%rax)
|
stsp/binutils-ia16
| 85,354
|
gas/testsuite/gas/i386/x86-64-avx512_fp16_pseudo_ops.s
|
# Check 64bit VCM.*{PH,SH} instructions
.allow_index_reg
.text
_start:
vcmpeq_oqph %zmm29, %zmm30, %k5
vcmpeq_oqph %zmm29, %zmm30, %k5{%k7}
vcmpeq_oqph {sae}, %zmm29, %zmm30, %k5
vcmpeq_oqph (%rcx), %zmm30, %k5
vcmpeq_oqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpeq_oqph (%rcx){1to32}, %zmm30, %k5
vcmpeq_oqph 8128(%rdx), %zmm30, %k5
vcmpeq_oqph 8192(%rdx), %zmm30, %k5
vcmpeq_oqph -8192(%rdx), %zmm30, %k5
vcmpeq_oqph -8256(%rdx), %zmm30, %k5
vcmpeq_oqph 1016(%rdx){1to32}, %zmm30, %k5
vcmpeq_oqph 1024(%rdx){1to32}, %zmm30, %k5
vcmpeq_oqph -1024(%rdx){1to32}, %zmm30, %k5
vcmpeq_oqph -1032(%rdx){1to32}, %zmm30, %k5
vcmpeqph %zmm29, %zmm30, %k5
vcmpeqph %zmm29, %zmm30, %k5{%k7}
vcmpeqph {sae}, %zmm29, %zmm30, %k5
vcmpeqph (%rcx), %zmm30, %k5
vcmpeqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpeqph (%rcx){1to32}, %zmm30, %k5
vcmpeqph 8128(%rdx), %zmm30, %k5
vcmpeqph 8192(%rdx), %zmm30, %k5
vcmpeqph -8192(%rdx), %zmm30, %k5
vcmpeqph -8256(%rdx), %zmm30, %k5
vcmpeqph 1016(%rdx){1to32}, %zmm30, %k5
vcmpeqph 1024(%rdx){1to32}, %zmm30, %k5
vcmpeqph -1024(%rdx){1to32}, %zmm30, %k5
vcmpeqph -1032(%rdx){1to32}, %zmm30, %k5
vcmplt_osph %zmm29, %zmm30, %k5
vcmplt_osph %zmm29, %zmm30, %k5{%k7}
vcmplt_osph {sae}, %zmm29, %zmm30, %k5
vcmplt_osph (%rcx), %zmm30, %k5
vcmplt_osph 0x123(%rax,%r14,8), %zmm30, %k5
vcmplt_osph (%rcx){1to32}, %zmm30, %k5
vcmplt_osph 8128(%rdx), %zmm30, %k5
vcmplt_osph 8192(%rdx), %zmm30, %k5
vcmplt_osph -8192(%rdx), %zmm30, %k5
vcmplt_osph -8256(%rdx), %zmm30, %k5
vcmplt_osph 1016(%rdx){1to32}, %zmm30, %k5
vcmplt_osph 1024(%rdx){1to32}, %zmm30, %k5
vcmplt_osph -1024(%rdx){1to32}, %zmm30, %k5
vcmplt_osph -1032(%rdx){1to32}, %zmm30, %k5
vcmpltph %zmm29, %zmm30, %k5
vcmpltph %zmm29, %zmm30, %k5{%k7}
vcmpltph {sae}, %zmm29, %zmm30, %k5
vcmpltph (%rcx), %zmm30, %k5
vcmpltph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpltph (%rcx){1to32}, %zmm30, %k5
vcmpltph 8128(%rdx), %zmm30, %k5
vcmpltph 8192(%rdx), %zmm30, %k5
vcmpltph -8192(%rdx), %zmm30, %k5
vcmpltph -8256(%rdx), %zmm30, %k5
vcmpltph 1016(%rdx){1to32}, %zmm30, %k5
vcmpltph 1024(%rdx){1to32}, %zmm30, %k5
vcmpltph -1024(%rdx){1to32}, %zmm30, %k5
vcmpltph -1032(%rdx){1to32}, %zmm30, %k5
vcmple_osph %zmm29, %zmm30, %k5
vcmple_osph %zmm29, %zmm30, %k5{%k7}
vcmple_osph {sae}, %zmm29, %zmm30, %k5
vcmple_osph (%rcx), %zmm30, %k5
vcmple_osph 0x123(%rax,%r14,8), %zmm30, %k5
vcmple_osph (%rcx){1to32}, %zmm30, %k5
vcmple_osph 8128(%rdx), %zmm30, %k5
vcmple_osph 8192(%rdx), %zmm30, %k5
vcmple_osph -8192(%rdx), %zmm30, %k5
vcmple_osph -8256(%rdx), %zmm30, %k5
vcmple_osph 1016(%rdx){1to32}, %zmm30, %k5
vcmple_osph 1024(%rdx){1to32}, %zmm30, %k5
vcmple_osph -1024(%rdx){1to32}, %zmm30, %k5
vcmple_osph -1032(%rdx){1to32}, %zmm30, %k5
vcmpleph %zmm29, %zmm30, %k5
vcmpleph %zmm29, %zmm30, %k5{%k7}
vcmpleph {sae}, %zmm29, %zmm30, %k5
vcmpleph (%rcx), %zmm30, %k5
vcmpleph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpleph (%rcx){1to32}, %zmm30, %k5
vcmpleph 8128(%rdx), %zmm30, %k5
vcmpleph 8192(%rdx), %zmm30, %k5
vcmpleph -8192(%rdx), %zmm30, %k5
vcmpleph -8256(%rdx), %zmm30, %k5
vcmpleph 1016(%rdx){1to32}, %zmm30, %k5
vcmpleph 1024(%rdx){1to32}, %zmm30, %k5
vcmpleph -1024(%rdx){1to32}, %zmm30, %k5
vcmpleph -1032(%rdx){1to32}, %zmm30, %k5
vcmpunord_qph %zmm29, %zmm30, %k5
vcmpunord_qph %zmm29, %zmm30, %k5{%k7}
vcmpunord_qph {sae}, %zmm29, %zmm30, %k5
vcmpunord_qph (%rcx), %zmm30, %k5
vcmpunord_qph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpunord_qph (%rcx){1to32}, %zmm30, %k5
vcmpunord_qph 8128(%rdx), %zmm30, %k5
vcmpunord_qph 8192(%rdx), %zmm30, %k5
vcmpunord_qph -8192(%rdx), %zmm30, %k5
vcmpunord_qph -8256(%rdx), %zmm30, %k5
vcmpunord_qph 1016(%rdx){1to32}, %zmm30, %k5
vcmpunord_qph 1024(%rdx){1to32}, %zmm30, %k5
vcmpunord_qph -1024(%rdx){1to32}, %zmm30, %k5
vcmpunord_qph -1032(%rdx){1to32}, %zmm30, %k5
vcmpunordph %zmm29, %zmm30, %k5
vcmpunordph %zmm29, %zmm30, %k5{%k7}
vcmpunordph {sae}, %zmm29, %zmm30, %k5
vcmpunordph (%rcx), %zmm30, %k5
vcmpunordph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpunordph (%rcx){1to32}, %zmm30, %k5
vcmpunordph 8128(%rdx), %zmm30, %k5
vcmpunordph 8192(%rdx), %zmm30, %k5
vcmpunordph -8192(%rdx), %zmm30, %k5
vcmpunordph -8256(%rdx), %zmm30, %k5
vcmpunordph 1016(%rdx){1to32}, %zmm30, %k5
vcmpunordph 1024(%rdx){1to32}, %zmm30, %k5
vcmpunordph -1024(%rdx){1to32}, %zmm30, %k5
vcmpunordph -1032(%rdx){1to32}, %zmm30, %k5
vcmpneq_uqph %zmm29, %zmm30, %k5
vcmpneq_uqph %zmm29, %zmm30, %k5{%k7}
vcmpneq_uqph {sae}, %zmm29, %zmm30, %k5
vcmpneq_uqph (%rcx), %zmm30, %k5
vcmpneq_uqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpneq_uqph (%rcx){1to32}, %zmm30, %k5
vcmpneq_uqph 8128(%rdx), %zmm30, %k5
vcmpneq_uqph 8192(%rdx), %zmm30, %k5
vcmpneq_uqph -8192(%rdx), %zmm30, %k5
vcmpneq_uqph -8256(%rdx), %zmm30, %k5
vcmpneq_uqph 1016(%rdx){1to32}, %zmm30, %k5
vcmpneq_uqph 1024(%rdx){1to32}, %zmm30, %k5
vcmpneq_uqph -1024(%rdx){1to32}, %zmm30, %k5
vcmpneq_uqph -1032(%rdx){1to32}, %zmm30, %k5
vcmpneqph %zmm29, %zmm30, %k5
vcmpneqph %zmm29, %zmm30, %k5{%k7}
vcmpneqph {sae}, %zmm29, %zmm30, %k5
vcmpneqph (%rcx), %zmm30, %k5
vcmpneqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpneqph (%rcx){1to32}, %zmm30, %k5
vcmpneqph 8128(%rdx), %zmm30, %k5
vcmpneqph 8192(%rdx), %zmm30, %k5
vcmpneqph -8192(%rdx), %zmm30, %k5
vcmpneqph -8256(%rdx), %zmm30, %k5
vcmpneqph 1016(%rdx){1to32}, %zmm30, %k5
vcmpneqph 1024(%rdx){1to32}, %zmm30, %k5
vcmpneqph -1024(%rdx){1to32}, %zmm30, %k5
vcmpneqph -1032(%rdx){1to32}, %zmm30, %k5
vcmpnlt_usph %zmm29, %zmm30, %k5
vcmpnlt_usph %zmm29, %zmm30, %k5{%k7}
vcmpnlt_usph {sae}, %zmm29, %zmm30, %k5
vcmpnlt_usph (%rcx), %zmm30, %k5
vcmpnlt_usph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpnlt_usph (%rcx){1to32}, %zmm30, %k5
vcmpnlt_usph 8128(%rdx), %zmm30, %k5
vcmpnlt_usph 8192(%rdx), %zmm30, %k5
vcmpnlt_usph -8192(%rdx), %zmm30, %k5
vcmpnlt_usph -8256(%rdx), %zmm30, %k5
vcmpnlt_usph 1016(%rdx){1to32}, %zmm30, %k5
vcmpnlt_usph 1024(%rdx){1to32}, %zmm30, %k5
vcmpnlt_usph -1024(%rdx){1to32}, %zmm30, %k5
vcmpnlt_usph -1032(%rdx){1to32}, %zmm30, %k5
vcmpnltph %zmm29, %zmm30, %k5
vcmpnltph %zmm29, %zmm30, %k5{%k7}
vcmpnltph {sae}, %zmm29, %zmm30, %k5
vcmpnltph (%rcx), %zmm30, %k5
vcmpnltph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpnltph (%rcx){1to32}, %zmm30, %k5
vcmpnltph 8128(%rdx), %zmm30, %k5
vcmpnltph 8192(%rdx), %zmm30, %k5
vcmpnltph -8192(%rdx), %zmm30, %k5
vcmpnltph -8256(%rdx), %zmm30, %k5
vcmpnltph 1016(%rdx){1to32}, %zmm30, %k5
vcmpnltph 1024(%rdx){1to32}, %zmm30, %k5
vcmpnltph -1024(%rdx){1to32}, %zmm30, %k5
vcmpnltph -1032(%rdx){1to32}, %zmm30, %k5
vcmpnle_usph %zmm29, %zmm30, %k5
vcmpnle_usph %zmm29, %zmm30, %k5{%k7}
vcmpnle_usph {sae}, %zmm29, %zmm30, %k5
vcmpnle_usph (%rcx), %zmm30, %k5
vcmpnle_usph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpnle_usph (%rcx){1to32}, %zmm30, %k5
vcmpnle_usph 8128(%rdx), %zmm30, %k5
vcmpnle_usph 8192(%rdx), %zmm30, %k5
vcmpnle_usph -8192(%rdx), %zmm30, %k5
vcmpnle_usph -8256(%rdx), %zmm30, %k5
vcmpnle_usph 1016(%rdx){1to32}, %zmm30, %k5
vcmpnle_usph 1024(%rdx){1to32}, %zmm30, %k5
vcmpnle_usph -1024(%rdx){1to32}, %zmm30, %k5
vcmpnle_usph -1032(%rdx){1to32}, %zmm30, %k5
vcmpnleph %zmm29, %zmm30, %k5
vcmpnleph %zmm29, %zmm30, %k5{%k7}
vcmpnleph {sae}, %zmm29, %zmm30, %k5
vcmpnleph (%rcx), %zmm30, %k5
vcmpnleph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpnleph (%rcx){1to32}, %zmm30, %k5
vcmpnleph 8128(%rdx), %zmm30, %k5
vcmpnleph 8192(%rdx), %zmm30, %k5
vcmpnleph -8192(%rdx), %zmm30, %k5
vcmpnleph -8256(%rdx), %zmm30, %k5
vcmpnleph 1016(%rdx){1to32}, %zmm30, %k5
vcmpnleph 1024(%rdx){1to32}, %zmm30, %k5
vcmpnleph -1024(%rdx){1to32}, %zmm30, %k5
vcmpnleph -1032(%rdx){1to32}, %zmm30, %k5
vcmpord_qph %zmm29, %zmm30, %k5
vcmpord_qph %zmm29, %zmm30, %k5{%k7}
vcmpord_qph {sae}, %zmm29, %zmm30, %k5
vcmpord_qph (%rcx), %zmm30, %k5
vcmpord_qph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpord_qph (%rcx){1to32}, %zmm30, %k5
vcmpord_qph 8128(%rdx), %zmm30, %k5
vcmpord_qph 8192(%rdx), %zmm30, %k5
vcmpord_qph -8192(%rdx), %zmm30, %k5
vcmpord_qph -8256(%rdx), %zmm30, %k5
vcmpord_qph 1016(%rdx){1to32}, %zmm30, %k5
vcmpord_qph 1024(%rdx){1to32}, %zmm30, %k5
vcmpord_qph -1024(%rdx){1to32}, %zmm30, %k5
vcmpord_qph -1032(%rdx){1to32}, %zmm30, %k5
vcmpordph %zmm29, %zmm30, %k5
vcmpordph %zmm29, %zmm30, %k5{%k7}
vcmpordph {sae}, %zmm29, %zmm30, %k5
vcmpordph (%rcx), %zmm30, %k5
vcmpordph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpordph (%rcx){1to32}, %zmm30, %k5
vcmpordph 8128(%rdx), %zmm30, %k5
vcmpordph 8192(%rdx), %zmm30, %k5
vcmpordph -8192(%rdx), %zmm30, %k5
vcmpordph -8256(%rdx), %zmm30, %k5
vcmpordph 1016(%rdx){1to32}, %zmm30, %k5
vcmpordph 1024(%rdx){1to32}, %zmm30, %k5
vcmpordph -1024(%rdx){1to32}, %zmm30, %k5
vcmpordph -1032(%rdx){1to32}, %zmm30, %k5
vcmpeq_uqph %zmm29, %zmm30, %k5
vcmpeq_uqph %zmm29, %zmm30, %k5{%k7}
vcmpeq_uqph {sae}, %zmm29, %zmm30, %k5
vcmpeq_uqph (%rcx), %zmm30, %k5
vcmpeq_uqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpeq_uqph (%rcx){1to32}, %zmm30, %k5
vcmpeq_uqph 8128(%rdx), %zmm30, %k5
vcmpeq_uqph 8192(%rdx), %zmm30, %k5
vcmpeq_uqph -8192(%rdx), %zmm30, %k5
vcmpeq_uqph -8256(%rdx), %zmm30, %k5
vcmpeq_uqph 1016(%rdx){1to32}, %zmm30, %k5
vcmpeq_uqph 1024(%rdx){1to32}, %zmm30, %k5
vcmpeq_uqph -1024(%rdx){1to32}, %zmm30, %k5
vcmpeq_uqph -1032(%rdx){1to32}, %zmm30, %k5
vcmpnge_usph %zmm29, %zmm30, %k5
vcmpnge_usph %zmm29, %zmm30, %k5{%k7}
vcmpnge_usph {sae}, %zmm29, %zmm30, %k5
vcmpnge_usph (%rcx), %zmm30, %k5
vcmpnge_usph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpnge_usph (%rcx){1to32}, %zmm30, %k5
vcmpnge_usph 8128(%rdx), %zmm30, %k5
vcmpnge_usph 8192(%rdx), %zmm30, %k5
vcmpnge_usph -8192(%rdx), %zmm30, %k5
vcmpnge_usph -8256(%rdx), %zmm30, %k5
vcmpnge_usph 1016(%rdx){1to32}, %zmm30, %k5
vcmpnge_usph 1024(%rdx){1to32}, %zmm30, %k5
vcmpnge_usph -1024(%rdx){1to32}, %zmm30, %k5
vcmpnge_usph -1032(%rdx){1to32}, %zmm30, %k5
vcmpngeph %zmm29, %zmm30, %k5
vcmpngeph %zmm29, %zmm30, %k5{%k7}
vcmpngeph {sae}, %zmm29, %zmm30, %k5
vcmpngeph (%rcx), %zmm30, %k5
vcmpngeph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpngeph (%rcx){1to32}, %zmm30, %k5
vcmpngeph 8128(%rdx), %zmm30, %k5
vcmpngeph 8192(%rdx), %zmm30, %k5
vcmpngeph -8192(%rdx), %zmm30, %k5
vcmpngeph -8256(%rdx), %zmm30, %k5
vcmpngeph 1016(%rdx){1to32}, %zmm30, %k5
vcmpngeph 1024(%rdx){1to32}, %zmm30, %k5
vcmpngeph -1024(%rdx){1to32}, %zmm30, %k5
vcmpngeph -1032(%rdx){1to32}, %zmm30, %k5
vcmpngt_usph %zmm29, %zmm30, %k5
vcmpngt_usph %zmm29, %zmm30, %k5{%k7}
vcmpngt_usph {sae}, %zmm29, %zmm30, %k5
vcmpngt_usph (%rcx), %zmm30, %k5
vcmpngt_usph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpngt_usph (%rcx){1to32}, %zmm30, %k5
vcmpngt_usph 8128(%rdx), %zmm30, %k5
vcmpngt_usph 8192(%rdx), %zmm30, %k5
vcmpngt_usph -8192(%rdx), %zmm30, %k5
vcmpngt_usph -8256(%rdx), %zmm30, %k5
vcmpngt_usph 1016(%rdx){1to32}, %zmm30, %k5
vcmpngt_usph 1024(%rdx){1to32}, %zmm30, %k5
vcmpngt_usph -1024(%rdx){1to32}, %zmm30, %k5
vcmpngt_usph -1032(%rdx){1to32}, %zmm30, %k5
vcmpngtph %zmm29, %zmm30, %k5
vcmpngtph %zmm29, %zmm30, %k5{%k7}
vcmpngtph {sae}, %zmm29, %zmm30, %k5
vcmpngtph (%rcx), %zmm30, %k5
vcmpngtph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpngtph (%rcx){1to32}, %zmm30, %k5
vcmpngtph 8128(%rdx), %zmm30, %k5
vcmpngtph 8192(%rdx), %zmm30, %k5
vcmpngtph -8192(%rdx), %zmm30, %k5
vcmpngtph -8256(%rdx), %zmm30, %k5
vcmpngtph 1016(%rdx){1to32}, %zmm30, %k5
vcmpngtph 1024(%rdx){1to32}, %zmm30, %k5
vcmpngtph -1024(%rdx){1to32}, %zmm30, %k5
vcmpngtph -1032(%rdx){1to32}, %zmm30, %k5
vcmpfalse_oqph %zmm29, %zmm30, %k5
vcmpfalse_oqph %zmm29, %zmm30, %k5{%k7}
vcmpfalse_oqph {sae}, %zmm29, %zmm30, %k5
vcmpfalse_oqph (%rcx), %zmm30, %k5
vcmpfalse_oqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpfalse_oqph (%rcx){1to32}, %zmm30, %k5
vcmpfalse_oqph 8128(%rdx), %zmm30, %k5
vcmpfalse_oqph 8192(%rdx), %zmm30, %k5
vcmpfalse_oqph -8192(%rdx), %zmm30, %k5
vcmpfalse_oqph -8256(%rdx), %zmm30, %k5
vcmpfalse_oqph 1016(%rdx){1to32}, %zmm30, %k5
vcmpfalse_oqph 1024(%rdx){1to32}, %zmm30, %k5
vcmpfalse_oqph -1024(%rdx){1to32}, %zmm30, %k5
vcmpfalse_oqph -1032(%rdx){1to32}, %zmm30, %k5
vcmpfalseph %zmm29, %zmm30, %k5
vcmpfalseph %zmm29, %zmm30, %k5{%k7}
vcmpfalseph {sae}, %zmm29, %zmm30, %k5
vcmpfalseph (%rcx), %zmm30, %k5
vcmpfalseph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpfalseph (%rcx){1to32}, %zmm30, %k5
vcmpfalseph 8128(%rdx), %zmm30, %k5
vcmpfalseph 8192(%rdx), %zmm30, %k5
vcmpfalseph -8192(%rdx), %zmm30, %k5
vcmpfalseph -8256(%rdx), %zmm30, %k5
vcmpfalseph 1016(%rdx){1to32}, %zmm30, %k5
vcmpfalseph 1024(%rdx){1to32}, %zmm30, %k5
vcmpfalseph -1024(%rdx){1to32}, %zmm30, %k5
vcmpfalseph -1032(%rdx){1to32}, %zmm30, %k5
vcmpneq_oqph %zmm29, %zmm30, %k5
vcmpneq_oqph %zmm29, %zmm30, %k5{%k7}
vcmpneq_oqph {sae}, %zmm29, %zmm30, %k5
vcmpneq_oqph (%rcx), %zmm30, %k5
vcmpneq_oqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpneq_oqph (%rcx){1to32}, %zmm30, %k5
vcmpneq_oqph 8128(%rdx), %zmm30, %k5
vcmpneq_oqph 8192(%rdx), %zmm30, %k5
vcmpneq_oqph -8192(%rdx), %zmm30, %k5
vcmpneq_oqph -8256(%rdx), %zmm30, %k5
vcmpneq_oqph 1016(%rdx){1to32}, %zmm30, %k5
vcmpneq_oqph 1024(%rdx){1to32}, %zmm30, %k5
vcmpneq_oqph -1024(%rdx){1to32}, %zmm30, %k5
vcmpneq_oqph -1032(%rdx){1to32}, %zmm30, %k5
vcmpge_osph %zmm29, %zmm30, %k5
vcmpge_osph %zmm29, %zmm30, %k5{%k7}
vcmpge_osph {sae}, %zmm29, %zmm30, %k5
vcmpge_osph (%rcx), %zmm30, %k5
vcmpge_osph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpge_osph (%rcx){1to32}, %zmm30, %k5
vcmpge_osph 8128(%rdx), %zmm30, %k5
vcmpge_osph 8192(%rdx), %zmm30, %k5
vcmpge_osph -8192(%rdx), %zmm30, %k5
vcmpge_osph -8256(%rdx), %zmm30, %k5
vcmpge_osph 1016(%rdx){1to32}, %zmm30, %k5
vcmpge_osph 1024(%rdx){1to32}, %zmm30, %k5
vcmpge_osph -1024(%rdx){1to32}, %zmm30, %k5
vcmpge_osph -1032(%rdx){1to32}, %zmm30, %k5
vcmpgeph %zmm29, %zmm30, %k5
vcmpgeph %zmm29, %zmm30, %k5{%k7}
vcmpgeph {sae}, %zmm29, %zmm30, %k5
vcmpgeph (%rcx), %zmm30, %k5
vcmpgeph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpgeph (%rcx){1to32}, %zmm30, %k5
vcmpgeph 8128(%rdx), %zmm30, %k5
vcmpgeph 8192(%rdx), %zmm30, %k5
vcmpgeph -8192(%rdx), %zmm30, %k5
vcmpgeph -8256(%rdx), %zmm30, %k5
vcmpgeph 1016(%rdx){1to32}, %zmm30, %k5
vcmpgeph 1024(%rdx){1to32}, %zmm30, %k5
vcmpgeph -1024(%rdx){1to32}, %zmm30, %k5
vcmpgeph -1032(%rdx){1to32}, %zmm30, %k5
vcmpgt_osph %zmm29, %zmm30, %k5
vcmpgt_osph %zmm29, %zmm30, %k5{%k7}
vcmpgt_osph {sae}, %zmm29, %zmm30, %k5
vcmpgt_osph (%rcx), %zmm30, %k5
vcmpgt_osph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpgt_osph (%rcx){1to32}, %zmm30, %k5
vcmpgt_osph 8128(%rdx), %zmm30, %k5
vcmpgt_osph 8192(%rdx), %zmm30, %k5
vcmpgt_osph -8192(%rdx), %zmm30, %k5
vcmpgt_osph -8256(%rdx), %zmm30, %k5
vcmpgt_osph 1016(%rdx){1to32}, %zmm30, %k5
vcmpgt_osph 1024(%rdx){1to32}, %zmm30, %k5
vcmpgt_osph -1024(%rdx){1to32}, %zmm30, %k5
vcmpgt_osph -1032(%rdx){1to32}, %zmm30, %k5
vcmpgtph %zmm29, %zmm30, %k5
vcmpgtph %zmm29, %zmm30, %k5{%k7}
vcmpgtph {sae}, %zmm29, %zmm30, %k5
vcmpgtph (%rcx), %zmm30, %k5
vcmpgtph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpgtph (%rcx){1to32}, %zmm30, %k5
vcmpgtph 8128(%rdx), %zmm30, %k5
vcmpgtph 8192(%rdx), %zmm30, %k5
vcmpgtph -8192(%rdx), %zmm30, %k5
vcmpgtph -8256(%rdx), %zmm30, %k5
vcmpgtph 1016(%rdx){1to32}, %zmm30, %k5
vcmpgtph 1024(%rdx){1to32}, %zmm30, %k5
vcmpgtph -1024(%rdx){1to32}, %zmm30, %k5
vcmpgtph -1032(%rdx){1to32}, %zmm30, %k5
vcmptrue_uqph %zmm29, %zmm30, %k5
vcmptrue_uqph %zmm29, %zmm30, %k5{%k7}
vcmptrue_uqph {sae}, %zmm29, %zmm30, %k5
vcmptrue_uqph (%rcx), %zmm30, %k5
vcmptrue_uqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmptrue_uqph (%rcx){1to32}, %zmm30, %k5
vcmptrue_uqph 8128(%rdx), %zmm30, %k5
vcmptrue_uqph 8192(%rdx), %zmm30, %k5
vcmptrue_uqph -8192(%rdx), %zmm30, %k5
vcmptrue_uqph -8256(%rdx), %zmm30, %k5
vcmptrue_uqph 1016(%rdx){1to32}, %zmm30, %k5
vcmptrue_uqph 1024(%rdx){1to32}, %zmm30, %k5
vcmptrue_uqph -1024(%rdx){1to32}, %zmm30, %k5
vcmptrue_uqph -1032(%rdx){1to32}, %zmm30, %k5
vcmptrueph %zmm29, %zmm30, %k5
vcmptrueph %zmm29, %zmm30, %k5{%k7}
vcmptrueph {sae}, %zmm29, %zmm30, %k5
vcmptrueph (%rcx), %zmm30, %k5
vcmptrueph 0x123(%rax,%r14,8), %zmm30, %k5
vcmptrueph (%rcx){1to32}, %zmm30, %k5
vcmptrueph 8128(%rdx), %zmm30, %k5
vcmptrueph 8192(%rdx), %zmm30, %k5
vcmptrueph -8192(%rdx), %zmm30, %k5
vcmptrueph -8256(%rdx), %zmm30, %k5
vcmptrueph 1016(%rdx){1to32}, %zmm30, %k5
vcmptrueph 1024(%rdx){1to32}, %zmm30, %k5
vcmptrueph -1024(%rdx){1to32}, %zmm30, %k5
vcmptrueph -1032(%rdx){1to32}, %zmm30, %k5
vcmpeq_osph %zmm29, %zmm30, %k5
vcmpeq_osph %zmm29, %zmm30, %k5{%k7}
vcmpeq_osph {sae}, %zmm29, %zmm30, %k5
vcmpeq_osph (%rcx), %zmm30, %k5
vcmpeq_osph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpeq_osph (%rcx){1to32}, %zmm30, %k5
vcmpeq_osph 8128(%rdx), %zmm30, %k5
vcmpeq_osph 8192(%rdx), %zmm30, %k5
vcmpeq_osph -8192(%rdx), %zmm30, %k5
vcmpeq_osph -8256(%rdx), %zmm30, %k5
vcmpeq_osph 1016(%rdx){1to32}, %zmm30, %k5
vcmpeq_osph 1024(%rdx){1to32}, %zmm30, %k5
vcmpeq_osph -1024(%rdx){1to32}, %zmm30, %k5
vcmpeq_osph -1032(%rdx){1to32}, %zmm30, %k5
vcmplt_oqph %zmm29, %zmm30, %k5
vcmplt_oqph %zmm29, %zmm30, %k5{%k7}
vcmplt_oqph {sae}, %zmm29, %zmm30, %k5
vcmplt_oqph (%rcx), %zmm30, %k5
vcmplt_oqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmplt_oqph (%rcx){1to32}, %zmm30, %k5
vcmplt_oqph 8128(%rdx), %zmm30, %k5
vcmplt_oqph 8192(%rdx), %zmm30, %k5
vcmplt_oqph -8192(%rdx), %zmm30, %k5
vcmplt_oqph -8256(%rdx), %zmm30, %k5
vcmplt_oqph 1016(%rdx){1to32}, %zmm30, %k5
vcmplt_oqph 1024(%rdx){1to32}, %zmm30, %k5
vcmplt_oqph -1024(%rdx){1to32}, %zmm30, %k5
vcmplt_oqph -1032(%rdx){1to32}, %zmm30, %k5
vcmple_oqph %zmm29, %zmm30, %k5
vcmple_oqph %zmm29, %zmm30, %k5{%k7}
vcmple_oqph {sae}, %zmm29, %zmm30, %k5
vcmple_oqph (%rcx), %zmm30, %k5
vcmple_oqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmple_oqph (%rcx){1to32}, %zmm30, %k5
vcmple_oqph 8128(%rdx), %zmm30, %k5
vcmple_oqph 8192(%rdx), %zmm30, %k5
vcmple_oqph -8192(%rdx), %zmm30, %k5
vcmple_oqph -8256(%rdx), %zmm30, %k5
vcmple_oqph 1016(%rdx){1to32}, %zmm30, %k5
vcmple_oqph 1024(%rdx){1to32}, %zmm30, %k5
vcmple_oqph -1024(%rdx){1to32}, %zmm30, %k5
vcmple_oqph -1032(%rdx){1to32}, %zmm30, %k5
vcmpunord_sph %zmm29, %zmm30, %k5
vcmpunord_sph %zmm29, %zmm30, %k5{%k7}
vcmpunord_sph {sae}, %zmm29, %zmm30, %k5
vcmpunord_sph (%rcx), %zmm30, %k5
vcmpunord_sph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpunord_sph (%rcx){1to32}, %zmm30, %k5
vcmpunord_sph 8128(%rdx), %zmm30, %k5
vcmpunord_sph 8192(%rdx), %zmm30, %k5
vcmpunord_sph -8192(%rdx), %zmm30, %k5
vcmpunord_sph -8256(%rdx), %zmm30, %k5
vcmpunord_sph 1016(%rdx){1to32}, %zmm30, %k5
vcmpunord_sph 1024(%rdx){1to32}, %zmm30, %k5
vcmpunord_sph -1024(%rdx){1to32}, %zmm30, %k5
vcmpunord_sph -1032(%rdx){1to32}, %zmm30, %k5
vcmpneq_usph %zmm29, %zmm30, %k5
vcmpneq_usph %zmm29, %zmm30, %k5{%k7}
vcmpneq_usph {sae}, %zmm29, %zmm30, %k5
vcmpneq_usph (%rcx), %zmm30, %k5
vcmpneq_usph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpneq_usph (%rcx){1to32}, %zmm30, %k5
vcmpneq_usph 8128(%rdx), %zmm30, %k5
vcmpneq_usph 8192(%rdx), %zmm30, %k5
vcmpneq_usph -8192(%rdx), %zmm30, %k5
vcmpneq_usph -8256(%rdx), %zmm30, %k5
vcmpneq_usph 1016(%rdx){1to32}, %zmm30, %k5
vcmpneq_usph 1024(%rdx){1to32}, %zmm30, %k5
vcmpneq_usph -1024(%rdx){1to32}, %zmm30, %k5
vcmpneq_usph -1032(%rdx){1to32}, %zmm30, %k5
vcmpnlt_uqph %zmm29, %zmm30, %k5
vcmpnlt_uqph %zmm29, %zmm30, %k5{%k7}
vcmpnlt_uqph {sae}, %zmm29, %zmm30, %k5
vcmpnlt_uqph (%rcx), %zmm30, %k5
vcmpnlt_uqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpnlt_uqph (%rcx){1to32}, %zmm30, %k5
vcmpnlt_uqph 8128(%rdx), %zmm30, %k5
vcmpnlt_uqph 8192(%rdx), %zmm30, %k5
vcmpnlt_uqph -8192(%rdx), %zmm30, %k5
vcmpnlt_uqph -8256(%rdx), %zmm30, %k5
vcmpnlt_uqph 1016(%rdx){1to32}, %zmm30, %k5
vcmpnlt_uqph 1024(%rdx){1to32}, %zmm30, %k5
vcmpnlt_uqph -1024(%rdx){1to32}, %zmm30, %k5
vcmpnlt_uqph -1032(%rdx){1to32}, %zmm30, %k5
vcmpnle_uqph %zmm29, %zmm30, %k5
vcmpnle_uqph %zmm29, %zmm30, %k5{%k7}
vcmpnle_uqph {sae}, %zmm29, %zmm30, %k5
vcmpnle_uqph (%rcx), %zmm30, %k5
vcmpnle_uqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpnle_uqph (%rcx){1to32}, %zmm30, %k5
vcmpnle_uqph 8128(%rdx), %zmm30, %k5
vcmpnle_uqph 8192(%rdx), %zmm30, %k5
vcmpnle_uqph -8192(%rdx), %zmm30, %k5
vcmpnle_uqph -8256(%rdx), %zmm30, %k5
vcmpnle_uqph 1016(%rdx){1to32}, %zmm30, %k5
vcmpnle_uqph 1024(%rdx){1to32}, %zmm30, %k5
vcmpnle_uqph -1024(%rdx){1to32}, %zmm30, %k5
vcmpnle_uqph -1032(%rdx){1to32}, %zmm30, %k5
vcmpord_sph %zmm29, %zmm30, %k5
vcmpord_sph %zmm29, %zmm30, %k5{%k7}
vcmpord_sph {sae}, %zmm29, %zmm30, %k5
vcmpord_sph (%rcx), %zmm30, %k5
vcmpord_sph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpord_sph (%rcx){1to32}, %zmm30, %k5
vcmpord_sph 8128(%rdx), %zmm30, %k5
vcmpord_sph 8192(%rdx), %zmm30, %k5
vcmpord_sph -8192(%rdx), %zmm30, %k5
vcmpord_sph -8256(%rdx), %zmm30, %k5
vcmpord_sph 1016(%rdx){1to32}, %zmm30, %k5
vcmpord_sph 1024(%rdx){1to32}, %zmm30, %k5
vcmpord_sph -1024(%rdx){1to32}, %zmm30, %k5
vcmpord_sph -1032(%rdx){1to32}, %zmm30, %k5
vcmpeq_usph %zmm29, %zmm30, %k5
vcmpeq_usph %zmm29, %zmm30, %k5{%k7}
vcmpeq_usph {sae}, %zmm29, %zmm30, %k5
vcmpeq_usph (%rcx), %zmm30, %k5
vcmpeq_usph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpeq_usph (%rcx){1to32}, %zmm30, %k5
vcmpeq_usph 8128(%rdx), %zmm30, %k5
vcmpeq_usph 8192(%rdx), %zmm30, %k5
vcmpeq_usph -8192(%rdx), %zmm30, %k5
vcmpeq_usph -8256(%rdx), %zmm30, %k5
vcmpeq_usph 1016(%rdx){1to32}, %zmm30, %k5
vcmpeq_usph 1024(%rdx){1to32}, %zmm30, %k5
vcmpeq_usph -1024(%rdx){1to32}, %zmm30, %k5
vcmpeq_usph -1032(%rdx){1to32}, %zmm30, %k5
vcmpnge_uqph %zmm29, %zmm30, %k5
vcmpnge_uqph %zmm29, %zmm30, %k5{%k7}
vcmpnge_uqph {sae}, %zmm29, %zmm30, %k5
vcmpnge_uqph (%rcx), %zmm30, %k5
vcmpnge_uqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpnge_uqph (%rcx){1to32}, %zmm30, %k5
vcmpnge_uqph 8128(%rdx), %zmm30, %k5
vcmpnge_uqph 8192(%rdx), %zmm30, %k5
vcmpnge_uqph -8192(%rdx), %zmm30, %k5
vcmpnge_uqph -8256(%rdx), %zmm30, %k5
vcmpnge_uqph 1016(%rdx){1to32}, %zmm30, %k5
vcmpnge_uqph 1024(%rdx){1to32}, %zmm30, %k5
vcmpnge_uqph -1024(%rdx){1to32}, %zmm30, %k5
vcmpnge_uqph -1032(%rdx){1to32}, %zmm30, %k5
vcmpngt_uqph %zmm29, %zmm30, %k5
vcmpngt_uqph %zmm29, %zmm30, %k5{%k7}
vcmpngt_uqph {sae}, %zmm29, %zmm30, %k5
vcmpngt_uqph (%rcx), %zmm30, %k5
vcmpngt_uqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpngt_uqph (%rcx){1to32}, %zmm30, %k5
vcmpngt_uqph 8128(%rdx), %zmm30, %k5
vcmpngt_uqph 8192(%rdx), %zmm30, %k5
vcmpngt_uqph -8192(%rdx), %zmm30, %k5
vcmpngt_uqph -8256(%rdx), %zmm30, %k5
vcmpngt_uqph 1016(%rdx){1to32}, %zmm30, %k5
vcmpngt_uqph 1024(%rdx){1to32}, %zmm30, %k5
vcmpngt_uqph -1024(%rdx){1to32}, %zmm30, %k5
vcmpngt_uqph -1032(%rdx){1to32}, %zmm30, %k5
vcmpfalse_osph %zmm29, %zmm30, %k5
vcmpfalse_osph %zmm29, %zmm30, %k5{%k7}
vcmpfalse_osph {sae}, %zmm29, %zmm30, %k5
vcmpfalse_osph (%rcx), %zmm30, %k5
vcmpfalse_osph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpfalse_osph (%rcx){1to32}, %zmm30, %k5
vcmpfalse_osph 8128(%rdx), %zmm30, %k5
vcmpfalse_osph 8192(%rdx), %zmm30, %k5
vcmpfalse_osph -8192(%rdx), %zmm30, %k5
vcmpfalse_osph -8256(%rdx), %zmm30, %k5
vcmpfalse_osph 1016(%rdx){1to32}, %zmm30, %k5
vcmpfalse_osph 1024(%rdx){1to32}, %zmm30, %k5
vcmpfalse_osph -1024(%rdx){1to32}, %zmm30, %k5
vcmpfalse_osph -1032(%rdx){1to32}, %zmm30, %k5
vcmpneq_osph %zmm29, %zmm30, %k5
vcmpneq_osph %zmm29, %zmm30, %k5{%k7}
vcmpneq_osph {sae}, %zmm29, %zmm30, %k5
vcmpneq_osph (%rcx), %zmm30, %k5
vcmpneq_osph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpneq_osph (%rcx){1to32}, %zmm30, %k5
vcmpneq_osph 8128(%rdx), %zmm30, %k5
vcmpneq_osph 8192(%rdx), %zmm30, %k5
vcmpneq_osph -8192(%rdx), %zmm30, %k5
vcmpneq_osph -8256(%rdx), %zmm30, %k5
vcmpneq_osph 1016(%rdx){1to32}, %zmm30, %k5
vcmpneq_osph 1024(%rdx){1to32}, %zmm30, %k5
vcmpneq_osph -1024(%rdx){1to32}, %zmm30, %k5
vcmpneq_osph -1032(%rdx){1to32}, %zmm30, %k5
vcmpge_oqph %zmm29, %zmm30, %k5
vcmpge_oqph %zmm29, %zmm30, %k5{%k7}
vcmpge_oqph {sae}, %zmm29, %zmm30, %k5
vcmpge_oqph (%rcx), %zmm30, %k5
vcmpge_oqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpge_oqph (%rcx){1to32}, %zmm30, %k5
vcmpge_oqph 8128(%rdx), %zmm30, %k5
vcmpge_oqph 8192(%rdx), %zmm30, %k5
vcmpge_oqph -8192(%rdx), %zmm30, %k5
vcmpge_oqph -8256(%rdx), %zmm30, %k5
vcmpge_oqph 1016(%rdx){1to32}, %zmm30, %k5
vcmpge_oqph 1024(%rdx){1to32}, %zmm30, %k5
vcmpge_oqph -1024(%rdx){1to32}, %zmm30, %k5
vcmpge_oqph -1032(%rdx){1to32}, %zmm30, %k5
vcmpgt_oqph %zmm29, %zmm30, %k5
vcmpgt_oqph %zmm29, %zmm30, %k5{%k7}
vcmpgt_oqph {sae}, %zmm29, %zmm30, %k5
vcmpgt_oqph (%rcx), %zmm30, %k5
vcmpgt_oqph 0x123(%rax,%r14,8), %zmm30, %k5
vcmpgt_oqph (%rcx){1to32}, %zmm30, %k5
vcmpgt_oqph 8128(%rdx), %zmm30, %k5
vcmpgt_oqph 8192(%rdx), %zmm30, %k5
vcmpgt_oqph -8192(%rdx), %zmm30, %k5
vcmpgt_oqph -8256(%rdx), %zmm30, %k5
vcmpgt_oqph 1016(%rdx){1to32}, %zmm30, %k5
vcmpgt_oqph 1024(%rdx){1to32}, %zmm30, %k5
vcmpgt_oqph -1024(%rdx){1to32}, %zmm30, %k5
vcmpgt_oqph -1032(%rdx){1to32}, %zmm30, %k5
vcmptrue_usph %zmm29, %zmm30, %k5
vcmptrue_usph %zmm29, %zmm30, %k5{%k7}
vcmptrue_usph {sae}, %zmm29, %zmm30, %k5
vcmptrue_usph (%rcx), %zmm30, %k5
vcmptrue_usph 0x123(%rax,%r14,8), %zmm30, %k5
vcmptrue_usph (%rcx){1to32}, %zmm30, %k5
vcmptrue_usph 8128(%rdx), %zmm30, %k5
vcmptrue_usph 8192(%rdx), %zmm30, %k5
vcmptrue_usph -8192(%rdx), %zmm30, %k5
vcmptrue_usph -8256(%rdx), %zmm30, %k5
vcmptrue_usph 1016(%rdx){1to32}, %zmm30, %k5
vcmptrue_usph 1024(%rdx){1to32}, %zmm30, %k5
vcmptrue_usph -1024(%rdx){1to32}, %zmm30, %k5
vcmptrue_usph -1032(%rdx){1to32}, %zmm30, %k5
vcmpeq_oqsh %xmm28, %xmm29, %k5{%k7}
vcmpeq_oqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpeq_oqsh (%rcx), %xmm29, %k5{%k7}
vcmpeq_oqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpeq_oqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpeq_oqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpeq_oqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpeq_oqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpeqsh %xmm28, %xmm29, %k5{%k7}
vcmpeqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpeqsh (%rcx), %xmm29, %k5{%k7}
vcmpeqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpeqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpeqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpeqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpeqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmplt_ossh %xmm28, %xmm29, %k5{%k7}
vcmplt_ossh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmplt_ossh (%rcx), %xmm29, %k5{%k7}
vcmplt_ossh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmplt_ossh 1016(%rdx), %xmm29, %k5{%k7}
vcmplt_ossh 1024(%rdx), %xmm29, %k5{%k7}
vcmplt_ossh -1024(%rdx), %xmm29, %k5{%k7}
vcmplt_ossh -1032(%rdx), %xmm29, %k5{%k7}
vcmpltsh %xmm28, %xmm29, %k5{%k7}
vcmpltsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpltsh (%rcx), %xmm29, %k5{%k7}
vcmpltsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpltsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpltsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpltsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpltsh -1032(%rdx), %xmm29, %k5{%k7}
vcmple_ossh %xmm28, %xmm29, %k5{%k7}
vcmple_ossh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmple_ossh (%rcx), %xmm29, %k5{%k7}
vcmple_ossh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmple_ossh 1016(%rdx), %xmm29, %k5{%k7}
vcmple_ossh 1024(%rdx), %xmm29, %k5{%k7}
vcmple_ossh -1024(%rdx), %xmm29, %k5{%k7}
vcmple_ossh -1032(%rdx), %xmm29, %k5{%k7}
vcmplesh %xmm28, %xmm29, %k5{%k7}
vcmplesh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmplesh (%rcx), %xmm29, %k5{%k7}
vcmplesh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmplesh 1016(%rdx), %xmm29, %k5{%k7}
vcmplesh 1024(%rdx), %xmm29, %k5{%k7}
vcmplesh -1024(%rdx), %xmm29, %k5{%k7}
vcmplesh -1032(%rdx), %xmm29, %k5{%k7}
vcmpunord_qsh %xmm28, %xmm29, %k5{%k7}
vcmpunord_qsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpunord_qsh (%rcx), %xmm29, %k5{%k7}
vcmpunord_qsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpunord_qsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpunord_qsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpunord_qsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpunord_qsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpunordsh %xmm28, %xmm29, %k5{%k7}
vcmpunordsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpunordsh (%rcx), %xmm29, %k5{%k7}
vcmpunordsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpunordsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpunordsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpunordsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpunordsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpneq_uqsh %xmm28, %xmm29, %k5{%k7}
vcmpneq_uqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpneq_uqsh (%rcx), %xmm29, %k5{%k7}
vcmpneq_uqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpneq_uqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpneq_uqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpneq_uqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpneq_uqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpneqsh %xmm28, %xmm29, %k5{%k7}
vcmpneqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpneqsh (%rcx), %xmm29, %k5{%k7}
vcmpneqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpneqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpneqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpneqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpneqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpnlt_ussh %xmm28, %xmm29, %k5{%k7}
vcmpnlt_ussh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpnlt_ussh (%rcx), %xmm29, %k5{%k7}
vcmpnlt_ussh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpnlt_ussh 1016(%rdx), %xmm29, %k5{%k7}
vcmpnlt_ussh 1024(%rdx), %xmm29, %k5{%k7}
vcmpnlt_ussh -1024(%rdx), %xmm29, %k5{%k7}
vcmpnlt_ussh -1032(%rdx), %xmm29, %k5{%k7}
vcmpnltsh %xmm28, %xmm29, %k5{%k7}
vcmpnltsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpnltsh (%rcx), %xmm29, %k5{%k7}
vcmpnltsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpnltsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpnltsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpnltsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpnltsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpnle_ussh %xmm28, %xmm29, %k5{%k7}
vcmpnle_ussh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpnle_ussh (%rcx), %xmm29, %k5{%k7}
vcmpnle_ussh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpnle_ussh 1016(%rdx), %xmm29, %k5{%k7}
vcmpnle_ussh 1024(%rdx), %xmm29, %k5{%k7}
vcmpnle_ussh -1024(%rdx), %xmm29, %k5{%k7}
vcmpnle_ussh -1032(%rdx), %xmm29, %k5{%k7}
vcmpnlesh %xmm28, %xmm29, %k5{%k7}
vcmpnlesh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpnlesh (%rcx), %xmm29, %k5{%k7}
vcmpnlesh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpnlesh 1016(%rdx), %xmm29, %k5{%k7}
vcmpnlesh 1024(%rdx), %xmm29, %k5{%k7}
vcmpnlesh -1024(%rdx), %xmm29, %k5{%k7}
vcmpnlesh -1032(%rdx), %xmm29, %k5{%k7}
vcmpord_qsh %xmm28, %xmm29, %k5{%k7}
vcmpord_qsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpord_qsh (%rcx), %xmm29, %k5{%k7}
vcmpord_qsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpord_qsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpord_qsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpord_qsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpord_qsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpordsh %xmm28, %xmm29, %k5{%k7}
vcmpordsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpordsh (%rcx), %xmm29, %k5{%k7}
vcmpordsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpordsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpordsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpordsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpordsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpeq_uqsh %xmm28, %xmm29, %k5{%k7}
vcmpeq_uqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpeq_uqsh (%rcx), %xmm29, %k5{%k7}
vcmpeq_uqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpeq_uqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpeq_uqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpeq_uqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpeq_uqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpnge_ussh %xmm28, %xmm29, %k5{%k7}
vcmpnge_ussh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpnge_ussh (%rcx), %xmm29, %k5{%k7}
vcmpnge_ussh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpnge_ussh 1016(%rdx), %xmm29, %k5{%k7}
vcmpnge_ussh 1024(%rdx), %xmm29, %k5{%k7}
vcmpnge_ussh -1024(%rdx), %xmm29, %k5{%k7}
vcmpnge_ussh -1032(%rdx), %xmm29, %k5{%k7}
vcmpngesh %xmm28, %xmm29, %k5{%k7}
vcmpngesh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpngesh (%rcx), %xmm29, %k5{%k7}
vcmpngesh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpngesh 1016(%rdx), %xmm29, %k5{%k7}
vcmpngesh 1024(%rdx), %xmm29, %k5{%k7}
vcmpngesh -1024(%rdx), %xmm29, %k5{%k7}
vcmpngesh -1032(%rdx), %xmm29, %k5{%k7}
vcmpngt_ussh %xmm28, %xmm29, %k5{%k7}
vcmpngt_ussh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpngt_ussh (%rcx), %xmm29, %k5{%k7}
vcmpngt_ussh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpngt_ussh 1016(%rdx), %xmm29, %k5{%k7}
vcmpngt_ussh 1024(%rdx), %xmm29, %k5{%k7}
vcmpngt_ussh -1024(%rdx), %xmm29, %k5{%k7}
vcmpngt_ussh -1032(%rdx), %xmm29, %k5{%k7}
vcmpngtsh %xmm28, %xmm29, %k5{%k7}
vcmpngtsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpngtsh (%rcx), %xmm29, %k5{%k7}
vcmpngtsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpngtsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpngtsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpngtsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpngtsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpfalse_oqsh %xmm28, %xmm29, %k5{%k7}
vcmpfalse_oqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpfalse_oqsh (%rcx), %xmm29, %k5{%k7}
vcmpfalse_oqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpfalse_oqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpfalse_oqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpfalse_oqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpfalse_oqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpfalsesh %xmm28, %xmm29, %k5{%k7}
vcmpfalsesh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpfalsesh (%rcx), %xmm29, %k5{%k7}
vcmpfalsesh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpfalsesh 1016(%rdx), %xmm29, %k5{%k7}
vcmpfalsesh 1024(%rdx), %xmm29, %k5{%k7}
vcmpfalsesh -1024(%rdx), %xmm29, %k5{%k7}
vcmpfalsesh -1032(%rdx), %xmm29, %k5{%k7}
vcmpneq_oqsh %xmm28, %xmm29, %k5{%k7}
vcmpneq_oqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpneq_oqsh (%rcx), %xmm29, %k5{%k7}
vcmpneq_oqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpneq_oqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpneq_oqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpneq_oqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpneq_oqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpge_ossh %xmm28, %xmm29, %k5{%k7}
vcmpge_ossh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpge_ossh (%rcx), %xmm29, %k5{%k7}
vcmpge_ossh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpge_ossh 1016(%rdx), %xmm29, %k5{%k7}
vcmpge_ossh 1024(%rdx), %xmm29, %k5{%k7}
vcmpge_ossh -1024(%rdx), %xmm29, %k5{%k7}
vcmpge_ossh -1032(%rdx), %xmm29, %k5{%k7}
vcmpgesh %xmm28, %xmm29, %k5{%k7}
vcmpgesh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpgesh (%rcx), %xmm29, %k5{%k7}
vcmpgesh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpgesh 1016(%rdx), %xmm29, %k5{%k7}
vcmpgesh 1024(%rdx), %xmm29, %k5{%k7}
vcmpgesh -1024(%rdx), %xmm29, %k5{%k7}
vcmpgesh -1032(%rdx), %xmm29, %k5{%k7}
vcmpgt_ossh %xmm28, %xmm29, %k5{%k7}
vcmpgt_ossh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpgt_ossh (%rcx), %xmm29, %k5{%k7}
vcmpgt_ossh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpgt_ossh 1016(%rdx), %xmm29, %k5{%k7}
vcmpgt_ossh 1024(%rdx), %xmm29, %k5{%k7}
vcmpgt_ossh -1024(%rdx), %xmm29, %k5{%k7}
vcmpgt_ossh -1032(%rdx), %xmm29, %k5{%k7}
vcmpgtsh %xmm28, %xmm29, %k5{%k7}
vcmpgtsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpgtsh (%rcx), %xmm29, %k5{%k7}
vcmpgtsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpgtsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpgtsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpgtsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpgtsh -1032(%rdx), %xmm29, %k5{%k7}
vcmptrue_uqsh %xmm28, %xmm29, %k5{%k7}
vcmptrue_uqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmptrue_uqsh (%rcx), %xmm29, %k5{%k7}
vcmptrue_uqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmptrue_uqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmptrue_uqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmptrue_uqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmptrue_uqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmptruesh %xmm28, %xmm29, %k5{%k7}
vcmptruesh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmptruesh (%rcx), %xmm29, %k5{%k7}
vcmptruesh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmptruesh 1016(%rdx), %xmm29, %k5{%k7}
vcmptruesh 1024(%rdx), %xmm29, %k5{%k7}
vcmptruesh -1024(%rdx), %xmm29, %k5{%k7}
vcmptruesh -1032(%rdx), %xmm29, %k5{%k7}
vcmpeq_ossh %xmm28, %xmm29, %k5{%k7}
vcmpeq_ossh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpeq_ossh (%rcx), %xmm29, %k5{%k7}
vcmpeq_ossh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpeq_ossh 1016(%rdx), %xmm29, %k5{%k7}
vcmpeq_ossh 1024(%rdx), %xmm29, %k5{%k7}
vcmpeq_ossh -1024(%rdx), %xmm29, %k5{%k7}
vcmpeq_ossh -1032(%rdx), %xmm29, %k5{%k7}
vcmplt_oqsh %xmm28, %xmm29, %k5{%k7}
vcmplt_oqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmplt_oqsh (%rcx), %xmm29, %k5{%k7}
vcmplt_oqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmplt_oqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmplt_oqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmplt_oqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmplt_oqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmple_oqsh %xmm28, %xmm29, %k5{%k7}
vcmple_oqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmple_oqsh (%rcx), %xmm29, %k5{%k7}
vcmple_oqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmple_oqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmple_oqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmple_oqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmple_oqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpunord_ssh %xmm28, %xmm29, %k5{%k7}
vcmpunord_ssh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpunord_ssh (%rcx), %xmm29, %k5{%k7}
vcmpunord_ssh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpunord_ssh 1016(%rdx), %xmm29, %k5{%k7}
vcmpunord_ssh 1024(%rdx), %xmm29, %k5{%k7}
vcmpunord_ssh -1024(%rdx), %xmm29, %k5{%k7}
vcmpunord_ssh -1032(%rdx), %xmm29, %k5{%k7}
vcmpneq_ussh %xmm28, %xmm29, %k5{%k7}
vcmpneq_ussh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpneq_ussh (%rcx), %xmm29, %k5{%k7}
vcmpneq_ussh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpneq_ussh 1016(%rdx), %xmm29, %k5{%k7}
vcmpneq_ussh 1024(%rdx), %xmm29, %k5{%k7}
vcmpneq_ussh -1024(%rdx), %xmm29, %k5{%k7}
vcmpneq_ussh -1032(%rdx), %xmm29, %k5{%k7}
vcmpnlt_uqsh %xmm28, %xmm29, %k5{%k7}
vcmpnlt_uqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpnlt_uqsh (%rcx), %xmm29, %k5{%k7}
vcmpnlt_uqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpnlt_uqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpnlt_uqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpnlt_uqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpnlt_uqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpnle_uqsh %xmm28, %xmm29, %k5{%k7}
vcmpnle_uqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpnle_uqsh (%rcx), %xmm29, %k5{%k7}
vcmpnle_uqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpnle_uqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpnle_uqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpnle_uqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpnle_uqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpord_ssh %xmm28, %xmm29, %k5{%k7}
vcmpord_ssh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpord_ssh (%rcx), %xmm29, %k5{%k7}
vcmpord_ssh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpord_ssh 1016(%rdx), %xmm29, %k5{%k7}
vcmpord_ssh 1024(%rdx), %xmm29, %k5{%k7}
vcmpord_ssh -1024(%rdx), %xmm29, %k5{%k7}
vcmpord_ssh -1032(%rdx), %xmm29, %k5{%k7}
vcmpeq_ussh %xmm28, %xmm29, %k5{%k7}
vcmpeq_ussh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpeq_ussh (%rcx), %xmm29, %k5{%k7}
vcmpeq_ussh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpeq_ussh 1016(%rdx), %xmm29, %k5{%k7}
vcmpeq_ussh 1024(%rdx), %xmm29, %k5{%k7}
vcmpeq_ussh -1024(%rdx), %xmm29, %k5{%k7}
vcmpeq_ussh -1032(%rdx), %xmm29, %k5{%k7}
vcmpnge_uqsh %xmm28, %xmm29, %k5{%k7}
vcmpnge_uqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpnge_uqsh (%rcx), %xmm29, %k5{%k7}
vcmpnge_uqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpnge_uqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpnge_uqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpnge_uqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpnge_uqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpngt_uqsh %xmm28, %xmm29, %k5{%k7}
vcmpngt_uqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpngt_uqsh (%rcx), %xmm29, %k5{%k7}
vcmpngt_uqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpngt_uqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpngt_uqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpngt_uqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpngt_uqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpfalse_ossh %xmm28, %xmm29, %k5{%k7}
vcmpfalse_ossh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpfalse_ossh (%rcx), %xmm29, %k5{%k7}
vcmpfalse_ossh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpfalse_ossh 1016(%rdx), %xmm29, %k5{%k7}
vcmpfalse_ossh 1024(%rdx), %xmm29, %k5{%k7}
vcmpfalse_ossh -1024(%rdx), %xmm29, %k5{%k7}
vcmpfalse_ossh -1032(%rdx), %xmm29, %k5{%k7}
vcmpneq_ossh %xmm28, %xmm29, %k5{%k7}
vcmpneq_ossh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpneq_ossh (%rcx), %xmm29, %k5{%k7}
vcmpneq_ossh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpneq_ossh 1016(%rdx), %xmm29, %k5{%k7}
vcmpneq_ossh 1024(%rdx), %xmm29, %k5{%k7}
vcmpneq_ossh -1024(%rdx), %xmm29, %k5{%k7}
vcmpneq_ossh -1032(%rdx), %xmm29, %k5{%k7}
vcmpge_oqsh %xmm28, %xmm29, %k5{%k7}
vcmpge_oqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpge_oqsh (%rcx), %xmm29, %k5{%k7}
vcmpge_oqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpge_oqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpge_oqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpge_oqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpge_oqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmpgt_oqsh %xmm28, %xmm29, %k5{%k7}
vcmpgt_oqsh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmpgt_oqsh (%rcx), %xmm29, %k5{%k7}
vcmpgt_oqsh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmpgt_oqsh 1016(%rdx), %xmm29, %k5{%k7}
vcmpgt_oqsh 1024(%rdx), %xmm29, %k5{%k7}
vcmpgt_oqsh -1024(%rdx), %xmm29, %k5{%k7}
vcmpgt_oqsh -1032(%rdx), %xmm29, %k5{%k7}
vcmptrue_ussh %xmm28, %xmm29, %k5{%k7}
vcmptrue_ussh {sae}, %xmm28, %xmm29, %k5{%k7}
vcmptrue_ussh (%rcx), %xmm29, %k5{%k7}
vcmptrue_ussh 0x123(%rax,%r14,8), %xmm29, %k5{%k7}
vcmptrue_ussh 1016(%rdx), %xmm29, %k5{%k7}
vcmptrue_ussh 1024(%rdx), %xmm29, %k5{%k7}
vcmptrue_ussh -1024(%rdx), %xmm29, %k5{%k7}
vcmptrue_ussh -1032(%rdx), %xmm29, %k5{%k7}
.intel_syntax noprefix
vcmpeq_oqph k5, zmm30, zmm29
vcmpeq_oqph k5{k7}, zmm30, zmm29
vcmpeq_oqph k5, zmm30, zmm29, {sae}
vcmpeq_oqph k5, zmm30, ZMMWORD PTR [rcx]
vcmpeq_oqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpeq_oqph k5, zmm30, [rcx]{1to32}
vcmpeq_oqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpeq_oqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpeq_oqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpeq_oqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpeq_oqph k5, zmm30, [rdx+1016]{1to32}
vcmpeq_oqph k5, zmm30, [rdx+1024]{1to32}
vcmpeq_oqph k5, zmm30, [rdx-1024]{1to32}
vcmpeq_oqph k5, zmm30, [rdx-1032]{1to32}
vcmpeqph k5, zmm30, zmm29
vcmpeqph k5{k7}, zmm30, zmm29
vcmpeqph k5, zmm30, zmm29, {sae}
vcmpeqph k5, zmm30, ZMMWORD PTR [rcx]
vcmpeqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpeqph k5, zmm30, [rcx]{1to32}
vcmpeqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpeqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpeqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpeqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpeqph k5, zmm30, [rdx+1016]{1to32}
vcmpeqph k5, zmm30, [rdx+1024]{1to32}
vcmpeqph k5, zmm30, [rdx-1024]{1to32}
vcmpeqph k5, zmm30, [rdx-1032]{1to32}
vcmplt_osph k5, zmm30, zmm29
vcmplt_osph k5{k7}, zmm30, zmm29
vcmplt_osph k5, zmm30, zmm29, {sae}
vcmplt_osph k5, zmm30, ZMMWORD PTR [rcx]
vcmplt_osph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmplt_osph k5, zmm30, [rcx]{1to32}
vcmplt_osph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmplt_osph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmplt_osph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmplt_osph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmplt_osph k5, zmm30, [rdx+1016]{1to32}
vcmplt_osph k5, zmm30, [rdx+1024]{1to32}
vcmplt_osph k5, zmm30, [rdx-1024]{1to32}
vcmplt_osph k5, zmm30, [rdx-1032]{1to32}
vcmpltph k5, zmm30, zmm29
vcmpltph k5{k7}, zmm30, zmm29
vcmpltph k5, zmm30, zmm29, {sae}
vcmpltph k5, zmm30, ZMMWORD PTR [rcx]
vcmpltph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpltph k5, zmm30, [rcx]{1to32}
vcmpltph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpltph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpltph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpltph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpltph k5, zmm30, [rdx+1016]{1to32}
vcmpltph k5, zmm30, [rdx+1024]{1to32}
vcmpltph k5, zmm30, [rdx-1024]{1to32}
vcmpltph k5, zmm30, [rdx-1032]{1to32}
vcmple_osph k5, zmm30, zmm29
vcmple_osph k5{k7}, zmm30, zmm29
vcmple_osph k5, zmm30, zmm29, {sae}
vcmple_osph k5, zmm30, ZMMWORD PTR [rcx]
vcmple_osph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmple_osph k5, zmm30, [rcx]{1to32}
vcmple_osph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmple_osph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmple_osph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmple_osph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmple_osph k5, zmm30, [rdx+1016]{1to32}
vcmple_osph k5, zmm30, [rdx+1024]{1to32}
vcmple_osph k5, zmm30, [rdx-1024]{1to32}
vcmple_osph k5, zmm30, [rdx-1032]{1to32}
vcmpleph k5, zmm30, zmm29
vcmpleph k5{k7}, zmm30, zmm29
vcmpleph k5, zmm30, zmm29, {sae}
vcmpleph k5, zmm30, ZMMWORD PTR [rcx]
vcmpleph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpleph k5, zmm30, [rcx]{1to32}
vcmpleph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpleph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpleph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpleph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpleph k5, zmm30, [rdx+1016]{1to32}
vcmpleph k5, zmm30, [rdx+1024]{1to32}
vcmpleph k5, zmm30, [rdx-1024]{1to32}
vcmpleph k5, zmm30, [rdx-1032]{1to32}
vcmpunord_qph k5, zmm30, zmm29
vcmpunord_qph k5{k7}, zmm30, zmm29
vcmpunord_qph k5, zmm30, zmm29, {sae}
vcmpunord_qph k5, zmm30, ZMMWORD PTR [rcx]
vcmpunord_qph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpunord_qph k5, zmm30, [rcx]{1to32}
vcmpunord_qph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpunord_qph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpunord_qph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpunord_qph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpunord_qph k5, zmm30, [rdx+1016]{1to32}
vcmpunord_qph k5, zmm30, [rdx+1024]{1to32}
vcmpunord_qph k5, zmm30, [rdx-1024]{1to32}
vcmpunord_qph k5, zmm30, [rdx-1032]{1to32}
vcmpunordph k5, zmm30, zmm29
vcmpunordph k5{k7}, zmm30, zmm29
vcmpunordph k5, zmm30, zmm29, {sae}
vcmpunordph k5, zmm30, ZMMWORD PTR [rcx]
vcmpunordph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpunordph k5, zmm30, [rcx]{1to32}
vcmpunordph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpunordph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpunordph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpunordph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpunordph k5, zmm30, [rdx+1016]{1to32}
vcmpunordph k5, zmm30, [rdx+1024]{1to32}
vcmpunordph k5, zmm30, [rdx-1024]{1to32}
vcmpunordph k5, zmm30, [rdx-1032]{1to32}
vcmpneq_uqph k5, zmm30, zmm29
vcmpneq_uqph k5{k7}, zmm30, zmm29
vcmpneq_uqph k5, zmm30, zmm29, {sae}
vcmpneq_uqph k5, zmm30, ZMMWORD PTR [rcx]
vcmpneq_uqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpneq_uqph k5, zmm30, [rcx]{1to32}
vcmpneq_uqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpneq_uqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpneq_uqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpneq_uqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpneq_uqph k5, zmm30, [rdx+1016]{1to32}
vcmpneq_uqph k5, zmm30, [rdx+1024]{1to32}
vcmpneq_uqph k5, zmm30, [rdx-1024]{1to32}
vcmpneq_uqph k5, zmm30, [rdx-1032]{1to32}
vcmpneqph k5, zmm30, zmm29
vcmpneqph k5{k7}, zmm30, zmm29
vcmpneqph k5, zmm30, zmm29, {sae}
vcmpneqph k5, zmm30, ZMMWORD PTR [rcx]
vcmpneqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpneqph k5, zmm30, [rcx]{1to32}
vcmpneqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpneqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpneqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpneqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpneqph k5, zmm30, [rdx+1016]{1to32}
vcmpneqph k5, zmm30, [rdx+1024]{1to32}
vcmpneqph k5, zmm30, [rdx-1024]{1to32}
vcmpneqph k5, zmm30, [rdx-1032]{1to32}
vcmpnlt_usph k5, zmm30, zmm29
vcmpnlt_usph k5{k7}, zmm30, zmm29
vcmpnlt_usph k5, zmm30, zmm29, {sae}
vcmpnlt_usph k5, zmm30, ZMMWORD PTR [rcx]
vcmpnlt_usph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpnlt_usph k5, zmm30, [rcx]{1to32}
vcmpnlt_usph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpnlt_usph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpnlt_usph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpnlt_usph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpnlt_usph k5, zmm30, [rdx+1016]{1to32}
vcmpnlt_usph k5, zmm30, [rdx+1024]{1to32}
vcmpnlt_usph k5, zmm30, [rdx-1024]{1to32}
vcmpnlt_usph k5, zmm30, [rdx-1032]{1to32}
vcmpnltph k5, zmm30, zmm29
vcmpnltph k5{k7}, zmm30, zmm29
vcmpnltph k5, zmm30, zmm29, {sae}
vcmpnltph k5, zmm30, ZMMWORD PTR [rcx]
vcmpnltph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpnltph k5, zmm30, [rcx]{1to32}
vcmpnltph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpnltph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpnltph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpnltph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpnltph k5, zmm30, [rdx+1016]{1to32}
vcmpnltph k5, zmm30, [rdx+1024]{1to32}
vcmpnltph k5, zmm30, [rdx-1024]{1to32}
vcmpnltph k5, zmm30, [rdx-1032]{1to32}
vcmpnle_usph k5, zmm30, zmm29
vcmpnle_usph k5{k7}, zmm30, zmm29
vcmpnle_usph k5, zmm30, zmm29, {sae}
vcmpnle_usph k5, zmm30, ZMMWORD PTR [rcx]
vcmpnle_usph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpnle_usph k5, zmm30, [rcx]{1to32}
vcmpnle_usph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpnle_usph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpnle_usph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpnle_usph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpnle_usph k5, zmm30, [rdx+1016]{1to32}
vcmpnle_usph k5, zmm30, [rdx+1024]{1to32}
vcmpnle_usph k5, zmm30, [rdx-1024]{1to32}
vcmpnle_usph k5, zmm30, [rdx-1032]{1to32}
vcmpnleph k5, zmm30, zmm29
vcmpnleph k5{k7}, zmm30, zmm29
vcmpnleph k5, zmm30, zmm29, {sae}
vcmpnleph k5, zmm30, ZMMWORD PTR [rcx]
vcmpnleph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpnleph k5, zmm30, [rcx]{1to32}
vcmpnleph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpnleph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpnleph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpnleph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpnleph k5, zmm30, [rdx+1016]{1to32}
vcmpnleph k5, zmm30, [rdx+1024]{1to32}
vcmpnleph k5, zmm30, [rdx-1024]{1to32}
vcmpnleph k5, zmm30, [rdx-1032]{1to32}
vcmpord_qph k5, zmm30, zmm29
vcmpord_qph k5{k7}, zmm30, zmm29
vcmpord_qph k5, zmm30, zmm29, {sae}
vcmpord_qph k5, zmm30, ZMMWORD PTR [rcx]
vcmpord_qph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpord_qph k5, zmm30, [rcx]{1to32}
vcmpord_qph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpord_qph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpord_qph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpord_qph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpord_qph k5, zmm30, [rdx+1016]{1to32}
vcmpord_qph k5, zmm30, [rdx+1024]{1to32}
vcmpord_qph k5, zmm30, [rdx-1024]{1to32}
vcmpord_qph k5, zmm30, [rdx-1032]{1to32}
vcmpordph k5, zmm30, zmm29
vcmpordph k5{k7}, zmm30, zmm29
vcmpordph k5, zmm30, zmm29, {sae}
vcmpordph k5, zmm30, ZMMWORD PTR [rcx]
vcmpordph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpordph k5, zmm30, [rcx]{1to32}
vcmpordph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpordph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpordph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpordph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpordph k5, zmm30, [rdx+1016]{1to32}
vcmpordph k5, zmm30, [rdx+1024]{1to32}
vcmpordph k5, zmm30, [rdx-1024]{1to32}
vcmpordph k5, zmm30, [rdx-1032]{1to32}
vcmpeq_uqph k5, zmm30, zmm29
vcmpeq_uqph k5{k7}, zmm30, zmm29
vcmpeq_uqph k5, zmm30, zmm29, {sae}
vcmpeq_uqph k5, zmm30, ZMMWORD PTR [rcx]
vcmpeq_uqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpeq_uqph k5, zmm30, [rcx]{1to32}
vcmpeq_uqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpeq_uqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpeq_uqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpeq_uqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpeq_uqph k5, zmm30, [rdx+1016]{1to32}
vcmpeq_uqph k5, zmm30, [rdx+1024]{1to32}
vcmpeq_uqph k5, zmm30, [rdx-1024]{1to32}
vcmpeq_uqph k5, zmm30, [rdx-1032]{1to32}
vcmpnge_usph k5, zmm30, zmm29
vcmpnge_usph k5{k7}, zmm30, zmm29
vcmpnge_usph k5, zmm30, zmm29, {sae}
vcmpnge_usph k5, zmm30, ZMMWORD PTR [rcx]
vcmpnge_usph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpnge_usph k5, zmm30, [rcx]{1to32}
vcmpnge_usph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpnge_usph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpnge_usph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpnge_usph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpnge_usph k5, zmm30, [rdx+1016]{1to32}
vcmpnge_usph k5, zmm30, [rdx+1024]{1to32}
vcmpnge_usph k5, zmm30, [rdx-1024]{1to32}
vcmpnge_usph k5, zmm30, [rdx-1032]{1to32}
vcmpngeph k5, zmm30, zmm29
vcmpngeph k5{k7}, zmm30, zmm29
vcmpngeph k5, zmm30, zmm29, {sae}
vcmpngeph k5, zmm30, ZMMWORD PTR [rcx]
vcmpngeph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpngeph k5, zmm30, [rcx]{1to32}
vcmpngeph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpngeph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpngeph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpngeph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpngeph k5, zmm30, [rdx+1016]{1to32}
vcmpngeph k5, zmm30, [rdx+1024]{1to32}
vcmpngeph k5, zmm30, [rdx-1024]{1to32}
vcmpngeph k5, zmm30, [rdx-1032]{1to32}
vcmpngt_usph k5, zmm30, zmm29
vcmpngt_usph k5{k7}, zmm30, zmm29
vcmpngt_usph k5, zmm30, zmm29, {sae}
vcmpngt_usph k5, zmm30, ZMMWORD PTR [rcx]
vcmpngt_usph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpngt_usph k5, zmm30, [rcx]{1to32}
vcmpngt_usph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpngt_usph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpngt_usph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpngt_usph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpngt_usph k5, zmm30, [rdx+1016]{1to32}
vcmpngt_usph k5, zmm30, [rdx+1024]{1to32}
vcmpngt_usph k5, zmm30, [rdx-1024]{1to32}
vcmpngt_usph k5, zmm30, [rdx-1032]{1to32}
vcmpngtph k5, zmm30, zmm29
vcmpngtph k5{k7}, zmm30, zmm29
vcmpngtph k5, zmm30, zmm29, {sae}
vcmpngtph k5, zmm30, ZMMWORD PTR [rcx]
vcmpngtph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpngtph k5, zmm30, [rcx]{1to32}
vcmpngtph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpngtph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpngtph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpngtph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpngtph k5, zmm30, [rdx+1016]{1to32}
vcmpngtph k5, zmm30, [rdx+1024]{1to32}
vcmpngtph k5, zmm30, [rdx-1024]{1to32}
vcmpngtph k5, zmm30, [rdx-1032]{1to32}
vcmpfalse_oqph k5, zmm30, zmm29
vcmpfalse_oqph k5{k7}, zmm30, zmm29
vcmpfalse_oqph k5, zmm30, zmm29, {sae}
vcmpfalse_oqph k5, zmm30, ZMMWORD PTR [rcx]
vcmpfalse_oqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpfalse_oqph k5, zmm30, [rcx]{1to32}
vcmpfalse_oqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpfalse_oqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpfalse_oqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpfalse_oqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpfalse_oqph k5, zmm30, [rdx+1016]{1to32}
vcmpfalse_oqph k5, zmm30, [rdx+1024]{1to32}
vcmpfalse_oqph k5, zmm30, [rdx-1024]{1to32}
vcmpfalse_oqph k5, zmm30, [rdx-1032]{1to32}
vcmpfalseph k5, zmm30, zmm29
vcmpfalseph k5{k7}, zmm30, zmm29
vcmpfalseph k5, zmm30, zmm29, {sae}
vcmpfalseph k5, zmm30, ZMMWORD PTR [rcx]
vcmpfalseph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpfalseph k5, zmm30, [rcx]{1to32}
vcmpfalseph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpfalseph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpfalseph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpfalseph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpfalseph k5, zmm30, [rdx+1016]{1to32}
vcmpfalseph k5, zmm30, [rdx+1024]{1to32}
vcmpfalseph k5, zmm30, [rdx-1024]{1to32}
vcmpfalseph k5, zmm30, [rdx-1032]{1to32}
vcmpneq_oqph k5, zmm30, zmm29
vcmpneq_oqph k5{k7}, zmm30, zmm29
vcmpneq_oqph k5, zmm30, zmm29, {sae}
vcmpneq_oqph k5, zmm30, ZMMWORD PTR [rcx]
vcmpneq_oqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpneq_oqph k5, zmm30, [rcx]{1to32}
vcmpneq_oqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpneq_oqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpneq_oqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpneq_oqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpneq_oqph k5, zmm30, [rdx+1016]{1to32}
vcmpneq_oqph k5, zmm30, [rdx+1024]{1to32}
vcmpneq_oqph k5, zmm30, [rdx-1024]{1to32}
vcmpneq_oqph k5, zmm30, [rdx-1032]{1to32}
vcmpge_osph k5, zmm30, zmm29
vcmpge_osph k5{k7}, zmm30, zmm29
vcmpge_osph k5, zmm30, zmm29, {sae}
vcmpge_osph k5, zmm30, ZMMWORD PTR [rcx]
vcmpge_osph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpge_osph k5, zmm30, [rcx]{1to32}
vcmpge_osph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpge_osph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpge_osph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpge_osph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpge_osph k5, zmm30, [rdx+1016]{1to32}
vcmpge_osph k5, zmm30, [rdx+1024]{1to32}
vcmpge_osph k5, zmm30, [rdx-1024]{1to32}
vcmpge_osph k5, zmm30, [rdx-1032]{1to32}
vcmpgeph k5, zmm30, zmm29
vcmpgeph k5{k7}, zmm30, zmm29
vcmpgeph k5, zmm30, zmm29, {sae}
vcmpgeph k5, zmm30, ZMMWORD PTR [rcx]
vcmpgeph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpgeph k5, zmm30, [rcx]{1to32}
vcmpgeph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpgeph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpgeph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpgeph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpgeph k5, zmm30, [rdx+1016]{1to32}
vcmpgeph k5, zmm30, [rdx+1024]{1to32}
vcmpgeph k5, zmm30, [rdx-1024]{1to32}
vcmpgeph k5, zmm30, [rdx-1032]{1to32}
vcmpgt_osph k5, zmm30, zmm29
vcmpgt_osph k5{k7}, zmm30, zmm29
vcmpgt_osph k5, zmm30, zmm29, {sae}
vcmpgt_osph k5, zmm30, ZMMWORD PTR [rcx]
vcmpgt_osph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpgt_osph k5, zmm30, [rcx]{1to32}
vcmpgt_osph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpgt_osph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpgt_osph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpgt_osph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpgt_osph k5, zmm30, [rdx+1016]{1to32}
vcmpgt_osph k5, zmm30, [rdx+1024]{1to32}
vcmpgt_osph k5, zmm30, [rdx-1024]{1to32}
vcmpgt_osph k5, zmm30, [rdx-1032]{1to32}
vcmpgtph k5, zmm30, zmm29
vcmpgtph k5{k7}, zmm30, zmm29
vcmpgtph k5, zmm30, zmm29, {sae}
vcmpgtph k5, zmm30, ZMMWORD PTR [rcx]
vcmpgtph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpgtph k5, zmm30, [rcx]{1to32}
vcmpgtph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpgtph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpgtph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpgtph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpgtph k5, zmm30, [rdx+1016]{1to32}
vcmpgtph k5, zmm30, [rdx+1024]{1to32}
vcmpgtph k5, zmm30, [rdx-1024]{1to32}
vcmpgtph k5, zmm30, [rdx-1032]{1to32}
vcmptrue_uqph k5, zmm30, zmm29
vcmptrue_uqph k5{k7}, zmm30, zmm29
vcmptrue_uqph k5, zmm30, zmm29, {sae}
vcmptrue_uqph k5, zmm30, ZMMWORD PTR [rcx]
vcmptrue_uqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmptrue_uqph k5, zmm30, [rcx]{1to32}
vcmptrue_uqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmptrue_uqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmptrue_uqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmptrue_uqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmptrue_uqph k5, zmm30, [rdx+1016]{1to32}
vcmptrue_uqph k5, zmm30, [rdx+1024]{1to32}
vcmptrue_uqph k5, zmm30, [rdx-1024]{1to32}
vcmptrue_uqph k5, zmm30, [rdx-1032]{1to32}
vcmptrueph k5, zmm30, zmm29
vcmptrueph k5{k7}, zmm30, zmm29
vcmptrueph k5, zmm30, zmm29, {sae}
vcmptrueph k5, zmm30, ZMMWORD PTR [rcx]
vcmptrueph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmptrueph k5, zmm30, [rcx]{1to32}
vcmptrueph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmptrueph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmptrueph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmptrueph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmptrueph k5, zmm30, [rdx+1016]{1to32}
vcmptrueph k5, zmm30, [rdx+1024]{1to32}
vcmptrueph k5, zmm30, [rdx-1024]{1to32}
vcmptrueph k5, zmm30, [rdx-1032]{1to32}
vcmpeq_osph k5, zmm30, zmm29
vcmpeq_osph k5{k7}, zmm30, zmm29
vcmpeq_osph k5, zmm30, zmm29, {sae}
vcmpeq_osph k5, zmm30, ZMMWORD PTR [rcx]
vcmpeq_osph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpeq_osph k5, zmm30, [rcx]{1to32}
vcmpeq_osph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpeq_osph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpeq_osph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpeq_osph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpeq_osph k5, zmm30, [rdx+1016]{1to32}
vcmpeq_osph k5, zmm30, [rdx+1024]{1to32}
vcmpeq_osph k5, zmm30, [rdx-1024]{1to32}
vcmpeq_osph k5, zmm30, [rdx-1032]{1to32}
vcmplt_oqph k5, zmm30, zmm29
vcmplt_oqph k5{k7}, zmm30, zmm29
vcmplt_oqph k5, zmm30, zmm29, {sae}
vcmplt_oqph k5, zmm30, ZMMWORD PTR [rcx]
vcmplt_oqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmplt_oqph k5, zmm30, [rcx]{1to32}
vcmplt_oqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmplt_oqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmplt_oqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmplt_oqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmplt_oqph k5, zmm30, [rdx+1016]{1to32}
vcmplt_oqph k5, zmm30, [rdx+1024]{1to32}
vcmplt_oqph k5, zmm30, [rdx-1024]{1to32}
vcmplt_oqph k5, zmm30, [rdx-1032]{1to32}
vcmple_oqph k5, zmm30, zmm29
vcmple_oqph k5{k7}, zmm30, zmm29
vcmple_oqph k5, zmm30, zmm29, {sae}
vcmple_oqph k5, zmm30, ZMMWORD PTR [rcx]
vcmple_oqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmple_oqph k5, zmm30, [rcx]{1to32}
vcmple_oqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmple_oqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmple_oqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmple_oqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmple_oqph k5, zmm30, [rdx+1016]{1to32}
vcmple_oqph k5, zmm30, [rdx+1024]{1to32}
vcmple_oqph k5, zmm30, [rdx-1024]{1to32}
vcmple_oqph k5, zmm30, [rdx-1032]{1to32}
vcmpunord_sph k5, zmm30, zmm29
vcmpunord_sph k5{k7}, zmm30, zmm29
vcmpunord_sph k5, zmm30, zmm29, {sae}
vcmpunord_sph k5, zmm30, ZMMWORD PTR [rcx]
vcmpunord_sph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpunord_sph k5, zmm30, [rcx]{1to32}
vcmpunord_sph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpunord_sph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpunord_sph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpunord_sph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpunord_sph k5, zmm30, [rdx+1016]{1to32}
vcmpunord_sph k5, zmm30, [rdx+1024]{1to32}
vcmpunord_sph k5, zmm30, [rdx-1024]{1to32}
vcmpunord_sph k5, zmm30, [rdx-1032]{1to32}
vcmpneq_usph k5, zmm30, zmm29
vcmpneq_usph k5{k7}, zmm30, zmm29
vcmpneq_usph k5, zmm30, zmm29, {sae}
vcmpneq_usph k5, zmm30, ZMMWORD PTR [rcx]
vcmpneq_usph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpneq_usph k5, zmm30, [rcx]{1to32}
vcmpneq_usph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpneq_usph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpneq_usph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpneq_usph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpneq_usph k5, zmm30, [rdx+1016]{1to32}
vcmpneq_usph k5, zmm30, [rdx+1024]{1to32}
vcmpneq_usph k5, zmm30, [rdx-1024]{1to32}
vcmpneq_usph k5, zmm30, [rdx-1032]{1to32}
vcmpnlt_uqph k5, zmm30, zmm29
vcmpnlt_uqph k5{k7}, zmm30, zmm29
vcmpnlt_uqph k5, zmm30, zmm29, {sae}
vcmpnlt_uqph k5, zmm30, ZMMWORD PTR [rcx]
vcmpnlt_uqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpnlt_uqph k5, zmm30, [rcx]{1to32}
vcmpnlt_uqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpnlt_uqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpnlt_uqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpnlt_uqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpnlt_uqph k5, zmm30, [rdx+1016]{1to32}
vcmpnlt_uqph k5, zmm30, [rdx+1024]{1to32}
vcmpnlt_uqph k5, zmm30, [rdx-1024]{1to32}
vcmpnlt_uqph k5, zmm30, [rdx-1032]{1to32}
vcmpnle_uqph k5, zmm30, zmm29
vcmpnle_uqph k5{k7}, zmm30, zmm29
vcmpnle_uqph k5, zmm30, zmm29, {sae}
vcmpnle_uqph k5, zmm30, ZMMWORD PTR [rcx]
vcmpnle_uqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpnle_uqph k5, zmm30, [rcx]{1to32}
vcmpnle_uqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpnle_uqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpnle_uqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpnle_uqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpnle_uqph k5, zmm30, [rdx+1016]{1to32}
vcmpnle_uqph k5, zmm30, [rdx+1024]{1to32}
vcmpnle_uqph k5, zmm30, [rdx-1024]{1to32}
vcmpnle_uqph k5, zmm30, [rdx-1032]{1to32}
vcmpord_sph k5, zmm30, zmm29
vcmpord_sph k5{k7}, zmm30, zmm29
vcmpord_sph k5, zmm30, zmm29, {sae}
vcmpord_sph k5, zmm30, ZMMWORD PTR [rcx]
vcmpord_sph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpord_sph k5, zmm30, [rcx]{1to32}
vcmpord_sph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpord_sph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpord_sph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpord_sph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpord_sph k5, zmm30, [rdx+1016]{1to32}
vcmpord_sph k5, zmm30, [rdx+1024]{1to32}
vcmpord_sph k5, zmm30, [rdx-1024]{1to32}
vcmpord_sph k5, zmm30, [rdx-1032]{1to32}
vcmpeq_usph k5, zmm30, zmm29
vcmpeq_usph k5{k7}, zmm30, zmm29
vcmpeq_usph k5, zmm30, zmm29, {sae}
vcmpeq_usph k5, zmm30, ZMMWORD PTR [rcx]
vcmpeq_usph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpeq_usph k5, zmm30, [rcx]{1to32}
vcmpeq_usph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpeq_usph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpeq_usph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpeq_usph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpeq_usph k5, zmm30, [rdx+1016]{1to32}
vcmpeq_usph k5, zmm30, [rdx+1024]{1to32}
vcmpeq_usph k5, zmm30, [rdx-1024]{1to32}
vcmpeq_usph k5, zmm30, [rdx-1032]{1to32}
vcmpnge_uqph k5, zmm30, zmm29
vcmpnge_uqph k5{k7}, zmm30, zmm29
vcmpnge_uqph k5, zmm30, zmm29, {sae}
vcmpnge_uqph k5, zmm30, ZMMWORD PTR [rcx]
vcmpnge_uqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpnge_uqph k5, zmm30, [rcx]{1to32}
vcmpnge_uqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpnge_uqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpnge_uqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpnge_uqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpnge_uqph k5, zmm30, [rdx+1016]{1to32}
vcmpnge_uqph k5, zmm30, [rdx+1024]{1to32}
vcmpnge_uqph k5, zmm30, [rdx-1024]{1to32}
vcmpnge_uqph k5, zmm30, [rdx-1032]{1to32}
vcmpngt_uqph k5, zmm30, zmm29
vcmpngt_uqph k5{k7}, zmm30, zmm29
vcmpngt_uqph k5, zmm30, zmm29, {sae}
vcmpngt_uqph k5, zmm30, ZMMWORD PTR [rcx]
vcmpngt_uqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpngt_uqph k5, zmm30, [rcx]{1to32}
vcmpngt_uqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpngt_uqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpngt_uqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpngt_uqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpngt_uqph k5, zmm30, [rdx+1016]{1to32}
vcmpngt_uqph k5, zmm30, [rdx+1024]{1to32}
vcmpngt_uqph k5, zmm30, [rdx-1024]{1to32}
vcmpngt_uqph k5, zmm30, [rdx-1032]{1to32}
vcmpfalse_osph k5, zmm30, zmm29
vcmpfalse_osph k5{k7}, zmm30, zmm29
vcmpfalse_osph k5, zmm30, zmm29, {sae}
vcmpfalse_osph k5, zmm30, ZMMWORD PTR [rcx]
vcmpfalse_osph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpfalse_osph k5, zmm30, [rcx]{1to32}
vcmpfalse_osph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpfalse_osph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpfalse_osph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpfalse_osph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpfalse_osph k5, zmm30, [rdx+1016]{1to32}
vcmpfalse_osph k5, zmm30, [rdx+1024]{1to32}
vcmpfalse_osph k5, zmm30, [rdx-1024]{1to32}
vcmpfalse_osph k5, zmm30, [rdx-1032]{1to32}
vcmpneq_osph k5, zmm30, zmm29
vcmpneq_osph k5{k7}, zmm30, zmm29
vcmpneq_osph k5, zmm30, zmm29, {sae}
vcmpneq_osph k5, zmm30, ZMMWORD PTR [rcx]
vcmpneq_osph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpneq_osph k5, zmm30, [rcx]{1to32}
vcmpneq_osph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpneq_osph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpneq_osph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpneq_osph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpneq_osph k5, zmm30, [rdx+1016]{1to32}
vcmpneq_osph k5, zmm30, [rdx+1024]{1to32}
vcmpneq_osph k5, zmm30, [rdx-1024]{1to32}
vcmpneq_osph k5, zmm30, [rdx-1032]{1to32}
vcmpge_oqph k5, zmm30, zmm29
vcmpge_oqph k5{k7}, zmm30, zmm29
vcmpge_oqph k5, zmm30, zmm29, {sae}
vcmpge_oqph k5, zmm30, ZMMWORD PTR [rcx]
vcmpge_oqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpge_oqph k5, zmm30, [rcx]{1to32}
vcmpge_oqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpge_oqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpge_oqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpge_oqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpge_oqph k5, zmm30, [rdx+1016]{1to32}
vcmpge_oqph k5, zmm30, [rdx+1024]{1to32}
vcmpge_oqph k5, zmm30, [rdx-1024]{1to32}
vcmpge_oqph k5, zmm30, [rdx-1032]{1to32}
vcmpgt_oqph k5, zmm30, zmm29
vcmpgt_oqph k5{k7}, zmm30, zmm29
vcmpgt_oqph k5, zmm30, zmm29, {sae}
vcmpgt_oqph k5, zmm30, ZMMWORD PTR [rcx]
vcmpgt_oqph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmpgt_oqph k5, zmm30, [rcx]{1to32}
vcmpgt_oqph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmpgt_oqph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmpgt_oqph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmpgt_oqph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmpgt_oqph k5, zmm30, [rdx+1016]{1to32}
vcmpgt_oqph k5, zmm30, [rdx+1024]{1to32}
vcmpgt_oqph k5, zmm30, [rdx-1024]{1to32}
vcmpgt_oqph k5, zmm30, [rdx-1032]{1to32}
vcmptrue_usph k5, zmm30, zmm29
vcmptrue_usph k5{k7}, zmm30, zmm29
vcmptrue_usph k5, zmm30, zmm29, {sae}
vcmptrue_usph k5, zmm30, ZMMWORD PTR [rcx]
vcmptrue_usph k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234]
vcmptrue_usph k5, zmm30, [rcx]{1to32}
vcmptrue_usph k5, zmm30, ZMMWORD PTR [rdx+8128]
vcmptrue_usph k5, zmm30, ZMMWORD PTR [rdx+8192]
vcmptrue_usph k5, zmm30, ZMMWORD PTR [rdx-8192]
vcmptrue_usph k5, zmm30, ZMMWORD PTR [rdx-8256]
vcmptrue_usph k5, zmm30, [rdx+1016]{1to32}
vcmptrue_usph k5, zmm30, [rdx+1024]{1to32}
vcmptrue_usph k5, zmm30, [rdx-1024]{1to32}
vcmptrue_usph k5, zmm30, [rdx-1032]{1to32}
vcmpeq_oqsh k5{k7}, xmm29, xmm28
vcmpeq_oqsh k5{k7}, xmm29, xmm28, {sae}
vcmpeq_oqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpeq_oqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpeq_oqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpeq_oqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpeq_oqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpeq_oqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpeqsh k5{k7}, xmm29, xmm28
vcmpeqsh k5{k7}, xmm29, xmm28, {sae}
vcmpeqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpeqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpeqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpeqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpeqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpeqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmplt_ossh k5{k7}, xmm29, xmm28
vcmplt_ossh k5{k7}, xmm29, xmm28, {sae}
vcmplt_ossh k5{k7}, xmm29, WORD PTR [rcx]
vcmplt_ossh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmplt_ossh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmplt_ossh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmplt_ossh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmplt_ossh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpltsh k5{k7}, xmm29, xmm28
vcmpltsh k5{k7}, xmm29, xmm28, {sae}
vcmpltsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpltsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpltsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpltsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpltsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpltsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmple_ossh k5{k7}, xmm29, xmm28
vcmple_ossh k5{k7}, xmm29, xmm28, {sae}
vcmple_ossh k5{k7}, xmm29, WORD PTR [rcx]
vcmple_ossh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmple_ossh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmple_ossh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmple_ossh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmple_ossh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmplesh k5{k7}, xmm29, xmm28
vcmplesh k5{k7}, xmm29, xmm28, {sae}
vcmplesh k5{k7}, xmm29, WORD PTR [rcx]
vcmplesh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmplesh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmplesh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmplesh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmplesh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpunord_qsh k5{k7}, xmm29, xmm28
vcmpunord_qsh k5{k7}, xmm29, xmm28, {sae}
vcmpunord_qsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpunord_qsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpunord_qsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpunord_qsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpunord_qsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpunord_qsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpunordsh k5{k7}, xmm29, xmm28
vcmpunordsh k5{k7}, xmm29, xmm28, {sae}
vcmpunordsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpunordsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpunordsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpunordsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpunordsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpunordsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpneq_uqsh k5{k7}, xmm29, xmm28
vcmpneq_uqsh k5{k7}, xmm29, xmm28, {sae}
vcmpneq_uqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpneq_uqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpneq_uqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpneq_uqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpneq_uqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpneq_uqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpneqsh k5{k7}, xmm29, xmm28
vcmpneqsh k5{k7}, xmm29, xmm28, {sae}
vcmpneqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpneqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpneqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpneqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpneqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpneqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpnlt_ussh k5{k7}, xmm29, xmm28
vcmpnlt_ussh k5{k7}, xmm29, xmm28, {sae}
vcmpnlt_ussh k5{k7}, xmm29, WORD PTR [rcx]
vcmpnlt_ussh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpnlt_ussh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpnlt_ussh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpnlt_ussh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpnlt_ussh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpnltsh k5{k7}, xmm29, xmm28
vcmpnltsh k5{k7}, xmm29, xmm28, {sae}
vcmpnltsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpnltsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpnltsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpnltsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpnltsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpnltsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpnle_ussh k5{k7}, xmm29, xmm28
vcmpnle_ussh k5{k7}, xmm29, xmm28, {sae}
vcmpnle_ussh k5{k7}, xmm29, WORD PTR [rcx]
vcmpnle_ussh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpnle_ussh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpnle_ussh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpnle_ussh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpnle_ussh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpnlesh k5{k7}, xmm29, xmm28
vcmpnlesh k5{k7}, xmm29, xmm28, {sae}
vcmpnlesh k5{k7}, xmm29, WORD PTR [rcx]
vcmpnlesh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpnlesh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpnlesh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpnlesh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpnlesh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpord_qsh k5{k7}, xmm29, xmm28
vcmpord_qsh k5{k7}, xmm29, xmm28, {sae}
vcmpord_qsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpord_qsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpord_qsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpord_qsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpord_qsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpord_qsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpordsh k5{k7}, xmm29, xmm28
vcmpordsh k5{k7}, xmm29, xmm28, {sae}
vcmpordsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpordsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpordsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpordsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpordsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpordsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpeq_uqsh k5{k7}, xmm29, xmm28
vcmpeq_uqsh k5{k7}, xmm29, xmm28, {sae}
vcmpeq_uqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpeq_uqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpeq_uqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpeq_uqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpeq_uqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpeq_uqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpnge_ussh k5{k7}, xmm29, xmm28
vcmpnge_ussh k5{k7}, xmm29, xmm28, {sae}
vcmpnge_ussh k5{k7}, xmm29, WORD PTR [rcx]
vcmpnge_ussh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpnge_ussh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpnge_ussh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpnge_ussh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpnge_ussh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpngesh k5{k7}, xmm29, xmm28
vcmpngesh k5{k7}, xmm29, xmm28, {sae}
vcmpngesh k5{k7}, xmm29, WORD PTR [rcx]
vcmpngesh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpngesh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpngesh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpngesh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpngesh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpngt_ussh k5{k7}, xmm29, xmm28
vcmpngt_ussh k5{k7}, xmm29, xmm28, {sae}
vcmpngt_ussh k5{k7}, xmm29, WORD PTR [rcx]
vcmpngt_ussh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpngt_ussh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpngt_ussh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpngt_ussh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpngt_ussh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpngtsh k5{k7}, xmm29, xmm28
vcmpngtsh k5{k7}, xmm29, xmm28, {sae}
vcmpngtsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpngtsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpngtsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpngtsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpngtsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpngtsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpfalse_oqsh k5{k7}, xmm29, xmm28
vcmpfalse_oqsh k5{k7}, xmm29, xmm28, {sae}
vcmpfalse_oqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpfalse_oqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpfalse_oqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpfalse_oqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpfalse_oqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpfalse_oqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpfalsesh k5{k7}, xmm29, xmm28
vcmpfalsesh k5{k7}, xmm29, xmm28, {sae}
vcmpfalsesh k5{k7}, xmm29, WORD PTR [rcx]
vcmpfalsesh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpfalsesh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpfalsesh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpfalsesh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpfalsesh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpneq_oqsh k5{k7}, xmm29, xmm28
vcmpneq_oqsh k5{k7}, xmm29, xmm28, {sae}
vcmpneq_oqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpneq_oqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpneq_oqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpneq_oqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpneq_oqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpneq_oqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpge_ossh k5{k7}, xmm29, xmm28
vcmpge_ossh k5{k7}, xmm29, xmm28, {sae}
vcmpge_ossh k5{k7}, xmm29, WORD PTR [rcx]
vcmpge_ossh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpge_ossh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpge_ossh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpge_ossh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpge_ossh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpgesh k5{k7}, xmm29, xmm28
vcmpgesh k5{k7}, xmm29, xmm28, {sae}
vcmpgesh k5{k7}, xmm29, WORD PTR [rcx]
vcmpgesh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpgesh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpgesh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpgesh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpgesh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpgt_ossh k5{k7}, xmm29, xmm28
vcmpgt_ossh k5{k7}, xmm29, xmm28, {sae}
vcmpgt_ossh k5{k7}, xmm29, WORD PTR [rcx]
vcmpgt_ossh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpgt_ossh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpgt_ossh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpgt_ossh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpgt_ossh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpgtsh k5{k7}, xmm29, xmm28
vcmpgtsh k5{k7}, xmm29, xmm28, {sae}
vcmpgtsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpgtsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpgtsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpgtsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpgtsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpgtsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmptrue_uqsh k5{k7}, xmm29, xmm28
vcmptrue_uqsh k5{k7}, xmm29, xmm28, {sae}
vcmptrue_uqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmptrue_uqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmptrue_uqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmptrue_uqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmptrue_uqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmptrue_uqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmptruesh k5{k7}, xmm29, xmm28
vcmptruesh k5{k7}, xmm29, xmm28, {sae}
vcmptruesh k5{k7}, xmm29, WORD PTR [rcx]
vcmptruesh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmptruesh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmptruesh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmptruesh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmptruesh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpeq_ossh k5{k7}, xmm29, xmm28
vcmpeq_ossh k5{k7}, xmm29, xmm28, {sae}
vcmpeq_ossh k5{k7}, xmm29, WORD PTR [rcx]
vcmpeq_ossh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpeq_ossh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpeq_ossh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpeq_ossh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpeq_ossh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmplt_oqsh k5{k7}, xmm29, xmm28
vcmplt_oqsh k5{k7}, xmm29, xmm28, {sae}
vcmplt_oqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmplt_oqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmplt_oqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmplt_oqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmplt_oqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmplt_oqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmple_oqsh k5{k7}, xmm29, xmm28
vcmple_oqsh k5{k7}, xmm29, xmm28, {sae}
vcmple_oqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmple_oqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmple_oqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmple_oqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmple_oqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmple_oqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpunord_ssh k5{k7}, xmm29, xmm28
vcmpunord_ssh k5{k7}, xmm29, xmm28, {sae}
vcmpunord_ssh k5{k7}, xmm29, WORD PTR [rcx]
vcmpunord_ssh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpunord_ssh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpunord_ssh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpunord_ssh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpunord_ssh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpneq_ussh k5{k7}, xmm29, xmm28
vcmpneq_ussh k5{k7}, xmm29, xmm28, {sae}
vcmpneq_ussh k5{k7}, xmm29, WORD PTR [rcx]
vcmpneq_ussh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpneq_ussh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpneq_ussh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpneq_ussh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpneq_ussh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpnlt_uqsh k5{k7}, xmm29, xmm28
vcmpnlt_uqsh k5{k7}, xmm29, xmm28, {sae}
vcmpnlt_uqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpnlt_uqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpnlt_uqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpnlt_uqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpnlt_uqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpnlt_uqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpnle_uqsh k5{k7}, xmm29, xmm28
vcmpnle_uqsh k5{k7}, xmm29, xmm28, {sae}
vcmpnle_uqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpnle_uqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpnle_uqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpnle_uqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpnle_uqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpnle_uqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpord_ssh k5{k7}, xmm29, xmm28
vcmpord_ssh k5{k7}, xmm29, xmm28, {sae}
vcmpord_ssh k5{k7}, xmm29, WORD PTR [rcx]
vcmpord_ssh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpord_ssh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpord_ssh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpord_ssh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpord_ssh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpeq_ussh k5{k7}, xmm29, xmm28
vcmpeq_ussh k5{k7}, xmm29, xmm28, {sae}
vcmpeq_ussh k5{k7}, xmm29, WORD PTR [rcx]
vcmpeq_ussh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpeq_ussh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpeq_ussh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpeq_ussh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpeq_ussh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpnge_uqsh k5{k7}, xmm29, xmm28
vcmpnge_uqsh k5{k7}, xmm29, xmm28, {sae}
vcmpnge_uqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpnge_uqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpnge_uqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpnge_uqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpnge_uqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpnge_uqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpngt_uqsh k5{k7}, xmm29, xmm28
vcmpngt_uqsh k5{k7}, xmm29, xmm28, {sae}
vcmpngt_uqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpngt_uqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpngt_uqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpngt_uqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpngt_uqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpngt_uqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpfalse_ossh k5{k7}, xmm29, xmm28
vcmpfalse_ossh k5{k7}, xmm29, xmm28, {sae}
vcmpfalse_ossh k5{k7}, xmm29, WORD PTR [rcx]
vcmpfalse_ossh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpfalse_ossh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpfalse_ossh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpfalse_ossh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpfalse_ossh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpneq_ossh k5{k7}, xmm29, xmm28
vcmpneq_ossh k5{k7}, xmm29, xmm28, {sae}
vcmpneq_ossh k5{k7}, xmm29, WORD PTR [rcx]
vcmpneq_ossh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpneq_ossh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpneq_ossh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpneq_ossh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpneq_ossh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpge_oqsh k5{k7}, xmm29, xmm28
vcmpge_oqsh k5{k7}, xmm29, xmm28, {sae}
vcmpge_oqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpge_oqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpge_oqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpge_oqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpge_oqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpge_oqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmpgt_oqsh k5{k7}, xmm29, xmm28
vcmpgt_oqsh k5{k7}, xmm29, xmm28, {sae}
vcmpgt_oqsh k5{k7}, xmm29, WORD PTR [rcx]
vcmpgt_oqsh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmpgt_oqsh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmpgt_oqsh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmpgt_oqsh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmpgt_oqsh k5{k7}, xmm29, WORD PTR [rdx-1032]
vcmptrue_ussh k5{k7}, xmm29, xmm28
vcmptrue_ussh k5{k7}, xmm29, xmm28, {sae}
vcmptrue_ussh k5{k7}, xmm29, WORD PTR [rcx]
vcmptrue_ussh k5{k7}, xmm29, WORD PTR [rax+r14*8+0x1234]
vcmptrue_ussh k5{k7}, xmm29, WORD PTR [rdx+1016]
vcmptrue_ussh k5{k7}, xmm29, WORD PTR [rdx+1024]
vcmptrue_ussh k5{k7}, xmm29, WORD PTR [rdx-1024]
vcmptrue_ussh k5{k7}, xmm29, WORD PTR [rdx-1032]
|
stsp/binutils-ia16
| 4,634
|
gas/testsuite/gas/i386/avx512vbmi.s
|
# Check 32bit AVX512VBMI instructions
.allow_index_reg
.text
_start:
vpermb %zmm4, %zmm5, %zmm6 # AVX512VBMI
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI
vpermb (%ecx), %zmm5, %zmm6 # AVX512VBMI
vpermb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI
vpermb 8128(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpermb 8192(%edx), %zmm5, %zmm6 # AVX512VBMI
vpermb -8192(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpermb -8256(%edx), %zmm5, %zmm6 # AVX512VBMI
vpermi2b %zmm4, %zmm5, %zmm6 # AVX512VBMI
vpermi2b %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermi2b %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI
vpermi2b (%ecx), %zmm5, %zmm6 # AVX512VBMI
vpermi2b -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI
vpermi2b 8128(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpermi2b 8192(%edx), %zmm5, %zmm6 # AVX512VBMI
vpermi2b -8192(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpermi2b -8256(%edx), %zmm5, %zmm6 # AVX512VBMI
vpermt2b %zmm4, %zmm5, %zmm6 # AVX512VBMI
vpermt2b %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermt2b %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI
vpermt2b (%ecx), %zmm5, %zmm6 # AVX512VBMI
vpermt2b -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI
vpermt2b 8128(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpermt2b 8192(%edx), %zmm5, %zmm6 # AVX512VBMI
vpermt2b -8192(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpermt2b -8256(%edx), %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb %zmm4, %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpmultishiftqb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI
vpmultishiftqb (%ecx), %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb (%eax){1to8}, %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb 8128(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpmultishiftqb 8192(%edx), %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb -8192(%edx), %zmm5, %zmm6 # AVX512VBMI Disp8
vpmultishiftqb -8256(%edx), %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI Disp8
vpmultishiftqb 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI
vpmultishiftqb -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI Disp8
vpmultishiftqb -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI
.intel_syntax noprefix
vpermb zmm6, zmm5, zmm4 # AVX512VBMI
vpermb zmm6{k7}, zmm5, zmm4 # AVX512VBMI
vpermb zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI
vpermb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512VBMI
vpermb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI
vpermb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512VBMI Disp8
vpermb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512VBMI
vpermb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512VBMI Disp8
vpermb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512VBMI
vpermi2b zmm6, zmm5, zmm4 # AVX512VBMI
vpermi2b zmm6{k7}, zmm5, zmm4 # AVX512VBMI
vpermi2b zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI
vpermi2b zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512VBMI
vpermi2b zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI
vpermi2b zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512VBMI Disp8
vpermi2b zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512VBMI
vpermi2b zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512VBMI Disp8
vpermi2b zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512VBMI
vpermt2b zmm6, zmm5, zmm4 # AVX512VBMI
vpermt2b zmm6{k7}, zmm5, zmm4 # AVX512VBMI
vpermt2b zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI
vpermt2b zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512VBMI
vpermt2b zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI
vpermt2b zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512VBMI Disp8
vpermt2b zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512VBMI
vpermt2b zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512VBMI Disp8
vpermt2b zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512VBMI
vpmultishiftqb zmm6, zmm5, zmm4 # AVX512VBMI
vpmultishiftqb zmm6{k7}, zmm5, zmm4 # AVX512VBMI
vpmultishiftqb zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI
vpmultishiftqb zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512VBMI
vpmultishiftqb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI
vpmultishiftqb zmm6, zmm5, [eax]{1to8} # AVX512VBMI
vpmultishiftqb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512VBMI Disp8
vpmultishiftqb zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512VBMI
vpmultishiftqb zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512VBMI Disp8
vpmultishiftqb zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512VBMI
vpmultishiftqb zmm6, zmm5, [edx+1016]{1to8} # AVX512VBMI Disp8
vpmultishiftqb zmm6, zmm5, [edx+1024]{1to8} # AVX512VBMI
vpmultishiftqb zmm6, zmm5, [edx-1024]{1to8} # AVX512VBMI Disp8
vpmultishiftqb zmm6, zmm5, [edx-1032]{1to8} # AVX512VBMI
|
stsp/binutils-ia16
| 10,354
|
gas/testsuite/gas/i386/avx512vbmi2.s
|
# Check 32bit AVX512VBMI2 instructions
.allow_index_reg
.text
_start:
vpcompressb %zmm6, (%ecx){%k7} # AVX512VBMI2
vpcompressb %zmm6, -123456(%esp,%esi,8) # AVX512VBMI2
vpcompressb %zmm6, 126(%edx) # AVX512VBMI2 Disp8
vpcompressb %zmm5, %zmm6 # AVX512VBMI2
vpcompressb %zmm5, %zmm6{%k7} # AVX512VBMI2
vpcompressb %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpcompressw %zmm6, (%ecx){%k7} # AVX512VBMI2
vpcompressw %zmm6, -123456(%esp,%esi,8) # AVX512VBMI2
vpcompressw %zmm6, 128(%edx) # AVX512VBMI2 Disp8
vpcompressw %zmm5, %zmm6 # AVX512VBMI2
vpcompressw %zmm5, %zmm6{%k7} # AVX512VBMI2
vpcompressw %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpexpandb (%ecx), %zmm6{%k7} # AVX512VBMI2
vpexpandb (%ecx), %zmm6{%k7}{z} # AVX512VBMI2
vpexpandb -123456(%esp,%esi,8), %zmm6 # AVX512VBMI2
vpexpandb 126(%edx), %zmm6 # AVX512VBMI2 Disp8
vpexpandb %zmm5, %zmm6 # AVX512VBMI2
vpexpandb %zmm5, %zmm6{%k7} # AVX512VBMI2
vpexpandb %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpexpandw (%ecx), %zmm6{%k7} # AVX512VBMI2
vpexpandw (%ecx), %zmm6{%k7}{z} # AVX512VBMI2
vpexpandw -123456(%esp,%esi,8), %zmm6 # AVX512VBMI2
vpexpandw 128(%edx), %zmm6 # AVX512VBMI2 Disp8
vpexpandw %zmm5, %zmm6 # AVX512VBMI2
vpexpandw %zmm5, %zmm6{%k7} # AVX512VBMI2
vpexpandw %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshldvw %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshldvw %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshldvw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshldvw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshldvw 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldvd %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshldvd %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshldvd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshldvd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshldvd 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldvd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldvq %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshldvq %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshldvq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshldvq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshldvq 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldvq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdvw %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshrdvw %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshrdvw %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshrdvw -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshrdvw 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdvd %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshrdvd %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshrdvd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshrdvd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshrdvd 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdvd 508(%edx){1to16}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdvq %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshrdvq %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshrdvq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshrdvq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshrdvq 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdvq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldw $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshldw $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshldw $123, %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshldw $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshldw $123, 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldd $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshldd $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshldd $123, %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshldd $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshldd $123, 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldd $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldq $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshldq $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshldq $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshldq $123, 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshldq $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdw $0xab, %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshrdw $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshrdw $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshrdw $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshrdw $123, 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdd $0xab, %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshrdd $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshrdd $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshrdd $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshrdd $123, 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdd $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdq $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI2
vpshrdq $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512VBMI2
vpshrdq $123, %zmm4, %zmm5, %zmm6 # AVX512VBMI2
vpshrdq $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512VBMI2
vpshrdq $123, 128(%edx), %zmm5, %zmm6 # AVX512VBMI2 Disp8
vpshrdq $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512VBMI2 Disp8
.intel_syntax noprefix
vpcompressb ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512VBMI2
vpcompressb ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512VBMI2
vpcompressb ZMMWORD PTR [edx+126], zmm6 # AVX512VBMI2 Disp8
vpcompressb zmm6, zmm5 # AVX512VBMI2
vpcompressb zmm6{k7}, zmm5 # AVX512VBMI2
vpcompressb zmm6{k7}{z}, zmm5 # AVX512VBMI2
vpcompressw ZMMWORD PTR [ecx]{k7}, zmm6 # AVX512VBMI2
vpcompressw ZMMWORD PTR [esp+esi*8-123456], zmm6 # AVX512VBMI2
vpcompressw ZMMWORD PTR [edx+128], zmm6 # AVX512VBMI2 Disp8
vpcompressw zmm6, zmm5 # AVX512VBMI2
vpcompressw zmm6{k7}, zmm5 # AVX512VBMI2
vpcompressw zmm6{k7}{z}, zmm5 # AVX512VBMI2
vpexpandb zmm6{k7}, ZMMWORD PTR [ecx] # AVX512VBMI2
vpexpandb zmm6{k7}{z}, ZMMWORD PTR [ecx] # AVX512VBMI2
vpexpandb zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpexpandb zmm6, ZMMWORD PTR [edx+126] # AVX512VBMI2 Disp8
vpexpandb zmm6, zmm5 # AVX512VBMI2
vpexpandb zmm6{k7}, zmm5 # AVX512VBMI2
vpexpandb zmm6{k7}{z}, zmm5 # AVX512VBMI2
vpexpandw zmm6{k7}, ZMMWORD PTR [ecx] # AVX512VBMI2
vpexpandw zmm6{k7}{z}, ZMMWORD PTR [ecx] # AVX512VBMI2
vpexpandw zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpexpandw zmm6, ZMMWORD PTR [edx+128] # AVX512VBMI2 Disp8
vpexpandw zmm6, zmm5 # AVX512VBMI2
vpexpandw zmm6{k7}, zmm5 # AVX512VBMI2
vpexpandw zmm6{k7}{z}, zmm5 # AVX512VBMI2
vpshldvw zmm6, zmm5, zmm4 # AVX512VBMI2
vpshldvw zmm6{k7}, zmm5, zmm4 # AVX512VBMI2
vpshldvw zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI2
vpshldvw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpshldvw zmm6, zmm5, ZMMWORD PTR [edx+128] # AVX512VBMI2 Disp8
vpshldvd zmm6, zmm5, zmm4 # AVX512VBMI2
vpshldvd zmm6{k7}, zmm5, zmm4 # AVX512VBMI2
vpshldvd zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI2
vpshldvd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpshldvd zmm6, zmm5, ZMMWORD PTR [edx+128] # AVX512VBMI2 Disp8
vpshldvd zmm6, zmm5, [edx+508]{1to16} # AVX512VBMI2 Disp8
vpshldvq zmm6, zmm5, zmm4 # AVX512VBMI2
vpshldvq zmm6{k7}, zmm5, zmm4 # AVX512VBMI2
vpshldvq zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI2
vpshldvq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpshldvq zmm6, zmm5, ZMMWORD PTR [edx+128] # AVX512VBMI2 Disp8
vpshldvq zmm6, zmm5, [edx+1016]{1to8} # AVX512VBMI2 Disp8
vpshrdvw zmm6, zmm5, zmm4 # AVX512VBMI2
vpshrdvw zmm6{k7}, zmm5, zmm4 # AVX512VBMI2
vpshrdvw zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI2
vpshrdvw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpshrdvw zmm6, zmm5, ZMMWORD PTR [edx+128] # AVX512VBMI2 Disp8
vpshrdvd zmm6, zmm5, zmm4 # AVX512VBMI2
vpshrdvd zmm6{k7}, zmm5, zmm4 # AVX512VBMI2
vpshrdvd zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI2
vpshrdvd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpshrdvd zmm6, zmm5, ZMMWORD PTR [edx+128] # AVX512VBMI2 Disp8
vpshrdvd zmm6, zmm5, [edx+508]{1to16} # AVX512VBMI2 Disp8
vpshrdvq zmm6, zmm5, zmm4 # AVX512VBMI2
vpshrdvq zmm6{k7}, zmm5, zmm4 # AVX512VBMI2
vpshrdvq zmm6{k7}{z}, zmm5, zmm4 # AVX512VBMI2
vpshrdvq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512VBMI2
vpshrdvq zmm6, zmm5, ZMMWORD PTR [edx+128] # AVX512VBMI2 Disp8
vpshrdvq zmm6, zmm5, [edx+1016]{1to8} # AVX512VBMI2 Disp8
vpshldw zmm6, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldw zmm6{k7}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldw zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512VBMI2
vpshldw zmm6, zmm5, ZMMWORD PTR [edx+128], 123 # AVX512VBMI2 Disp8
vpshldd zmm6, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldd zmm6{k7}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldd zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512VBMI2
vpshldd zmm6, zmm5, ZMMWORD PTR [edx+128], 123 # AVX512VBMI2 Disp8
vpshldd zmm6, zmm5, [edx+508]{1to16}, 123 # AVX512VBMI2 Disp8
vpshldq zmm6, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldq zmm6{k7}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldq zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshldq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512VBMI2
vpshldq zmm6, zmm5, ZMMWORD PTR [edx+128], 123 # AVX512VBMI2 Disp8
vpshldq zmm6, zmm5, [edx+1016]{1to8}, 123 # AVX512VBMI2 Disp8
vpshrdw zmm6, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdw zmm6{k7}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdw zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdw zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512VBMI2
vpshrdw zmm6, zmm5, ZMMWORD PTR [edx+128], 123 # AVX512VBMI2 Disp8
vpshrdd zmm6, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdd zmm6{k7}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdd zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512VBMI2
vpshrdd zmm6, zmm5, ZMMWORD PTR [edx+128], 123 # AVX512VBMI2 Disp8
vpshrdd zmm6, zmm5, [edx+508]{1to16}, 123 # AVX512VBMI2 Disp8
vpshrdq zmm6, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdq zmm6{k7}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdq zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512VBMI2
vpshrdq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512VBMI2
vpshrdq zmm6, zmm5, ZMMWORD PTR [edx+128], 123 # AVX512VBMI2 Disp8
vpshrdq zmm6, zmm5, [edx+1016]{1to8}, 123 # AVX512VBMI2 Disp8
|
stsp/binutils-ia16
| 95,745
|
gas/testsuite/gas/i386/avx512dq_vl.s
|
# Check 32bit AVX512{DQ,VL} instructions
.allow_index_reg
.text
_start:
vbroadcastf64x2 (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf64x2 (%ecx), %ymm6{%k7}{z} # AVX512{DQ,VL}
vbroadcastf64x2 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf64x2 2032(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcastf64x2 2048(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf64x2 -2048(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcastf64x2 -2064(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti64x2 (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti64x2 (%ecx), %ymm6{%k7}{z} # AVX512{DQ,VL}
vbroadcasti64x2 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti64x2 2032(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcasti64x2 2048(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti64x2 -2048(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcasti64x2 -2064(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf32x2 %xmm7, %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf32x2 %xmm7, %ymm6{%k7}{z} # AVX512{DQ,VL}
vbroadcastf32x2 (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf32x2 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf32x2 1016(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcastf32x2 1024(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcastf32x2 -1024(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcastf32x2 -1032(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq 1016(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq 1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2qq -1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2qq -1032(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtpd2uqq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvtpd2uqq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq 1016(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq 1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtpd2uqq -1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtpd2uqq -1032(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtps2qq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq 1016(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq 1024(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq -1024(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq -1032(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq 508(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq 512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq -512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq -516(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2qq %xmm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq %xmm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvtps2qq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq 2032(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq 2048(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq -2048(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq -2064(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq 508(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq 512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2qq -512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2qq -516(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtps2uqq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq 1016(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq 1024(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq -1024(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq -1032(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq 508(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq 512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq -512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq -516(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq %xmm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq %xmm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvtps2uqq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq 2032(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq 2048(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq -2048(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq -2064(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq 508(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq 512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtps2uqq -512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtps2uqq -516(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtqq2pd (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvtqq2pd (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd 1016(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd 1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2pd -1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2pd -1032(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtqq2ps %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2ps %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtqq2psx (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psx -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2ps (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psx 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psx 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psx -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psx -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psx 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psx 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psx -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psx -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2ps %ymm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2ps %ymm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtqq2psy (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psy -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2ps (%eax){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psy 4064(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psy 4096(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psy -4096(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psy -4128(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psy 1016(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psy 1024(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtqq2psy -1024(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtqq2psy -1032(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2pd (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2pd (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd 1016(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd 1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2pd -1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2pd -1032(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvtuqq2ps %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2ps %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2psx (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psx -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2ps (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psx 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psx 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psx -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psx -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psx 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psx 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psx -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psx -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2ps %ymm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2ps %ymm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvtuqq2psy (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psy -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2ps (%eax){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psy 4064(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psy 4096(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psy -4096(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psy -4128(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psy 1016(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psy 1024(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vcvtuqq2psy -1024(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvtuqq2psy -1032(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm5, %xmm6{%k7} # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vextractf64x2 $123, %ymm5, %xmm6{%k7} # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm5, %xmm6{%k7} # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vextracti64x2 $123, %ymm5, %xmm6{%k7} # AVX512{DQ,VL}
vfpclasspd $0xab, %xmm6, %k5{%k7} # AVX512{DQ,VL}
vfpclasspd $123, %xmm6, %k5{%k7} # AVX512{DQ,VL}
vfpclasspdx $123, (%ecx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspdx $123, -123456(%esp,%esi,8), %k5{%k7} # AVX512{DQ,VL}
vfpclasspd $123, (%eax){1to2}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspdx $123, 2032(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdx $123, 2048(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspdx $123, -2048(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdx $123, -2064(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspdx $123, 1016(%edx){1to2}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdx $123, 1024(%edx){1to2}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspdx $123, -1024(%edx){1to2}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdx $123, -1032(%edx){1to2}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspd $0xab, %ymm6, %k5{%k7} # AVX512{DQ,VL}
vfpclasspd $123, %ymm6, %k5{%k7} # AVX512{DQ,VL}
vfpclasspdy $123, (%ecx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspdy $123, -123456(%esp,%esi,8), %k5{%k7} # AVX512{DQ,VL}
vfpclasspd $123, (%eax){1to4}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspdy $123, 4064(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdy $123, 4096(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspdy $123, -4096(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdy $123, -4128(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspdy $123, 1016(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdy $123, 1024(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspdy $123, -1024(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspdy $123, -1032(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL}
vfpclassps $0xab, %xmm6, %k5{%k7} # AVX512{DQ,VL}
vfpclassps $123, %xmm6, %k5{%k7} # AVX512{DQ,VL}
vfpclasspsx $123, (%ecx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspsx $123, -123456(%esp,%esi,8), %k5{%k7} # AVX512{DQ,VL}
vfpclassps $123, (%eax){1to4}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspsx $123, 2032(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsx $123, 2048(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspsx $123, -2048(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsx $123, -2064(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspsx $123, 508(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsx $123, 512(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspsx $123, -512(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsx $123, -516(%edx){1to4}, %k5{%k7} # AVX512{DQ,VL}
vfpclassps $0xab, %ymm6, %k5{%k7} # AVX512{DQ,VL}
vfpclassps $123, %ymm6, %k5{%k7} # AVX512{DQ,VL}
vfpclasspsy $123, (%ecx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspsy $123, -123456(%esp,%esi,8), %k5{%k7} # AVX512{DQ,VL}
vfpclassps $123, (%eax){1to8}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspsy $123, 4064(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsy $123, 4096(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspsy $123, -4096(%edx), %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsy $123, -4128(%edx), %k5{%k7} # AVX512{DQ,VL}
vfpclasspsy $123, 508(%edx){1to8}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsy $123, 512(%edx){1to8}, %k5{%k7} # AVX512{DQ,VL}
vfpclasspsy $123, -512(%edx){1to8}, %k5{%k7} # AVX512{DQ,VL} Disp8
vfpclasspsy $123, -516(%edx){1to8}, %k5{%k7} # AVX512{DQ,VL}
vinsertf64x2 $0xab, %xmm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinsertf64x2 $0xab, %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vinsertf64x2 $123, %xmm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinsertf64x2 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinsertf64x2 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinsertf64x2 $123, 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vinsertf64x2 $123, 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinsertf64x2 $123, -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vinsertf64x2 $123, -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinserti64x2 $0xab, %xmm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinserti64x2 $0xab, %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vinserti64x2 $123, %xmm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinserti64x2 $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinserti64x2 $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinserti64x2 $123, 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vinserti64x2 $123, 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vinserti64x2 $123, -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vinserti64x2 $123, -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 %xmm7, %xmm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 %xmm7, %xmm6{%k7}{z} # AVX512{DQ,VL}
vbroadcasti32x2 (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 1016(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcasti32x2 1024(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 -1024(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcasti32x2 -1032(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 %xmm7, %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 %xmm7, %ymm6{%k7}{z} # AVX512{DQ,VL}
vbroadcasti32x2 (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 1016(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcasti32x2 1024(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vbroadcasti32x2 -1024(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vbroadcasti32x2 -1032(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vpmullq %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vpmullq (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vpmullq %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vpmullq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vpmullq (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vpmullq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vpmullq (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vpmullq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vpmullq -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vpmullq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vpmullq -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vpmullq -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vrangepd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $123, (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $123, -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangepd $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vrangepd $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $123, (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangepd $123, -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangepd $123, -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vrangeps $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $123, (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $123, 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $123, -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vrangeps $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vrangeps $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $123, (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vrangeps $123, -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vrangeps $123, -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vandpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vandpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vandps (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandps %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vandps (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vandnpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vandnpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vandnps (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vandnps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vandnps %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vandnps (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vandnps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vandnps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vorpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vorpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vorps (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vorps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vorps %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vorps (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vorps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vorps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vxorpd (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd (%eax){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd 1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd -1024(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd -1032(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorpd %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vxorpd (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd (%eax){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd 1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorpd -1024(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorpd -1032(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps %xmm4, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vxorps (%ecx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorps 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorps -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorps 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vxorps -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vxorps %ymm4, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vxorps (%ecx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorps 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorps -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorps 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vxorps -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vxorps -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $0xab, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vreducepd $123, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $123, (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $123, (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $123, 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $123, -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $123, 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $123, -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vreducepd $0xab, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vreducepd $123, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $123, (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $123, (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $123, 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $123, -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $123, 1016(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, 1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vreducepd $123, -1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreducepd $123, -1032(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $0xab, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vreduceps $123, %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $123, (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $123, (%eax){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $123, 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $123, -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $123, 508(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, 512(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $123, -512(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, -516(%edx){1to4}, %xmm6{%k7} # AVX512{DQ,VL}
vreduceps $0xab, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vreduceps $123, %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $123, (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $123, (%eax){1to8}, %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $123, 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $123, -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $123, 508(%edx){1to8}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, 512(%edx){1to8}, %ymm6{%k7} # AVX512{DQ,VL}
vreduceps $123, -512(%edx){1to8}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vreduceps $123, -516(%edx){1to8}, %ymm6{%k7} # AVX512{DQ,VL}
vextractf64x2 $0xab, %ymm5, (%ecx){%k7} # AVX512{DQ,VL}
vextractf64x2 $123, %ymm5, (%ecx){%k7} # AVX512{DQ,VL}
vextractf64x2 $123, %ymm5, -123456(%esp,%esi,8){%k7} # AVX512{DQ,VL}
vextractf64x2 $123, %ymm5, 2032(%edx){%k7} # AVX512{DQ,VL} Disp8
vextractf64x2 $123, %ymm5, 2048(%edx){%k7} # AVX512{DQ,VL}
vextractf64x2 $123, %ymm5, -2048(%edx){%k7} # AVX512{DQ,VL} Disp8
vextractf64x2 $123, %ymm5, -2064(%edx){%k7} # AVX512{DQ,VL}
vextracti64x2 $0xab, %ymm5, (%ecx){%k7} # AVX512{DQ,VL}
vextracti64x2 $123, %ymm5, (%ecx){%k7} # AVX512{DQ,VL}
vextracti64x2 $123, %ymm5, -123456(%esp,%esi,8){%k7} # AVX512{DQ,VL}
vextracti64x2 $123, %ymm5, 2032(%edx){%k7} # AVX512{DQ,VL} Disp8
vextracti64x2 $123, %ymm5, 2048(%edx){%k7} # AVX512{DQ,VL}
vextracti64x2 $123, %ymm5, -2048(%edx){%k7} # AVX512{DQ,VL} Disp8
vextracti64x2 $123, %ymm5, -2064(%edx){%k7} # AVX512{DQ,VL}
vcvttpd2qq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvttpd2qq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvttpd2qq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq 1016(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq 1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2qq -1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2qq -1032(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvttpd2uqq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq 2032(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq 2048(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq -2048(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq -2064(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq 1016(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq 1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq -1024(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq -1032(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq %ymm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq %ymm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvttpd2uqq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq 4064(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq 4096(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq -4096(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq -4128(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq 1016(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq 1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttpd2uqq -1024(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttpd2uqq -1032(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvttps2qq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq 1016(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq 1024(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq -1024(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq -1032(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq 508(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq 512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq -512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq -516(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2qq %xmm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq %xmm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvttps2qq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq 2032(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq 2048(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq -2048(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq -2064(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq 508(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq 512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2qq -512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2qq -516(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq %xmm5, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq %xmm5, %xmm6{%k7}{z} # AVX512{DQ,VL}
vcvttps2uqq (%ecx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq (%eax){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq 1016(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq 1024(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq -1024(%edx), %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq -1032(%edx), %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq 508(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq 512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq -512(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq -516(%edx){1to2}, %xmm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq %xmm5, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq %xmm5, %ymm6{%k7}{z} # AVX512{DQ,VL}
vcvttps2uqq (%ecx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq (%eax){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq 2032(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq 2048(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq -2048(%edx), %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq -2064(%edx), %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq 508(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq 512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vcvttps2uqq -512(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL} Disp8
vcvttps2uqq -516(%edx){1to4}, %ymm6{%k7} # AVX512{DQ,VL}
vpmovd2m %xmm6, %k5 # AVX512{DQ,VL}
vpmovd2m %ymm6, %k5 # AVX512{DQ,VL}
vpmovq2m %xmm6, %k5 # AVX512{DQ,VL}
vpmovq2m %ymm6, %k5 # AVX512{DQ,VL}
vpmovm2d %k5, %xmm6 # AVX512{DQ,VL}
vpmovm2d %k5, %ymm6 # AVX512{DQ,VL}
vpmovm2q %k5, %xmm6 # AVX512{DQ,VL}
vpmovm2q %k5, %ymm6 # AVX512{DQ,VL}
.intel_syntax noprefix
vbroadcastf64x2 ymm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vbroadcastf64x2 ymm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vbroadcastf64x2 ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vbroadcastf64x2 ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vbroadcastf64x2 ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vbroadcastf64x2 ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vbroadcastf64x2 ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vbroadcasti64x2 ymm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vbroadcasti64x2 ymm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vbroadcasti64x2 ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vbroadcasti64x2 ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vbroadcasti64x2 ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vbroadcasti64x2 ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vbroadcasti64x2 ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vbroadcastf32x2 ymm6{k7}, xmm7 # AVX512{DQ,VL}
vbroadcastf32x2 ymm6{k7}{z}, xmm7 # AVX512{DQ,VL}
vbroadcastf32x2 ymm6{k7}, QWORD PTR [ecx] # AVX512{DQ,VL}
vbroadcastf32x2 ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vbroadcastf32x2 ymm6{k7}, QWORD PTR [edx+1016] # AVX512{DQ,VL} Disp8
vbroadcastf32x2 ymm6{k7}, QWORD PTR [edx+1024] # AVX512{DQ,VL}
vbroadcastf32x2 ymm6{k7}, QWORD PTR [edx-1024] # AVX512{DQ,VL} Disp8
vbroadcastf32x2 ymm6{k7}, QWORD PTR [edx-1032] # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm6{k7}, [edx+1024]{1to2} # AVX512{DQ,VL}
vcvtpd2qq xmm6{k7}, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2qq xmm6{k7}, [edx-1032]{1to2} # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, ymm5 # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm6{k7}, [edx+1024]{1to4} # AVX512{DQ,VL}
vcvtpd2qq ymm6{k7}, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2qq ymm6{k7}, [edx-1032]{1to4} # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm6{k7}, [edx+1024]{1to2} # AVX512{DQ,VL}
vcvtpd2uqq xmm6{k7}, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtpd2uqq xmm6{k7}, [edx-1032]{1to2} # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, ymm5 # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm6{k7}, [edx+1024]{1to4} # AVX512{DQ,VL}
vcvtpd2uqq ymm6{k7}, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtpd2uqq ymm6{k7}, [edx-1032]{1to4} # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, QWORD PTR [ecx] # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, QWORD PTR [edx+1016] # AVX512{DQ,VL} Disp8
vcvtps2qq xmm6{k7}, QWORD PTR [edx+1024] # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, QWORD PTR [edx-1024] # AVX512{DQ,VL} Disp8
vcvtps2qq xmm6{k7}, QWORD PTR [edx-1032] # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, [edx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2qq xmm6{k7}, [edx+512]{1to2} # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, [edx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2qq xmm6{k7}, [edx-516]{1to2} # AVX512{DQ,VL}
vcvtps2qq xmm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vcvtps2qq ymm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtps2qq ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtps2qq ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2qq ymm6{k7}, [edx+512]{1to4} # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2qq ymm6{k7}, [edx-516]{1to4} # AVX512{DQ,VL}
vcvtps2qq ymm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, QWORD PTR [ecx] # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, QWORD PTR [edx+1016] # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm6{k7}, QWORD PTR [edx+1024] # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, QWORD PTR [edx-1024] # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm6{k7}, QWORD PTR [edx-1032] # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, [edx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm6{k7}, [edx+512]{1to2} # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, [edx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvtps2uqq xmm6{k7}, [edx-516]{1to2} # AVX512{DQ,VL}
vcvtps2uqq xmm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm6{k7}, [edx+512]{1to4} # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvtps2uqq ymm6{k7}, [edx-516]{1to4} # AVX512{DQ,VL}
vcvtps2uqq ymm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm6{k7}, [edx+1024]{1to2} # AVX512{DQ,VL}
vcvtqq2pd xmm6{k7}, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2pd xmm6{k7}, [edx-1032]{1to2} # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, ymm5 # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm6{k7}, [edx+1024]{1to4} # AVX512{DQ,VL}
vcvtqq2pd ymm6{k7}, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2pd ymm6{k7}, [edx-1032]{1to4} # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, QWORD BCST [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, QWORD BCST [edx+1024]{1to2} # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, QWORD BCST [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, QWORD BCST [edx-1032]{1to2} # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, ymm5 # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, QWORD BCST [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, QWORD BCST [edx+1024]{1to4} # AVX512{DQ,VL}
vcvtqq2ps xmm6{k7}, QWORD BCST [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtqq2ps xmm6{k7}, QWORD BCST [edx-1032]{1to4} # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm6{k7}, [edx+1024]{1to2} # AVX512{DQ,VL}
vcvtuqq2pd xmm6{k7}, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2pd xmm6{k7}, [edx-1032]{1to2} # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, ymm5 # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm6{k7}, [edx+1024]{1to4} # AVX512{DQ,VL}
vcvtuqq2pd ymm6{k7}, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2pd ymm6{k7}, [edx-1032]{1to4} # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx+1024]{1to2} # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx-1032]{1to2} # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, ymm5 # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx+1024]{1to4} # AVX512{DQ,VL}
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvtuqq2ps xmm6{k7}, QWORD BCST [edx-1032]{1to4} # AVX512{DQ,VL}
vextractf64x2 xmm6{k7}, ymm5, 0xab # AVX512{DQ,VL}
vextractf64x2 xmm6{k7}{z}, ymm5, 0xab # AVX512{DQ,VL}
vextractf64x2 xmm6{k7}, ymm5, 123 # AVX512{DQ,VL}
vextracti64x2 xmm6{k7}, ymm5, 0xab # AVX512{DQ,VL}
vextracti64x2 xmm6{k7}{z}, ymm5, 0xab # AVX512{DQ,VL}
vextracti64x2 xmm6{k7}, ymm5, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, xmm6, 0xab # AVX512{DQ,VL}
vfpclasspd k5{k7}, xmm6, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, [eax]{1to2}, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, QWORD BCST [edx+1016]{1to2}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, QWORD BCST [edx+1024]{1to2}, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, QWORD BCST [edx-1024]{1to2}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, QWORD BCST [edx-1032]{1to2}, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, ymm6, 0xab # AVX512{DQ,VL}
vfpclasspd k5{k7}, ymm6, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, YMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, [eax]{1to4}, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, QWORD BCST [edx+1016]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, QWORD BCST [edx+1024]{1to4}, 123 # AVX512{DQ,VL}
vfpclasspd k5{k7}, QWORD BCST [edx-1024]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclasspd k5{k7}, QWORD BCST [edx-1032]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, xmm6, 0xab # AVX512{DQ,VL}
vfpclassps k5{k7}, xmm6, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, [eax]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, DWORD BCST [edx+508]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, DWORD BCST [edx+512]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, DWORD BCST [edx-512]{1to4}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, DWORD BCST [edx-516]{1to4}, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, ymm6, 0xab # AVX512{DQ,VL}
vfpclassps k5{k7}, ymm6, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, YMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, [eax]{1to8}, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, DWORD BCST [edx+508]{1to8}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, DWORD BCST [edx+512]{1to8}, 123 # AVX512{DQ,VL}
vfpclassps k5{k7}, DWORD BCST [edx-512]{1to8}, 123 # AVX512{DQ,VL} Disp8
vfpclassps k5{k7}, DWORD BCST [edx-516]{1to8}, 123 # AVX512{DQ,VL}
vinsertf64x2 ymm6{k7}, ymm5, xmm4, 0xab # AVX512{DQ,VL}
vinsertf64x2 ymm6{k7}{z}, ymm5, xmm4, 0xab # AVX512{DQ,VL}
vinsertf64x2 ymm6{k7}, ymm5, xmm4, 123 # AVX512{DQ,VL}
vinsertf64x2 ymm6{k7}, ymm5, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vinsertf64x2 ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vinsertf64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vinsertf64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vinsertf64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vinsertf64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vinserti64x2 ymm6{k7}, ymm5, xmm4, 0xab # AVX512{DQ,VL}
vinserti64x2 ymm6{k7}{z}, ymm5, xmm4, 0xab # AVX512{DQ,VL}
vinserti64x2 ymm6{k7}, ymm5, xmm4, 123 # AVX512{DQ,VL}
vinserti64x2 ymm6{k7}, ymm5, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vinserti64x2 ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vinserti64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vinserti64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vinserti64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vinserti64x2 ymm6{k7}, ymm5, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vbroadcasti32x2 xmm6{k7}, xmm7 # AVX512{DQ,VL}
vbroadcasti32x2 xmm6{k7}{z}, xmm7 # AVX512{DQ,VL}
vbroadcasti32x2 xmm6{k7}, QWORD PTR [ecx] # AVX512{DQ,VL}
vbroadcasti32x2 xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vbroadcasti32x2 xmm6{k7}, QWORD PTR [edx+1016] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 xmm6{k7}, QWORD PTR [edx+1024] # AVX512{DQ,VL}
vbroadcasti32x2 xmm6{k7}, QWORD PTR [edx-1024] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 xmm6{k7}, QWORD PTR [edx-1032] # AVX512{DQ,VL}
vbroadcasti32x2 ymm6{k7}, xmm7 # AVX512{DQ,VL}
vbroadcasti32x2 ymm6{k7}{z}, xmm7 # AVX512{DQ,VL}
vbroadcasti32x2 ymm6{k7}, QWORD PTR [ecx] # AVX512{DQ,VL}
vbroadcasti32x2 ymm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vbroadcasti32x2 ymm6{k7}, QWORD PTR [edx+1016] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 ymm6{k7}, QWORD PTR [edx+1024] # AVX512{DQ,VL}
vbroadcasti32x2 ymm6{k7}, QWORD PTR [edx-1024] # AVX512{DQ,VL} Disp8
vbroadcasti32x2 ymm6{k7}, QWORD PTR [edx-1032] # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vpmullq xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, [eax]{1to2} # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vpmullq xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vpmullq xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vpmullq xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{DQ,VL}
vpmullq xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vpmullq xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vpmullq ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, [eax]{1to4} # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vpmullq ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vpmullq ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vpmullq ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{DQ,VL}
vpmullq ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vpmullq ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, xmm4, 0xab # AVX512{DQ,VL}
vrangepd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, xmm4, 123 # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, [eax]{1to2}, 123 # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vrangepd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vrangepd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{DQ,VL} Disp8
vrangepd xmm6{k7}, xmm5, [edx+1024]{1to2}, 123 # AVX512{DQ,VL}
vrangepd xmm6{k7}, xmm5, [edx-1024]{1to2}, 123 # AVX512{DQ,VL} Disp8
vrangepd xmm6{k7}, xmm5, [edx-1032]{1to2}, 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, ymm4, 0xab # AVX512{DQ,VL}
vrangepd ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, ymm4, 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{DQ,VL} Disp8
vrangepd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{DQ,VL} Disp8
vrangepd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangepd ymm6{k7}, ymm5, [edx+1024]{1to4}, 123 # AVX512{DQ,VL}
vrangepd ymm6{k7}, ymm5, [edx-1024]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangepd ymm6{k7}, ymm5, [edx-1032]{1to4}, 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, xmm4, 0xab # AVX512{DQ,VL}
vrangeps xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, xmm4, 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, [eax]{1to4}, 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vrangeps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vrangeps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangeps xmm6{k7}, xmm5, [edx+512]{1to4}, 123 # AVX512{DQ,VL}
vrangeps xmm6{k7}, xmm5, [edx-512]{1to4}, 123 # AVX512{DQ,VL} Disp8
vrangeps xmm6{k7}, xmm5, [edx-516]{1to4}, 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, ymm4, 0xab # AVX512{DQ,VL}
vrangeps ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, ymm4, 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, [eax]{1to8}, 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{DQ,VL} Disp8
vrangeps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{DQ,VL} Disp8
vrangeps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{DQ,VL} Disp8
vrangeps ymm6{k7}, ymm5, [edx+512]{1to8}, 123 # AVX512{DQ,VL}
vrangeps ymm6{k7}, ymm5, [edx-512]{1to8}, 123 # AVX512{DQ,VL} Disp8
vrangeps ymm6{k7}, ymm5, [edx-516]{1to8}, 123 # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vandpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vandpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vandpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vandpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{DQ,VL}
vandpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vandpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vandpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vandpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vandpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vandpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{DQ,VL}
vandpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vandpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vandps xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vandps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vandps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vandps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{DQ,VL}
vandps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vandps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vandps ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vandps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vandps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{DQ,VL} Disp8
vandps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{DQ,VL}
vandps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{DQ,VL} Disp8
vandps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vandnpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vandnpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vandnpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vandnpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{DQ,VL}
vandnpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vandnpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vandnpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vandnpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vandnpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vandnpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{DQ,VL}
vandnpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vandnpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vandnps xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vandnps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vandnps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vandnps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{DQ,VL}
vandnps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vandnps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vandnps ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vandnps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vandnps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{DQ,VL} Disp8
vandnps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{DQ,VL}
vandnps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{DQ,VL} Disp8
vandnps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vorpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vorpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vorpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vorpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{DQ,VL}
vorpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vorpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vorpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vorpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vorpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vorpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{DQ,VL}
vorpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vorpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vorps xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vorps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vorps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vorps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{DQ,VL}
vorps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vorps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vorps ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vorps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vorps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{DQ,VL} Disp8
vorps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{DQ,VL}
vorps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{DQ,VL} Disp8
vorps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vxorpd xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, [eax]{1to2} # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vxorpd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vxorpd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vxorpd xmm6{k7}, xmm5, [edx+1024]{1to2} # AVX512{DQ,VL}
vxorpd xmm6{k7}, xmm5, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vxorpd xmm6{k7}, xmm5, [edx-1032]{1to2} # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vxorpd ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, [eax]{1to4} # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vxorpd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vxorpd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vxorpd ymm6{k7}, ymm5, [edx+1024]{1to4} # AVX512{DQ,VL}
vxorpd ymm6{k7}, ymm5, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vxorpd ymm6{k7}, ymm5, [edx-1032]{1to4} # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, xmm4 # AVX512{DQ,VL}
vxorps xmm6{k7}{z}, xmm5, xmm4 # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, [eax]{1to4} # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vxorps xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vxorps xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vxorps xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{DQ,VL}
vxorps xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vxorps xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, ymm4 # AVX512{DQ,VL}
vxorps ymm6{k7}{z}, ymm5, ymm4 # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, [eax]{1to8} # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vxorps ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vxorps ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{DQ,VL} Disp8
vxorps ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{DQ,VL}
vxorps ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{DQ,VL} Disp8
vxorps ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{DQ,VL}
vreducepd xmm6{k7}, xmm5, 0xab # AVX512{DQ,VL}
vreducepd xmm6{k7}{z}, xmm5, 0xab # AVX512{DQ,VL}
vreducepd xmm6{k7}, xmm5, 123 # AVX512{DQ,VL}
vreducepd xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vreducepd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vreducepd xmm6{k7}, [eax]{1to2}, 123 # AVX512{DQ,VL}
vreducepd xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vreducepd xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vreducepd xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vreducepd xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vreducepd xmm6{k7}, [edx+1016]{1to2}, 123 # AVX512{DQ,VL} Disp8
vreducepd xmm6{k7}, [edx+1024]{1to2}, 123 # AVX512{DQ,VL}
vreducepd xmm6{k7}, [edx-1024]{1to2}, 123 # AVX512{DQ,VL} Disp8
vreducepd xmm6{k7}, [edx-1032]{1to2}, 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, ymm5, 0xab # AVX512{DQ,VL}
vreducepd ymm6{k7}{z}, ymm5, 0xab # AVX512{DQ,VL}
vreducepd ymm6{k7}, ymm5, 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, [eax]{1to4}, 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{DQ,VL} Disp8
vreducepd ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{DQ,VL} Disp8
vreducepd ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, [edx+1016]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreducepd ymm6{k7}, [edx+1024]{1to4}, 123 # AVX512{DQ,VL}
vreducepd ymm6{k7}, [edx-1024]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreducepd ymm6{k7}, [edx-1032]{1to4}, 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, xmm5, 0xab # AVX512{DQ,VL}
vreduceps xmm6{k7}{z}, xmm5, 0xab # AVX512{DQ,VL}
vreduceps xmm6{k7}, xmm5, 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, [eax]{1to4}, 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{DQ,VL} Disp8
vreduceps xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{DQ,VL} Disp8
vreduceps xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, [edx+508]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreduceps xmm6{k7}, [edx+512]{1to4}, 123 # AVX512{DQ,VL}
vreduceps xmm6{k7}, [edx-512]{1to4}, 123 # AVX512{DQ,VL} Disp8
vreduceps xmm6{k7}, [edx-516]{1to4}, 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, ymm5, 0xab # AVX512{DQ,VL}
vreduceps ymm6{k7}{z}, ymm5, 0xab # AVX512{DQ,VL}
vreduceps ymm6{k7}, ymm5, 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, [eax]{1to8}, 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{DQ,VL} Disp8
vreduceps ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{DQ,VL} Disp8
vreduceps ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, [edx+508]{1to8}, 123 # AVX512{DQ,VL} Disp8
vreduceps ymm6{k7}, [edx+512]{1to8}, 123 # AVX512{DQ,VL}
vreduceps ymm6{k7}, [edx-512]{1to8}, 123 # AVX512{DQ,VL} Disp8
vreduceps ymm6{k7}, [edx-516]{1to8}, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [ecx]{k7}, ymm5, 0xab # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [ecx]{k7}, ymm5, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [esp+esi*8-123456]{k7}, ymm5, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [edx+2032]{k7}, ymm5, 123 # AVX512{DQ,VL} Disp8
vextractf64x2 XMMWORD PTR [edx+2048]{k7}, ymm5, 123 # AVX512{DQ,VL}
vextractf64x2 XMMWORD PTR [edx-2048]{k7}, ymm5, 123 # AVX512{DQ,VL} Disp8
vextractf64x2 XMMWORD PTR [edx-2064]{k7}, ymm5, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [ecx]{k7}, ymm5, 0xab # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [ecx]{k7}, ymm5, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [esp+esi*8-123456]{k7}, ymm5, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [edx+2032]{k7}, ymm5, 123 # AVX512{DQ,VL} Disp8
vextracti64x2 XMMWORD PTR [edx+2048]{k7}, ymm5, 123 # AVX512{DQ,VL}
vextracti64x2 XMMWORD PTR [edx-2048]{k7}, ymm5, 123 # AVX512{DQ,VL} Disp8
vextracti64x2 XMMWORD PTR [edx-2064]{k7}, ymm5, 123 # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm6{k7}, [edx+1024]{1to2} # AVX512{DQ,VL}
vcvttpd2qq xmm6{k7}, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2qq xmm6{k7}, [edx-1032]{1to2} # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, ymm5 # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm6{k7}, [edx+1024]{1to4} # AVX512{DQ,VL}
vcvttpd2qq ymm6{k7}, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2qq ymm6{k7}, [edx-1032]{1to4} # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, [edx+1016]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm6{k7}, [edx+1024]{1to2} # AVX512{DQ,VL}
vcvttpd2uqq xmm6{k7}, [edx-1024]{1to2} # AVX512{DQ,VL} Disp8
vcvttpd2uqq xmm6{k7}, [edx-1032]{1to2} # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, ymm5 # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}{z}, ymm5 # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, YMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, [edx+1016]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm6{k7}, [edx+1024]{1to4} # AVX512{DQ,VL}
vcvttpd2uqq ymm6{k7}, [edx-1024]{1to4} # AVX512{DQ,VL} Disp8
vcvttpd2uqq ymm6{k7}, [edx-1032]{1to4} # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, QWORD PTR [ecx] # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, QWORD PTR [edx+1016] # AVX512{DQ,VL} Disp8
vcvttps2qq xmm6{k7}, QWORD PTR [edx+1024] # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, QWORD PTR [edx-1024] # AVX512{DQ,VL} Disp8
vcvttps2qq xmm6{k7}, QWORD PTR [edx-1032] # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, [edx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2qq xmm6{k7}, [edx+512]{1to2} # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, [edx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2qq xmm6{k7}, [edx-516]{1to2} # AVX512{DQ,VL}
vcvttps2qq xmm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vcvttps2qq ymm6{k7}, xmm5 # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvttps2qq ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvttps2qq ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2qq ymm6{k7}, [edx+512]{1to4} # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2qq ymm6{k7}, [edx-516]{1to4} # AVX512{DQ,VL}
vcvttps2qq ymm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm6{k7}, xmm5 # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, QWORD PTR [ecx] # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, [eax]{1to2} # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, QWORD PTR [edx+1016] # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm6{k7}, QWORD PTR [edx+1024] # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, QWORD PTR [edx-1024] # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm6{k7}, QWORD PTR [edx-1032] # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, [edx+508]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm6{k7}, [edx+512]{1to2} # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, [edx-512]{1to2} # AVX512{DQ,VL} Disp8
vcvttps2uqq xmm6{k7}, [edx-516]{1to2} # AVX512{DQ,VL}
vcvttps2uqq xmm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm6{k7}, xmm5 # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}{z}, xmm5 # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, XMMWORD PTR [ecx] # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, [eax]{1to4} # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, [edx+508]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm6{k7}, [edx+512]{1to4} # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, [edx-512]{1to4} # AVX512{DQ,VL} Disp8
vcvttps2uqq ymm6{k7}, [edx-516]{1to4} # AVX512{DQ,VL}
vcvttps2uqq ymm6{k7}, DWORD BCST [edx+508] # AVX512{DQ,VL} Disp8
vpmovd2m k5, xmm6 # AVX512{DQ,VL}
vpmovd2m k5, ymm6 # AVX512{DQ,VL}
vpmovq2m k5, xmm6 # AVX512{DQ,VL}
vpmovq2m k5, ymm6 # AVX512{DQ,VL}
vpmovm2d xmm6, k5 # AVX512{DQ,VL}
vpmovm2d ymm6, k5 # AVX512{DQ,VL}
vpmovm2q xmm6, k5 # AVX512{DQ,VL}
vpmovm2q ymm6, k5 # AVX512{DQ,VL}
|
stsp/binutils-ia16
| 1,482
|
gas/testsuite/gas/i386/string-ok.s
|
.text
.code32
start32:
cmpsb (%edi), %cs:(%esi)
cmpsb %es:(%edi), (%esi)
cmpsb (%di), (%si)
cmpsb (%esi), (%edi)
insb (%dx), %es:(%edi)
insb (%dx), (%esi)
lodsb %cs:(%esi)
lodsb (%edi)
movsb %cs:(%esi), (%edi)
movsb (%esi), %es:(%edi)
movsb (%si), (%di)
movsb (%ebx), (%edi)
movsb (%esi), (%ebx)
outsb %cs:(%esi), (%dx)
outsb (%edi), (%dx)
scasb %es:(%edi)
scasb (%esi)
stosb %es:(%edi)
stosb (%esi)
xlat (%ebx)
xlat (%bx)
xlat %ds:(%ebx)
xlatb
xlatb (%ebx)
xlatb %cs:(%ebx)
.code16
start16:
cmpsb (%di), (%si)
movsb (%esi), (%edi)
.code64
start64:
cmpsb (%rdi), (%rsi)
movsb (%esi), (%edi)
.intel_syntax noprefix
.code32
intel32:
cmps byte ptr cs:[esi], [edi]
cmps byte ptr [esi], es:[edi]
cmps byte ptr [esi], byte ptr [edi]
cmps byte ptr [si], [di]
cmps byte ptr [edi], [esi]
ins byte ptr es:[edi], dx
ins byte ptr [esi], dx
lods byte ptr cs:[esi]
lods byte ptr [edi]
movs byte ptr [edi], cs:[esi]
movs byte ptr es:[edi], [esi]
movs byte ptr [edi], byte ptr [esi]
movs byte ptr [di], [si]
movs byte ptr [edi], [ebx]
movs byte ptr [ebx], [esi]
outs dx, byte ptr cs:[esi]
outs dx, byte ptr [edi]
scas byte ptr es:[edi]
scas byte ptr [esi]
stos byte ptr es:[edi]
stos byte ptr [esi]
xlatb
xlat [bx]
xlat ds:[ebx]
xlat byte ptr [ebx]
xlat byte ptr cs:[ebx]
.code16
intel16:
cmps byte ptr [si], [di]
movs byte ptr [edi], [esi]
.code64
intel64:
cmps byte ptr [rsi], [rdi]
movs byte ptr [edi], [esi]
|
stsp/binutils-ia16
| 5,379
|
gas/testsuite/gas/i386/avx512bitalg_vl.s
|
# Check 32bit AVX512{BITALG,VL} instructions
.allow_index_reg
.text
_start:
vpshufbitqmb %xmm4, %xmm5, %k5{%k7} # AVX512{BITALG,VL}
vpshufbitqmb -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512{BITALG,VL}
vpshufbitqmb 2032(%edx), %xmm5, %k5{%k7} # AVX512{BITALG,VL} Disp8
vpshufbitqmb %ymm4, %ymm5, %k5{%k7} # AVX512{BITALG,VL}
vpshufbitqmb -123456(%esp,%esi,8), %ymm5, %k5{%k7} # AVX512{BITALG,VL}
vpshufbitqmb 4064(%edx), %ymm5, %k5{%k7} # AVX512{BITALG,VL} Disp8
vpopcntb %xmm5, %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntb %xmm5, %xmm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntb -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntb 2032(%edx), %xmm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntb %ymm5, %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntb %ymm5, %ymm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntb -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntb 4064(%edx), %ymm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntw %xmm5, %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntw %xmm5, %xmm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntw 2032(%edx), %xmm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntw %ymm5, %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntw %ymm5, %ymm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntw 4064(%edx), %ymm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntd %xmm5, %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntd %xmm5, %xmm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntd 2032(%edx), %xmm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntd 508(%edx){1to4}, %xmm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntd %ymm5, %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntd %ymm5, %ymm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntd -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntd 4064(%edx), %ymm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntd 508(%edx){1to8}, %ymm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntq %xmm5, %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntq %xmm5, %xmm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntq -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BITALG,VL}
vpopcntq 2032(%edx), %xmm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntq 1016(%edx){1to2}, %xmm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntq %ymm5, %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntq %ymm5, %ymm6{%k7}{z} # AVX512{BITALG,VL}
vpopcntq -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BITALG,VL}
vpopcntq 4064(%edx), %ymm6{%k7} # AVX512{BITALG,VL} Disp8
vpopcntq 1016(%edx){1to4}, %ymm6{%k7} # AVX512{BITALG,VL} Disp8
.intel_syntax noprefix
vpshufbitqmb k5{k7}, xmm5, xmm4 # AVX512{BITALG,VL}
vpshufbitqmb k5{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpshufbitqmb k5{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BITALG,VL} Disp8
vpshufbitqmb k5{k7}, ymm5, ymm4 # AVX512{BITALG,VL}
vpshufbitqmb k5{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpshufbitqmb k5{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BITALG,VL} Disp8
vpopcntb xmm6{k7}, xmm5 # AVX512{BITALG,VL}
vpopcntb xmm6{k7}{z}, xmm5 # AVX512{BITALG,VL}
vpopcntb xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntb xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BITALG,VL} Disp8
vpopcntb ymm6{k7}, ymm5 # AVX512{BITALG,VL}
vpopcntb ymm6{k7}{z}, ymm5 # AVX512{BITALG,VL}
vpopcntb ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntb ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BITALG,VL} Disp8
vpopcntw xmm6{k7}, xmm5 # AVX512{BITALG,VL}
vpopcntw xmm6{k7}{z}, xmm5 # AVX512{BITALG,VL}
vpopcntw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntw xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BITALG,VL} Disp8
vpopcntw ymm6{k7}, ymm5 # AVX512{BITALG,VL}
vpopcntw ymm6{k7}{z}, ymm5 # AVX512{BITALG,VL}
vpopcntw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntw ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BITALG,VL} Disp8
vpopcntd xmm6{k7}, xmm5 # AVX512{BITALG,VL}
vpopcntd xmm6{k7}{z}, xmm5 # AVX512{BITALG,VL}
vpopcntd xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntd xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BITALG,VL} Disp8
vpopcntd xmm6{k7}, [edx+508]{1to4} # AVX512{BITALG,VL} Disp8
vpopcntd xmm6{k7}, DWORD BCST [edx] # AVX512{BITALG,VL}
vpopcntd ymm6{k7}, ymm5 # AVX512{BITALG,VL}
vpopcntd ymm6{k7}{z}, ymm5 # AVX512{BITALG,VL}
vpopcntd ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntd ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BITALG,VL} Disp8
vpopcntd ymm6{k7}, [edx+508]{1to8} # AVX512{BITALG,VL} Disp8
vpopcntd ymm6{k7}, DWORD BCST [edx] # AVX512{BITALG,VL}
vpopcntq xmm6{k7}, xmm5 # AVX512{BITALG,VL}
vpopcntq xmm6{k7}{z}, xmm5 # AVX512{BITALG,VL}
vpopcntq xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntq xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BITALG,VL} Disp8
vpopcntq xmm6{k7}, [edx+1016]{1to2} # AVX512{BITALG,VL} Disp8
vpopcntq xmm6{k7}, QWORD BCST [edx] # AVX512{BITALG,VL}
vpopcntq ymm6{k7}, ymm5 # AVX512{BITALG,VL}
vpopcntq ymm6{k7}{z}, ymm5 # AVX512{BITALG,VL}
vpopcntq ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BITALG,VL}
vpopcntq ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BITALG,VL} Disp8
vpopcntq ymm6{k7}, [edx+1016]{1to4} # AVX512{BITALG,VL} Disp8
vpopcntq ymm6{k7}, QWORD BCST [edx] # AVX512{BITALG,VL
|
stsp/binutils-ia16
| 3,303
|
gas/testsuite/gas/i386/avx512ifma.s
|
# Check 32bit AVX512IFMA instructions
.allow_index_reg
.text
_start:
vpmadd52luq %zmm4, %zmm5, %zmm6 # AVX512IFMA
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512IFMA
vpmadd52luq (%ecx), %zmm5, %zmm6 # AVX512IFMA
vpmadd52luq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512IFMA
vpmadd52luq (%eax){1to8}, %zmm5, %zmm6 # AVX512IFMA
vpmadd52luq 8128(%edx), %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52luq 8192(%edx), %zmm5, %zmm6 # AVX512IFMA
vpmadd52luq -8192(%edx), %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52luq -8256(%edx), %zmm5, %zmm6 # AVX512IFMA
vpmadd52luq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52luq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA
vpmadd52luq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52luq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq %zmm4, %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52huq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512IFMA
vpmadd52huq (%ecx), %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq (%eax){1to8}, %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq 8128(%edx), %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52huq 8192(%edx), %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq -8192(%edx), %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52huq -8256(%edx), %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52huq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA
vpmadd52huq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA Disp8
vpmadd52huq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512IFMA
.intel_syntax noprefix
vpmadd52luq zmm6, zmm5, zmm4 # AVX512IFMA
vpmadd52luq zmm6{k7}, zmm5, zmm4 # AVX512IFMA
vpmadd52luq zmm6{k7}{z}, zmm5, zmm4 # AVX512IFMA
vpmadd52luq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512IFMA
vpmadd52luq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512IFMA
vpmadd52luq zmm6, zmm5, [eax]{1to8} # AVX512IFMA
vpmadd52luq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512IFMA Disp8
vpmadd52luq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512IFMA
vpmadd52luq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512IFMA Disp8
vpmadd52luq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512IFMA
vpmadd52luq zmm6, zmm5, [edx+1016]{1to8} # AVX512IFMA Disp8
vpmadd52luq zmm6, zmm5, [edx+1024]{1to8} # AVX512IFMA
vpmadd52luq zmm6, zmm5, [edx-1024]{1to8} # AVX512IFMA Disp8
vpmadd52luq zmm6, zmm5, [edx-1032]{1to8} # AVX512IFMA
vpmadd52huq zmm6, zmm5, zmm4 # AVX512IFMA
vpmadd52huq zmm6{k7}, zmm5, zmm4 # AVX512IFMA
vpmadd52huq zmm6{k7}{z}, zmm5, zmm4 # AVX512IFMA
vpmadd52huq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512IFMA
vpmadd52huq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512IFMA
vpmadd52huq zmm6, zmm5, [eax]{1to8} # AVX512IFMA
vpmadd52huq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512IFMA Disp8
vpmadd52huq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512IFMA
vpmadd52huq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512IFMA Disp8
vpmadd52huq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512IFMA
vpmadd52huq zmm6, zmm5, [edx+1016]{1to8} # AVX512IFMA Disp8
vpmadd52huq zmm6, zmm5, [edx+1024]{1to8} # AVX512IFMA
vpmadd52huq zmm6, zmm5, [edx-1024]{1to8} # AVX512IFMA Disp8
vpmadd52huq zmm6, zmm5, [edx-1032]{1to8} # AVX512IFMA
|
stsp/binutils-ia16
| 1,179
|
gas/testsuite/gas/i386/x86-64-adx.s
|
# Check 64 bit ADX instructions.
.allow_index_reg
.text
_start:
adcx 400(%ecx), %eax
adcx %edx, %ecx
adcx -654321(%esp,%esi,8), %edx
adcx (%eax), %eax
adcxl %edx, %ecx
adcxl (%eax), %eax
adcx 400(%rcx), %r11
adcx %r14, %r12
adcx -654321(%esp,%esi,8), %rdx
adcx (%r8), %rax
adcxq %rdx, %rcx
adcxq (%rax), %rax
adox 400(%ecx), %eax
adox %edx, %ecx
adox -654321(%esp,%esi,8), %edx
adox (%eax), %eax
adoxl %edx, %ecx
adoxl (%eax), %eax
adox 400(%rcx), %r11
adox %r14, %r12
adox -654321(%esp,%esi,8), %rdx
adox (%r8), %rax
adoxq %rdx, %rcx
adoxq (%rax), %rax
.intel_syntax noprefix
adcx eax, DWORD PTR [edx+399]
adcx edx, ecx
adcx edx, DWORD PTR [esp+esi*8-123456]
adcx eax, DWORD PTR [eax]
adcx rax, QWORD PTR [r11+399]
adcx rdx, r9
adcx rdx, QWORD PTR [rsp+rsi*8-123456]
adcx rax, [rbx]
adox eax, DWORD PTR [edx+399]
adox edx, ecx
adox edx, DWORD PTR [esp+esi*8-123456]
adox eax, DWORD PTR [eax]
adox rax, QWORD PTR [r11+399]
adox rdx, r9
adox rdx, QWORD PTR [rsp+rsi*8-123456]
adox rax, QWORD PTR [rbx]
|
stsp/binutils-ia16
| 1,505
|
gas/testsuite/gas/i386/avx512f_vaes-wig.s
|
# Check 32bit AVX512F,VAES WIG instructions
.allow_index_reg
.text
_start:
vaesdec %zmm4, %zmm5, %zmm6 # AVX512F,VAES
vaesdec -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,VAES
vaesdec 8128(%edx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesdeclast %zmm4, %zmm5, %zmm6 # AVX512F,VAES
vaesdeclast -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,VAES
vaesdeclast 8128(%edx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesenc %zmm4, %zmm5, %zmm6 # AVX512F,VAES
vaesenc -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,VAES
vaesenc 8128(%edx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesenclast %zmm4, %zmm5, %zmm6 # AVX512F,VAES
vaesenclast -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,VAES
vaesenclast 8128(%edx), %zmm5, %zmm6 # AVX512F,VAES Disp8
.intel_syntax noprefix
vaesdec zmm6, zmm5, zmm4 # AVX512F,VAES
vaesdec zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F,VAES
vaesdec zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F,VAES Disp8
vaesdeclast zmm6, zmm5, zmm4 # AVX512F,VAES
vaesdeclast zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F,VAES
vaesdeclast zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F,VAES Disp8
vaesenc zmm6, zmm5, zmm4 # AVX512F,VAES
vaesenc zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F,VAES
vaesenc zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F,VAES Disp8
vaesenclast zmm6, zmm5, zmm4 # AVX512F,VAES
vaesenclast zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F,VAES
vaesenclast zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F,VAES Disp8
|
stsp/binutils-ia16
| 1,777
|
gas/testsuite/gas/i386/x86-64-mpx-inval-2.s
|
# MPX instructions
.allow_index_reg
.text
### bndmk
bndmk (%eax), %bnd1
bndmk 0x3(%ecx,%ebx,1), %bnd1
bndmk (%rip), %bnd3
bndmk (%eip), %bnd2
### bndmov
bndmov (%r8d), %bnd1
bndmov 0x3(%r9d,%edx,1), %bnd1
bndmov %bnd1, (%eax)
bndmov %bnd1, 0x3(%ecx,%eax,1)
### bndcl
bndcl (%ecx), %bnd1
bndcl 0x3(%ecx,%eax,1), %bnd1
bndcl %ecx, %bnd1
bndcl %cx, %bnd1
### bndcu
bndcu (%ecx), %bnd1
bndcu 0x3(%ecx,%eax,1), %bnd1
bndcu %ecx, %bnd1
bndcu %cx, %bnd1
### bndcn
bndcn (%ecx), %bnd1
bndcn 0x3(%ecx,%eax,1), %bnd1
bndcn %ecx, %bnd1
bndcn %cx, %bnd1
### bndstx
bndstx %bnd0, 0x3(%eax,%ebx,1)
bndstx %bnd2, 3(%ebx,1)
bndstx %bnd1, (%r15,%rax,2)
bndstx %bnd3, base(%rip)
bndstx %bnd1, base(%eip)
### bndldx
bndldx 0x3(%eax,%ebx,1), %bnd0
bndldx 3(%ebx,1), %bnd2
bndldx (%rax,%r15,4), %bnd3
bndldx base(%rip), %bnd1
bndldx base(%eip), %bnd3
.intel_syntax noprefix
bndmk bnd1, [eax]
bndmk bnd1, [edx+1*eax+0x3]
bndmk bnd3, [rip]
bndmk bnd2, [eip]
bndmk bnd2, [rax+rsp]
### bndmov
bndmov bnd1, [eax]
bndmov bnd1, [edx+1*eax+0x3]
bndmov [eax], bnd1
bndmov [edx+1*eax+0x3], bnd1
### bndcl
bndcl bnd1, [eax]
bndcl bnd1, [edx+1*eax+0x3]
bndcl bnd1, eax
bndcl bnd1, dx
### bndcu
bndcu bnd1, [eax]
bndcu bnd1, [edx+1*eax+0x3]
bndcu bnd1, eax
bndcu bnd1, dx
### bndcn
bndcn bnd1, [eax]
bndcn bnd1, [edx+1*eax+0x3]
bndcn bnd1, eax
bndcn bnd1, dx
### bndstx
bndstx [eax+ebx*1+0x3], bnd0
bndstx [1*ebx+3], bnd2
bndstx [r8+rdi*4], bnd2
bndstx [rip+base], bnd1
bndstx [eip+base], bnd3
bndstx [rax+rsp], bnd3
### bndldx
bndldx bnd0, [eax+ebx*1+0x3]
bndldx bnd2, [1*ebx+3]
bndldx bnd2, [rdi+r8*8]
bndldx bnd1, [rip+base]
bndldx bnd3, [eip+base]
bndldx bnd3, [rax+rsp]
# Force a good alignment.
.p2align 4,0
|
stsp/binutils-ia16
| 2,710
|
gas/testsuite/gas/i386/x86-64-bmi.s
|
# Check 64bit BMI instructions
.allow_index_reg
.text
_start:
# Test for op r16, r/m16
tzcnt %ax,%bx
tzcnt (%rcx),%bx
tzcnt (%rcx),%r15w
# Test for op r32, r32, r/m32
andn %eax,%ebx,%esi
andn (%rcx),%ebx,%esi
andn %r9d,%r15d,%r10d
andn (%rcx),%r15d,%r10d
# Test for op r32, r/m32, r32
bextr %eax,%ebx,%esi
bextr %ebx,(%rcx),%esi
bextr %r9d,%r15d,%r10d
bextr %r9d,(%rcx),%r10d
# Test for op r32, r/m32
tzcnt %eax,%ebx
tzcnt (%rcx),%ebx
tzcnt (%rcx),%r15d
blsi %eax,%ebx
blsi (%rcx),%ebx
blsi (%rcx),%r15d
blsmsk %eax,%ebx
blsmsk (%rcx),%ebx
blsmsk (%rcx),%r15d
blsr %eax,%ebx
blsr (%rcx),%ebx
blsr (%rcx),%r15d
# Test for op r64, r64, r/m64
andn %rax,%rbx,%rsi
andn (%rcx),%rbx,%rsi
andn %r9,%r15,%r10
andn (%rcx),%r15,%r10
# Test for op r64, r/m64, r64
bextr %rax,%rbx,%rsi
bextr %rax,(%rcx),%rsi
bextr %r9,%r15,%r10
bextr %r9,(%rcx),%r10
# Test for op r64, r/m64
tzcnt %rax,%rbx
tzcnt (%rcx),%rbx
tzcnt %r9,%r15
tzcnt (%rcx),%r15
blsi %rax,%rbx
blsi (%rcx),%rbx
blsi %r9,%r15
blsi (%rcx),%r15
blsmsk %rax,%rbx
blsmsk (%rcx),%rbx
blsmsk %r9,%r15
blsmsk (%rcx),%r15
blsr %rax,%rbx
blsr (%rcx),%rbx
blsr %r9,%r15
blsr (%rcx),%r15
.intel_syntax noprefix
# Test for op r16, r/m16
tzcnt bx,ax
tzcnt bx,WORD PTR [rcx]
tzcnt r10w,WORD PTR [rcx]
tzcnt bx,[rcx]
# Test for op r32, r32, r/m32
andn esi,ebx,eax
andn esi,ebx,DWORD PTR [rcx]
andn r15d,r10d,r9d
andn r15d,r10d,DWORD PTR [rcx]
andn esi,ebx,[rcx]
# Test for op r32, r/m32, r32
bextr esi,ebx,eax
bextr esi,DWORD PTR [rcx],ebx
bextr r15d,r10d,r9d
bextr r15d,DWORD PTR [rcx],r9d
bextr esi,[rcx],ebx
# Test for op r32, r/m32
tzcnt ebx,eax
tzcnt ebx,DWORD PTR [rcx]
tzcnt r10d,DWORD PTR [rcx]
tzcnt ebx,[rcx]
blsi ebx,eax
blsi ebx,DWORD PTR [rcx]
blsi r10d,DWORD PTR [rcx]
blsi ebx,[rcx]
blsmsk ebx,eax
blsmsk ebx,DWORD PTR [rcx]
blsmsk r10d,DWORD PTR [rcx]
blsmsk ebx,[rcx]
blsr ebx,eax
blsr ebx,DWORD PTR [rcx]
blsr r10d,DWORD PTR [rcx]
blsr ebx,[rcx]
# Test for op r64, r64, r/m64
andn rsi,rbx,rax
andn rsi,rbx,QWORD PTR [rcx]
andn r10,r15,r9
andn r10,r15,QWORD PTR [rcx]
andn rsi,rbx,[rcx]
# Test for op r64, r/m64, r64
bextr rsi,rbx,rax
bextr rsi,QWORD PTR [rcx],rax
bextr r10,r15,r9
bextr r10,QWORD PTR [rcx],r9
bextr rsi,[rcx],rax
# Test for op r64, r/m64
tzcnt rbx,rax
tzcnt rbx,QWORD PTR [rcx]
tzcnt r15,r9
tzcnt r15,QWORD PTR [rcx]
tzcnt rbx,[rcx]
blsi rbx,rax
blsi rbx,QWORD PTR [rcx]
blsi r15,r9
blsi r15,QWORD PTR [rcx]
blsi rbx,[rcx]
blsmsk rbx,rax
blsmsk rbx,QWORD PTR [rcx]
blsmsk r15,r9
blsmsk r15,QWORD PTR [rcx]
blsmsk rbx,[rcx]
blsr rbx,rax
blsr rbx,QWORD PTR [rcx]
blsr r15,r9
blsr r15,QWORD PTR [rcx]
blsr rbx,[rcx]
|
stsp/binutils-ia16
| 5,370
|
gas/testsuite/gas/i386/dw2-compress-1.s
|
/* This testcase is copied from a similar test in GDB.
Copyright (C) 2010-2022 Free Software Foundation, Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
/* This tests that gdb can read compressed sections. The contents
are a basic assembly file, but the .debug_abbrev section has been
comrpessed using zlib. */
/* Dummy function to provide debug information for. */
.text
.Lbegin_text1:
.globl func_cu1
.type func_cu1, %function
func_cu1:
.Lbegin_func_cu1:
.int 0
.Lend_func_cu1:
.size func_cu1, .-func_cu1
.Lend_text1:
/* Debug information */
.section .debug_info
.Lcu1_begin:
/* CU header */
.4byte .Lcu1_end - .Lcu1_start /* Length of Compilation Unit */
.Lcu1_start:
.2byte 2 /* DWARF Version */
.4byte .Labbrev1_begin /* Offset into abbrev section */
.byte 4 /* Pointer size */
/* CU die */
.uleb128 1 /* Abbrev: DW_TAG_compile_unit */
.4byte .Lline1_begin /* DW_AT_stmt_list */
.4byte .Lend_text1 /* DW_AT_high_pc */
.4byte .Lbegin_text1 /* DW_AT_low_pc */
.ascii "file1.txt\0" /* DW_AT_name */
.ascii "GNU C 3.3.3\0" /* DW_AT_producer */
.byte 1 /* DW_AT_language (C) */
/* func_cu1 */
.uleb128 2 /* Abbrev: DW_TAG_subprogram */
.byte 1 /* DW_AT_external */
.byte 1 /* DW_AT_decl_file */
.byte 2 /* DW_AT_decl_line */
.ascii "func_cu1\0" /* DW_AT_name */
.4byte .Ltype_int-.Lcu1_begin /* DW_AT_type */
.4byte .Lbegin_func_cu1 /* DW_AT_low_pc */
.4byte .Lend_func_cu1 /* DW_AT_high_pc */
.byte 1 /* DW_AT_frame_base: length */
.byte 0x55 /* DW_AT_frame_base: DW_OP_reg5 */
.Ltype_int:
.uleb128 3 /* Abbrev: DW_TAG_base_type */
.ascii "int\0" /* DW_AT_name */
.byte 4 /* DW_AT_byte_size */
.byte 5 /* DW_AT_encoding */
.byte 0 /* End of children of CU */
.Lcu1_end:
/* Abbrev table */
.section .debug_abbrev
.Labbrev1_begin:
.uleb128 1 /* Abbrev code */
.uleb128 0x11 /* DW_TAG_compile_unit */
.byte 1 /* has_children */
.uleb128 0x10 /* DW_AT_stmt_list */
.uleb128 0x6 /* DW_FORM_data4 */
.uleb128 0x12 /* DW_AT_high_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x11 /* DW_AT_low_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x3 /* DW_AT_name */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0x25 /* DW_AT_producer */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0x13 /* DW_AT_language */
.uleb128 0xb /* DW_FORM_data1 */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.uleb128 2 /* Abbrev code */
.uleb128 0x2e /* DW_TAG_subprogram */
.byte 0 /* has_children */
.uleb128 0x3f /* DW_AT_external */
.uleb128 0xc /* DW_FORM_flag */
.uleb128 0x3a /* DW_AT_decl_file */
.uleb128 0xb /* DW_FORM_data1 */
.uleb128 0x3b /* DW_AT_decl_line */
.uleb128 0xb /* DW_FORM_data1 */
.uleb128 0x3 /* DW_AT_name */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0x49 /* DW_AT_type */
.uleb128 0x13 /* DW_FORM_ref4 */
.uleb128 0x11 /* DW_AT_low_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x12 /* DW_AT_high_pc */
.uleb128 0x1 /* DW_FORM_addr */
.uleb128 0x40 /* DW_AT_frame_base */
.uleb128 0xa /* DW_FORM_block1 */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.uleb128 3 /* Abbrev code */
.uleb128 0x24 /* DW_TAG_base_type */
.byte 0 /* has_children */
.uleb128 0x3 /* DW_AT_name */
.uleb128 0x8 /* DW_FORM_string */
.uleb128 0xb /* DW_AT_byte_size */
.uleb128 0xb /* DW_FORM_data1 */
.uleb128 0x3e /* DW_AT_encoding */
.uleb128 0xb /* DW_FORM_data1 */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
.byte 0x0 /* Terminator */
/* Line table */
.section .debug_line
.Lline1_begin:
.4byte .Lline1_end - .Lline1_start /* Initial length */
.Lline1_start:
.2byte 2 /* Version */
.4byte .Lline1_lines - .Lline1_hdr /* header_length */
.Lline1_hdr:
.byte 1 /* Minimum insn length */
.byte 1 /* default_is_stmt */
.byte 1 /* line_base */
.byte 1 /* line_range */
.byte 0x10 /* opcode_base */
/* Standard lengths */
.byte 0
.byte 1
.byte 1
.byte 1
.byte 1
.byte 0
.byte 0
.byte 0
.byte 1
.byte 0
.byte 0
.byte 1
.byte 0
.byte 0
.byte 0
/* Include directories */
.byte 0
/* File names */
.ascii "file1.txt\0"
.uleb128 0
.uleb128 0
.uleb128 0
.byte 0
.Lline1_lines:
.byte 0 /* DW_LNE_set_address */
.uleb128 5
.byte 2
.4byte .Lbegin_func_cu1
.byte 3 /* DW_LNS_advance_line */
.sleb128 3 /* ... to 4 */
.byte 1 /* DW_LNS_copy */
.byte 1 /* DW_LNS_copy (second time as an end-of-prologue marker) */
.byte 0 /* DW_LNE_set_address */
.uleb128 5
.byte 2
.4byte .Lend_func_cu1
.byte 0 /* DW_LNE_end_of_sequence */
.uleb128 1
.byte 1
.Lline1_end:
|
stsp/binutils-ia16
| 1,405
|
gas/testsuite/gas/i386/dw2-compress-3.s
|
.file "dw2-compress-3.c"
.text
.Ltext0:
.comm foo,4,4
.Letext0:
.file 1 "dw2-compress-3.c"
.section .debug_info,"",@progbits
.Ldebug_info0:
.long 0x32
.value 0x4
.long .Ldebug_abbrev0
.byte 0x4
.uleb128 0x1
.long .LASF0
.byte 0x1
.long .LASF1
.long .LASF2
.long .Ldebug_line0
.uleb128 0x2
.string "foo"
.byte 0x1
.byte 0x1
.long 0x2e
.uleb128 0x5
.byte 0x3
.long foo
.uleb128 0x3
.byte 0x4
.byte 0x5
.string "int"
.byte 0
.section .debug_abbrev,"",@progbits
.Ldebug_abbrev0:
.uleb128 0x1
.uleb128 0x11
.byte 0x1
.uleb128 0x25
.uleb128 0xe
.uleb128 0x13
.uleb128 0xb
.uleb128 0x3
.uleb128 0xe
.uleb128 0x1b
.uleb128 0xe
.uleb128 0x10
.uleb128 0x17
.byte 0
.byte 0
.uleb128 0x2
.uleb128 0x34
.byte 0
.uleb128 0x3
.uleb128 0x8
.uleb128 0x3a
.uleb128 0xb
.uleb128 0x3b
.uleb128 0xb
.uleb128 0x49
.uleb128 0x13
.uleb128 0x3f
.uleb128 0x19
.uleb128 0x2
.uleb128 0x18
.byte 0
.byte 0
.uleb128 0x3
.uleb128 0x24
.byte 0
.uleb128 0xb
.uleb128 0xb
.uleb128 0x3e
.uleb128 0xb
.uleb128 0x3
.uleb128 0x8
.byte 0
.byte 0
.byte 0
.section .debug_aranges,"",@progbits
.long 0x14
.value 0x2
.long .Ldebug_info0
.byte 0x4
.byte 0
.value 0
.value 0
.long 0
.long 0
.section .debug_line,"",@progbits
.Ldebug_line0:
.section .debug_str,"MS",@progbits,1
.LASF2:
.string "."
.LASF0:
.string "GNU C 4.8.3"
.LASF1:
.string "dw2-compress-3.c"
|
stsp/binutils-ia16
| 3,081
|
gas/testsuite/gas/i386/avx512bw_vl-opts.s
|
# Check 32bit AVX512{BW,VL} swap instructions
.allow_index_reg
.text
_start:
vmovdqu8 %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vmovdqu8.s %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vmovdqu8 %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vmovdqu8.s %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vmovdqu8 %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vmovdqu8.s %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vmovdqu8 %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vmovdqu8.s %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vmovdqu8 %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vmovdqu8.s %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vmovdqu8 %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vmovdqu8.s %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vmovdqu8 %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vmovdqu8.s %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vmovdqu8 %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vmovdqu8.s %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vmovdqu16 %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vmovdqu16.s %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vmovdqu16 %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vmovdqu16.s %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vmovdqu16 %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vmovdqu16.s %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vmovdqu16 %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vmovdqu16.s %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vmovdqu16 %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vmovdqu16.s %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vmovdqu16 %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vmovdqu16.s %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vmovdqu16 %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vmovdqu16.s %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vmovdqu16 %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vmovdqu16.s %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
.intel_syntax noprefix
vmovdqu8 xmm6{k7}, xmm5 # AVX512{BW,VL}
vmovdqu8.s xmm6{k7}, xmm5 # AVX512{BW,VL}
vmovdqu8 xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vmovdqu8.s xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vmovdqu8 xmm6{k7}, xmm5 # AVX512{BW,VL}
vmovdqu8.s xmm6{k7}, xmm5 # AVX512{BW,VL}
vmovdqu8 xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vmovdqu8.s xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vmovdqu8 ymm6{k7}, ymm5 # AVX512{BW,VL}
vmovdqu8.s ymm6{k7}, ymm5 # AVX512{BW,VL}
vmovdqu8 ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vmovdqu8.s ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vmovdqu8 ymm6{k7}, ymm5 # AVX512{BW,VL}
vmovdqu8.s ymm6{k7}, ymm5 # AVX512{BW,VL}
vmovdqu8 ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vmovdqu8.s ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vmovdqu16 xmm6{k7}, xmm5 # AVX512{BW,VL}
vmovdqu16.s xmm6{k7}, xmm5 # AVX512{BW,VL}
vmovdqu16 xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vmovdqu16.s xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vmovdqu16 xmm6{k7}, xmm5 # AVX512{BW,VL}
vmovdqu16.s xmm6{k7}, xmm5 # AVX512{BW,VL}
vmovdqu16 xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vmovdqu16.s xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vmovdqu16 ymm6{k7}, ymm5 # AVX512{BW,VL}
vmovdqu16.s ymm6{k7}, ymm5 # AVX512{BW,VL}
vmovdqu16 ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vmovdqu16.s ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vmovdqu16 ymm6{k7}, ymm5 # AVX512{BW,VL}
vmovdqu16.s ymm6{k7}, ymm5 # AVX512{BW,VL}
vmovdqu16 ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vmovdqu16.s ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.