repo_id
stringlengths 5
115
| size
int64 590
5.01M
| file_path
stringlengths 4
212
| content
stringlengths 590
5.01M
|
|---|---|---|---|
stsp/binutils-ia16
| 6,952
|
gas/testsuite/gas/i386/x86-64-avx-wig.s
|
# Check AVX WIG instructions
.allow_index_reg
.text
_start:
vaddpd %ymm4,%ymm6,%ymm2
vaddps %ymm4,%ymm6,%ymm2
vaddsd %xmm4,%xmm6,%xmm2
vaddss %xmm4,%xmm6,%xmm2
vaddsubpd %ymm4,%ymm6,%ymm2
vaddsubps %ymm4,%ymm6,%ymm2
vaesdec %xmm4,%xmm6,%xmm2
vaesdeclast %xmm4,%xmm6,%xmm2
vaesenc %xmm4,%xmm6,%xmm2
vaesenclast %xmm4,%xmm6,%xmm2
vaesimc %xmm4,%xmm6
vaeskeygenassist $7,%xmm4,%xmm6
vandnpd %ymm4,%ymm6,%ymm2
vandnps %ymm4,%ymm6,%ymm2
vandpd %ymm4,%ymm6,%ymm2
vandps %ymm4,%ymm6,%ymm2
vblendpd $7,%ymm4,%ymm6,%ymm2
vblendps $7,%ymm4,%ymm6,%ymm2
vcmpeqpd %ymm4,%ymm6,%ymm2
vcmpeqps %ymm4,%ymm6,%ymm2
vcmpeqsd %xmm4,%xmm6,%xmm2
vcmpeqss %xmm4,%xmm6,%xmm2
vcmppd $7,%ymm4,%ymm6,%ymm2
vcmpps $7,%ymm4,%ymm6,%ymm2
vcmpsd $7,%xmm4,%xmm6,%xmm2
vcmpss $7,%xmm4,%xmm6,%xmm2
vcomisd %xmm4,%xmm6
vcomiss %xmm4,%xmm6
vcvtdq2pd %xmm4,%ymm4
vcvtdq2ps %ymm4,%ymm6
vcvtpd2dqy %ymm4,%xmm4
vcvtpd2dqx %xmm4,%xmm6
vcvtpd2dqy %ymm4,%xmm4
vcvtpd2psy %ymm4,%xmm4
vcvtpd2psx %xmm4,%xmm6
vcvtpd2psy %ymm4,%xmm4
vcvtps2dq %ymm4,%ymm6
vcvtps2pd %xmm4,%ymm4
vcvtsd2ss %xmm4,%xmm6,%xmm2
vcvtss2sd %xmm4,%xmm6,%xmm2
vcvttpd2dqy %ymm4,%xmm4
vcvttpd2dqx %xmm4,%xmm6
vcvttpd2dqy %ymm4,%xmm4
vcvttps2dq %ymm4,%ymm6
vdivpd %ymm4,%ymm6,%ymm2
vdivps %ymm4,%ymm6,%ymm2
vdivsd %xmm4,%xmm6,%xmm2
vdivss %xmm4,%xmm6,%xmm2
vdppd $7,%xmm4,%xmm6,%xmm2
vdpps $7,%ymm4,%ymm6,%ymm2
vextractps $7,%xmm4,%rcx
vhaddpd %ymm4,%ymm6,%ymm2
vhaddps %ymm4,%ymm6,%ymm2
vhsubpd %ymm4,%ymm6,%ymm2
vhsubps %ymm4,%ymm6,%ymm2
vinsertps $7,%xmm4,%xmm6,%xmm2
vlddqu (%rcx),%ymm4
vldmxcsr (%rcx)
vmaskmovdqu %xmm4,%xmm6
vmaxpd %ymm4,%ymm6,%ymm2
vmaxps %ymm4,%ymm6,%ymm2
vmaxsd %xmm4,%xmm6,%xmm2
vmaxss %xmm4,%xmm6,%xmm2
vminpd %ymm4,%ymm6,%ymm2
vminps %ymm4,%ymm6,%ymm2
vminsd %xmm4,%xmm6,%xmm2
vminss %xmm4,%xmm6,%xmm2
vmovapd %ymm4,%ymm6
vmovaps %ymm4,%ymm6
{store} vmovapd %ymm4,%ymm6
{store} vmovaps %ymm4,%ymm6
vmovddup %ymm4,%ymm6
vmovdqa %ymm4,%ymm6
vmovdqu %ymm4,%ymm6
{store} vmovdqa %ymm4,%ymm6
{store} vmovdqu %ymm4,%ymm6
vmovhlps %xmm4,%xmm6,%xmm2
vmovhpd (%rcx),%xmm4,%xmm6
vmovhpd %xmm4,(%rcx)
vmovhps (%rcx),%xmm4,%xmm6
vmovhps %xmm4,(%rcx)
vmovlhps %xmm4,%xmm6,%xmm2
vmovlpd (%rcx),%xmm4,%xmm6
vmovlpd %xmm4,(%rcx)
vmovlps (%rcx),%xmm4,%xmm6
vmovlps %xmm4,(%rcx)
vmovmskpd %xmm4,%rcx
vmovmskps %xmm4,%rcx
vmovntdq %ymm4,(%rcx)
vmovntdqa (%rcx),%xmm4
vmovntpd %ymm4,(%rcx)
vmovntps %ymm4,(%rcx)
vmovq %xmm4,%xmm6
vmovq %xmm4,(%rcx)
vmovq %xmm4,%rcx
vmovq %rcx,%xmm4
vmovsd (%rcx),%xmm4
vmovsd %xmm4,(%rcx)
vmovshdup %ymm4,%ymm6
vmovsldup %ymm4,%ymm6
vmovss (%rcx),%xmm4
vmovss %xmm4,(%rcx)
vmovupd %ymm4,%ymm6
vmovupd %ymm4,(%rcx)
vmovups %ymm4,%ymm6
vmovups %ymm4,(%rcx)
vmpsadbw $7,%xmm4,%xmm6,%xmm2
vmulpd %ymm4,%ymm6,%ymm2
vmulps %ymm4,%ymm6,%ymm2
vmulsd %xmm4,%xmm6,%xmm2
vmulss %xmm4,%xmm6,%xmm2
vorpd %ymm4,%ymm6,%ymm2
vorps %ymm4,%ymm6,%ymm2
vpabsb %xmm4,%xmm6
vpabsd %xmm4,%xmm6
vpabsw %xmm4,%xmm6
vpackssdw %xmm4,%xmm6,%xmm2
vpacksswb %xmm4,%xmm6,%xmm2
vpackusdw %xmm4,%xmm6,%xmm2
vpackuswb %xmm4,%xmm6,%xmm2
vpaddb %xmm4,%xmm6,%xmm2
vpaddd %xmm4,%xmm6,%xmm2
vpaddq %xmm4,%xmm6,%xmm2
vpaddsb %xmm4,%xmm6,%xmm2
vpaddsw %xmm4,%xmm6,%xmm2
vpaddusb %xmm4,%xmm6,%xmm2
vpaddusw %xmm4,%xmm6,%xmm2
vpaddw %xmm4,%xmm6,%xmm2
vpalignr $7,%xmm4,%xmm6,%xmm2
vpand %xmm4,%xmm6,%xmm2
vpandn %xmm4,%xmm6,%xmm2
vpavgb %xmm4,%xmm6,%xmm2
vpavgw %xmm4,%xmm6,%xmm2
vpblendw $7,%xmm4,%xmm6,%xmm2
vpclmulhqhqdq %xmm4,%xmm6,%xmm2
vpclmulhqlqdq %xmm4,%xmm6,%xmm2
vpclmullqhqdq %xmm4,%xmm6,%xmm2
vpclmullqlqdq %xmm4,%xmm6,%xmm2
vpclmulqdq $7,%xmm4,%xmm6,%xmm2
vpcmpeqb %xmm4,%xmm6,%xmm2
vpcmpeqd %xmm4,%xmm6,%xmm2
vpcmpeqq %xmm4,%xmm6,%xmm2
vpcmpeqw %xmm4,%xmm6,%xmm2
vpcmpgtb %xmm4,%xmm6,%xmm2
vpcmpgtd %xmm4,%xmm6,%xmm2
vpcmpgtq %xmm4,%xmm6,%xmm2
vpcmpgtw %xmm4,%xmm6,%xmm2
vpcmpistri $7,%xmm4,%xmm6
vpcmpistrm $7,%xmm4,%xmm6
vpextrb $0, %xmm0, %eax
vpextrb $0, %xmm0, (%rax)
vpextrw $0, %xmm0, %eax
{store} vpextrw $0, %xmm0, %eax
vpextrw $0, %xmm0, (%rax)
vphaddd %xmm4,%xmm6,%xmm2
vphaddsw %xmm4,%xmm6,%xmm2
vphaddw %xmm4,%xmm6,%xmm2
vphminposuw %xmm4,%xmm6
vphsubd %xmm4,%xmm6,%xmm2
vphsubsw %xmm4,%xmm6,%xmm2
vphsubw %xmm4,%xmm6,%xmm2
vpinsrb $0, %eax, %xmm0, %xmm0
vpinsrb $0, (%rax), %xmm0, %xmm0
vpinsrw $0, %eax, %xmm0, %xmm0
vpinsrw $0, (%rax), %xmm0, %xmm0
vpmaddubsw %xmm4,%xmm6,%xmm2
vpmaddwd %xmm4,%xmm6,%xmm2
vpmaxsb %xmm4,%xmm6,%xmm2
vpmaxsd %xmm4,%xmm6,%xmm2
vpmaxsw %xmm4,%xmm6,%xmm2
vpmaxub %xmm4,%xmm6,%xmm2
vpmaxud %xmm4,%xmm6,%xmm2
vpmaxuw %xmm4,%xmm6,%xmm2
vpminsb %xmm4,%xmm6,%xmm2
vpminsd %xmm4,%xmm6,%xmm2
vpminsw %xmm4,%xmm6,%xmm2
vpminub %xmm4,%xmm6,%xmm2
vpminud %xmm4,%xmm6,%xmm2
vpminuw %xmm4,%xmm6,%xmm2
vpmovmskb %xmm4,%rcx
vpmovsxbd %xmm4,%xmm6
vpmovsxbq %xmm4,%xmm6
vpmovsxbw %xmm4,%xmm6
vpmovsxdq %xmm4,%xmm6
vpmovsxwd %xmm4,%xmm6
vpmovsxwq %xmm4,%xmm6
vpmovzxbd %xmm4,%xmm6
vpmovzxbq %xmm4,%xmm6
vpmovzxbw %xmm4,%xmm6
vpmovzxdq %xmm4,%xmm6
vpmovzxwd %xmm4,%xmm6
vpmovzxwq %xmm4,%xmm6
vpmuldq %xmm4,%xmm6,%xmm2
vpmulhrsw %xmm4,%xmm6,%xmm2
vpmulhuw %xmm4,%xmm6,%xmm2
vpmulhw %xmm4,%xmm6,%xmm2
vpmulld %xmm4,%xmm6,%xmm2
vpmullw %xmm4,%xmm6,%xmm2
vpmuludq %xmm4,%xmm6,%xmm2
vpor %xmm4,%xmm6,%xmm2
vpsadbw %xmm4,%xmm6,%xmm2
vpshufb %xmm4,%xmm6,%xmm2
vpshufd $7,%xmm4,%xmm6
vpshufhw $7,%xmm4,%xmm6
vpshuflw $7,%xmm4,%xmm6
vpsignb %xmm4,%xmm6,%xmm2
vpsignd %xmm4,%xmm6,%xmm2
vpsignw %xmm4,%xmm6,%xmm2
vpslld %xmm4,%xmm6,%xmm2
vpslldq $7,%xmm4,%xmm6
vpsllq %xmm4,%xmm6,%xmm2
vpsllw %xmm4,%xmm6,%xmm2
vpsrad %xmm4,%xmm6,%xmm2
vpsraw %xmm4,%xmm6,%xmm2
vpsrld %xmm4,%xmm6,%xmm2
vpsrldq $7,%xmm4,%xmm6
vpsrlq %xmm4,%xmm6,%xmm2
vpsrlw %xmm4,%xmm6,%xmm2
vpsubb %xmm4,%xmm6,%xmm2
vpsubd %xmm4,%xmm6,%xmm2
vpsubq %xmm4,%xmm6,%xmm2
vpsubsb %xmm4,%xmm6,%xmm2
vpsubsw %xmm4,%xmm6,%xmm2
vpsubusb %xmm4,%xmm6,%xmm2
vpsubusw %xmm4,%xmm6,%xmm2
vpsubw %xmm4,%xmm6,%xmm2
vptest %ymm4,%ymm6
vpunpckhbw %xmm4,%xmm6,%xmm2
vpunpckhdq %xmm4,%xmm6,%xmm2
vpunpckhqdq %xmm4,%xmm6,%xmm2
vpunpckhwd %xmm4,%xmm6,%xmm2
vpunpcklbw %xmm4,%xmm6,%xmm2
vpunpckldq %xmm4,%xmm6,%xmm2
vpunpcklqdq %xmm4,%xmm6,%xmm2
vpunpcklwd %xmm4,%xmm6,%xmm2
vpxor %xmm4,%xmm6,%xmm2
vrcpps %ymm4,%ymm6
vrcpss %xmm4,%xmm6,%xmm2
vroundpd $7,%ymm6,%ymm2
vroundps $7,%ymm6,%ymm2
vroundsd $7,%xmm4,%xmm6,%xmm2
vroundss $7,%xmm4,%xmm6,%xmm2
vrsqrtps %ymm4,%ymm6
vrsqrtss %xmm4,%xmm6,%xmm2
vshufpd $7,%ymm4,%ymm6,%ymm2
vshufps $7,%ymm4,%ymm6,%ymm2
vsqrtpd %ymm4,%ymm6
vsqrtps %ymm4,%ymm6
vsqrtsd %xmm4,%xmm6,%xmm2
vsqrtss %xmm4,%xmm6,%xmm2
vstmxcsr (%rcx)
vsubpd %ymm4,%ymm6,%ymm2
vsubps %ymm4,%ymm6,%ymm2
vsubsd %xmm4,%xmm6,%xmm2
vsubss %xmm4,%xmm6,%xmm2
vucomisd %xmm4,%xmm6
vucomiss %xmm4,%xmm6
vunpckhpd %ymm4,%ymm6,%ymm2
vunpckhps %ymm4,%ymm6,%ymm2
vunpcklpd %ymm4,%ymm6,%ymm2
vunpcklps %ymm4,%ymm6,%ymm2
vxorpd %ymm4,%ymm6,%ymm2
vxorps %ymm4,%ymm6,%ymm2
vzeroall
vzeroupper
|
stsp/binutils-ia16
| 12,407
|
gas/testsuite/gas/i386/x86-64-avx512er.s
|
# Check 64bit AVX512ER instructions
.allow_index_reg
.text
_start:
vexp2ps %zmm29, %zmm30 # AVX512ER
vexp2ps {sae}, %zmm29, %zmm30 # AVX512ER
vexp2ps (%rcx), %zmm30 # AVX512ER
vexp2ps 0x123(%rax,%r14,8), %zmm30 # AVX512ER
vexp2ps (%rcx){1to16}, %zmm30 # AVX512ER
vexp2ps 8128(%rdx), %zmm30 # AVX512ER Disp8
vexp2ps 8192(%rdx), %zmm30 # AVX512ER
vexp2ps -8192(%rdx), %zmm30 # AVX512ER Disp8
vexp2ps -8256(%rdx), %zmm30 # AVX512ER
vexp2ps 508(%rdx){1to16}, %zmm30 # AVX512ER Disp8
vexp2ps 512(%rdx){1to16}, %zmm30 # AVX512ER
vexp2ps -512(%rdx){1to16}, %zmm30 # AVX512ER Disp8
vexp2ps -516(%rdx){1to16}, %zmm30 # AVX512ER
vexp2pd %zmm29, %zmm30 # AVX512ER
vexp2pd {sae}, %zmm29, %zmm30 # AVX512ER
vexp2pd (%rcx), %zmm30 # AVX512ER
vexp2pd 0x123(%rax,%r14,8), %zmm30 # AVX512ER
vexp2pd (%rcx){1to8}, %zmm30 # AVX512ER
vexp2pd 8128(%rdx), %zmm30 # AVX512ER Disp8
vexp2pd 8192(%rdx), %zmm30 # AVX512ER
vexp2pd -8192(%rdx), %zmm30 # AVX512ER Disp8
vexp2pd -8256(%rdx), %zmm30 # AVX512ER
vexp2pd 1016(%rdx){1to8}, %zmm30 # AVX512ER Disp8
vexp2pd 1024(%rdx){1to8}, %zmm30 # AVX512ER
vexp2pd -1024(%rdx){1to8}, %zmm30 # AVX512ER Disp8
vexp2pd -1032(%rdx){1to8}, %zmm30 # AVX512ER
vrcp28ps %zmm29, %zmm30 # AVX512ER
vrcp28ps %zmm29, %zmm30{%k7} # AVX512ER
vrcp28ps %zmm29, %zmm30{%k7}{z} # AVX512ER
vrcp28ps {sae}, %zmm29, %zmm30 # AVX512ER
vrcp28ps (%rcx), %zmm30 # AVX512ER
vrcp28ps 0x123(%rax,%r14,8), %zmm30 # AVX512ER
vrcp28ps (%rcx){1to16}, %zmm30 # AVX512ER
vrcp28ps 8128(%rdx), %zmm30 # AVX512ER Disp8
vrcp28ps 8192(%rdx), %zmm30 # AVX512ER
vrcp28ps -8192(%rdx), %zmm30 # AVX512ER Disp8
vrcp28ps -8256(%rdx), %zmm30 # AVX512ER
vrcp28ps 508(%rdx){1to16}, %zmm30 # AVX512ER Disp8
vrcp28ps 512(%rdx){1to16}, %zmm30 # AVX512ER
vrcp28ps -512(%rdx){1to16}, %zmm30 # AVX512ER Disp8
vrcp28ps -516(%rdx){1to16}, %zmm30 # AVX512ER
vrcp28pd %zmm29, %zmm30 # AVX512ER
vrcp28pd %zmm29, %zmm30{%k7} # AVX512ER
vrcp28pd %zmm29, %zmm30{%k7}{z} # AVX512ER
vrcp28pd {sae}, %zmm29, %zmm30 # AVX512ER
vrcp28pd (%rcx), %zmm30 # AVX512ER
vrcp28pd 0x123(%rax,%r14,8), %zmm30 # AVX512ER
vrcp28pd (%rcx){1to8}, %zmm30 # AVX512ER
vrcp28pd 8128(%rdx), %zmm30 # AVX512ER Disp8
vrcp28pd 8192(%rdx), %zmm30 # AVX512ER
vrcp28pd -8192(%rdx), %zmm30 # AVX512ER Disp8
vrcp28pd -8256(%rdx), %zmm30 # AVX512ER
vrcp28pd 1016(%rdx){1to8}, %zmm30 # AVX512ER Disp8
vrcp28pd 1024(%rdx){1to8}, %zmm30 # AVX512ER
vrcp28pd -1024(%rdx){1to8}, %zmm30 # AVX512ER Disp8
vrcp28pd -1032(%rdx){1to8}, %zmm30 # AVX512ER
vrcp28ss %xmm28, %xmm29, %xmm30{%k7} # AVX512ER
vrcp28ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512ER
vrcp28ss {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512ER
vrcp28ss (%rcx), %xmm29, %xmm30{%k7} # AVX512ER
vrcp28ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512ER
vrcp28ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512ER Disp8
vrcp28ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512ER
vrcp28ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512ER Disp8
vrcp28ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512ER
vrcp28sd %xmm28, %xmm29, %xmm30{%k7} # AVX512ER
vrcp28sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512ER
vrcp28sd {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512ER
vrcp28sd (%rcx), %xmm29, %xmm30{%k7} # AVX512ER
vrcp28sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512ER
vrcp28sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512ER Disp8
vrcp28sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512ER
vrcp28sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512ER Disp8
vrcp28sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512ER
vrsqrt28ps %zmm29, %zmm30 # AVX512ER
vrsqrt28ps %zmm29, %zmm30{%k7} # AVX512ER
vrsqrt28ps %zmm29, %zmm30{%k7}{z} # AVX512ER
vrsqrt28ps {sae}, %zmm29, %zmm30 # AVX512ER
vrsqrt28ps (%rcx), %zmm30 # AVX512ER
vrsqrt28ps 0x123(%rax,%r14,8), %zmm30 # AVX512ER
vrsqrt28ps (%rcx){1to16}, %zmm30 # AVX512ER
vrsqrt28ps 8128(%rdx), %zmm30 # AVX512ER Disp8
vrsqrt28ps 8192(%rdx), %zmm30 # AVX512ER
vrsqrt28ps -8192(%rdx), %zmm30 # AVX512ER Disp8
vrsqrt28ps -8256(%rdx), %zmm30 # AVX512ER
vrsqrt28ps 508(%rdx){1to16}, %zmm30 # AVX512ER Disp8
vrsqrt28ps 512(%rdx){1to16}, %zmm30 # AVX512ER
vrsqrt28ps -512(%rdx){1to16}, %zmm30 # AVX512ER Disp8
vrsqrt28ps -516(%rdx){1to16}, %zmm30 # AVX512ER
vrsqrt28pd %zmm29, %zmm30 # AVX512ER
vrsqrt28pd %zmm29, %zmm30{%k7} # AVX512ER
vrsqrt28pd %zmm29, %zmm30{%k7}{z} # AVX512ER
vrsqrt28pd {sae}, %zmm29, %zmm30 # AVX512ER
vrsqrt28pd (%rcx), %zmm30 # AVX512ER
vrsqrt28pd 0x123(%rax,%r14,8), %zmm30 # AVX512ER
vrsqrt28pd (%rcx){1to8}, %zmm30 # AVX512ER
vrsqrt28pd 8128(%rdx), %zmm30 # AVX512ER Disp8
vrsqrt28pd 8192(%rdx), %zmm30 # AVX512ER
vrsqrt28pd -8192(%rdx), %zmm30 # AVX512ER Disp8
vrsqrt28pd -8256(%rdx), %zmm30 # AVX512ER
vrsqrt28pd 1016(%rdx){1to8}, %zmm30 # AVX512ER Disp8
vrsqrt28pd 1024(%rdx){1to8}, %zmm30 # AVX512ER
vrsqrt28pd -1024(%rdx){1to8}, %zmm30 # AVX512ER Disp8
vrsqrt28pd -1032(%rdx){1to8}, %zmm30 # AVX512ER
vrsqrt28ss %xmm28, %xmm29, %xmm30{%k7} # AVX512ER
vrsqrt28ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512ER
vrsqrt28ss {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512ER
vrsqrt28ss (%rcx), %xmm29, %xmm30{%k7} # AVX512ER
vrsqrt28ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512ER
vrsqrt28ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512ER Disp8
vrsqrt28ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512ER
vrsqrt28ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512ER Disp8
vrsqrt28ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512ER
vrsqrt28sd %xmm28, %xmm29, %xmm30{%k7} # AVX512ER
vrsqrt28sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512ER
vrsqrt28sd {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512ER
vrsqrt28sd (%rcx), %xmm29, %xmm30{%k7} # AVX512ER
vrsqrt28sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512ER
vrsqrt28sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512ER Disp8
vrsqrt28sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512ER
vrsqrt28sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512ER Disp8
vrsqrt28sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512ER
.intel_syntax noprefix
vexp2ps zmm30, zmm29 # AVX512ER
vexp2ps zmm30, zmm29{sae} # AVX512ER
vexp2ps zmm30, ZMMWORD PTR [rcx] # AVX512ER
vexp2ps zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512ER
vexp2ps zmm30, [rcx]{1to16} # AVX512ER
vexp2ps zmm30, ZMMWORD PTR [rdx+8128] # AVX512ER Disp8
vexp2ps zmm30, ZMMWORD PTR [rdx+8192] # AVX512ER
vexp2ps zmm30, ZMMWORD PTR [rdx-8192] # AVX512ER Disp8
vexp2ps zmm30, ZMMWORD PTR [rdx-8256] # AVX512ER
vexp2ps zmm30, [rdx+508]{1to16} # AVX512ER Disp8
vexp2ps zmm30, [rdx+512]{1to16} # AVX512ER
vexp2ps zmm30, [rdx-512]{1to16} # AVX512ER Disp8
vexp2ps zmm30, [rdx-516]{1to16} # AVX512ER
vexp2pd zmm30, zmm29 # AVX512ER
vexp2pd zmm30, zmm29{sae} # AVX512ER
vexp2pd zmm30, ZMMWORD PTR [rcx] # AVX512ER
vexp2pd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512ER
vexp2pd zmm30, [rcx]{1to8} # AVX512ER
vexp2pd zmm30, ZMMWORD PTR [rdx+8128] # AVX512ER Disp8
vexp2pd zmm30, ZMMWORD PTR [rdx+8192] # AVX512ER
vexp2pd zmm30, ZMMWORD PTR [rdx-8192] # AVX512ER Disp8
vexp2pd zmm30, ZMMWORD PTR [rdx-8256] # AVX512ER
vexp2pd zmm30, [rdx+1016]{1to8} # AVX512ER Disp8
vexp2pd zmm30, [rdx+1024]{1to8} # AVX512ER
vexp2pd zmm30, [rdx-1024]{1to8} # AVX512ER Disp8
vexp2pd zmm30, [rdx-1032]{1to8} # AVX512ER
vrcp28ps zmm30, zmm29 # AVX512ER
vrcp28ps zmm30{k7}, zmm29 # AVX512ER
vrcp28ps zmm30{k7}{z}, zmm29 # AVX512ER
vrcp28ps zmm30, zmm29{sae} # AVX512ER
vrcp28ps zmm30, ZMMWORD PTR [rcx] # AVX512ER
vrcp28ps zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512ER
vrcp28ps zmm30, [rcx]{1to16} # AVX512ER
vrcp28ps zmm30, ZMMWORD PTR [rdx+8128] # AVX512ER Disp8
vrcp28ps zmm30, ZMMWORD PTR [rdx+8192] # AVX512ER
vrcp28ps zmm30, ZMMWORD PTR [rdx-8192] # AVX512ER Disp8
vrcp28ps zmm30, ZMMWORD PTR [rdx-8256] # AVX512ER
vrcp28ps zmm30, [rdx+508]{1to16} # AVX512ER Disp8
vrcp28ps zmm30, [rdx+512]{1to16} # AVX512ER
vrcp28ps zmm30, [rdx-512]{1to16} # AVX512ER Disp8
vrcp28ps zmm30, [rdx-516]{1to16} # AVX512ER
vrcp28pd zmm30, zmm29 # AVX512ER
vrcp28pd zmm30{k7}, zmm29 # AVX512ER
vrcp28pd zmm30{k7}{z}, zmm29 # AVX512ER
vrcp28pd zmm30, zmm29{sae} # AVX512ER
vrcp28pd zmm30, ZMMWORD PTR [rcx] # AVX512ER
vrcp28pd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512ER
vrcp28pd zmm30, [rcx]{1to8} # AVX512ER
vrcp28pd zmm30, ZMMWORD PTR [rdx+8128] # AVX512ER Disp8
vrcp28pd zmm30, ZMMWORD PTR [rdx+8192] # AVX512ER
vrcp28pd zmm30, ZMMWORD PTR [rdx-8192] # AVX512ER Disp8
vrcp28pd zmm30, ZMMWORD PTR [rdx-8256] # AVX512ER
vrcp28pd zmm30, [rdx+1016]{1to8} # AVX512ER Disp8
vrcp28pd zmm30, [rdx+1024]{1to8} # AVX512ER
vrcp28pd zmm30, [rdx-1024]{1to8} # AVX512ER Disp8
vrcp28pd zmm30, [rdx-1032]{1to8} # AVX512ER
vrcp28ss xmm30{k7}, xmm29, xmm28 # AVX512ER
vrcp28ss xmm30{k7}{z}, xmm29, xmm28 # AVX512ER
vrcp28ss xmm30{k7}, xmm29, xmm28{sae} # AVX512ER
vrcp28ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512ER
vrcp28ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512ER
vrcp28ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512ER Disp8
vrcp28ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512ER
vrcp28ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512ER Disp8
vrcp28ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512ER
vrcp28sd xmm30{k7}, xmm29, xmm28 # AVX512ER
vrcp28sd xmm30{k7}{z}, xmm29, xmm28 # AVX512ER
vrcp28sd xmm30{k7}, xmm29, xmm28{sae} # AVX512ER
vrcp28sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512ER
vrcp28sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512ER
vrcp28sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512ER Disp8
vrcp28sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512ER
vrcp28sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512ER Disp8
vrcp28sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512ER
vrsqrt28ps zmm30, zmm29 # AVX512ER
vrsqrt28ps zmm30{k7}, zmm29 # AVX512ER
vrsqrt28ps zmm30{k7}{z}, zmm29 # AVX512ER
vrsqrt28ps zmm30, zmm29{sae} # AVX512ER
vrsqrt28ps zmm30, ZMMWORD PTR [rcx] # AVX512ER
vrsqrt28ps zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512ER
vrsqrt28ps zmm30, [rcx]{1to16} # AVX512ER
vrsqrt28ps zmm30, ZMMWORD PTR [rdx+8128] # AVX512ER Disp8
vrsqrt28ps zmm30, ZMMWORD PTR [rdx+8192] # AVX512ER
vrsqrt28ps zmm30, ZMMWORD PTR [rdx-8192] # AVX512ER Disp8
vrsqrt28ps zmm30, ZMMWORD PTR [rdx-8256] # AVX512ER
vrsqrt28ps zmm30, [rdx+508]{1to16} # AVX512ER Disp8
vrsqrt28ps zmm30, [rdx+512]{1to16} # AVX512ER
vrsqrt28ps zmm30, [rdx-512]{1to16} # AVX512ER Disp8
vrsqrt28ps zmm30, [rdx-516]{1to16} # AVX512ER
vrsqrt28pd zmm30, zmm29 # AVX512ER
vrsqrt28pd zmm30{k7}, zmm29 # AVX512ER
vrsqrt28pd zmm30{k7}{z}, zmm29 # AVX512ER
vrsqrt28pd zmm30, zmm29{sae} # AVX512ER
vrsqrt28pd zmm30, ZMMWORD PTR [rcx] # AVX512ER
vrsqrt28pd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512ER
vrsqrt28pd zmm30, [rcx]{1to8} # AVX512ER
vrsqrt28pd zmm30, ZMMWORD PTR [rdx+8128] # AVX512ER Disp8
vrsqrt28pd zmm30, ZMMWORD PTR [rdx+8192] # AVX512ER
vrsqrt28pd zmm30, ZMMWORD PTR [rdx-8192] # AVX512ER Disp8
vrsqrt28pd zmm30, ZMMWORD PTR [rdx-8256] # AVX512ER
vrsqrt28pd zmm30, [rdx+1016]{1to8} # AVX512ER Disp8
vrsqrt28pd zmm30, [rdx+1024]{1to8} # AVX512ER
vrsqrt28pd zmm30, [rdx-1024]{1to8} # AVX512ER Disp8
vrsqrt28pd zmm30, [rdx-1032]{1to8} # AVX512ER
vrsqrt28ss xmm30{k7}, xmm29, xmm28 # AVX512ER
vrsqrt28ss xmm30{k7}{z}, xmm29, xmm28 # AVX512ER
vrsqrt28ss xmm30{k7}, xmm29, xmm28{sae} # AVX512ER
vrsqrt28ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512ER
vrsqrt28ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512ER
vrsqrt28ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512ER Disp8
vrsqrt28ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512ER
vrsqrt28ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512ER Disp8
vrsqrt28ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512ER
vrsqrt28sd xmm30{k7}, xmm29, xmm28 # AVX512ER
vrsqrt28sd xmm30{k7}{z}, xmm29, xmm28 # AVX512ER
vrsqrt28sd xmm30{k7}, xmm29, xmm28{sae} # AVX512ER
vrsqrt28sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512ER
vrsqrt28sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512ER
vrsqrt28sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512ER Disp8
vrsqrt28sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512ER
vrsqrt28sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512ER Disp8
vrsqrt28sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512ER
|
stsp/binutils-ia16
| 1,387
|
gas/testsuite/gas/i386/wrap32.s
|
.text
wrap:
mov $500 - 0x100, %eax
mov $500 + 0xffffff00, %edx
mov $val - 0x100, %eax
mov $val + 0xffffff00, %edx
mov $sym - 0x100, %eax
mov $sym + 0xffffff00, %edx
mov $sym + 500 - 0x100, %eax
mov $sym + 500 + 0xffffff00, %edx
movl $500 - 0x100, (%eax)
movl $500 + 0xffffff00, (%edx)
movl $val - 0x100, (%eax)
movl $val + 0xffffff00, (%edx)
movl $sym - 0x100, (%eax)
movl $sym + 0xffffff00, (%edx)
movl $sym + 500 - 0x100, (%eax)
movl $sym + 500 + 0xffffff00, (%edx)
add $500 - 0x100, %ecx
add $500 + 0xffffff00, %edx
add $val - 0x100, %ecx
add $val + 0xffffff00, %edx
add $sym - 0x100, %ecx
add $sym + 0xffffff00, %edx
add $sym + 500 - 0x100, %ecx
add $sym + 500 + 0xffffff00, %edx
addl $500 - 0x100, (%eax)
addl $500 + 0xffffff00, (%edx)
addl $val - 0x100, (%eax)
addl $val + 0xffffff00, (%edx)
addl $sym - 0x100, (%eax)
addl $sym + 0xffffff00, (%edx)
addl $sym + 500 - 0x100, (%eax)
addl $sym + 500 + 0xffffff00, (%edx)
ret
.data
.long 500 - 0x100
.long 500 + 0xffffff00
.long val - 0x100
.long val + 0xffffff00
.long sym - 0x100
.long sym + 0xffffff00
.long sym + 500 - 0x100
.long sym + 500 + 0xffffff00
.slong 500 - 0x8fffff00
.slong 500 + 0x7fffff00
.slong val - 0x8fffff00
.slong val + 0x7fffff00
.slong sym - 0x8fffff00
.slong sym + 0x7fffff00
.slong sym + 500 - 0x8fffff00
.slong sym + 500 + 0x7fffff00
.equ val, 400
|
stsp/binutils-ia16
| 1,522
|
gas/testsuite/gas/i386/x86-64-nops.s
|
.text
.byte 0x0f, 0x1f, 0x0
.byte 0x0f, 0x1f, 0x40, 0x0
.byte 0x0f, 0x1f, 0x44, 0x0, 0x0
.byte 0x66, 0x0f, 0x1f, 0x44, 0x0, 0x0
.byte 0x0f, 0x1f, 0x80, 0x0, 0x0, 0x0, 0x0
.byte 0x0f, 0x1f, 0x84, 0x0, 0x0, 0x0, 0x0, 0x0
.byte 0x66, 0x0f, 0x1f, 0x84, 0x0, 0x0, 0x0, 0x0, 0x0
.byte 0x66, 0x2e, 0x0f, 0x1f, 0x84, 0x0, 0x0, 0x0, 0x0, 0x0
# reg,reg
.byte 0x0f, 0x19, 0xff
.byte 0x0f, 0x1a, 0xff
.byte 0x0f, 0x1b, 0xff
.byte 0x0f, 0x1c, 0xff
.byte 0x0f, 0x1d, 0xff
.byte 0x0f, 0x1e, 0xff
.byte 0x0f, 0x1f, 0xff
# with base and imm8
.byte 0x0f, 0x19, 0x5A, 0x22
.byte 0x0f, 0x1c, 0x5A, 0x22
.byte 0x0f, 0x1d, 0x5A, 0x22
.byte 0x0f, 0x1e, 0x5A, 0x22
.byte 0x0f, 0x1f, 0x5A, 0x22
# with sib and imm32
.byte 0x0f, 0x19, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1c, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1d, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1e, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x1f, 0x9C, 0x1D, 0x11, 0x22, 0x33, 0x44
.byte 0x0f, 0x19, 0x04, 0x60
.byte 0x0f, 0x1c, 0x0c, 0x60
.byte 0x0f, 0x1d, 0x04, 0x60
.byte 0x0f, 0x1e, 0x04, 0x60
.byte 0x0f, 0x1f, 0x04, 0x60
.byte 0x0f, 0x19, 0x04, 0x59
.byte 0x0f, 0x1c, 0x0c, 0x59
.byte 0x0f, 0x1d, 0x04, 0x59
.byte 0x0f, 0x1e, 0x04, 0x59
.byte 0x0f, 0x1f, 0x04, 0x59
nop %rax
nop %eax
nop %ax
nopq (%rax)
nopl (%rax)
nopw (%rax)
nopq %rax
nopl %eax
nopw %ax
nop %r10
nop %r10d
nop %r10w
nopq (%r10)
nopl (%r10)
nopw (%r10)
nopq %r10
nopl %r10d
nopw %r10w
|
stsp/binutils-ia16
| 4,435
|
gas/testsuite/gas/i386/avx512vl_gfni.s
|
# Check 32bit AVX512VL,GFNI instructions
.allow_index_reg
.text
_start:
vgf2p8affineqb $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512VL,GFNI
vgf2p8affineqb $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512VL,GFNI
vgf2p8affineqb $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512VL,GFNI
vgf2p8affineqb $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512VL,GFNI
vgf2p8affineqb $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512VL,GFNI Disp8
vgf2p8affineqb $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512VL,GFNI Disp8
vgf2p8affineqb $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512VL,GFNI
vgf2p8affineqb $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512VL,GFNI
vgf2p8affineqb $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512VL,GFNI
vgf2p8affineqb $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512VL,GFNI
vgf2p8affineqb $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512VL,GFNI Disp8
vgf2p8affineqb $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512VL,GFNI Disp8
vgf2p8affineinvqb $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512VL,GFNI
vgf2p8affineinvqb $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512VL,GFNI
vgf2p8affineinvqb $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512VL,GFNI
vgf2p8affineinvqb $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512VL,GFNI
vgf2p8affineinvqb $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512VL,GFNI Disp8
vgf2p8affineinvqb $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512VL,GFNI Disp8
vgf2p8affineinvqb $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512VL,GFNI
vgf2p8affineinvqb $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512VL,GFNI
vgf2p8affineinvqb $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512VL,GFNI
vgf2p8affineinvqb $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512VL,GFNI
vgf2p8affineinvqb $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512VL,GFNI Disp8
vgf2p8affineinvqb $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512VL,GFNI Disp8
vgf2p8mulb %xmm4, %xmm5, %xmm6{%k7} # AVX512VL,GFNI
vgf2p8mulb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512VL,GFNI
vgf2p8mulb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512VL,GFNI
vgf2p8mulb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512VL,GFNI Disp8
vgf2p8mulb %ymm4, %ymm5, %ymm6{%k7} # AVX512VL,GFNI
vgf2p8mulb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512VL,GFNI
vgf2p8mulb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512VL,GFNI
vgf2p8mulb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512VL,GFNI Disp8
.intel_syntax noprefix
vgf2p8affineqb xmm6{k7}, xmm5, xmm4, 0xab # AVX512VL,GFNI
vgf2p8affineqb xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512VL,GFNI
vgf2p8affineqb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,GFNI
vgf2p8affineqb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512VL,GFNI Disp8
vgf2p8affineqb xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512VL,GFNI Disp8
vgf2p8affineqb ymm6{k7}, ymm5, ymm4, 0xab # AVX512VL,GFNI
vgf2p8affineqb ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512VL,GFNI
vgf2p8affineqb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,GFNI
vgf2p8affineqb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512VL,GFNI Disp8
vgf2p8affineqb ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb xmm6{k7}, xmm5, xmm4, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,GFNI
vgf2p8affineinvqb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb ymm6{k7}, ymm5, ymm4, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512VL,GFNI
vgf2p8affineinvqb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,GFNI
vgf2p8affineinvqb ymm6{k7}, ymm5, [eax]{1to4}, 123 # AVX512VL,GFNI
vgf2p8affineinvqb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512VL,GFNI Disp8
vgf2p8affineinvqb ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512VL,GFNI Disp8
vgf2p8mulb xmm6{k7}, xmm5, xmm4 # AVX512VL,GFNI
vgf2p8mulb xmm6{k7}{z}, xmm5, xmm4 # AVX512VL,GFNI
vgf2p8mulb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,GFNI
vgf2p8mulb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,GFNI Disp8
vgf2p8mulb ymm6{k7}, ymm5, ymm4 # AVX512VL,GFNI
vgf2p8mulb ymm6{k7}{z}, ymm5, ymm4 # AVX512VL,GFNI
vgf2p8mulb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,GFNI
vgf2p8mulb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,GFNI Disp8
|
stsp/binutils-ia16
| 3,004
|
gas/testsuite/gas/i386/bundle.s
|
.bundle_align_mode 5
# We use these macros to test each pattern at every offset from
# bundle alignment, i.e. [0,31].
.macro offset_insn insn_name, offset
.p2align 5
\insn_name\()_offset_\offset\():
.if \offset
.space \offset, 0xf4
.endif
\insn_name
.endm
.macro test_offsets insn_name
offset_insn \insn_name, 0
offset_insn \insn_name, 1
offset_insn \insn_name, 2
offset_insn \insn_name, 3
offset_insn \insn_name, 4
offset_insn \insn_name, 5
offset_insn \insn_name, 6
offset_insn \insn_name, 7
offset_insn \insn_name, 8
offset_insn \insn_name, 9
offset_insn \insn_name, 10
offset_insn \insn_name, 11
offset_insn \insn_name, 12
offset_insn \insn_name, 13
offset_insn \insn_name, 14
offset_insn \insn_name, 15
offset_insn \insn_name, 16
offset_insn \insn_name, 17
offset_insn \insn_name, 18
offset_insn \insn_name, 19
offset_insn \insn_name, 20
offset_insn \insn_name, 21
offset_insn \insn_name, 22
offset_insn \insn_name, 23
offset_insn \insn_name, 24
offset_insn \insn_name, 25
offset_insn \insn_name, 26
offset_insn \insn_name, 27
offset_insn \insn_name, 28
offset_insn \insn_name, 29
offset_insn \insn_name, 30
offset_insn \insn_name, 31
.endm
# These are vanilla (non-relaxed) instructions of each length.
.macro test_1
inc %eax
.endm
.macro test_2
add %eax,%eax
.endm
.macro test_3
and $3,%eax
.endm
.macro test_4
lock andl $3,(%eax)
.endm
.macro test_5
mov $0xaabbccdd,%eax
.endm
.macro test_6
movl %eax,0xaabbccdd(%esi)
.endm
.macro test_7
movl $0xaabbccdd,0x7f(%esi)
.endm
.macro test_8
lock addl $0xaabbccdd,0x10(%esi)
.endm
.macro test_9
lock addl $0xaabbccdd,%fs:0x10(%esi)
.endm
.macro test_10
movl $0xaabbccdd,0x7ff(%esi)
.endm
.macro test_11
lock addl $0xaabbccdd,0x7ff(%esi)
.endm
.macro test_12
lock addl $0xaabbccdd,%fs:0x7ff(%esi)
.endm
test_offsets test_1
test_offsets test_2
test_offsets test_3
test_offsets test_4
test_offsets test_5
test_offsets test_6
test_offsets test_7
test_offsets test_8
test_offsets test_9
test_offsets test_10
test_offsets test_11
test_offsets test_12
# The only relaxation cases are the jump instructions.
# For each of the three flavors of jump (unconditional, conditional,
# and conditional with prediction), we test a case that can be relaxed
# to its shortest form, and one that must use the long form.
.macro jmp_2
jmp jmp_2_\@
movl $0xdeadbeef,%eax
jmp_2_\@\():
movl $0xb00b,%eax
.endm
.macro jmp_5
jmp jmp_5_\@
.rept 128
inc %eax
.endr
jmp_5_\@\():
movl $0xb00b,%eax
.endm
.macro cjmp_2
jz cjmp_2_\@
movl $0xdeadbeef,%eax
cjmp_2_\@\():
movl $0xb00b,%eax
.endm
.macro cjmp_6
jz cjmp_6_\@
.rept 128
inc %eax
.endr
cjmp_6_\@\():
movl $0xb00b,%eax
.endm
.macro pjmp_3
jz,pt pjmp_3_\@
movl $0xdeadbeef,%eax
pjmp_3_\@\():
movl $0xb00b,%eax
.endm
.macro pjmp_7
jz,pt pjmp_7_\@
.rept 128
inc %eax
.endr
pjmp_7_\@\():
movl $0xb00b,%eax
.endm
test_offsets jmp_2
test_offsets cjmp_2
test_offsets pjmp_3
test_offsets jmp_5
test_offsets cjmp_6
test_offsets pjmp_7
.p2align 5
hlt
|
stsp/binutils-ia16
| 1,771
|
gas/testsuite/gas/i386/dwarf2-line-2.s
|
.file "dwarf2-test.c"
.text
.section .text.startup,"ax",@progbits
.p2align 4
.globl main
.type main, @function
main:
.cfi_startproc
nop
ret
.cfi_endproc
.size main, .-main
.text
.section .debug_info,"",%progbits
.long 0x0
.value 0x2
.long .Ldebug_abbrev0
.byte 0x8
.uleb128 0x1
.section .debug_abbrev,"",@progbits
.Ldebug_abbrev0:
.uleb128 0x0 # (abbrev code)
.uleb128 0x0 # (abbrev code)
.uleb128 0x0 # (abbrev code)
# A non-empty .debug_line section is ok when not using .loc directives
.section .debug_line
.Lline1_begin:
.4byte .Lline1_end - .Lline1_start /* Initial length */
.Lline1_start:
.2byte 2 /* Version */
.4byte .Lline1_lines - .Lline1_hdr /* header_length */
.Lline1_hdr:
.byte 1 /* Minimum insn length */
.byte 1 /* default_is_stmt */
.byte 1 /* line_base */
.byte 1 /* line_range */
.byte 0x10 /* opcode_base */
/* Standard lengths */
.byte 0
.byte 1
.byte 1
.byte 1
.byte 1
.byte 0
.byte 0
.byte 0
.byte 1
.byte 0
.byte 0
.byte 1
.byte 0
.byte 0
.byte 0
/* Include directories */
.byte 0
/* File names */
.ascii "file1.txt\0"
.uleb128 0
.uleb128 0
.uleb128 0
.byte 0
.Lline1_lines:
.byte 0 /* DW_LNE_set_address */
.uleb128 5
.byte 2
.4byte .Lbegin_func_cu1
.byte 3 /* DW_LNS_advance_line */
.sleb128 3 /* ... to 4 */
.byte 1 /* DW_LNS_copy */
.byte 1 /* DW_LNS_copy (second time as an end-of-prologue marker) */
.byte 0 /* DW_LNE_set_address */
.uleb128 5
.byte 2
.4byte .Lend_func_cu1
.byte 0 /* DW_LNE_end_of_sequence */
.uleb128 1
.byte 1
|
stsp/binutils-ia16
| 1,386
|
gas/testsuite/gas/i386/vp2intersect.s
|
# Check AVX512_VP2INTERSECT new instructions.
.text
vp2intersectd %zmm1, %zmm2, %k3
vp2intersectd 64(%eax), %zmm2, %k3
vp2intersectd 8(%eax){1to16}, %zmm2, %k3
vp2intersectd %ymm1, %ymm2, %k3
vp2intersectd 32(%eax), %ymm2, %k3
vp2intersectd 8(%eax){1to8}, %ymm2, %k3
vp2intersectd %xmm1, %xmm2, %k3
vp2intersectd 16(%eax), %xmm2, %k3
vp2intersectd 8(%eax){1to4}, %xmm2, %k3
vp2intersectq %zmm1, %zmm2, %k3
vp2intersectq 64(%eax), %zmm2, %k3
vp2intersectq 8(%eax){1to8}, %zmm2, %k3
vp2intersectq %ymm1, %ymm2, %k3
vp2intersectq 32(%eax), %ymm2, %k3
vp2intersectq 8(%eax){1to4}, %ymm2, %k3
vp2intersectq %xmm1, %xmm2, %k3
vp2intersectq 16(%eax), %xmm2, %k3
vp2intersectq 8(%eax){1to2}, %xmm2, %k3
.intel_syntax noprefix
vp2intersectd k3, zmm2, zmm1
vp2intersectd k3, zmm2, 64[eax]
vp2intersectd k3, zmm2, dword bcst 8[eax]
vp2intersectd k3, ymm2, ymm1
vp2intersectd k3, ymm2, 32[eax]
vp2intersectd k3, ymm2, dword bcst 8[eax]
vp2intersectd k3, xmm2, xmm1
vp2intersectd k3, xmm2, 16[eax]
vp2intersectd k3, xmm2, dword bcst 8[eax]
vp2intersectq k3, zmm2, zmm1
vp2intersectq k3, zmm2, 64[eax]
vp2intersectq k3, zmm2, qword bcst 8[eax]
vp2intersectq k3, ymm2, ymm1
vp2intersectq k3, ymm2, 32[eax]
vp2intersectq k3, ymm2, qword bcst 8[eax]
vp2intersectq k3, xmm2, xmm1
vp2intersectq k3, xmm2, 16[eax]
vp2intersectq k3, xmm2, qword bcst 8[eax]
|
stsp/binutils-ia16
| 10,956
|
gas/testsuite/gas/i386/hlebad.s
|
# Check 32bit unsupported HLE instructions
.allow_index_reg
.text
_start:
# Tests for op imm8 al
xacquire adc $100,%al
xacquire lock adc $100,%al
lock xacquire adc $100,%al
xrelease adc $100,%al
xrelease lock adc $100,%al
lock xrelease adc $100,%al
# Tests for op imm16 ax
xacquire adc $1000,%ax
xacquire lock adc $1000,%ax
lock xacquire adc $1000,%ax
xrelease adc $1000,%ax
xrelease lock adc $1000,%ax
lock xrelease adc $1000,%ax
# Tests for op imm32 eax
xacquire adc $10000000,%eax
xacquire lock adc $10000000,%eax
lock xacquire adc $10000000,%eax
xrelease adc $10000000,%eax
xrelease lock adc $10000000,%eax
lock xrelease adc $10000000,%eax
# Tests for op imm8 regb/m8
xacquire adcb $100,%cl
xacquire lock adcb $100,%cl
lock xacquire adcb $100,%cl
xrelease adcb $100,%cl
xrelease lock adcb $100,%cl
lock xrelease adcb $100,%cl
xacquire adcb $100,(%ecx)
xrelease adcb $100,(%ecx)
# Tests for op imm16 regs/m16
xacquire adcw $1000,%cx
xacquire lock adcw $1000,%cx
lock xacquire adcw $1000,%cx
xrelease adcw $1000,%cx
xrelease lock adcw $1000,%cx
lock xrelease adcw $1000,%cx
xacquire adcw $1000,(%ecx)
xrelease adcw $1000,(%ecx)
# Tests for op imm32 regl/m32
xacquire adcl $10000000,%ecx
xacquire lock adcl $10000000,%ecx
lock xacquire adcl $10000000,%ecx
xrelease adcl $10000000,%ecx
xrelease lock adcl $10000000,%ecx
lock xrelease adcl $10000000,%ecx
xacquire adcl $10000000,(%ecx)
xrelease adcl $10000000,(%ecx)
# Tests for op imm8 regs/m16
xacquire adcw $100,%cx
xacquire lock adcw $100,%cx
lock xacquire adcw $100,%cx
xrelease adcw $100,%cx
xrelease lock adcw $100,%cx
lock xrelease adcw $100,%cx
xacquire adcw $100,(%ecx)
xrelease adcw $100,(%ecx)
# Tests for op imm8 regl/m32
xacquire adcl $100,%ecx
xacquire lock adcl $100,%ecx
lock xacquire adcl $100,%ecx
xrelease adcl $100,%ecx
xrelease lock adcl $100,%ecx
lock xrelease adcl $100,%ecx
xacquire adcl $100,(%ecx)
xrelease adcl $100,(%ecx)
# Tests for op imm8 regb/m8
xacquire adcb $100,%cl
xacquire lock adcb $100,%cl
lock xacquire adcb $100,%cl
xrelease adcb $100,%cl
xrelease lock adcb $100,%cl
lock xrelease adcb $100,%cl
xacquire adcb $100,(%ecx)
xrelease adcb $100,(%ecx)
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire adcb %al,%cl
xacquire lock adcb %al,%cl
lock xacquire adcb %al,%cl
xrelease adcb %al,%cl
xrelease lock adcb %al,%cl
lock xrelease adcb %al,%cl
xacquire adcb %al,(%ecx)
xrelease adcb %al,(%ecx)
xacquire adcb %cl,%al
xacquire lock adcb %cl,%al
lock xacquire adcb %cl,%al
xrelease adcb %cl,%al
xrelease lock adcb %cl,%al
lock xrelease adcb %cl,%al
xacquire adcb (%ecx),%al
xacquire lock adcb (%ecx),%al
lock xacquire adcb (%ecx),%al
xrelease adcb (%ecx),%al
xrelease lock adcb (%ecx),%al
lock xrelease adcb (%ecx),%al
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire adcw %ax,%cx
xacquire lock adcw %ax,%cx
lock xacquire adcw %ax,%cx
xrelease adcw %ax,%cx
xrelease lock adcw %ax,%cx
lock xrelease adcw %ax,%cx
xacquire adcw %ax,(%ecx)
xrelease adcw %ax,(%ecx)
xacquire adcw %cx,%ax
xacquire lock adcw %cx,%ax
lock xacquire adcw %cx,%ax
xrelease adcw %cx,%ax
xrelease lock adcw %cx,%ax
lock xrelease adcw %cx,%ax
xacquire adcw (%ecx),%ax
xacquire lock adcw (%ecx),%ax
lock xacquire adcw (%ecx),%ax
xrelease adcw (%ecx),%ax
xrelease lock adcw (%ecx),%ax
lock xrelease adcw (%ecx),%ax
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire adcl %eax,%ecx
xacquire lock adcl %eax,%ecx
lock xacquire adcl %eax,%ecx
xrelease adcl %eax,%ecx
xrelease lock adcl %eax,%ecx
lock xrelease adcl %eax,%ecx
xacquire adcl %eax,(%ecx)
xrelease adcl %eax,(%ecx)
xacquire adcl %ecx,%eax
xacquire lock adcl %ecx,%eax
lock xacquire adcl %ecx,%eax
xrelease adcl %ecx,%eax
xrelease lock adcl %ecx,%eax
lock xrelease adcl %ecx,%eax
xacquire adcl (%ecx),%eax
xacquire lock adcl (%ecx),%eax
lock xacquire adcl (%ecx),%eax
xrelease adcl (%ecx),%eax
xrelease lock adcl (%ecx),%eax
lock xrelease adcl (%ecx),%eax
# Tests for op regs, regs/m16
xacquire btcw %ax,%cx
xacquire lock btcw %ax,%cx
lock xacquire btcw %ax,%cx
xrelease btcw %ax,%cx
xrelease lock btcw %ax,%cx
lock xrelease btcw %ax,%cx
xacquire btcw %ax,(%ecx)
xrelease btcw %ax,(%ecx)
# Tests for op regl regl/m32
xacquire btcl %eax,%ecx
xacquire lock btcl %eax,%ecx
lock xacquire btcl %eax,%ecx
xrelease btcl %eax,%ecx
xrelease lock btcl %eax,%ecx
lock xrelease btcl %eax,%ecx
xacquire btcl %eax,(%ecx)
xrelease btcl %eax,(%ecx)
# Tests for op regb/m8
xacquire decb %cl
xacquire lock decb %cl
lock xacquire decb %cl
xrelease decb %cl
xrelease lock decb %cl
lock xrelease decb %cl
xacquire decb (%ecx)
xrelease decb (%ecx)
# Tests for op regs/m16
xacquire decw %cx
xacquire lock decw %cx
lock xacquire decw %cx
xrelease decw %cx
xrelease lock decw %cx
lock xrelease decw %cx
xacquire decw (%ecx)
xrelease decw (%ecx)
# Tests for op regl/m32
xacquire decl %ecx
xacquire lock decl %ecx
lock xacquire decl %ecx
xrelease decl %ecx
xrelease lock decl %ecx
lock xrelease decl %ecx
xacquire decl (%ecx)
xrelease decl (%ecx)
# Tests for op m64
xacquire cmpxchg8bq (%ecx)
xrelease cmpxchg8bq (%ecx)
# Tests for op regb, regb/m8
xacquire cmpxchgb %cl,%al
xacquire lock cmpxchgb %cl,%al
lock xacquire cmpxchgb %cl,%al
xrelease cmpxchgb %cl,%al
xrelease lock cmpxchgb %cl,%al
lock xrelease cmpxchgb %cl,%al
xacquire cmpxchgb %cl,(%ecx)
xrelease cmpxchgb %cl,(%ecx)
.intel_syntax noprefix
# Tests for op imm8 al
xacquire adc al,100
xacquire lock adc al,100
lock xacquire adc al,100
xrelease adc al,100
xrelease lock adc al,100
lock xrelease adc al,100
# Tests for op imm16 ax
xacquire adc ax,1000
xacquire lock adc ax,1000
lock xacquire adc ax,1000
xrelease adc ax,1000
xrelease lock adc ax,1000
lock xrelease adc ax,1000
# Tests for op imm32 eax
xacquire adc eax,10000000
xacquire lock adc eax,10000000
lock xacquire adc eax,10000000
xrelease adc eax,10000000
xrelease lock adc eax,10000000
lock xrelease adc eax,10000000
# Tests for op imm8 regb/m8
xacquire adc cl,100
xacquire lock adc cl,100
lock xacquire adc cl,100
xrelease adc cl,100
xrelease lock adc cl,100
lock xrelease adc cl,100
xacquire adc BYTE PTR [ecx],100
xrelease adc BYTE PTR [ecx],100
# Tests for op imm16 regs/m16
xacquire adc cx,1000
xacquire lock adc cx,1000
lock xacquire adc cx,1000
xrelease adc cx,1000
xrelease lock adc cx,1000
lock xrelease adc cx,1000
xacquire adc WORD PTR [ecx],1000
xrelease adc WORD PTR [ecx],1000
# Tests for op imm32 regl/m32
xacquire adc ecx,10000000
xacquire lock adc ecx,10000000
lock xacquire adc ecx,10000000
xrelease adc ecx,10000000
xrelease lock adc ecx,10000000
lock xrelease adc ecx,10000000
xacquire adc DWORD PTR [ecx],10000000
xrelease adc DWORD PTR [ecx],10000000
# Tests for op imm8 regs/m16
xacquire adc cx,100
xacquire lock adc cx,100
lock xacquire adc cx,100
xrelease adc cx,100
xrelease lock adc cx,100
lock xrelease adc cx,100
xacquire adc WORD PTR [ecx],100
xrelease adc WORD PTR [ecx],100
# Tests for op imm8 regl/m32
xacquire adc ecx,100
xacquire lock adc ecx,100
lock xacquire adc ecx,100
xrelease adc ecx,100
xrelease lock adc ecx,100
lock xrelease adc ecx,100
xacquire adc DWORD PTR [ecx],100
xrelease adc DWORD PTR [ecx],100
# Tests for op imm8 regb/m8
xacquire adc cl,100
xacquire lock adc cl,100
lock xacquire adc cl,100
xrelease adc cl,100
xrelease lock adc cl,100
lock xrelease adc cl,100
xacquire adc BYTE PTR [ecx],100
xrelease adc BYTE PTR [ecx],100
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire adc cl,al
xacquire lock adc cl,al
lock xacquire adc cl,al
xrelease adc cl,al
xrelease lock adc cl,al
lock xrelease adc cl,al
xacquire adc BYTE PTR [ecx],al
xrelease adc BYTE PTR [ecx],al
xacquire adc al,cl
xacquire lock adc al,cl
lock xacquire adc al,cl
xrelease adc al,cl
xrelease lock adc al,cl
lock xrelease adc al,cl
xacquire adc al,BYTE PTR [ecx]
xacquire lock adc al,BYTE PTR [ecx]
lock xacquire adc al,BYTE PTR [ecx]
xrelease adc al,BYTE PTR [ecx]
xrelease lock adc al,BYTE PTR [ecx]
lock xrelease adc al,BYTE PTR [ecx]
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire adc cx,ax
xacquire lock adc cx,ax
lock xacquire adc cx,ax
xrelease adc cx,ax
xrelease lock adc cx,ax
lock xrelease adc cx,ax
xacquire adc WORD PTR [ecx],ax
xrelease adc WORD PTR [ecx],ax
xacquire adc ax,cx
xacquire lock adc ax,cx
lock xacquire adc ax,cx
xrelease adc ax,cx
xrelease lock adc ax,cx
lock xrelease adc ax,cx
xacquire adc ax,WORD PTR [ecx]
xacquire lock adc ax,WORD PTR [ecx]
lock xacquire adc ax,WORD PTR [ecx]
xrelease adc ax,WORD PTR [ecx]
xrelease lock adc ax,WORD PTR [ecx]
lock xrelease adc ax,WORD PTR [ecx]
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire adc ecx,eax
xacquire lock adc ecx,eax
lock xacquire adc ecx,eax
xrelease adc ecx,eax
xrelease lock adc ecx,eax
lock xrelease adc ecx,eax
xacquire adc DWORD PTR [ecx],eax
xrelease adc DWORD PTR [ecx],eax
xacquire adc eax,ecx
xacquire lock adc eax,ecx
lock xacquire adc eax,ecx
xrelease adc eax,ecx
xrelease lock adc eax,ecx
lock xrelease adc eax,ecx
xacquire adc eax,DWORD PTR [ecx]
xacquire lock adc eax,DWORD PTR [ecx]
lock xacquire adc eax,DWORD PTR [ecx]
xrelease adc eax,DWORD PTR [ecx]
xrelease lock adc eax,DWORD PTR [ecx]
lock xrelease adc eax,DWORD PTR [ecx]
# Tests for op regs, regs/m16
xacquire btc cx,ax
xacquire lock btc cx,ax
lock xacquire btc cx,ax
xrelease btc cx,ax
xrelease lock btc cx,ax
lock xrelease btc cx,ax
xacquire btc WORD PTR [ecx],ax
xrelease btc WORD PTR [ecx],ax
# Tests for op regl regl/m32
xacquire btc ecx,eax
xacquire lock btc ecx,eax
lock xacquire btc ecx,eax
xrelease btc ecx,eax
xrelease lock btc ecx,eax
lock xrelease btc ecx,eax
xacquire btc DWORD PTR [ecx],eax
xrelease btc DWORD PTR [ecx],eax
# Tests for op regb/m8
xacquire dec cl
xacquire lock dec cl
lock xacquire dec cl
xrelease dec cl
xrelease lock dec cl
lock xrelease dec cl
xacquire dec BYTE PTR [ecx]
xrelease dec BYTE PTR [ecx]
# Tests for op regs/m16
xacquire dec cx
xacquire lock dec cx
lock xacquire dec cx
xrelease dec cx
xrelease lock dec cx
lock xrelease dec cx
xacquire dec WORD PTR [ecx]
xrelease dec WORD PTR [ecx]
# Tests for op regl/m32
xacquire dec ecx
xacquire lock dec ecx
lock xacquire dec ecx
xrelease dec ecx
xrelease lock dec ecx
lock xrelease dec ecx
xacquire dec DWORD PTR [ecx]
xrelease dec DWORD PTR [ecx]
# Tests for op m64
xacquire cmpxchg8b QWORD PTR [ecx]
xrelease cmpxchg8b QWORD PTR [ecx]
# Tests for op regb, regb/m8
xacquire cmpxchg al,cl
xacquire lock cmpxchg al,cl
lock xacquire cmpxchg al,cl
xrelease cmpxchg al,cl
xrelease lock cmpxchg al,cl
lock xrelease cmpxchg al,cl
xacquire cmpxchg BYTE PTR [ecx],cl
xrelease cmpxchg BYTE PTR [ecx],cl
|
stsp/binutils-ia16
| 6,557
|
gas/testsuite/gas/i386/x86-64-avx2.s
|
# Check x86-64 AVX2 instructions
.allow_index_reg
.text
_start:
# Tests for op mem256, mask, ymm
# Tests for op ymm, mask, mem256
vpmaskmovd (%rcx),%ymm4,%ymm6
vpmaskmovd %ymm4,%ymm6,(%rcx)
vpmaskmovq (%rcx),%ymm4,%ymm6
vpmaskmovq %ymm4,%ymm6,(%rcx)
# Tests for op imm8, ymm/mem256, ymm
vpermpd $7,%ymm6,%ymm2
vpermpd $7,(%rcx),%ymm6
vpermq $7,%ymm6,%ymm2
vpermq $7,(%rcx),%ymm6
# Tests for op ymm/mem256, ymm, ymm
vpermd %ymm4,%ymm6,%ymm2
vpermd (%rcx),%ymm6,%ymm2
vpermps %ymm4,%ymm6,%ymm2
vpermps (%rcx),%ymm6,%ymm2
vpsllvd %ymm4,%ymm6,%ymm2
vpsllvd (%rcx),%ymm6,%ymm2
vpsllvq %ymm4,%ymm6,%ymm2
vpsllvq (%rcx),%ymm6,%ymm2
vpsravd %ymm4,%ymm6,%ymm2
vpsravd (%rcx),%ymm6,%ymm2
vpsrlvd %ymm4,%ymm6,%ymm2
vpsrlvd (%rcx),%ymm6,%ymm2
vpsrlvq %ymm4,%ymm6,%ymm2
vpsrlvq (%rcx),%ymm6,%ymm2
# Tests for op mem256, ymm
vmovntdqa (%rcx),%ymm4
# Tests for op ymm, xmm
vbroadcastsd %xmm4,%ymm6
vbroadcastss %xmm4,%ymm6
# Tests for op imm8, ymm/mem256, ymm, ymm
vpblendd $7,%ymm4,%ymm6,%ymm2
vpblendd $7,(%rcx),%ymm6,%ymm2
vperm2i128 $7,%ymm4,%ymm6,%ymm2
vperm2i128 $7,(%rcx),%ymm6,%ymm2
# Tests for op imm8, xmm/mem128, ymm, ymm
vinserti128 $7,%xmm4,%ymm4,%ymm6
vinserti128 $7,(%rcx),%ymm4,%ymm6
# Tests for op mem128, ymm
vbroadcasti128 (%rcx),%ymm4
# Tests for op xmm/mem128, xmm, xmm
vpsllvd %xmm4,%xmm6,%xmm2
vpsllvd (%rcx),%xmm6,%xmm7
vpsllvq %xmm4,%xmm6,%xmm2
vpsllvq (%rcx),%xmm6,%xmm7
vpsravd %xmm4,%xmm6,%xmm2
vpsravd (%rcx),%xmm6,%xmm7
vpsrlvd %xmm4,%xmm6,%xmm2
vpsrlvd (%rcx),%xmm6,%xmm7
vpsrlvq %xmm4,%xmm6,%xmm2
vpsrlvq (%rcx),%xmm6,%xmm7
# Tests for op mem128, xmm, xmm
vpmaskmovd (%rcx),%xmm4,%xmm6
vpmaskmovq (%rcx),%xmm4,%xmm6
# Tests for op imm8, ymm, xmm128/mem
vextracti128 $7,%ymm4,%xmm6
vextracti128 $7,%ymm4,(%rcx)
# Tests for op xmm, xmm, mem128
vpmaskmovd %xmm4,%xmm6,(%rcx)
vpmaskmovq %xmm4,%xmm6,(%rcx)
# Tests for op imm8, xmm/mem128, xmm, xmm
vpblendd $7,%xmm4,%xmm6,%xmm2
vpblendd $7,(%rcx),%xmm6,%xmm2
# Tests for op xmm/mem64, xmm
vpbroadcastq %xmm4,%xmm6
vpbroadcastq (%rcx),%xmm4
# Tests for op xmm/mem64, ymm
vpbroadcastq %xmm4,%ymm6
vpbroadcastq (%rcx),%ymm4
# Tests for op xmm/mem32, ymm
vpbroadcastd %xmm4,%ymm4
vpbroadcastd (%rcx),%ymm4
# Tests for op xmm/mem32, xmm
vpbroadcastd %xmm4,%xmm6
vpbroadcastd (%rcx),%xmm4
# Tests for op xmm/m16, xmm
vpbroadcastw %xmm4,%xmm6
vpbroadcastw (%rcx),%xmm4
# Tests for op xmm/m16, ymm
vpbroadcastw %xmm4,%ymm6
vpbroadcastw (%rcx),%ymm4
# Tests for op xmm/m8, xmm
vpbroadcastb %xmm4,%xmm6
vpbroadcastb (%rcx),%xmm4
# Tests for op xmm/m8, ymm
vpbroadcastb %xmm4,%ymm6
vpbroadcastb (%rcx),%ymm4
# Tests for op xmm, xmm
vbroadcastss %xmm4,%xmm6
.intel_syntax noprefix
# Tests for op mem256, mask, ymm
# Tests for op ymm, mask, mem256
vpmaskmovd ymm6,ymm4,YMMWORD PTR [rcx]
vpmaskmovd YMMWORD PTR [rcx],ymm6,ymm4
vpmaskmovd ymm6,ymm4,[rcx]
vpmaskmovd [rcx],ymm6,ymm4
vpmaskmovq ymm6,ymm4,YMMWORD PTR [rcx]
vpmaskmovq YMMWORD PTR [rcx],ymm6,ymm4
vpmaskmovq ymm6,ymm4,[rcx]
vpmaskmovq [rcx],ymm6,ymm4
# Tests for op imm8, ymm/mem256, ymm
vpermpd ymm2,ymm6,7
vpermpd ymm6,YMMWORD PTR [rcx],7
vpermpd ymm6,[rcx],7
vpermq ymm2,ymm6,7
vpermq ymm6,YMMWORD PTR [rcx],7
vpermq ymm6,[rcx],7
# Tests for op ymm/mem256, ymm, ymm
vpermd ymm2,ymm6,ymm4
vpermd ymm2,ymm6,YMMWORD PTR [rcx]
vpermd ymm2,ymm6,[rcx]
vpermps ymm2,ymm6,ymm4
vpermps ymm2,ymm6,YMMWORD PTR [rcx]
vpermps ymm2,ymm6,[rcx]
vpsllvd ymm2,ymm6,ymm4
vpsllvd ymm2,ymm6,YMMWORD PTR [rcx]
vpsllvd ymm2,ymm6,[rcx]
vpsllvq ymm2,ymm6,ymm4
vpsllvq ymm2,ymm6,YMMWORD PTR [rcx]
vpsllvq ymm2,ymm6,[rcx]
vpsravd ymm2,ymm6,ymm4
vpsravd ymm2,ymm6,YMMWORD PTR [rcx]
vpsravd ymm2,ymm6,[rcx]
vpsrlvd ymm2,ymm6,ymm4
vpsrlvd ymm2,ymm6,YMMWORD PTR [rcx]
vpsrlvd ymm2,ymm6,[rcx]
vpsrlvq ymm2,ymm6,ymm4
vpsrlvq ymm2,ymm6,YMMWORD PTR [rcx]
vpsrlvq ymm2,ymm6,[rcx]
# Tests for op mem256, ymm
vmovntdqa ymm4,YMMWORD PTR [rcx]
vmovntdqa ymm4,[rcx]
# Tests for op ymm, xmm
vbroadcastsd ymm6,xmm4
vbroadcastss ymm6,xmm4
# Tests for op imm8, ymm/mem256, ymm, ymm
vpblendd ymm2,ymm6,ymm4,7
vpblendd ymm2,ymm6,YMMWORD PTR [rcx],7
vpblendd ymm2,ymm6,[rcx],7
vperm2i128 ymm2,ymm6,ymm4,7
vperm2i128 ymm2,ymm6,YMMWORD PTR [rcx],7
vperm2i128 ymm2,ymm6,[rcx],7
# Tests for op imm8, xmm/mem128, ymm, ymm
vinserti128 ymm6,ymm4,xmm4,7
vinserti128 ymm6,ymm4,XMMWORD PTR [rcx],7
vinserti128 ymm6,ymm4,[rcx],7
# Tests for op mem128, ymm
vbroadcasti128 ymm4,XMMWORD PTR [rcx]
vbroadcasti128 ymm4,[rcx]
# Tests for op xmm/mem128, xmm, xmm
vpsllvd xmm2,xmm6,xmm4
vpsllvd xmm7,xmm6,XMMWORD PTR [rcx]
vpsllvd xmm7,xmm6,[rcx]
vpsllvq xmm2,xmm6,xmm4
vpsllvq xmm7,xmm6,XMMWORD PTR [rcx]
vpsllvq xmm7,xmm6,[rcx]
vpsravd xmm2,xmm6,xmm4
vpsravd xmm7,xmm6,XMMWORD PTR [rcx]
vpsravd xmm7,xmm6,[rcx]
vpsrlvd xmm2,xmm6,xmm4
vpsrlvd xmm7,xmm6,XMMWORD PTR [rcx]
vpsrlvd xmm7,xmm6,[rcx]
vpsrlvq xmm2,xmm6,xmm4
vpsrlvq xmm7,xmm6,XMMWORD PTR [rcx]
vpsrlvq xmm7,xmm6,[rcx]
# Tests for op mem128, xmm, xmm
vpmaskmovd xmm6,xmm4,XMMWORD PTR [rcx]
vpmaskmovd xmm6,xmm4,[rcx]
vpmaskmovq xmm6,xmm4,XMMWORD PTR [rcx]
vpmaskmovq xmm6,xmm4,[rcx]
# Tests for op imm8, ymm, xmm128/mem
vextracti128 xmm6,ymm4,7
vextracti128 XMMWORD PTR [rcx],ymm4,7
vextracti128 [rcx],ymm4,7
# Tests for op xmm, xmm, mem128
vpmaskmovd XMMWORD PTR [rcx],xmm6,xmm4
vpmaskmovd [rcx],xmm6,xmm4
vpmaskmovq XMMWORD PTR [rcx],xmm6,xmm4
vpmaskmovq [rcx],xmm6,xmm4
# Tests for op imm8, xmm/mem128, xmm, xmm
vpblendd xmm2,xmm6,xmm4,7
vpblendd xmm2,xmm6,XMMWORD PTR [rcx],7
vpblendd xmm2,xmm6,[rcx],7
# Tests for op xmm/mem64, xmm
vpbroadcastq xmm6,xmm4
vpbroadcastq xmm4,QWORD PTR [rcx]
vpbroadcastq xmm4,[rcx]
# Tests for op xmm/mem64, ymm
vpbroadcastq ymm6,xmm4
vpbroadcastq ymm4,QWORD PTR [rcx]
vpbroadcastq ymm4,[rcx]
# Tests for op xmm/mem32, ymm
vpbroadcastd ymm4,xmm4
vpbroadcastd ymm4,DWORD PTR [rcx]
vpbroadcastd ymm4,[rcx]
# Tests for op xmm/mem32, xmm
vpbroadcastd xmm6,xmm4
vpbroadcastd xmm4,DWORD PTR [rcx]
vpbroadcastd xmm4,[rcx]
# Tests for op xmm/m16, xmm
vpbroadcastw xmm6,xmm4
vpbroadcastw xmm4,WORD PTR [rcx]
vpbroadcastw xmm4,[rcx]
# Tests for op xmm/m16, ymm
vpbroadcastw ymm6,xmm4
vpbroadcastw ymm4,WORD PTR [rcx]
vpbroadcastw ymm4,[rcx]
# Tests for op xmm/m8, xmm
vpbroadcastb xmm6,xmm4
vpbroadcastb xmm4,BYTE PTR [rcx]
vpbroadcastb xmm4,[rcx]
# Tests for op xmm/m8, ymm
vpbroadcastb ymm6,xmm4
vpbroadcastb ymm4,BYTE PTR [rcx]
vpbroadcastb ymm4,[rcx]
# Tests for op xmm, xmm
vbroadcastss xmm6,xmm4
|
stsp/binutils-ia16
| 2,258
|
gas/testsuite/gas/i386/avx512vl_vpclmulqdq.s
|
# Check 32bit AVX512VL,VPCLMULQDQ instructions
.allow_index_reg
.text
_start:
vpclmulqdq $0xab, %xmm2, %xmm2, %xmm3 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, -123456(%esp,%esi,8), %xmm2, %xmm3 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 2032(%edx), %xmm2, %xmm3 # AVX512VL,VPCLMULQDQ Disp8
vpclmulqdq $0xab, %ymm1, %ymm5, %ymm4 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, -123456(%esp,%esi,8), %ymm5, %ymm4 # AVX512VL,VPCLMULQDQ
vpclmulqdq $123, 4064(%edx), %ymm5, %ymm4 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq $0xab, %xmm2, %xmm2, %xmm3 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, -123456(%esp,%esi,8), %xmm2, %xmm3 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 2032(%edx), %xmm2, %xmm3 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq $0xab, %ymm1, %ymm5, %ymm4 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, -123456(%esp,%esi,8), %ymm5, %ymm4 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq $123, 4064(%edx), %ymm5, %ymm4 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulhqhqdq %xmm2, %xmm3, %xmm4
{evex} vpclmulhqlqdq %xmm3, %xmm4, %xmm5
{evex} vpclmullqhqdq %xmm4, %xmm5, %xmm6
{evex} vpclmullqlqdq %xmm5, %xmm6, %xmm7
{evex} vpclmulhqhqdq %ymm1, %ymm2, %ymm3
{evex} vpclmulhqlqdq %ymm2, %ymm3, %ymm4
{evex} vpclmullqhqdq %ymm3, %ymm4, %ymm5
{evex} vpclmullqlqdq %ymm4, %ymm5, %ymm6
.intel_syntax noprefix
vpclmulqdq xmm3, xmm5, xmm3, 0xab # AVX512VL,VPCLMULQDQ
vpclmulqdq xmm3, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,VPCLMULQDQ
vpclmulqdq xmm3, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512VL,VPCLMULQDQ Disp8
vpclmulqdq ymm2, ymm2, ymm2, 0xab # AVX512VL,VPCLMULQDQ
vpclmulqdq ymm2, ymm2, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,VPCLMULQDQ
vpclmulqdq ymm2, ymm2, YMMWORD PTR [edx+4064], 123 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq xmm3, xmm5, xmm3, 0xab # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq xmm3, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq xmm3, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512VL,VPCLMULQDQ Disp8
{evex} vpclmulqdq ymm2, ymm2, ymm2, 0xab # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq ymm2, ymm2, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512VL,VPCLMULQDQ
{evex} vpclmulqdq ymm2, ymm2, YMMWORD PTR [edx+4064], 123 # AVX512VL,VPCLMULQDQ Disp8
|
stsp/binutils-ia16
| 1,364
|
gas/testsuite/gas/i386/align-branch-1.s
|
.text
.globl foo
.p2align 4
foo:
movl %eax, %gs:0x1
pushl %ebp
pushl %ebp
pushl %ebp
pushl %ebp
movl %esp, %ebp
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
cmp %eax, %ebp
je .L_2
movl %esi, -12(%ebx)
movl %esi, -12(%ebp)
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
popl %ebp
popl %ebp
popl %ebp
je .L_2
popl %ebp
je .L_2
movl %eax, -4(%esp)
movl %esi, -12(%ebp)
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
popl %ebp
jmp .L_3
jmp .L_3
jmp .L_3
movl %eax, -4(%ebp)
movl %esi, -12(%ebp)
movl %edi, -8(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
movl %esi, -12(%ebp)
popl %ebp
popl %ebp
cmp %eax, %ebp
je .L_2
jmp .L_3
.L_2:
movl -12(%ebp), %eax
movl %eax, -4(%ebp)
.L_3:
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, 12(%ebp)
jmp bar
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, -1200(%ebp)
movl %esi, (%ebp)
je .L_3
je .L_3
|
stsp/binutils-ia16
| 2,819
|
gas/testsuite/gas/i386/x86-64-avx512vnni.s
|
# Check 64bit AVX512VNNI instructions
.allow_index_reg
.text
_start:
vpdpwssd %zmm17, %zmm18, %zmm18 # AVX512VNNI
vpdpwssd %zmm17, %zmm18, %zmm18{%k5} # AVX512VNNI
vpdpwssd %zmm17, %zmm18, %zmm18{%k5}{z} # AVX512VNNI
vpdpwssd 0x123(%rax,%r14,8), %zmm18, %zmm18 # AVX512VNNI
vpdpwssd 8128(%rdx), %zmm18, %zmm18 # AVX512VNNI Disp8
vpdpwssd 508(%rdx){1to16}, %zmm18, %zmm18 # AVX512VNNI Disp8
vpdpwssds %zmm17, %zmm21, %zmm21 # AVX512VNNI
vpdpwssds %zmm17, %zmm21, %zmm21{%k4} # AVX512VNNI
vpdpwssds %zmm17, %zmm21, %zmm21{%k4}{z} # AVX512VNNI
vpdpwssds 0x123(%rax,%r14,8), %zmm21, %zmm21 # AVX512VNNI
vpdpwssds 8128(%rdx), %zmm21, %zmm21 # AVX512VNNI Disp8
vpdpwssds 508(%rdx){1to16}, %zmm21, %zmm21 # AVX512VNNI Disp8
vpdpbusd %zmm18, %zmm21, %zmm23 # AVX512VNNI
vpdpbusd %zmm18, %zmm21, %zmm23{%k4} # AVX512VNNI
vpdpbusd %zmm18, %zmm21, %zmm23{%k4}{z} # AVX512VNNI
vpdpbusd 0x123(%rax,%r14,8), %zmm21, %zmm23 # AVX512VNNI
vpdpbusd 8128(%rdx), %zmm21, %zmm23 # AVX512VNNI Disp8
vpdpbusd 508(%rdx){1to16}, %zmm21, %zmm23 # AVX512VNNI Disp8
vpdpbusds %zmm25, %zmm24, %zmm24 # AVX512VNNI
vpdpbusds %zmm25, %zmm24, %zmm24{%k7} # AVX512VNNI
vpdpbusds %zmm25, %zmm24, %zmm24{%k7}{z} # AVX512VNNI
vpdpbusds 0x123(%rax,%r14,8), %zmm24, %zmm24 # AVX512VNNI
vpdpbusds 8128(%rdx), %zmm24, %zmm24 # AVX512VNNI Disp8
vpdpbusds 508(%rdx){1to16}, %zmm24, %zmm24 # AVX512VNNI Disp8
.intel_syntax noprefix
vpdpwssd zmm28, zmm27, zmm17 # AVX512VNNI
vpdpwssd zmm28{k7}, zmm27, zmm17 # AVX512VNNI
vpdpwssd zmm28{k7}{z}, zmm27, zmm17 # AVX512VNNI
vpdpwssd zmm28, zmm27, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VNNI
vpdpwssd zmm28, zmm27, ZMMWORD PTR [rdx+8128] # AVX512VNNI Disp8
vpdpwssd zmm28, zmm27, [rdx+508]{1to16} # AVX512VNNI Disp8
vpdpwssds zmm29, zmm28, zmm17 # AVX512VNNI
vpdpwssds zmm29{k3}, zmm28, zmm17 # AVX512VNNI
vpdpwssds zmm29{k3}{z}, zmm28, zmm17 # AVX512VNNI
vpdpwssds zmm29, zmm28, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VNNI
vpdpwssds zmm29, zmm28, ZMMWORD PTR [rdx+8128] # AVX512VNNI Disp8
vpdpwssds zmm29, zmm28, [rdx+508]{1to16} # AVX512VNNI Disp8
vpdpbusd zmm28, zmm24, zmm21 # AVX512VNNI
vpdpbusd zmm28{k6}, zmm24, zmm21 # AVX512VNNI
vpdpbusd zmm28{k6}{z}, zmm24, zmm21 # AVX512VNNI
vpdpbusd zmm28, zmm24, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VNNI
vpdpbusd zmm28, zmm24, ZMMWORD PTR [rdx+8128] # AVX512VNNI Disp8
vpdpbusd zmm28, zmm24, [rdx+508]{1to16} # AVX512VNNI Disp8
vpdpbusds zmm20, zmm17, zmm20 # AVX512VNNI
vpdpbusds zmm20{k2}, zmm17, zmm20 # AVX512VNNI
vpdpbusds zmm20{k2}{z}, zmm17, zmm20 # AVX512VNNI
vpdpbusds zmm20, zmm17, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VNNI
vpdpbusds zmm20, zmm17, ZMMWORD PTR [rdx+8128] # AVX512VNNI Disp8
vpdpbusds zmm20, zmm17, [rdx+508]{1to16} # AVX512VNNI Disp8
|
stsp/binutils-ia16
| 100,338
|
gas/testsuite/gas/i386/avx512bw_vl-wig.s
|
# Check 32bit AVX512{BW,VL} WIG instructions
.allow_index_reg
.text
_start:
vpabsb %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpabsb %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpabsb (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpabsb -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpabsb 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsb 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsb -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsb -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsb %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpabsb %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpabsb (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpabsb -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpabsb 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsb 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpabsb -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsb -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpabsw %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpabsw %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpabsw (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpabsw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpabsw 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsw 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsw -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsw -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsw %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpabsw %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpabsw (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpabsw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpabsw 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsw 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpabsw -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsw -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpacksswb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpacksswb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpacksswb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpackuswb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpackuswb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddusb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddusb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddusw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddusw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpalignr $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpalignr $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpavgb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpavgb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpavgw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpavgw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpcmpeqb %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpmaddubsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaddubsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaddubsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaddwd (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaddwd (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxub (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxub (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxuw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxuw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminub (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminub 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminub -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminub (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminub 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminub -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminuw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminuw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminuw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminuw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminuw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminuw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovsxbw (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw 1016(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw 1024(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw -1024(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw -1032(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmovsxbw (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw 2032(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw 2048(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw -2048(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw -2064(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovzxbw (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw 1016(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw 1024(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw -1024(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw -1032(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmovzxbw (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw 2032(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw 2048(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw -2048(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw -2064(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmulhrsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmulhrsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmulhuw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmulhuw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmulhw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmulhw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmullw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmullw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmullw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmullw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmullw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmullw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpshufb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpshufb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpshufhw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpshufhw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpshuflw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpshuflw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsllw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsllw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsraw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsraw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsrlw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsrlw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsrlw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsrlw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsraw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsraw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsubb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubusb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubusb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubusw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubusw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpckhbw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpckhbw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpckhwd (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpckhwd (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpcklbw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpcklbw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpcklwd (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpcklwd (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsllw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsllw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
.intel_syntax noprefix
vpabsb xmm6{k7}, xmm5 # AVX512{BW,VL}
vpabsb xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpabsb xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpabsb xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpabsb ymm6{k7}, ymm5 # AVX512{BW,VL}
vpabsb ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpabsb ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpabsb ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpabsw xmm6{k7}, xmm5 # AVX512{BW,VL}
vpabsw xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpabsw xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpabsw xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpabsw ymm6{k7}, ymm5 # AVX512{BW,VL}
vpabsw ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpabsw ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpabsw ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpacksswb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpacksswb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpackuswb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpackuswb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, xmm4, 0xab # AVX512{BW,VL}
vpalignr xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, xmm4, 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, ymm4, 0xab # AVX512{BW,VL}
vpalignr ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, ymm4, 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpavgb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpavgb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpavgw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpavgw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddubsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddubsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddwd xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddwd ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxub xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxub ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxuw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxuw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminub xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminub ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminuw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminuw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, xmm5 # AVX512{BW,VL}
vpmovsxbw xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [ecx] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [edx+1016] # AVX512{BW,VL} Disp8
vpmovsxbw xmm6{k7}, QWORD PTR [edx+1024] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [edx-1024] # AVX512{BW,VL} Disp8
vpmovsxbw xmm6{k7}, QWORD PTR [edx-1032] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, xmm5 # AVX512{BW,VL}
vpmovsxbw ymm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, xmm5 # AVX512{BW,VL}
vpmovzxbw xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [ecx] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [edx+1016] # AVX512{BW,VL} Disp8
vpmovzxbw xmm6{k7}, QWORD PTR [edx+1024] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [edx-1024] # AVX512{BW,VL} Disp8
vpmovzxbw xmm6{k7}, QWORD PTR [edx-1032] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, xmm5 # AVX512{BW,VL}
vpmovzxbw ymm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhrsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhrsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhuw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhuw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmullw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmullw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpshufb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpshufb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpshufhw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpshufhw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpshufhw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpshufhw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpshufhw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpshufhw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpshufhw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpshufhw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpshufhw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpshuflw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpshuflw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpshuflw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpshuflw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpshuflw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpshuflw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpshuflw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpshuflw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsllw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, xmm4 # AVX512{BW,VL}
vpsllw ymm6{k7}{z}, ymm5, xmm4 # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsraw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, xmm4 # AVX512{BW,VL}
vpsraw ymm6{k7}{z}, ymm5, xmm4 # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsrlw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, xmm4 # AVX512{BW,VL}
vpsrlw ymm6{k7}{z}, ymm5, xmm4 # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpsrlw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpsrlw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpsraw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpsraw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhbw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhbw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhwd xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhwd ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklbw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklbw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklwd xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklwd ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpsllw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpsllw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
|
stsp/binutils-ia16
| 1,267
|
gas/testsuite/gas/i386/jump16.s
|
.psize 0
.text
.extern xxx
.extern yyy
.code16gcc
1: jmp 1b
jmp xxx
jmp *xxx
jmp *%edi
jmp *(%edi)
ljmp *xxx(%edi)
ljmp *xxx
ljmp $0x1234,$xxx
call 1b
call xxx
call *xxx
call *%edi
call *(%edi)
lcall *xxx(%edi)
lcall *xxx
lcall $0x1234,$xxx
.code16
jmp 1b
jmp *xxx
jmp *%di
jmp *(%di)
ljmp *xxx(%di)
ljmpl *xxx(%di)
ljmp *xxx
ljmpl *xxx
ljmp $0x1234,$xxx
call 1b
call xxx
call *xxx
call *%di
call *(%di)
lcall *xxx(%di)
lcalll *xxx(%di)
lcall *xxx
lcalll *xxx
lcall $0x1234,$xxx
.intel_syntax noprefix
call word ptr [bx]
call dword ptr [bx]
call fword ptr [bx]
jmp word ptr [bx]
jmp dword ptr [bx]
jmp fword ptr [bx]
jmp $+2
nop
jmp .+2
nop
lcall 0x9090,0x1010
lcall 0x9090:0x1010
lcall 0x9090,xxx
lcall 0x9090:xxx
call 0x9090,0x1010
call 0x9090:0x1010
call 0x9090,xxx
call 0x9090:xxx
ljmp 0x9090,0x1010
ljmp 0x9090:0x1010
ljmp 0x9090,xxx
ljmp 0x9090:xxx
jmp 0x9090,0x1010
jmp 0x9090:0x1010
jmp 0x9090,xxx
jmp 0x9090:xxx
ljmp yyy,0x1010
ljmp yyy:0x1010
ljmp yyy,xxx
ljmp yyy:xxx
jmp yyy,0x1010
jmp yyy:0x1010
jmp yyy,xxx
jmp yyy:xxx
.att_syntax
.code16gcc
iret
iretw
iretl
.code16
iret
iretw
iretl
.intel_syntax noprefix
.code16gcc
iret
iretw
iretd
.code16
iret
iretw
iretd
|
stsp/binutils-ia16
| 2,881
|
gas/testsuite/gas/i386/avx2-wig.s
|
# Check AVX2 WIG instructions
.allow_index_reg
.text
_start:
vmovntdqa (%ecx),%ymm4
vmpsadbw $7,%ymm4,%ymm6,%ymm2
vpabsb %ymm4,%ymm6
vpabsd %ymm4,%ymm6
vpabsw %ymm4,%ymm6
vpackssdw %ymm4,%ymm6,%ymm2
vpacksswb %ymm4,%ymm6,%ymm2
vpackusdw %ymm4,%ymm6,%ymm2
vpackuswb %ymm4,%ymm6,%ymm2
vpaddb %ymm4,%ymm6,%ymm2
vpaddd %ymm4,%ymm6,%ymm2
vpaddq %ymm4,%ymm6,%ymm2
vpaddsb %ymm4,%ymm6,%ymm2
vpaddsw %ymm4,%ymm6,%ymm2
vpaddusb %ymm4,%ymm6,%ymm2
vpaddusw %ymm4,%ymm6,%ymm2
vpaddw %ymm4,%ymm6,%ymm2
vpalignr $7,%ymm4,%ymm6,%ymm2
vpand %ymm4,%ymm6,%ymm2
vpandn %ymm4,%ymm6,%ymm2
vpavgb %ymm4,%ymm6,%ymm2
vpavgw %ymm4,%ymm6,%ymm2
vpblendw $7,%ymm4,%ymm6,%ymm2
vpcmpeqb %ymm4,%ymm6,%ymm2
vpcmpeqd %ymm4,%ymm6,%ymm2
vpcmpeqq %ymm4,%ymm6,%ymm2
vpcmpeqw %ymm4,%ymm6,%ymm2
vpcmpgtb %ymm4,%ymm6,%ymm2
vpcmpgtd %ymm4,%ymm6,%ymm2
vpcmpgtq %ymm4,%ymm6,%ymm2
vpcmpgtw %ymm4,%ymm6,%ymm2
vphaddd %ymm4,%ymm6,%ymm2
vphaddsw %ymm4,%ymm6,%ymm2
vphaddw %ymm4,%ymm6,%ymm2
vphsubd %ymm4,%ymm6,%ymm2
vphsubsw %ymm4,%ymm6,%ymm2
vphsubw %ymm4,%ymm6,%ymm2
vpmaddubsw %ymm4,%ymm6,%ymm2
vpmaddwd %ymm4,%ymm6,%ymm2
vpmaxsb %ymm4,%ymm6,%ymm2
vpmaxsd %ymm4,%ymm6,%ymm2
vpmaxsw %ymm4,%ymm6,%ymm2
vpmaxub %ymm4,%ymm6,%ymm2
vpmaxud %ymm4,%ymm6,%ymm2
vpmaxuw %ymm4,%ymm6,%ymm2
vpminsb %ymm4,%ymm6,%ymm2
vpminsd %ymm4,%ymm6,%ymm2
vpminsw %ymm4,%ymm6,%ymm2
vpminub %ymm4,%ymm6,%ymm2
vpminud %ymm4,%ymm6,%ymm2
vpminuw %ymm4,%ymm6,%ymm2
vpmovmskb %ymm4,%ecx
vpmovsxbd %xmm4,%ymm6
vpmovsxbq %xmm4,%ymm4
vpmovsxbw %xmm4,%ymm4
vpmovsxdq %xmm4,%ymm4
vpmovsxwd %xmm4,%ymm4
vpmovsxwq %xmm4,%ymm6
vpmovzxbd %xmm4,%ymm6
vpmovzxbq %xmm4,%ymm4
vpmovzxbw %xmm4,%ymm4
vpmovzxdq %xmm4,%ymm4
vpmovzxwd %xmm4,%ymm4
vpmovzxwq %xmm4,%ymm6
vpmuldq %ymm4,%ymm6,%ymm2
vpmulhrsw %ymm4,%ymm6,%ymm2
vpmulhuw %ymm4,%ymm6,%ymm2
vpmulhw %ymm4,%ymm6,%ymm2
vpmulld %ymm4,%ymm6,%ymm2
vpmullw %ymm4,%ymm6,%ymm2
vpmuludq %ymm4,%ymm6,%ymm2
vpor %ymm4,%ymm6,%ymm2
vpsadbw %ymm4,%ymm6,%ymm2
vpshufb %ymm4,%ymm6,%ymm2
vpshufd $7,%ymm6,%ymm2
vpshufhw $7,%ymm6,%ymm2
vpshuflw $7,%ymm6,%ymm2
vpsignb %ymm4,%ymm6,%ymm2
vpsignd %ymm4,%ymm6,%ymm2
vpsignw %ymm4,%ymm6,%ymm2
vpslld $7,%ymm6,%ymm2
vpslldq $7,%ymm6,%ymm2
vpsllq $7,%ymm6,%ymm2
vpsllw $7,%ymm6,%ymm2
vpsrad $7,%ymm6,%ymm2
vpsraw $7,%ymm6,%ymm2
vpsrld $7,%ymm6,%ymm2
vpsrldq $7,%ymm6,%ymm2
vpsrlq $7,%ymm6,%ymm2
vpsrlw $7,%ymm6,%ymm2
vpsubb %ymm4,%ymm6,%ymm2
vpsubd %ymm4,%ymm6,%ymm2
vpsubq %ymm4,%ymm6,%ymm2
vpsubsb %ymm4,%ymm6,%ymm2
vpsubsw %ymm4,%ymm6,%ymm2
vpsubusb %ymm4,%ymm6,%ymm2
vpsubusw %ymm4,%ymm6,%ymm2
vpsubw %ymm4,%ymm6,%ymm2
vpunpckhbw %ymm4,%ymm6,%ymm2
vpunpckhdq %ymm4,%ymm6,%ymm2
vpunpckhqdq %ymm4,%ymm6,%ymm2
vpunpckhwd %ymm4,%ymm6,%ymm2
vpunpcklbw %ymm4,%ymm6,%ymm2
vpunpckldq %ymm4,%ymm6,%ymm2
vpunpcklqdq %ymm4,%ymm6,%ymm2
vpunpcklwd %ymm4,%ymm6,%ymm2
vpxor %ymm4,%ymm6,%ymm2
|
stsp/binutils-ia16
| 191,023
|
gas/testsuite/gas/i386/x86-64-evex-lig.s
|
# Check EVEX LIG instructions
.allow_index_reg
.text
_start:
vaddsd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vaddsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vaddsd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vaddsd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vaddsd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vaddsd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vaddsd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vaddsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vaddsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vaddsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vaddsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vaddsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vaddss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vaddss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vaddss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vaddss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vaddss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vaddss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vaddss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vaddss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vaddss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vaddss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vaddss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vaddss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vcmpsd $0xab, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpsd $0xab, {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpsd $123, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpsd $123, {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpsd $123, (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpsd $123, 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpsd $123, 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpsd $123, 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpsd $123, -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpsd $123, -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_oqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_oqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_oqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpeq_oqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpeq_oqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_oqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_oqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_oqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpeqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpeqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmplt_ossd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmplt_ossd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmplt_ossd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmplt_ossd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmplt_ossd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmplt_ossd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmplt_ossd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmplt_ossd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpltsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpltsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpltsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpltsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpltsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpltsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpltsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpltsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmple_ossd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmple_ossd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmple_ossd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmple_ossd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmple_ossd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmple_ossd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmple_ossd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmple_ossd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmplesd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmplesd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmplesd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmplesd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmplesd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmplesd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmplesd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmplesd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpunord_qsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpunord_qsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpunord_qsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpunord_qsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpunord_qsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpunord_qsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpunord_qsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpunord_qsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpunordsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpunordsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpunordsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpunordsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpunordsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpunordsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpunordsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpunordsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_uqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_uqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_uqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpneq_uqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpneq_uqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_uqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_uqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_uqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpneqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpneqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnlt_ussd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnlt_ussd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnlt_ussd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnlt_ussd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnlt_ussd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnlt_ussd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnlt_ussd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnlt_ussd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnltsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnltsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnltsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnltsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnltsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnltsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnltsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnltsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnle_ussd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnle_ussd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnle_ussd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnle_ussd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnle_ussd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnle_ussd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnle_ussd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnle_ussd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnlesd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnlesd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnlesd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnlesd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnlesd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnlesd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnlesd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnlesd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpord_qsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpord_qsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpord_qsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpord_qsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpord_qsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpord_qsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpord_qsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpord_qsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpordsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpordsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpordsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpordsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpordsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpordsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpordsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpordsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_uqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_uqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_uqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpeq_uqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpeq_uqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_uqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_uqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_uqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnge_ussd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnge_ussd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnge_ussd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnge_ussd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnge_ussd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnge_ussd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnge_ussd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnge_ussd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngesd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngesd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngesd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpngesd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpngesd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngesd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngesd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngesd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngt_ussd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngt_ussd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngt_ussd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpngt_ussd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpngt_ussd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngt_ussd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngt_ussd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngt_ussd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngtsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngtsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngtsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpngtsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpngtsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngtsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngtsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngtsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpfalse_oqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpfalse_oqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpfalse_oqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpfalse_oqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpfalse_oqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpfalse_oqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpfalse_oqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpfalse_oqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpfalsesd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpfalsesd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpfalsesd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpfalsesd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpfalsesd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpfalsesd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpfalsesd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpfalsesd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_oqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_oqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_oqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpneq_oqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpneq_oqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_oqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_oqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_oqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpge_ossd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpge_ossd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpge_ossd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpge_ossd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpge_ossd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpge_ossd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpge_ossd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpge_ossd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgesd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgesd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgesd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpgesd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpgesd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgesd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgesd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgesd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgt_ossd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgt_ossd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgt_ossd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpgt_ossd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpgt_ossd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgt_ossd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgt_ossd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgt_ossd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgtsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgtsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgtsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpgtsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpgtsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgtsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgtsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgtsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmptrue_uqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmptrue_uqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmptrue_uqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmptrue_uqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmptrue_uqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmptrue_uqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmptrue_uqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmptrue_uqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmptruesd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmptruesd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmptruesd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmptruesd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmptruesd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmptruesd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmptruesd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmptruesd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_ossd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_ossd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_ossd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpeq_ossd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpeq_ossd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_ossd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_ossd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_ossd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmplt_oqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmplt_oqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmplt_oqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmplt_oqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmplt_oqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmplt_oqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmplt_oqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmplt_oqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmple_oqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmple_oqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmple_oqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmple_oqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmple_oqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmple_oqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmple_oqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmple_oqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpunord_ssd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpunord_ssd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpunord_ssd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpunord_ssd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpunord_ssd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpunord_ssd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpunord_ssd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpunord_ssd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_ussd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_ussd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_ussd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpneq_ussd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpneq_ussd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_ussd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_ussd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_ussd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnlt_uqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnlt_uqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnlt_uqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnlt_uqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnlt_uqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnlt_uqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnlt_uqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnlt_uqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnle_uqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnle_uqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnle_uqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnle_uqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnle_uqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnle_uqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnle_uqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnle_uqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpord_ssd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpord_ssd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpord_ssd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpord_ssd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpord_ssd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpord_ssd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpord_ssd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpord_ssd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_ussd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_ussd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_ussd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpeq_ussd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpeq_ussd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_ussd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_ussd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_ussd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnge_uqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnge_uqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnge_uqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnge_uqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnge_uqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnge_uqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnge_uqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnge_uqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngt_uqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngt_uqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngt_uqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpngt_uqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpngt_uqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngt_uqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngt_uqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngt_uqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpfalse_ossd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpfalse_ossd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpfalse_ossd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpfalse_ossd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpfalse_ossd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpfalse_ossd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpfalse_ossd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpfalse_ossd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_ossd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_ossd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_ossd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpneq_ossd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpneq_ossd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_ossd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_ossd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_ossd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpge_oqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpge_oqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpge_oqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpge_oqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpge_oqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpge_oqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpge_oqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpge_oqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgt_oqsd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgt_oqsd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgt_oqsd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpgt_oqsd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpgt_oqsd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgt_oqsd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgt_oqsd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgt_oqsd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmptrue_ussd %xmm28, %xmm29, %k5{%k7} # AVX512
vcmptrue_ussd {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmptrue_ussd (%rcx), %xmm29, %k5{%k7} # AVX512
vcmptrue_ussd 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmptrue_ussd 1016(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmptrue_ussd 1024(%rdx), %xmm29, %k5{%k7} # AVX512
vcmptrue_ussd -1024(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmptrue_ussd -1032(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpss $0xab, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpss $0xab, {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpss $123, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpss $123, {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpss $123, (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpss $123, 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpss $123, 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpss $123, 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpss $123, -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpss $123, -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_oqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_oqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_oqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpeq_oqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpeq_oqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_oqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_oqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_oqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpeqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpeqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmplt_osss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmplt_osss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmplt_osss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmplt_osss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmplt_osss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmplt_osss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmplt_osss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmplt_osss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpltss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpltss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpltss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpltss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpltss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpltss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpltss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpltss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmple_osss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmple_osss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmple_osss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmple_osss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmple_osss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmple_osss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmple_osss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmple_osss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpless %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpless {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpless (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpless 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpless 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpless 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpless -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpless -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpunord_qss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpunord_qss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpunord_qss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpunord_qss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpunord_qss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpunord_qss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpunord_qss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpunord_qss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpunordss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpunordss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpunordss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpunordss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpunordss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpunordss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpunordss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpunordss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_uqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_uqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_uqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpneq_uqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpneq_uqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_uqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_uqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_uqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpneqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpneqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnlt_usss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnlt_usss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnlt_usss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnlt_usss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnlt_usss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnlt_usss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnlt_usss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnlt_usss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnltss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnltss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnltss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnltss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnltss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnltss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnltss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnltss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnle_usss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnle_usss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnle_usss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnle_usss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnle_usss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnle_usss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnle_usss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnle_usss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnless %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnless {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnless (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnless 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnless 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnless 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnless -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnless -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpord_qss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpord_qss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpord_qss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpord_qss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpord_qss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpord_qss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpord_qss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpord_qss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpordss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpordss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpordss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpordss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpordss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpordss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpordss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpordss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_uqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_uqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_uqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpeq_uqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpeq_uqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_uqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_uqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_uqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnge_usss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnge_usss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnge_usss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnge_usss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnge_usss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnge_usss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnge_usss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnge_usss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngess %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngess {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngess (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpngess 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpngess 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngess 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngess -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngess -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngt_usss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngt_usss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngt_usss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpngt_usss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpngt_usss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngt_usss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngt_usss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngt_usss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngtss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngtss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngtss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpngtss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpngtss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngtss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngtss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngtss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpfalse_oqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpfalse_oqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpfalse_oqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpfalse_oqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpfalse_oqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpfalse_oqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpfalse_oqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpfalse_oqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpfalsess %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpfalsess {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpfalsess (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpfalsess 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpfalsess 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpfalsess 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpfalsess -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpfalsess -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_oqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_oqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_oqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpneq_oqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpneq_oqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_oqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_oqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_oqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpge_osss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpge_osss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpge_osss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpge_osss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpge_osss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpge_osss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpge_osss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpge_osss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgess %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgess {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgess (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpgess 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpgess 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgess 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgess -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgess -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgt_osss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgt_osss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgt_osss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpgt_osss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpgt_osss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgt_osss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgt_osss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgt_osss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgtss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgtss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgtss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpgtss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpgtss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgtss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgtss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgtss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmptrue_uqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmptrue_uqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmptrue_uqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmptrue_uqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmptrue_uqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmptrue_uqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmptrue_uqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmptrue_uqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmptruess %xmm28, %xmm29, %k5{%k7} # AVX512
vcmptruess {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmptruess (%rcx), %xmm29, %k5{%k7} # AVX512
vcmptruess 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmptruess 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmptruess 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmptruess -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmptruess -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_osss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_osss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_osss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpeq_osss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpeq_osss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_osss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_osss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_osss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmplt_oqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmplt_oqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmplt_oqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmplt_oqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmplt_oqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmplt_oqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmplt_oqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmplt_oqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmple_oqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmple_oqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmple_oqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmple_oqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmple_oqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmple_oqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmple_oqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmple_oqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpunord_sss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpunord_sss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpunord_sss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpunord_sss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpunord_sss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpunord_sss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpunord_sss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpunord_sss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_usss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_usss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_usss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpneq_usss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpneq_usss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_usss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_usss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_usss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnlt_uqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnlt_uqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnlt_uqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnlt_uqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnlt_uqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnlt_uqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnlt_uqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnlt_uqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnle_uqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnle_uqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnle_uqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnle_uqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnle_uqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnle_uqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnle_uqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnle_uqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpord_sss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpord_sss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpord_sss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpord_sss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpord_sss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpord_sss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpord_sss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpord_sss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_usss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_usss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpeq_usss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpeq_usss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpeq_usss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_usss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpeq_usss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpeq_usss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnge_uqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnge_uqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpnge_uqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpnge_uqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpnge_uqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnge_uqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpnge_uqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpnge_uqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngt_uqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngt_uqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpngt_uqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpngt_uqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpngt_uqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngt_uqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpngt_uqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpngt_uqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpfalse_osss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpfalse_osss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpfalse_osss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpfalse_osss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpfalse_osss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpfalse_osss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpfalse_osss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpfalse_osss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_osss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_osss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpneq_osss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpneq_osss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpneq_osss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_osss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpneq_osss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpneq_osss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpge_oqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpge_oqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpge_oqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpge_oqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpge_oqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpge_oqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpge_oqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpge_oqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgt_oqss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgt_oqss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmpgt_oqss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmpgt_oqss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmpgt_oqss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgt_oqss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmpgt_oqss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmpgt_oqss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcmptrue_usss %xmm28, %xmm29, %k5{%k7} # AVX512
vcmptrue_usss {sae}, %xmm28, %xmm29, %k5{%k7} # AVX512
vcmptrue_usss (%rcx), %xmm29, %k5{%k7} # AVX512
vcmptrue_usss 0x123(%rax,%r14,8), %xmm29, %k5{%k7} # AVX512
vcmptrue_usss 508(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmptrue_usss 512(%rdx), %xmm29, %k5{%k7} # AVX512
vcmptrue_usss -512(%rdx), %xmm29, %k5{%k7} # AVX512 Disp8
vcmptrue_usss -516(%rdx), %xmm29, %k5{%k7} # AVX512
vcomisd %xmm29, %xmm30 # AVX512
vcomisd {sae}, %xmm29, %xmm30 # AVX512
vcomisd (%rcx), %xmm30 # AVX512
vcomisd 0x123(%rax,%r14,8), %xmm30 # AVX512
vcomisd 1016(%rdx), %xmm30 # AVX512 Disp8
vcomisd 1024(%rdx), %xmm30 # AVX512
vcomisd -1024(%rdx), %xmm30 # AVX512 Disp8
vcomisd -1032(%rdx), %xmm30 # AVX512
vcomiss %xmm29, %xmm30 # AVX512
vcomiss {sae}, %xmm29, %xmm30 # AVX512
vcomiss (%rcx), %xmm30 # AVX512
vcomiss 0x123(%rax,%r14,8), %xmm30 # AVX512
vcomiss 508(%rdx), %xmm30 # AVX512 Disp8
vcomiss 512(%rdx), %xmm30 # AVX512
vcomiss -512(%rdx), %xmm30 # AVX512 Disp8
vcomiss -516(%rdx), %xmm30 # AVX512
vcvtsd2si {rn-sae}, %xmm30, %eax # AVX512
vcvtsd2si {ru-sae}, %xmm30, %eax # AVX512
vcvtsd2si {rd-sae}, %xmm30, %eax # AVX512
vcvtsd2si {rz-sae}, %xmm30, %eax # AVX512
vcvtsd2si {rn-sae}, %xmm30, %ebp # AVX512
vcvtsd2si {ru-sae}, %xmm30, %ebp # AVX512
vcvtsd2si {rd-sae}, %xmm30, %ebp # AVX512
vcvtsd2si {rz-sae}, %xmm30, %ebp # AVX512
vcvtsd2si {rn-sae}, %xmm30, %r13d # AVX512
vcvtsd2si {ru-sae}, %xmm30, %r13d # AVX512
vcvtsd2si {rd-sae}, %xmm30, %r13d # AVX512
vcvtsd2si {rz-sae}, %xmm30, %r13d # AVX512
vcvtsd2si {rn-sae}, %xmm30, %rax # AVX512
vcvtsd2si {ru-sae}, %xmm30, %rax # AVX512
vcvtsd2si {rd-sae}, %xmm30, %rax # AVX512
vcvtsd2si {rz-sae}, %xmm30, %rax # AVX512
vcvtsd2si {rn-sae}, %xmm30, %r8 # AVX512
vcvtsd2si {ru-sae}, %xmm30, %r8 # AVX512
vcvtsd2si {rd-sae}, %xmm30, %r8 # AVX512
vcvtsd2si {rz-sae}, %xmm30, %r8 # AVX512
vcvtsd2ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vcvtsd2ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vcvtsd2ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vcvtsd2ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vcvtsd2ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vcvtsd2ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vcvtsd2ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vcvtsd2ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vcvtsd2ss 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vcvtsd2ss 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vcvtsd2ss -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vcvtsd2ss -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vcvtsi2sdl %eax, %xmm29, %xmm30 # AVX512
vcvtsi2sdl %ebp, %xmm29, %xmm30 # AVX512
vcvtsi2sdl %r13d, %xmm29, %xmm30 # AVX512
vcvtsi2sdl (%rcx), %xmm29, %xmm30 # AVX512
vcvtsi2sdl 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512
vcvtsi2sdl 508(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtsi2sdl 512(%rdx), %xmm29, %xmm30 # AVX512
vcvtsi2sdl -512(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtsi2sdl -516(%rdx), %xmm29, %xmm30 # AVX512
vcvtsi2sdq %rax, %xmm29, %xmm30 # AVX512
vcvtsi2sdq %rax, {rn-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2sdq %rax, {ru-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2sdq %rax, {rd-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2sdq %rax, {rz-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2sdq %r8, %xmm29, %xmm30 # AVX512
vcvtsi2sdq %r8, {rn-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2sdq %r8, {ru-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2sdq %r8, {rd-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2sdq %r8, {rz-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2sdq (%rcx), %xmm29, %xmm30 # AVX512
vcvtsi2sdq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512
vcvtsi2sdq 1016(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtsi2sdq 1024(%rdx), %xmm29, %xmm30 # AVX512
vcvtsi2sdq -1024(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtsi2sdq -1032(%rdx), %xmm29, %xmm30 # AVX512
vcvtsi2ssl %eax, %xmm29, %xmm30 # AVX512
vcvtsi2ssl %eax, {rn-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssl %eax, {ru-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssl %eax, {rd-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssl %eax, {rz-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssl %ebp, %xmm29, %xmm30 # AVX512
vcvtsi2ssl %ebp, {rn-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssl %ebp, {ru-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssl %ebp, {rd-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssl %ebp, {rz-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssl %r13d, %xmm29, %xmm30 # AVX512
vcvtsi2ssl %r13d, {rn-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssl %r13d, {ru-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssl %r13d, {rd-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssl %r13d, {rz-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssl (%rcx), %xmm29, %xmm30 # AVX512
vcvtsi2ssl 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512
vcvtsi2ssl 508(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtsi2ssl 512(%rdx), %xmm29, %xmm30 # AVX512
vcvtsi2ssl -512(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtsi2ssl -516(%rdx), %xmm29, %xmm30 # AVX512
vcvtsi2ssq %rax, %xmm29, %xmm30 # AVX512
vcvtsi2ssq %rax, {rn-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssq %rax, {ru-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssq %rax, {rd-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssq %rax, {rz-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssq %r8, %xmm29, %xmm30 # AVX512
vcvtsi2ssq %r8, {rn-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssq %r8, {ru-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssq %r8, {rd-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssq %r8, {rz-sae}, %xmm29, %xmm30 # AVX512
vcvtsi2ssq (%rcx), %xmm29, %xmm30 # AVX512
vcvtsi2ssq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512
vcvtsi2ssq 1016(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtsi2ssq 1024(%rdx), %xmm29, %xmm30 # AVX512
vcvtsi2ssq -1024(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtsi2ssq -1032(%rdx), %xmm29, %xmm30 # AVX512
vcvtss2sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vcvtss2sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vcvtss2sd {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vcvtss2sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vcvtss2sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vcvtss2sd 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vcvtss2sd 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vcvtss2sd -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vcvtss2sd -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vcvtss2si {rn-sae}, %xmm30, %eax # AVX512
vcvtss2si {ru-sae}, %xmm30, %eax # AVX512
vcvtss2si {rd-sae}, %xmm30, %eax # AVX512
vcvtss2si {rz-sae}, %xmm30, %eax # AVX512
vcvtss2si {rn-sae}, %xmm30, %ebp # AVX512
vcvtss2si {ru-sae}, %xmm30, %ebp # AVX512
vcvtss2si {rd-sae}, %xmm30, %ebp # AVX512
vcvtss2si {rz-sae}, %xmm30, %ebp # AVX512
vcvtss2si {rn-sae}, %xmm30, %r13d # AVX512
vcvtss2si {ru-sae}, %xmm30, %r13d # AVX512
vcvtss2si {rd-sae}, %xmm30, %r13d # AVX512
vcvtss2si {rz-sae}, %xmm30, %r13d # AVX512
vcvtss2si {rn-sae}, %xmm30, %rax # AVX512
vcvtss2si {ru-sae}, %xmm30, %rax # AVX512
vcvtss2si {rd-sae}, %xmm30, %rax # AVX512
vcvtss2si {rz-sae}, %xmm30, %rax # AVX512
vcvtss2si {rn-sae}, %xmm30, %r8 # AVX512
vcvtss2si {ru-sae}, %xmm30, %r8 # AVX512
vcvtss2si {rd-sae}, %xmm30, %r8 # AVX512
vcvtss2si {rz-sae}, %xmm30, %r8 # AVX512
vcvttsd2si {sae}, %xmm30, %eax # AVX512
vcvttsd2si {sae}, %xmm30, %ebp # AVX512
vcvttsd2si {sae}, %xmm30, %r13d # AVX512
vcvttsd2si {sae}, %xmm30, %rax # AVX512
vcvttsd2si {sae}, %xmm30, %r8 # AVX512
vcvttss2si {sae}, %xmm30, %eax # AVX512
vcvttss2si {sae}, %xmm30, %ebp # AVX512
vcvttss2si {sae}, %xmm30, %r13d # AVX512
vcvttss2si {sae}, %xmm30, %rax # AVX512
vcvttss2si {sae}, %xmm30, %r8 # AVX512
vdivsd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vdivsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vdivsd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vdivsd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vdivsd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vdivsd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vdivsd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vdivsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vdivsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vdivsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vdivsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vdivsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vdivss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vdivss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vdivss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vdivss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vdivss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vdivss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vdivss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vdivss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vdivss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vdivss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vdivss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vdivss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmadd132sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd132sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfmadd132sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd132sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd132sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd132sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd132sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfmadd132sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfmadd132sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmadd132sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmadd132sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmadd132sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmadd132ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd132ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfmadd132ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd132ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd132ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd132ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd132ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfmadd132ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfmadd132ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmadd132ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmadd132ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmadd132ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmadd213sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd213sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfmadd213sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd213sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd213sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd213sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd213sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfmadd213sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfmadd213sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmadd213sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmadd213sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmadd213sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmadd213ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd213ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfmadd213ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd213ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd213ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd213ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd213ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfmadd213ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfmadd213ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmadd213ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmadd213ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmadd213ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmadd231sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd231sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfmadd231sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd231sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd231sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd231sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd231sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfmadd231sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfmadd231sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmadd231sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmadd231sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmadd231sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmadd231ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd231ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfmadd231ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd231ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd231ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd231ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmadd231ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfmadd231ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfmadd231ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmadd231ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmadd231ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmadd231ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmsub132sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub132sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfmsub132sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub132sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub132sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub132sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub132sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfmsub132sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfmsub132sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmsub132sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmsub132sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmsub132sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmsub132ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub132ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfmsub132ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub132ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub132ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub132ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub132ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfmsub132ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfmsub132ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmsub132ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmsub132ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmsub132ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmsub213sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub213sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfmsub213sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub213sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub213sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub213sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub213sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfmsub213sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfmsub213sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmsub213sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmsub213sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmsub213sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmsub213ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub213ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfmsub213ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub213ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub213ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub213ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub213ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfmsub213ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfmsub213ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmsub213ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmsub213ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmsub213ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmsub231sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub231sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfmsub231sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub231sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub231sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub231sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub231sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfmsub231sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfmsub231sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmsub231sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmsub231sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmsub231sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmsub231ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub231ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfmsub231ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub231ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub231ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub231ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfmsub231ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfmsub231ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfmsub231ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmsub231ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfmsub231ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfmsub231ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd132sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd132sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfnmadd132sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd132sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd132sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd132sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd132sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd132sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfnmadd132sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmadd132sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd132sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmadd132sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd132ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd132ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfnmadd132ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd132ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd132ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd132ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd132ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd132ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfnmadd132ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmadd132ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd132ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmadd132ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd213sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd213sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfnmadd213sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd213sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd213sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd213sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd213sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd213sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfnmadd213sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmadd213sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd213sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmadd213sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd213ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd213ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfnmadd213ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd213ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd213ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd213ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd213ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd213ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfnmadd213ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmadd213ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd213ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmadd213ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd231sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd231sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfnmadd231sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd231sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd231sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd231sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd231sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd231sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfnmadd231sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmadd231sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd231sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmadd231sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd231ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd231ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfnmadd231ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd231ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd231ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd231ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmadd231ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd231ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfnmadd231ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmadd231ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmadd231ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmadd231ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub132sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub132sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfnmsub132sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub132sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub132sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub132sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub132sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub132sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfnmsub132sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmsub132sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub132sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmsub132sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub132ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub132ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfnmsub132ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub132ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub132ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub132ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub132ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub132ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfnmsub132ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmsub132ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub132ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmsub132ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub213sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub213sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfnmsub213sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub213sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub213sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub213sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub213sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub213sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfnmsub213sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmsub213sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub213sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmsub213sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub213ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub213ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfnmsub213ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub213ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub213ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub213ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub213ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub213ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfnmsub213ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmsub213ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub213ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmsub213ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub231sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub231sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfnmsub231sd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub231sd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub231sd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub231sd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub231sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub231sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfnmsub231sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmsub231sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub231sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmsub231sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub231ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub231ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfnmsub231ss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub231ss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub231ss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub231ss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfnmsub231ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub231ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfnmsub231ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmsub231ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfnmsub231ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfnmsub231ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vgetexpsd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vgetexpsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vgetexpsd {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vgetexpsd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vgetexpsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vgetexpsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vgetexpsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vgetexpsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vgetexpsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vgetexpss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vgetexpss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vgetexpss {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vgetexpss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vgetexpss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vgetexpss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vgetexpss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vgetexpss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vgetexpss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vgetmantsd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vgetmantsd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vgetmantsd $0xab, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vgetmantsd $123, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vgetmantsd $123, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vgetmantsd $123, (%rcx), %xmm29, %xmm30{%k7} # AVX512
vgetmantsd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vgetmantsd $123, 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vgetmantsd $123, 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vgetmantsd $123, -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vgetmantsd $123, -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vgetmantss $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vgetmantss $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vgetmantss $0xab, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vgetmantss $123, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vgetmantss $123, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vgetmantss $123, (%rcx), %xmm29, %xmm30{%k7} # AVX512
vgetmantss $123, 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vgetmantss $123, 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vgetmantss $123, 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vgetmantss $123, -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vgetmantss $123, -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vmaxsd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmaxsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vmaxsd {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmaxsd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vmaxsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vmaxsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vmaxsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vmaxsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vmaxsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vmaxss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmaxss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vmaxss {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmaxss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vmaxss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vmaxss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vmaxss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vmaxss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vmaxss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vminsd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vminsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vminsd {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vminsd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vminsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vminsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vminsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vminsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vminsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vminss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vminss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vminss {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vminss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vminss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vminss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vminss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vminss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vminss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vmovsd (%rcx), %xmm30{%k7} # AVX512
vmovsd (%rcx), %xmm30{%k7}{z} # AVX512
vmovsd 0x123(%rax,%r14,8), %xmm30{%k7} # AVX512
vmovsd 1016(%rdx), %xmm30{%k7} # AVX512 Disp8
vmovsd 1024(%rdx), %xmm30{%k7} # AVX512
vmovsd -1024(%rdx), %xmm30{%k7} # AVX512 Disp8
vmovsd -1032(%rdx), %xmm30{%k7} # AVX512
vmovsd %xmm30, (%rcx){%k7} # AVX512
vmovsd %xmm30, 0x123(%rax,%r14,8){%k7} # AVX512
vmovsd %xmm30, 1016(%rdx){%k7} # AVX512 Disp8
vmovsd %xmm30, 1024(%rdx){%k7} # AVX512
vmovsd %xmm30, -1024(%rdx){%k7} # AVX512 Disp8
vmovsd %xmm30, -1032(%rdx){%k7} # AVX512
vmovsd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmovsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vmovss (%rcx), %xmm30{%k7} # AVX512
vmovss (%rcx), %xmm30{%k7}{z} # AVX512
vmovss 0x123(%rax,%r14,8), %xmm30{%k7} # AVX512
vmovss 508(%rdx), %xmm30{%k7} # AVX512 Disp8
vmovss 512(%rdx), %xmm30{%k7} # AVX512
vmovss -512(%rdx), %xmm30{%k7} # AVX512 Disp8
vmovss -516(%rdx), %xmm30{%k7} # AVX512
vmovss %xmm30, (%rcx){%k7} # AVX512
vmovss %xmm30, 0x123(%rax,%r14,8){%k7} # AVX512
vmovss %xmm30, 508(%rdx){%k7} # AVX512 Disp8
vmovss %xmm30, 512(%rdx){%k7} # AVX512
vmovss %xmm30, -512(%rdx){%k7} # AVX512 Disp8
vmovss %xmm30, -516(%rdx){%k7} # AVX512
vmovss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmovss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vmulsd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmulsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vmulsd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmulsd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmulsd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmulsd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmulsd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vmulsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vmulsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vmulsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vmulsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vmulsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vmulss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmulss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vmulss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmulss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmulss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmulss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vmulss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vmulss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vmulss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vmulss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vmulss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vmulss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vrcp14sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vrcp14sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vrcp14sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vrcp14sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vrcp14sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vrcp14sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vrcp14sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vrcp14sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vrcp14ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vrcp14ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vrcp14ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vrcp14ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vrcp14ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vrcp14ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vrcp14ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vrcp14ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vrcp28ss %xmm28, %xmm29, %xmm30{%k7} # AVX512EMI
vrcp28ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512EMI
vrcp28ss {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512EMI
vrcp28ss (%rcx), %xmm29, %xmm30{%k7} # AVX512EMI
vrcp28ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512EMI
vrcp28ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI Disp8
vrcp28ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI
vrcp28ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI Disp8
vrcp28ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI
vrcp28sd %xmm28, %xmm29, %xmm30{%k7} # AVX512EMI
vrcp28sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512EMI
vrcp28sd {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512EMI
vrcp28sd (%rcx), %xmm29, %xmm30{%k7} # AVX512EMI
vrcp28sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512EMI
vrcp28sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI Disp8
vrcp28sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI
vrcp28sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI Disp8
vrcp28sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI
vrsqrt14sd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vrsqrt14sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vrsqrt14sd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vrsqrt14sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vrsqrt14sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vrsqrt14sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vrsqrt14sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vrsqrt14sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vrsqrt14ss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vrsqrt14ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vrsqrt14ss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vrsqrt14ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vrsqrt14ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vrsqrt14ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vrsqrt14ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vrsqrt14ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vrsqrt28ss %xmm28, %xmm29, %xmm30{%k7} # AVX512EMI
vrsqrt28ss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512EMI
vrsqrt28ss {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512EMI
vrsqrt28ss (%rcx), %xmm29, %xmm30{%k7} # AVX512EMI
vrsqrt28ss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512EMI
vrsqrt28ss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI Disp8
vrsqrt28ss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI
vrsqrt28ss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI Disp8
vrsqrt28ss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI
vrsqrt28sd %xmm28, %xmm29, %xmm30{%k7} # AVX512EMI
vrsqrt28sd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512EMI
vrsqrt28sd {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512EMI
vrsqrt28sd (%rcx), %xmm29, %xmm30{%k7} # AVX512EMI
vrsqrt28sd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512EMI
vrsqrt28sd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI Disp8
vrsqrt28sd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI
vrsqrt28sd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI Disp8
vrsqrt28sd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512EMI
vsqrtsd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsqrtsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vsqrtsd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsqrtsd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsqrtsd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsqrtsd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsqrtsd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vsqrtsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vsqrtsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vsqrtsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vsqrtsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vsqrtsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vsqrtss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsqrtss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vsqrtss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsqrtss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsqrtss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsqrtss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsqrtss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vsqrtss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vsqrtss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vsqrtss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vsqrtss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vsqrtss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vsubsd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsubsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vsubsd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsubsd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsubsd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsubsd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsubsd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vsubsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vsubsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vsubsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vsubsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vsubsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vsubss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsubss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vsubss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsubss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsubss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsubss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vsubss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vsubss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vsubss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vsubss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vsubss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vsubss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vucomisd %xmm29, %xmm30 # AVX512
vucomisd {sae}, %xmm29, %xmm30 # AVX512
vucomisd (%rcx), %xmm30 # AVX512
vucomisd 0x123(%rax,%r14,8), %xmm30 # AVX512
vucomisd 1016(%rdx), %xmm30 # AVX512 Disp8
vucomisd 1024(%rdx), %xmm30 # AVX512
vucomisd -1024(%rdx), %xmm30 # AVX512 Disp8
vucomisd -1032(%rdx), %xmm30 # AVX512
vucomiss %xmm29, %xmm30 # AVX512
vucomiss {sae}, %xmm29, %xmm30 # AVX512
vucomiss (%rcx), %xmm30 # AVX512
vucomiss 0x123(%rax,%r14,8), %xmm30 # AVX512
vucomiss 508(%rdx), %xmm30 # AVX512 Disp8
vucomiss 512(%rdx), %xmm30 # AVX512
vucomiss -512(%rdx), %xmm30 # AVX512 Disp8
vucomiss -516(%rdx), %xmm30 # AVX512
vcvtsd2usi %xmm30, %eax # AVX512
vcvtsd2usi {rn-sae}, %xmm30, %eax # AVX512
vcvtsd2usi {ru-sae}, %xmm30, %eax # AVX512
vcvtsd2usi {rd-sae}, %xmm30, %eax # AVX512
vcvtsd2usi {rz-sae}, %xmm30, %eax # AVX512
vcvtsd2usi (%rcx), %eax # AVX512
vcvtsd2usi 0x123(%rax,%r14,8), %eax # AVX512
vcvtsd2usi 1016(%rdx), %eax # AVX512 Disp8
vcvtsd2usi 1024(%rdx), %eax # AVX512
vcvtsd2usi -1024(%rdx), %eax # AVX512 Disp8
vcvtsd2usi -1032(%rdx), %eax # AVX512
vcvtsd2usi %xmm30, %ebp # AVX512
vcvtsd2usi {rn-sae}, %xmm30, %ebp # AVX512
vcvtsd2usi {ru-sae}, %xmm30, %ebp # AVX512
vcvtsd2usi {rd-sae}, %xmm30, %ebp # AVX512
vcvtsd2usi {rz-sae}, %xmm30, %ebp # AVX512
vcvtsd2usi (%rcx), %ebp # AVX512
vcvtsd2usi 0x123(%rax,%r14,8), %ebp # AVX512
vcvtsd2usi 1016(%rdx), %ebp # AVX512 Disp8
vcvtsd2usi 1024(%rdx), %ebp # AVX512
vcvtsd2usi -1024(%rdx), %ebp # AVX512 Disp8
vcvtsd2usi -1032(%rdx), %ebp # AVX512
vcvtsd2usi %xmm30, %r13d # AVX512
vcvtsd2usi {rn-sae}, %xmm30, %r13d # AVX512
vcvtsd2usi {ru-sae}, %xmm30, %r13d # AVX512
vcvtsd2usi {rd-sae}, %xmm30, %r13d # AVX512
vcvtsd2usi {rz-sae}, %xmm30, %r13d # AVX512
vcvtsd2usi (%rcx), %r13d # AVX512
vcvtsd2usi 0x123(%rax,%r14,8), %r13d # AVX512
vcvtsd2usi 1016(%rdx), %r13d # AVX512 Disp8
vcvtsd2usi 1024(%rdx), %r13d # AVX512
vcvtsd2usi -1024(%rdx), %r13d # AVX512 Disp8
vcvtsd2usi -1032(%rdx), %r13d # AVX512
vcvtsd2usi %xmm30, %rax # AVX512
vcvtsd2usi {rn-sae}, %xmm30, %rax # AVX512
vcvtsd2usi {ru-sae}, %xmm30, %rax # AVX512
vcvtsd2usi {rd-sae}, %xmm30, %rax # AVX512
vcvtsd2usi {rz-sae}, %xmm30, %rax # AVX512
vcvtsd2usi (%rcx), %rax # AVX512
vcvtsd2usi 0x123(%rax,%r14,8), %rax # AVX512
vcvtsd2usi 1016(%rdx), %rax # AVX512 Disp8
vcvtsd2usi 1024(%rdx), %rax # AVX512
vcvtsd2usi -1024(%rdx), %rax # AVX512 Disp8
vcvtsd2usi -1032(%rdx), %rax # AVX512
vcvtsd2usi %xmm30, %r8 # AVX512
vcvtsd2usi {rn-sae}, %xmm30, %r8 # AVX512
vcvtsd2usi {ru-sae}, %xmm30, %r8 # AVX512
vcvtsd2usi {rd-sae}, %xmm30, %r8 # AVX512
vcvtsd2usi {rz-sae}, %xmm30, %r8 # AVX512
vcvtsd2usi (%rcx), %r8 # AVX512
vcvtsd2usi 0x123(%rax,%r14,8), %r8 # AVX512
vcvtsd2usi 1016(%rdx), %r8 # AVX512 Disp8
vcvtsd2usi 1024(%rdx), %r8 # AVX512
vcvtsd2usi -1024(%rdx), %r8 # AVX512 Disp8
vcvtsd2usi -1032(%rdx), %r8 # AVX512
vcvtss2usi %xmm30, %eax # AVX512
vcvtss2usi {rn-sae}, %xmm30, %eax # AVX512
vcvtss2usi {ru-sae}, %xmm30, %eax # AVX512
vcvtss2usi {rd-sae}, %xmm30, %eax # AVX512
vcvtss2usi {rz-sae}, %xmm30, %eax # AVX512
vcvtss2usi (%rcx), %eax # AVX512
vcvtss2usi 0x123(%rax,%r14,8), %eax # AVX512
vcvtss2usi 508(%rdx), %eax # AVX512 Disp8
vcvtss2usi 512(%rdx), %eax # AVX512
vcvtss2usi -512(%rdx), %eax # AVX512 Disp8
vcvtss2usi -516(%rdx), %eax # AVX512
vcvtss2usi %xmm30, %ebp # AVX512
vcvtss2usi {rn-sae}, %xmm30, %ebp # AVX512
vcvtss2usi {ru-sae}, %xmm30, %ebp # AVX512
vcvtss2usi {rd-sae}, %xmm30, %ebp # AVX512
vcvtss2usi {rz-sae}, %xmm30, %ebp # AVX512
vcvtss2usi (%rcx), %ebp # AVX512
vcvtss2usi 0x123(%rax,%r14,8), %ebp # AVX512
vcvtss2usi 508(%rdx), %ebp # AVX512 Disp8
vcvtss2usi 512(%rdx), %ebp # AVX512
vcvtss2usi -512(%rdx), %ebp # AVX512 Disp8
vcvtss2usi -516(%rdx), %ebp # AVX512
vcvtss2usi %xmm30, %r13d # AVX512
vcvtss2usi {rn-sae}, %xmm30, %r13d # AVX512
vcvtss2usi {ru-sae}, %xmm30, %r13d # AVX512
vcvtss2usi {rd-sae}, %xmm30, %r13d # AVX512
vcvtss2usi {rz-sae}, %xmm30, %r13d # AVX512
vcvtss2usi (%rcx), %r13d # AVX512
vcvtss2usi 0x123(%rax,%r14,8), %r13d # AVX512
vcvtss2usi 508(%rdx), %r13d # AVX512 Disp8
vcvtss2usi 512(%rdx), %r13d # AVX512
vcvtss2usi -512(%rdx), %r13d # AVX512 Disp8
vcvtss2usi -516(%rdx), %r13d # AVX512
vcvtss2usi %xmm30, %rax # AVX512
vcvtss2usi {rn-sae}, %xmm30, %rax # AVX512
vcvtss2usi {ru-sae}, %xmm30, %rax # AVX512
vcvtss2usi {rd-sae}, %xmm30, %rax # AVX512
vcvtss2usi {rz-sae}, %xmm30, %rax # AVX512
vcvtss2usi (%rcx), %rax # AVX512
vcvtss2usi 0x123(%rax,%r14,8), %rax # AVX512
vcvtss2usi 508(%rdx), %rax # AVX512 Disp8
vcvtss2usi 512(%rdx), %rax # AVX512
vcvtss2usi -512(%rdx), %rax # AVX512 Disp8
vcvtss2usi -516(%rdx), %rax # AVX512
vcvtss2usi %xmm30, %r8 # AVX512
vcvtss2usi {rn-sae}, %xmm30, %r8 # AVX512
vcvtss2usi {ru-sae}, %xmm30, %r8 # AVX512
vcvtss2usi {rd-sae}, %xmm30, %r8 # AVX512
vcvtss2usi {rz-sae}, %xmm30, %r8 # AVX512
vcvtss2usi (%rcx), %r8 # AVX512
vcvtss2usi 0x123(%rax,%r14,8), %r8 # AVX512
vcvtss2usi 508(%rdx), %r8 # AVX512 Disp8
vcvtss2usi 512(%rdx), %r8 # AVX512
vcvtss2usi -512(%rdx), %r8 # AVX512 Disp8
vcvtss2usi -516(%rdx), %r8 # AVX512
vcvtusi2sdl %eax, %xmm29, %xmm30 # AVX512
vcvtusi2sdl %ebp, %xmm29, %xmm30 # AVX512
vcvtusi2sdl %r13d, %xmm29, %xmm30 # AVX512
vcvtusi2sdl (%rcx), %xmm29, %xmm30 # AVX512
vcvtusi2sdl 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512
vcvtusi2sdl 508(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtusi2sdl 512(%rdx), %xmm29, %xmm30 # AVX512
vcvtusi2sdl -512(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtusi2sdl -516(%rdx), %xmm29, %xmm30 # AVX512
vcvtusi2sdq %rax, %xmm29, %xmm30 # AVX512
vcvtusi2sdq %rax, {rn-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2sdq %rax, {ru-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2sdq %rax, {rd-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2sdq %rax, {rz-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2sdq %r8, %xmm29, %xmm30 # AVX512
vcvtusi2sdq %r8, {rn-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2sdq %r8, {ru-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2sdq %r8, {rd-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2sdq %r8, {rz-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2sdq (%rcx), %xmm29, %xmm30 # AVX512
vcvtusi2sdq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512
vcvtusi2sdq 1016(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtusi2sdq 1024(%rdx), %xmm29, %xmm30 # AVX512
vcvtusi2sdq -1024(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtusi2sdq -1032(%rdx), %xmm29, %xmm30 # AVX512
vcvtusi2ssl %eax, %xmm29, %xmm30 # AVX512
vcvtusi2ssl %eax, {rn-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssl %eax, {ru-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssl %eax, {rd-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssl %eax, {rz-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssl %ebp, %xmm29, %xmm30 # AVX512
vcvtusi2ssl %ebp, {rn-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssl %ebp, {ru-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssl %ebp, {rd-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssl %ebp, {rz-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssl %r13d, %xmm29, %xmm30 # AVX512
vcvtusi2ssl %r13d, {rn-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssl %r13d, {ru-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssl %r13d, {rd-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssl %r13d, {rz-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssl (%rcx), %xmm29, %xmm30 # AVX512
vcvtusi2ssl 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512
vcvtusi2ssl 508(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtusi2ssl 512(%rdx), %xmm29, %xmm30 # AVX512
vcvtusi2ssl -512(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtusi2ssl -516(%rdx), %xmm29, %xmm30 # AVX512
vcvtusi2ssq %rax, %xmm29, %xmm30 # AVX512
vcvtusi2ssq %rax, {rn-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssq %rax, {ru-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssq %rax, {rd-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssq %rax, {rz-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssq %r8, %xmm29, %xmm30 # AVX512
vcvtusi2ssq %r8, {rn-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssq %r8, {ru-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssq %r8, {rd-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssq %r8, {rz-sae}, %xmm29, %xmm30 # AVX512
vcvtusi2ssq (%rcx), %xmm29, %xmm30 # AVX512
vcvtusi2ssq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512
vcvtusi2ssq 1016(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtusi2ssq 1024(%rdx), %xmm29, %xmm30 # AVX512
vcvtusi2ssq -1024(%rdx), %xmm29, %xmm30 # AVX512 Disp8
vcvtusi2ssq -1032(%rdx), %xmm29, %xmm30 # AVX512
vscalefsd %xmm28, %xmm29, %xmm30{%k7} # AVX512
vscalefsd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vscalefsd {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vscalefsd {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vscalefsd {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vscalefsd {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vscalefsd (%rcx), %xmm29, %xmm30{%k7} # AVX512
vscalefsd 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vscalefsd 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vscalefsd 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vscalefsd -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vscalefsd -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vscalefss %xmm28, %xmm29, %xmm30{%k7} # AVX512
vscalefss %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vscalefss {rn-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vscalefss {ru-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vscalefss {rd-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vscalefss {rz-sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vscalefss (%rcx), %xmm29, %xmm30{%k7} # AVX512
vscalefss 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vscalefss 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vscalefss 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vscalefss -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vscalefss -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfixupimmss $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfixupimmss $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfixupimmss $0xab, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfixupimmss $123, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfixupimmss $123, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfixupimmss $123, (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfixupimmss $123, 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfixupimmss $123, 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfixupimmss $123, 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfixupimmss $123, -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfixupimmss $123, -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfixupimmsd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfixupimmsd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vfixupimmsd $0xab, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfixupimmsd $123, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfixupimmsd $123, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vfixupimmsd $123, (%rcx), %xmm29, %xmm30{%k7} # AVX512
vfixupimmsd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vfixupimmsd $123, 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfixupimmsd $123, 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vfixupimmsd $123, -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vfixupimmsd $123, -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vrndscalesd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vrndscalesd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vrndscalesd $0xab, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vrndscalesd $123, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vrndscalesd $123, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vrndscalesd $123, (%rcx), %xmm29, %xmm30{%k7} # AVX512
vrndscalesd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vrndscalesd $123, 1016(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vrndscalesd $123, 1024(%rdx), %xmm29, %xmm30{%k7} # AVX512
vrndscalesd $123, -1024(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vrndscalesd $123, -1032(%rdx), %xmm29, %xmm30{%k7} # AVX512
vrndscaless $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vrndscaless $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512
vrndscaless $0xab, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vrndscaless $123, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vrndscaless $123, {sae}, %xmm28, %xmm29, %xmm30{%k7} # AVX512
vrndscaless $123, (%rcx), %xmm29, %xmm30{%k7} # AVX512
vrndscaless $123, 0x123(%rax,%r14,8), %xmm29, %xmm30{%k7} # AVX512
vrndscaless $123, 508(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vrndscaless $123, 512(%rdx), %xmm29, %xmm30{%k7} # AVX512
vrndscaless $123, -512(%rdx), %xmm29, %xmm30{%k7} # AVX512 Disp8
vrndscaless $123, -516(%rdx), %xmm29, %xmm30{%k7} # AVX512
.intel_syntax noprefix
vaddsd xmm30{k7}, xmm29, xmm28 # AVX512
vaddsd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vaddsd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vaddsd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vaddsd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vaddsd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vaddsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vaddsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vaddsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vaddsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vaddsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vaddsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vaddss xmm30{k7}, xmm29, xmm28 # AVX512
vaddss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vaddss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vaddss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vaddss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vaddss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vaddss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vaddss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vaddss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vaddss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vaddss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vaddss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpsd k5{k7}, xmm29, xmm28, 0xab # AVX512
vcmpsd k5{k7}, xmm29, xmm28, {sae}, 0xab # AVX512
vcmpsd k5{k7}, xmm29, xmm28, 123 # AVX512
vcmpsd k5{k7}, xmm29, xmm28, {sae}, 123 # AVX512
vcmpsd k5{k7}, xmm29, QWORD PTR [rcx], 123 # AVX512
vcmpsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234], 123 # AVX512
vcmpsd k5{k7}, xmm29, QWORD PTR [rdx+1016], 123 # AVX512 Disp8
vcmpsd k5{k7}, xmm29, QWORD PTR [rdx+1024], 123 # AVX512
vcmpsd k5{k7}, xmm29, QWORD PTR [rdx-1024], 123 # AVX512 Disp8
vcmpsd k5{k7}, xmm29, QWORD PTR [rdx-1032], 123 # AVX512
vcmpeq_oqsd k5{k7}, xmm29, xmm28 # AVX512
vcmpeq_oqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpeq_oqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpeq_oqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpeq_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpeq_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpeq_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpeq_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpeqsd k5{k7}, xmm29, xmm28 # AVX512
vcmpeqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpeqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpeqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpeqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpeqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpeqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpeqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmplt_ossd k5{k7}, xmm29, xmm28 # AVX512
vcmplt_ossd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmplt_ossd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmplt_ossd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmplt_ossd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmplt_ossd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmplt_ossd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmplt_ossd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpltsd k5{k7}, xmm29, xmm28 # AVX512
vcmpltsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpltsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpltsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpltsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpltsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpltsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpltsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmple_ossd k5{k7}, xmm29, xmm28 # AVX512
vcmple_ossd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmple_ossd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmple_ossd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmple_ossd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmple_ossd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmple_ossd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmple_ossd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmplesd k5{k7}, xmm29, xmm28 # AVX512
vcmplesd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmplesd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmplesd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmplesd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmplesd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmplesd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmplesd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpunord_qsd k5{k7}, xmm29, xmm28 # AVX512
vcmpunord_qsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpunord_qsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpunord_qsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpunord_qsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpunord_qsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpunord_qsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpunord_qsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpunordsd k5{k7}, xmm29, xmm28 # AVX512
vcmpunordsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpunordsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpunordsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpunordsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpunordsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpunordsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpunordsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpneq_uqsd k5{k7}, xmm29, xmm28 # AVX512
vcmpneq_uqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpneq_uqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpneq_uqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpneq_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpneq_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpneq_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpneq_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpneqsd k5{k7}, xmm29, xmm28 # AVX512
vcmpneqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpneqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpneqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpneqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpneqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpneqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpneqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpnlt_ussd k5{k7}, xmm29, xmm28 # AVX512
vcmpnlt_ussd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnlt_ussd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpnlt_ussd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnlt_ussd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpnlt_ussd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpnlt_ussd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpnlt_ussd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpnltsd k5{k7}, xmm29, xmm28 # AVX512
vcmpnltsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnltsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpnltsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnltsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpnltsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpnltsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpnltsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpnle_ussd k5{k7}, xmm29, xmm28 # AVX512
vcmpnle_ussd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnle_ussd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpnle_ussd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnle_ussd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpnle_ussd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpnle_ussd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpnle_ussd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpnlesd k5{k7}, xmm29, xmm28 # AVX512
vcmpnlesd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnlesd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpnlesd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnlesd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpnlesd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpnlesd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpnlesd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpord_qsd k5{k7}, xmm29, xmm28 # AVX512
vcmpord_qsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpord_qsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpord_qsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpord_qsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpord_qsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpord_qsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpord_qsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpordsd k5{k7}, xmm29, xmm28 # AVX512
vcmpordsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpordsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpordsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpordsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpordsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpordsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpordsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpeq_uqsd k5{k7}, xmm29, xmm28 # AVX512
vcmpeq_uqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpeq_uqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpeq_uqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpeq_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpeq_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpeq_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpeq_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpnge_ussd k5{k7}, xmm29, xmm28 # AVX512
vcmpnge_ussd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnge_ussd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpnge_ussd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnge_ussd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpnge_ussd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpnge_ussd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpnge_ussd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpngesd k5{k7}, xmm29, xmm28 # AVX512
vcmpngesd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpngesd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpngesd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpngesd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpngesd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpngesd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpngesd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpngt_ussd k5{k7}, xmm29, xmm28 # AVX512
vcmpngt_ussd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpngt_ussd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpngt_ussd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpngt_ussd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpngt_ussd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpngt_ussd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpngt_ussd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpngtsd k5{k7}, xmm29, xmm28 # AVX512
vcmpngtsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpngtsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpngtsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpngtsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpngtsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpngtsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpngtsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpfalse_oqsd k5{k7}, xmm29, xmm28 # AVX512
vcmpfalse_oqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpfalse_oqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpfalse_oqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpfalse_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpfalse_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpfalse_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpfalse_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpfalsesd k5{k7}, xmm29, xmm28 # AVX512
vcmpfalsesd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpfalsesd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpfalsesd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpfalsesd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpfalsesd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpfalsesd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpfalsesd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpneq_oqsd k5{k7}, xmm29, xmm28 # AVX512
vcmpneq_oqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpneq_oqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpneq_oqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpneq_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpneq_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpneq_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpneq_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpge_ossd k5{k7}, xmm29, xmm28 # AVX512
vcmpge_ossd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpge_ossd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpge_ossd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpge_ossd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpge_ossd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpge_ossd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpge_ossd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpgesd k5{k7}, xmm29, xmm28 # AVX512
vcmpgesd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpgesd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpgesd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpgesd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpgesd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpgesd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpgesd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpgt_ossd k5{k7}, xmm29, xmm28 # AVX512
vcmpgt_ossd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpgt_ossd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpgt_ossd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpgt_ossd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpgt_ossd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpgt_ossd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpgt_ossd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpgtsd k5{k7}, xmm29, xmm28 # AVX512
vcmpgtsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpgtsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpgtsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpgtsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpgtsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpgtsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpgtsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmptrue_uqsd k5{k7}, xmm29, xmm28 # AVX512
vcmptrue_uqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmptrue_uqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmptrue_uqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmptrue_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmptrue_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmptrue_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmptrue_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmptruesd k5{k7}, xmm29, xmm28 # AVX512
vcmptruesd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmptruesd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmptruesd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmptruesd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmptruesd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmptruesd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmptruesd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpeq_ossd k5{k7}, xmm29, xmm28 # AVX512
vcmpeq_ossd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpeq_ossd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpeq_ossd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpeq_ossd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpeq_ossd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpeq_ossd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpeq_ossd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmplt_oqsd k5{k7}, xmm29, xmm28 # AVX512
vcmplt_oqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmplt_oqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmplt_oqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmplt_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmplt_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmplt_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmplt_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmple_oqsd k5{k7}, xmm29, xmm28 # AVX512
vcmple_oqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmple_oqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmple_oqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmple_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmple_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmple_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmple_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpunord_ssd k5{k7}, xmm29, xmm28 # AVX512
vcmpunord_ssd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpunord_ssd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpunord_ssd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpunord_ssd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpunord_ssd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpunord_ssd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpunord_ssd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpneq_ussd k5{k7}, xmm29, xmm28 # AVX512
vcmpneq_ussd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpneq_ussd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpneq_ussd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpneq_ussd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpneq_ussd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpneq_ussd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpneq_ussd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpnlt_uqsd k5{k7}, xmm29, xmm28 # AVX512
vcmpnlt_uqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnlt_uqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpnlt_uqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnlt_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpnlt_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpnlt_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpnlt_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpnle_uqsd k5{k7}, xmm29, xmm28 # AVX512
vcmpnle_uqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnle_uqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpnle_uqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnle_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpnle_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpnle_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpnle_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpord_ssd k5{k7}, xmm29, xmm28 # AVX512
vcmpord_ssd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpord_ssd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpord_ssd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpord_ssd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpord_ssd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpord_ssd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpord_ssd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpeq_ussd k5{k7}, xmm29, xmm28 # AVX512
vcmpeq_ussd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpeq_ussd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpeq_ussd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpeq_ussd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpeq_ussd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpeq_ussd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpeq_ussd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpnge_uqsd k5{k7}, xmm29, xmm28 # AVX512
vcmpnge_uqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnge_uqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpnge_uqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnge_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpnge_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpnge_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpnge_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpngt_uqsd k5{k7}, xmm29, xmm28 # AVX512
vcmpngt_uqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpngt_uqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpngt_uqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpngt_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpngt_uqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpngt_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpngt_uqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpfalse_ossd k5{k7}, xmm29, xmm28 # AVX512
vcmpfalse_ossd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpfalse_ossd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpfalse_ossd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpfalse_ossd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpfalse_ossd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpfalse_ossd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpfalse_ossd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpneq_ossd k5{k7}, xmm29, xmm28 # AVX512
vcmpneq_ossd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpneq_ossd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpneq_ossd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpneq_ossd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpneq_ossd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpneq_ossd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpneq_ossd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpge_oqsd k5{k7}, xmm29, xmm28 # AVX512
vcmpge_oqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpge_oqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpge_oqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpge_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpge_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpge_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpge_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpgt_oqsd k5{k7}, xmm29, xmm28 # AVX512
vcmpgt_oqsd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpgt_oqsd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmpgt_oqsd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpgt_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmpgt_oqsd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmpgt_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmpgt_oqsd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmptrue_ussd k5{k7}, xmm29, xmm28 # AVX512
vcmptrue_ussd k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmptrue_ussd k5{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcmptrue_ussd k5{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcmptrue_ussd k5{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcmptrue_ussd k5{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcmptrue_ussd k5{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcmptrue_ussd k5{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcmpss k5{k7}, xmm29, xmm28, 0xab # AVX512
vcmpss k5{k7}, xmm29, xmm28, {sae}, 0xab # AVX512
vcmpss k5{k7}, xmm29, xmm28, 123 # AVX512
vcmpss k5{k7}, xmm29, xmm28, {sae}, 123 # AVX512
vcmpss k5{k7}, xmm29, DWORD PTR [rcx], 123 # AVX512
vcmpss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234], 123 # AVX512
vcmpss k5{k7}, xmm29, DWORD PTR [rdx+508], 123 # AVX512 Disp8
vcmpss k5{k7}, xmm29, DWORD PTR [rdx+512], 123 # AVX512
vcmpss k5{k7}, xmm29, DWORD PTR [rdx-512], 123 # AVX512 Disp8
vcmpss k5{k7}, xmm29, DWORD PTR [rdx-516], 123 # AVX512
vcmpeq_oqss k5{k7}, xmm29, xmm28 # AVX512
vcmpeq_oqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpeq_oqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpeq_oqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpeq_oqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpeq_oqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpeq_oqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpeq_oqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpeqss k5{k7}, xmm29, xmm28 # AVX512
vcmpeqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpeqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpeqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpeqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpeqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpeqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpeqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmplt_osss k5{k7}, xmm29, xmm28 # AVX512
vcmplt_osss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmplt_osss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmplt_osss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmplt_osss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmplt_osss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmplt_osss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmplt_osss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpltss k5{k7}, xmm29, xmm28 # AVX512
vcmpltss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpltss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpltss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpltss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpltss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpltss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpltss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmple_osss k5{k7}, xmm29, xmm28 # AVX512
vcmple_osss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmple_osss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmple_osss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmple_osss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmple_osss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmple_osss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmple_osss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpless k5{k7}, xmm29, xmm28 # AVX512
vcmpless k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpless k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpless k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpless k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpless k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpless k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpless k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpunord_qss k5{k7}, xmm29, xmm28 # AVX512
vcmpunord_qss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpunord_qss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpunord_qss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpunord_qss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpunord_qss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpunord_qss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpunord_qss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpunordss k5{k7}, xmm29, xmm28 # AVX512
vcmpunordss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpunordss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpunordss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpunordss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpunordss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpunordss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpunordss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpneq_uqss k5{k7}, xmm29, xmm28 # AVX512
vcmpneq_uqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpneq_uqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpneq_uqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpneq_uqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpneq_uqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpneq_uqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpneq_uqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpneqss k5{k7}, xmm29, xmm28 # AVX512
vcmpneqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpneqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpneqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpneqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpneqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpneqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpneqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpnlt_usss k5{k7}, xmm29, xmm28 # AVX512
vcmpnlt_usss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnlt_usss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpnlt_usss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnlt_usss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpnlt_usss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpnlt_usss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpnlt_usss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpnltss k5{k7}, xmm29, xmm28 # AVX512
vcmpnltss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnltss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpnltss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnltss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpnltss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpnltss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpnltss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpnle_usss k5{k7}, xmm29, xmm28 # AVX512
vcmpnle_usss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnle_usss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpnle_usss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnle_usss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpnle_usss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpnle_usss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpnle_usss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpnless k5{k7}, xmm29, xmm28 # AVX512
vcmpnless k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnless k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpnless k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnless k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpnless k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpnless k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpnless k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpord_qss k5{k7}, xmm29, xmm28 # AVX512
vcmpord_qss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpord_qss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpord_qss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpord_qss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpord_qss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpord_qss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpord_qss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpordss k5{k7}, xmm29, xmm28 # AVX512
vcmpordss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpordss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpordss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpordss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpordss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpordss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpordss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpeq_uqss k5{k7}, xmm29, xmm28 # AVX512
vcmpeq_uqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpeq_uqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpeq_uqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpeq_uqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpeq_uqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpeq_uqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpeq_uqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpnge_usss k5{k7}, xmm29, xmm28 # AVX512
vcmpnge_usss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnge_usss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpnge_usss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnge_usss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpnge_usss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpnge_usss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpnge_usss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpngess k5{k7}, xmm29, xmm28 # AVX512
vcmpngess k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpngess k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpngess k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpngess k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpngess k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpngess k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpngess k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpngt_usss k5{k7}, xmm29, xmm28 # AVX512
vcmpngt_usss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpngt_usss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpngt_usss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpngt_usss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpngt_usss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpngt_usss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpngt_usss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpngtss k5{k7}, xmm29, xmm28 # AVX512
vcmpngtss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpngtss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpngtss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpngtss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpngtss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpngtss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpngtss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpfalse_oqss k5{k7}, xmm29, xmm28 # AVX512
vcmpfalse_oqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpfalse_oqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpfalse_oqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpfalse_oqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpfalse_oqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpfalse_oqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpfalse_oqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpfalsess k5{k7}, xmm29, xmm28 # AVX512
vcmpfalsess k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpfalsess k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpfalsess k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpfalsess k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpfalsess k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpfalsess k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpfalsess k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpneq_oqss k5{k7}, xmm29, xmm28 # AVX512
vcmpneq_oqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpneq_oqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpneq_oqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpneq_oqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpneq_oqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpneq_oqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpneq_oqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpge_osss k5{k7}, xmm29, xmm28 # AVX512
vcmpge_osss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpge_osss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpge_osss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpge_osss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpge_osss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpge_osss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpge_osss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpgess k5{k7}, xmm29, xmm28 # AVX512
vcmpgess k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpgess k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpgess k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpgess k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpgess k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpgess k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpgess k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpgt_osss k5{k7}, xmm29, xmm28 # AVX512
vcmpgt_osss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpgt_osss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpgt_osss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpgt_osss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpgt_osss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpgt_osss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpgt_osss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpgtss k5{k7}, xmm29, xmm28 # AVX512
vcmpgtss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpgtss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpgtss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpgtss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpgtss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpgtss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpgtss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmptrue_uqss k5{k7}, xmm29, xmm28 # AVX512
vcmptrue_uqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmptrue_uqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmptrue_uqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmptrue_uqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmptrue_uqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmptrue_uqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmptrue_uqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmptruess k5{k7}, xmm29, xmm28 # AVX512
vcmptruess k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmptruess k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmptruess k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmptruess k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmptruess k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmptruess k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmptruess k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpeq_osss k5{k7}, xmm29, xmm28 # AVX512
vcmpeq_osss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpeq_osss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpeq_osss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpeq_osss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpeq_osss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpeq_osss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpeq_osss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmplt_oqss k5{k7}, xmm29, xmm28 # AVX512
vcmplt_oqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmplt_oqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmplt_oqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmplt_oqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmplt_oqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmplt_oqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmplt_oqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmple_oqss k5{k7}, xmm29, xmm28 # AVX512
vcmple_oqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmple_oqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmple_oqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmple_oqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmple_oqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmple_oqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmple_oqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpunord_sss k5{k7}, xmm29, xmm28 # AVX512
vcmpunord_sss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpunord_sss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpunord_sss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpunord_sss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpunord_sss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpunord_sss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpunord_sss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpneq_usss k5{k7}, xmm29, xmm28 # AVX512
vcmpneq_usss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpneq_usss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpneq_usss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpneq_usss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpneq_usss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpneq_usss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpneq_usss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpnlt_uqss k5{k7}, xmm29, xmm28 # AVX512
vcmpnlt_uqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnlt_uqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpnlt_uqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnlt_uqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpnlt_uqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpnlt_uqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpnlt_uqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpnle_uqss k5{k7}, xmm29, xmm28 # AVX512
vcmpnle_uqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnle_uqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpnle_uqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnle_uqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpnle_uqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpnle_uqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpnle_uqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpord_sss k5{k7}, xmm29, xmm28 # AVX512
vcmpord_sss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpord_sss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpord_sss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpord_sss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpord_sss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpord_sss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpord_sss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpeq_usss k5{k7}, xmm29, xmm28 # AVX512
vcmpeq_usss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpeq_usss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpeq_usss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpeq_usss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpeq_usss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpeq_usss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpeq_usss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpnge_uqss k5{k7}, xmm29, xmm28 # AVX512
vcmpnge_uqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpnge_uqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpnge_uqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpnge_uqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpnge_uqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpnge_uqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpnge_uqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpngt_uqss k5{k7}, xmm29, xmm28 # AVX512
vcmpngt_uqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpngt_uqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpngt_uqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpngt_uqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpngt_uqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpngt_uqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpngt_uqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpfalse_osss k5{k7}, xmm29, xmm28 # AVX512
vcmpfalse_osss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpfalse_osss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpfalse_osss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpfalse_osss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpfalse_osss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpfalse_osss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpfalse_osss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpneq_osss k5{k7}, xmm29, xmm28 # AVX512
vcmpneq_osss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpneq_osss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpneq_osss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpneq_osss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpneq_osss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpneq_osss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpneq_osss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpge_oqss k5{k7}, xmm29, xmm28 # AVX512
vcmpge_oqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpge_oqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpge_oqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpge_oqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpge_oqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpge_oqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpge_oqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmpgt_oqss k5{k7}, xmm29, xmm28 # AVX512
vcmpgt_oqss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmpgt_oqss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmpgt_oqss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmpgt_oqss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmpgt_oqss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmpgt_oqss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmpgt_oqss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcmptrue_usss k5{k7}, xmm29, xmm28 # AVX512
vcmptrue_usss k5{k7}, xmm29, xmm28, {sae} # AVX512
vcmptrue_usss k5{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcmptrue_usss k5{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcmptrue_usss k5{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcmptrue_usss k5{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcmptrue_usss k5{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcmptrue_usss k5{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcomisd xmm30, xmm29 # AVX512
vcomisd xmm30, xmm29, {sae} # AVX512
vcomisd xmm30, QWORD PTR [rcx] # AVX512
vcomisd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcomisd xmm30, QWORD PTR [rdx+1016] # AVX512 Disp8
vcomisd xmm30, QWORD PTR [rdx+1024] # AVX512
vcomisd xmm30, QWORD PTR [rdx-1024] # AVX512 Disp8
vcomisd xmm30, QWORD PTR [rdx-1032] # AVX512
vcomiss xmm30, xmm29 # AVX512
vcomiss xmm30, xmm29, {sae} # AVX512
vcomiss xmm30, DWORD PTR [rcx] # AVX512
vcomiss xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcomiss xmm30, DWORD PTR [rdx+508] # AVX512 Disp8
vcomiss xmm30, DWORD PTR [rdx+512] # AVX512
vcomiss xmm30, DWORD PTR [rdx-512] # AVX512 Disp8
vcomiss xmm30, DWORD PTR [rdx-516] # AVX512
vcvtsd2si eax, xmm30, {rn-sae} # AVX512
vcvtsd2si eax, xmm30, {ru-sae} # AVX512
vcvtsd2si eax, xmm30, {rd-sae} # AVX512
vcvtsd2si eax, xmm30, {rz-sae} # AVX512
vcvtsd2si ebp, xmm30, {rn-sae} # AVX512
vcvtsd2si ebp, xmm30, {ru-sae} # AVX512
vcvtsd2si ebp, xmm30, {rd-sae} # AVX512
vcvtsd2si ebp, xmm30, {rz-sae} # AVX512
vcvtsd2si r13d, xmm30, {rn-sae} # AVX512
vcvtsd2si r13d, xmm30, {ru-sae} # AVX512
vcvtsd2si r13d, xmm30, {rd-sae} # AVX512
vcvtsd2si r13d, xmm30, {rz-sae} # AVX512
vcvtsd2si rax, xmm30, {rn-sae} # AVX512
vcvtsd2si rax, xmm30, {ru-sae} # AVX512
vcvtsd2si rax, xmm30, {rd-sae} # AVX512
vcvtsd2si rax, xmm30, {rz-sae} # AVX512
vcvtsd2si r8, xmm30, {rn-sae} # AVX512
vcvtsd2si r8, xmm30, {ru-sae} # AVX512
vcvtsd2si r8, xmm30, {rd-sae} # AVX512
vcvtsd2si r8, xmm30, {rz-sae} # AVX512
vcvtsd2ss xmm30{k7}, xmm29, xmm28 # AVX512
vcvtsd2ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vcvtsd2ss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vcvtsd2ss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vcvtsd2ss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vcvtsd2ss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vcvtsd2ss xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vcvtsd2ss xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtsd2ss xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcvtsd2ss xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vcvtsd2ss xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcvtsd2ss xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vcvtsi2sd xmm30, xmm29, eax # AVX512
vcvtsi2sd xmm30, xmm29, ebp # AVX512
vcvtsi2sd xmm30, xmm29, r13d # AVX512
vcvtsi2sd xmm30, xmm29, DWORD PTR [rcx] # AVX512
vcvtsi2sd xmm30, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtsi2sd xmm30, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcvtsi2sd xmm30, xmm29, DWORD PTR [rdx+512] # AVX512
vcvtsi2sd xmm30, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcvtsi2sd xmm30, xmm29, DWORD PTR [rdx-516] # AVX512
vcvtsi2sd xmm30, xmm29, rax # AVX512
vcvtsi2sd xmm30, xmm29, {rn-sae}, rax # AVX512
vcvtsi2sd xmm30, xmm29, {ru-sae}, rax # AVX512
vcvtsi2sd xmm30, xmm29, {rd-sae}, rax # AVX512
vcvtsi2sd xmm30, xmm29, {rz-sae}, rax # AVX512
vcvtsi2sd xmm30, xmm29, r8 # AVX512
vcvtsi2sd xmm30, xmm29, {rn-sae}, r8 # AVX512
vcvtsi2sd xmm30, xmm29, {ru-sae}, r8 # AVX512
vcvtsi2sd xmm30, xmm29, {rd-sae}, r8 # AVX512
vcvtsi2sd xmm30, xmm29, {rz-sae}, r8 # AVX512
vcvtsi2sd xmm30, xmm29, QWORD PTR [rcx] # AVX512
vcvtsi2sd xmm30, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtsi2sd xmm30, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcvtsi2sd xmm30, xmm29, QWORD PTR [rdx+1024] # AVX512
vcvtsi2sd xmm30, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcvtsi2sd xmm30, xmm29, QWORD PTR [rdx-1032] # AVX512
vcvtsi2ss xmm30, xmm29, eax # AVX512
vcvtsi2ss xmm30, xmm29, {rn-sae}, eax # AVX512
vcvtsi2ss xmm30, xmm29, {ru-sae}, eax # AVX512
vcvtsi2ss xmm30, xmm29, {rd-sae}, eax # AVX512
vcvtsi2ss xmm30, xmm29, {rz-sae}, eax # AVX512
vcvtsi2ss xmm30, xmm29, ebp # AVX512
vcvtsi2ss xmm30, xmm29, {rn-sae}, ebp # AVX512
vcvtsi2ss xmm30, xmm29, {ru-sae}, ebp # AVX512
vcvtsi2ss xmm30, xmm29, {rd-sae}, ebp # AVX512
vcvtsi2ss xmm30, xmm29, {rz-sae}, ebp # AVX512
vcvtsi2ss xmm30, xmm29, r13d # AVX512
vcvtsi2ss xmm30, xmm29, {rn-sae}, r13d # AVX512
vcvtsi2ss xmm30, xmm29, {ru-sae}, r13d # AVX512
vcvtsi2ss xmm30, xmm29, {rd-sae}, r13d # AVX512
vcvtsi2ss xmm30, xmm29, {rz-sae}, r13d # AVX512
vcvtsi2ss xmm30, xmm29, DWORD PTR [rcx] # AVX512
vcvtsi2ss xmm30, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtsi2ss xmm30, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcvtsi2ss xmm30, xmm29, DWORD PTR [rdx+512] # AVX512
vcvtsi2ss xmm30, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcvtsi2ss xmm30, xmm29, DWORD PTR [rdx-516] # AVX512
vcvtsi2ss xmm30, xmm29, rax # AVX512
vcvtsi2ss xmm30, xmm29, {rn-sae}, rax # AVX512
vcvtsi2ss xmm30, xmm29, {ru-sae}, rax # AVX512
vcvtsi2ss xmm30, xmm29, {rd-sae}, rax # AVX512
vcvtsi2ss xmm30, xmm29, {rz-sae}, rax # AVX512
vcvtsi2ss xmm30, xmm29, r8 # AVX512
vcvtsi2ss xmm30, xmm29, {rn-sae}, r8 # AVX512
vcvtsi2ss xmm30, xmm29, {ru-sae}, r8 # AVX512
vcvtsi2ss xmm30, xmm29, {rd-sae}, r8 # AVX512
vcvtsi2ss xmm30, xmm29, {rz-sae}, r8 # AVX512
vcvtsi2ss xmm30, xmm29, QWORD PTR [rcx] # AVX512
vcvtsi2ss xmm30, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtsi2ss xmm30, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcvtsi2ss xmm30, xmm29, QWORD PTR [rdx+1024] # AVX512
vcvtsi2ss xmm30, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcvtsi2ss xmm30, xmm29, QWORD PTR [rdx-1032] # AVX512
vcvtss2sd xmm30{k7}, xmm29, xmm28 # AVX512
vcvtss2sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vcvtss2sd xmm30{k7}, xmm29, xmm28, {sae} # AVX512
vcvtss2sd xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vcvtss2sd xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtss2sd xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcvtss2sd xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vcvtss2sd xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcvtss2sd xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vcvtss2si eax, xmm30, {rn-sae} # AVX512
vcvtss2si eax, xmm30, {ru-sae} # AVX512
vcvtss2si eax, xmm30, {rd-sae} # AVX512
vcvtss2si eax, xmm30, {rz-sae} # AVX512
vcvtss2si ebp, xmm30, {rn-sae} # AVX512
vcvtss2si ebp, xmm30, {ru-sae} # AVX512
vcvtss2si ebp, xmm30, {rd-sae} # AVX512
vcvtss2si ebp, xmm30, {rz-sae} # AVX512
vcvtss2si r13d, xmm30, {rn-sae} # AVX512
vcvtss2si r13d, xmm30, {ru-sae} # AVX512
vcvtss2si r13d, xmm30, {rd-sae} # AVX512
vcvtss2si r13d, xmm30, {rz-sae} # AVX512
vcvtss2si rax, xmm30, {rn-sae} # AVX512
vcvtss2si rax, xmm30, {ru-sae} # AVX512
vcvtss2si rax, xmm30, {rd-sae} # AVX512
vcvtss2si rax, xmm30, {rz-sae} # AVX512
vcvtss2si r8, xmm30, {rn-sae} # AVX512
vcvtss2si r8, xmm30, {ru-sae} # AVX512
vcvtss2si r8, xmm30, {rd-sae} # AVX512
vcvtss2si r8, xmm30, {rz-sae} # AVX512
vcvttsd2si eax, xmm30, {sae} # AVX512
vcvttsd2si ebp, xmm30, {sae} # AVX512
vcvttsd2si r13d, xmm30, {sae} # AVX512
vcvttsd2si rax, xmm30, {sae} # AVX512
vcvttsd2si r8, xmm30, {sae} # AVX512
vcvttss2si eax, xmm30, {sae} # AVX512
vcvttss2si ebp, xmm30, {sae} # AVX512
vcvttss2si r13d, xmm30, {sae} # AVX512
vcvttss2si rax, xmm30, {sae} # AVX512
vcvttss2si r8, xmm30, {sae} # AVX512
vdivsd xmm30{k7}, xmm29, xmm28 # AVX512
vdivsd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vdivsd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vdivsd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vdivsd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vdivsd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vdivsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vdivsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vdivsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vdivsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vdivsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vdivsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vdivss xmm30{k7}, xmm29, xmm28 # AVX512
vdivss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vdivss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vdivss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vdivss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vdivss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vdivss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vdivss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vdivss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vdivss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vdivss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vdivss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vfmadd132sd xmm30{k7}, xmm29, xmm28 # AVX512
vfmadd132sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfmadd132sd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfmadd132sd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfmadd132sd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfmadd132sd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfmadd132sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vfmadd132sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vfmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vfmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vfmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vfmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vfmadd132ss xmm30{k7}, xmm29, xmm28 # AVX512
vfmadd132ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfmadd132ss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfmadd132ss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfmadd132ss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfmadd132ss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfmadd132ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vfmadd132ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vfmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vfmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vfmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vfmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vfmadd213sd xmm30{k7}, xmm29, xmm28 # AVX512
vfmadd213sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfmadd213sd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfmadd213sd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfmadd213sd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfmadd213sd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfmadd213sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vfmadd213sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vfmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vfmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vfmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vfmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vfmadd213ss xmm30{k7}, xmm29, xmm28 # AVX512
vfmadd213ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfmadd213ss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfmadd213ss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfmadd213ss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfmadd213ss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfmadd213ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vfmadd213ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vfmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vfmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vfmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vfmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vfmadd231sd xmm30{k7}, xmm29, xmm28 # AVX512
vfmadd231sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfmadd231sd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfmadd231sd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfmadd231sd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfmadd231sd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfmadd231sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vfmadd231sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vfmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vfmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vfmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vfmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vfmadd231ss xmm30{k7}, xmm29, xmm28 # AVX512
vfmadd231ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfmadd231ss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfmadd231ss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfmadd231ss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfmadd231ss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfmadd231ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vfmadd231ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vfmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vfmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vfmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vfmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vfmsub132sd xmm30{k7}, xmm29, xmm28 # AVX512
vfmsub132sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfmsub132sd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfmsub132sd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfmsub132sd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfmsub132sd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfmsub132sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vfmsub132sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vfmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vfmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vfmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vfmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vfmsub132ss xmm30{k7}, xmm29, xmm28 # AVX512
vfmsub132ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfmsub132ss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfmsub132ss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfmsub132ss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfmsub132ss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfmsub132ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vfmsub132ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vfmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vfmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vfmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vfmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vfmsub213sd xmm30{k7}, xmm29, xmm28 # AVX512
vfmsub213sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfmsub213sd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfmsub213sd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfmsub213sd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfmsub213sd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfmsub213sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vfmsub213sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vfmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vfmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vfmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vfmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vfmsub213ss xmm30{k7}, xmm29, xmm28 # AVX512
vfmsub213ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfmsub213ss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfmsub213ss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfmsub213ss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfmsub213ss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfmsub213ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vfmsub213ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vfmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vfmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vfmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vfmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vfmsub231sd xmm30{k7}, xmm29, xmm28 # AVX512
vfmsub231sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfmsub231sd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfmsub231sd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfmsub231sd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfmsub231sd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfmsub231sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vfmsub231sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vfmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vfmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vfmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vfmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vfmsub231ss xmm30{k7}, xmm29, xmm28 # AVX512
vfmsub231ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfmsub231ss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfmsub231ss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfmsub231ss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfmsub231ss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfmsub231ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vfmsub231ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vfmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vfmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vfmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vfmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vfnmadd132sd xmm30{k7}, xmm29, xmm28 # AVX512
vfnmadd132sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfnmadd132sd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfnmadd132sd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfnmadd132sd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfnmadd132sd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfnmadd132sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vfnmadd132sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vfnmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vfnmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vfnmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vfnmadd132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vfnmadd132ss xmm30{k7}, xmm29, xmm28 # AVX512
vfnmadd132ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfnmadd132ss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfnmadd132ss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfnmadd132ss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfnmadd132ss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfnmadd132ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vfnmadd132ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vfnmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vfnmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vfnmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vfnmadd132ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vfnmadd213sd xmm30{k7}, xmm29, xmm28 # AVX512
vfnmadd213sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfnmadd213sd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfnmadd213sd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfnmadd213sd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfnmadd213sd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfnmadd213sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vfnmadd213sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vfnmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vfnmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vfnmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vfnmadd213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vfnmadd213ss xmm30{k7}, xmm29, xmm28 # AVX512
vfnmadd213ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfnmadd213ss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfnmadd213ss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfnmadd213ss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfnmadd213ss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfnmadd213ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vfnmadd213ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vfnmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vfnmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vfnmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vfnmadd213ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vfnmadd231sd xmm30{k7}, xmm29, xmm28 # AVX512
vfnmadd231sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfnmadd231sd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfnmadd231sd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfnmadd231sd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfnmadd231sd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfnmadd231sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vfnmadd231sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vfnmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vfnmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vfnmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vfnmadd231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vfnmadd231ss xmm30{k7}, xmm29, xmm28 # AVX512
vfnmadd231ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfnmadd231ss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfnmadd231ss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfnmadd231ss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfnmadd231ss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfnmadd231ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vfnmadd231ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vfnmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vfnmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vfnmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vfnmadd231ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vfnmsub132sd xmm30{k7}, xmm29, xmm28 # AVX512
vfnmsub132sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfnmsub132sd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfnmsub132sd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfnmsub132sd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfnmsub132sd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfnmsub132sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vfnmsub132sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vfnmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vfnmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vfnmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vfnmsub132sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vfnmsub132ss xmm30{k7}, xmm29, xmm28 # AVX512
vfnmsub132ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfnmsub132ss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfnmsub132ss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfnmsub132ss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfnmsub132ss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfnmsub132ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vfnmsub132ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vfnmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vfnmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vfnmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vfnmsub132ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vfnmsub213sd xmm30{k7}, xmm29, xmm28 # AVX512
vfnmsub213sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfnmsub213sd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfnmsub213sd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfnmsub213sd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfnmsub213sd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfnmsub213sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vfnmsub213sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vfnmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vfnmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vfnmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vfnmsub213sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vfnmsub213ss xmm30{k7}, xmm29, xmm28 # AVX512
vfnmsub213ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfnmsub213ss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfnmsub213ss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfnmsub213ss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfnmsub213ss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfnmsub213ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vfnmsub213ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vfnmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vfnmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vfnmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vfnmsub213ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vfnmsub231sd xmm30{k7}, xmm29, xmm28 # AVX512
vfnmsub231sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfnmsub231sd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfnmsub231sd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfnmsub231sd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfnmsub231sd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfnmsub231sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vfnmsub231sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vfnmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vfnmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vfnmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vfnmsub231sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vfnmsub231ss xmm30{k7}, xmm29, xmm28 # AVX512
vfnmsub231ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vfnmsub231ss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vfnmsub231ss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vfnmsub231ss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vfnmsub231ss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vfnmsub231ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vfnmsub231ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vfnmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vfnmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vfnmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vfnmsub231ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vgetexpsd xmm30{k7}, xmm29, xmm28 # AVX512
vgetexpsd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vgetexpsd xmm30{k7}, xmm29, xmm28, {sae} # AVX512
vgetexpsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vgetexpsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vgetexpsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vgetexpsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vgetexpsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vgetexpsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vgetexpss xmm30{k7}, xmm29, xmm28 # AVX512
vgetexpss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vgetexpss xmm30{k7}, xmm29, xmm28, {sae} # AVX512
vgetexpss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vgetexpss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vgetexpss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vgetexpss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vgetexpss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vgetexpss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vgetmantsd xmm30{k7}, xmm29, xmm28, 0xab # AVX512
vgetmantsd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512
vgetmantsd xmm30{k7}, xmm29, xmm28, {sae}, 0xab # AVX512
vgetmantsd xmm30{k7}, xmm29, xmm28, 123 # AVX512
vgetmantsd xmm30{k7}, xmm29, xmm28, {sae}, 123 # AVX512
vgetmantsd xmm30{k7}, xmm29, QWORD PTR [rcx], 123 # AVX512
vgetmantsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234], 123 # AVX512
vgetmantsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016], 123 # AVX512 Disp8
vgetmantsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024], 123 # AVX512
vgetmantsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024], 123 # AVX512 Disp8
vgetmantsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032], 123 # AVX512
vgetmantss xmm30{k7}, xmm29, xmm28, 0xab # AVX512
vgetmantss xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512
vgetmantss xmm30{k7}, xmm29, xmm28, {sae}, 0xab # AVX512
vgetmantss xmm30{k7}, xmm29, xmm28, 123 # AVX512
vgetmantss xmm30{k7}, xmm29, xmm28, {sae}, 123 # AVX512
vgetmantss xmm30{k7}, xmm29, DWORD PTR [rcx], 123 # AVX512
vgetmantss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234], 123 # AVX512
vgetmantss xmm30{k7}, xmm29, DWORD PTR [rdx+508], 123 # AVX512 Disp8
vgetmantss xmm30{k7}, xmm29, DWORD PTR [rdx+512], 123 # AVX512
vgetmantss xmm30{k7}, xmm29, DWORD PTR [rdx-512], 123 # AVX512 Disp8
vgetmantss xmm30{k7}, xmm29, DWORD PTR [rdx-516], 123 # AVX512
vmaxsd xmm30{k7}, xmm29, xmm28 # AVX512
vmaxsd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vmaxsd xmm30{k7}, xmm29, xmm28, {sae} # AVX512
vmaxsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vmaxsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vmaxsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vmaxsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vmaxsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vmaxsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vmaxss xmm30{k7}, xmm29, xmm28 # AVX512
vmaxss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vmaxss xmm30{k7}, xmm29, xmm28, {sae} # AVX512
vmaxss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vmaxss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vmaxss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vmaxss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vmaxss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vmaxss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vminsd xmm30{k7}, xmm29, xmm28 # AVX512
vminsd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vminsd xmm30{k7}, xmm29, xmm28, {sae} # AVX512
vminsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vminsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vminsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vminsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vminsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vminsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vminss xmm30{k7}, xmm29, xmm28 # AVX512
vminss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vminss xmm30{k7}, xmm29, xmm28, {sae} # AVX512
vminss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vminss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vminss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vminss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vminss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vminss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vmovsd xmm30{k7}, QWORD PTR [rcx] # AVX512
vmovsd xmm30{k7}{z}, QWORD PTR [rcx] # AVX512
vmovsd xmm30{k7}, QWORD PTR [rax+r14*8+0x1234] # AVX512
vmovsd xmm30{k7}, QWORD PTR [rdx+1016] # AVX512 Disp8
vmovsd xmm30{k7}, QWORD PTR [rdx+1024] # AVX512
vmovsd xmm30{k7}, QWORD PTR [rdx-1024] # AVX512 Disp8
vmovsd xmm30{k7}, QWORD PTR [rdx-1032] # AVX512
vmovsd QWORD PTR [rcx]{k7}, xmm30 # AVX512
vmovsd QWORD PTR [rax+r14*8+0x1234]{k7}, xmm30 # AVX512
vmovsd QWORD PTR [rdx+1016]{k7}, xmm30 # AVX512 Disp8
vmovsd QWORD PTR [rdx+1024]{k7}, xmm30 # AVX512
vmovsd QWORD PTR [rdx-1024]{k7}, xmm30 # AVX512 Disp8
vmovsd QWORD PTR [rdx-1032]{k7}, xmm30 # AVX512
vmovsd xmm30{k7}, xmm29, xmm28 # AVX512
vmovsd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vmovss xmm30{k7}, DWORD PTR [rcx] # AVX512
vmovss xmm30{k7}{z}, DWORD PTR [rcx] # AVX512
vmovss xmm30{k7}, DWORD PTR [rax+r14*8+0x1234] # AVX512
vmovss xmm30{k7}, DWORD PTR [rdx+508] # AVX512 Disp8
vmovss xmm30{k7}, DWORD PTR [rdx+512] # AVX512
vmovss xmm30{k7}, DWORD PTR [rdx-512] # AVX512 Disp8
vmovss xmm30{k7}, DWORD PTR [rdx-516] # AVX512
vmovss DWORD PTR [rcx]{k7}, xmm30 # AVX512
vmovss DWORD PTR [rax+r14*8+0x1234]{k7}, xmm30 # AVX512
vmovss DWORD PTR [rdx+508]{k7}, xmm30 # AVX512 Disp8
vmovss DWORD PTR [rdx+512]{k7}, xmm30 # AVX512
vmovss DWORD PTR [rdx-512]{k7}, xmm30 # AVX512 Disp8
vmovss DWORD PTR [rdx-516]{k7}, xmm30 # AVX512
vmovss xmm30{k7}, xmm29, xmm28 # AVX512
vmovss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vmulsd xmm30{k7}, xmm29, xmm28 # AVX512
vmulsd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vmulsd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vmulsd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vmulsd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vmulsd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vmulsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vmulsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vmulsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vmulsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vmulsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vmulsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vmulss xmm30{k7}, xmm29, xmm28 # AVX512
vmulss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vmulss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vmulss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vmulss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vmulss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vmulss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vmulss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vmulss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vmulss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vmulss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vmulss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vrcp14sd xmm30{k7}, xmm29, xmm28 # AVX512
vrcp14sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vrcp14sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vrcp14sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vrcp14sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vrcp14sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vrcp14sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vrcp14sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vrcp14ss xmm30{k7}, xmm29, xmm28 # AVX512
vrcp14ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vrcp14ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vrcp14ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vrcp14ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vrcp14ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vrcp14ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vrcp14ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vrcp28ss xmm30{k7}, xmm29, xmm28 # AVX512EMI
vrcp28ss xmm30{k7}{z}, xmm29, xmm28 # AVX512EMI
vrcp28ss xmm30{k7}, xmm29, xmm28, {sae} # AVX512EMI
vrcp28ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512EMI
vrcp28ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512EMI
vrcp28ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512EMI Disp8
vrcp28ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512EMI
vrcp28ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512EMI Disp8
vrcp28ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512EMI
vrcp28sd xmm30{k7}, xmm29, xmm28 # AVX512EMI
vrcp28sd xmm30{k7}{z}, xmm29, xmm28 # AVX512EMI
vrcp28sd xmm30{k7}, xmm29, xmm28, {sae} # AVX512EMI
vrcp28sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512EMI
vrcp28sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512EMI
vrcp28sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512EMI Disp8
vrcp28sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512EMI
vrcp28sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512EMI Disp8
vrcp28sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512EMI
vrsqrt14sd xmm30{k7}, xmm29, xmm28 # AVX512
vrsqrt14sd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vrsqrt14sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vrsqrt14sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vrsqrt14sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vrsqrt14sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vrsqrt14sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vrsqrt14sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vrsqrt14ss xmm30{k7}, xmm29, xmm28 # AVX512
vrsqrt14ss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vrsqrt14ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vrsqrt14ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vrsqrt14ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vrsqrt14ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vrsqrt14ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vrsqrt14ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vrsqrt28ss xmm30{k7}, xmm29, xmm28 # AVX512EMI
vrsqrt28ss xmm30{k7}{z}, xmm29, xmm28 # AVX512EMI
vrsqrt28ss xmm30{k7}, xmm29, xmm28, {sae} # AVX512EMI
vrsqrt28ss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512EMI
vrsqrt28ss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512EMI
vrsqrt28ss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512EMI Disp8
vrsqrt28ss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512EMI
vrsqrt28ss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512EMI Disp8
vrsqrt28ss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512EMI
vrsqrt28sd xmm30{k7}, xmm29, xmm28 # AVX512EMI
vrsqrt28sd xmm30{k7}{z}, xmm29, xmm28 # AVX512EMI
vrsqrt28sd xmm30{k7}, xmm29, xmm28, {sae} # AVX512EMI
vrsqrt28sd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512EMI
vrsqrt28sd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512EMI
vrsqrt28sd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512EMI Disp8
vrsqrt28sd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512EMI
vrsqrt28sd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512EMI Disp8
vrsqrt28sd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512EMI
vsqrtsd xmm30{k7}, xmm29, xmm28 # AVX512
vsqrtsd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vsqrtsd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vsqrtsd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vsqrtsd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vsqrtsd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vsqrtsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vsqrtsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vsqrtsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vsqrtsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vsqrtsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vsqrtsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vsqrtss xmm30{k7}, xmm29, xmm28 # AVX512
vsqrtss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vsqrtss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vsqrtss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vsqrtss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vsqrtss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vsqrtss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vsqrtss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vsqrtss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vsqrtss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vsqrtss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vsqrtss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vsubsd xmm30{k7}, xmm29, xmm28 # AVX512
vsubsd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vsubsd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vsubsd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vsubsd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vsubsd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vsubsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vsubsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vsubsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vsubsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vsubsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vsubsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vsubss xmm30{k7}, xmm29, xmm28 # AVX512
vsubss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vsubss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vsubss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vsubss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vsubss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vsubss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vsubss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vsubss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vsubss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vsubss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vsubss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vucomisd xmm30, xmm29 # AVX512
vucomisd xmm30, xmm29, {sae} # AVX512
vucomisd xmm30, QWORD PTR [rcx] # AVX512
vucomisd xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512
vucomisd xmm30, QWORD PTR [rdx+1016] # AVX512 Disp8
vucomisd xmm30, QWORD PTR [rdx+1024] # AVX512
vucomisd xmm30, QWORD PTR [rdx-1024] # AVX512 Disp8
vucomisd xmm30, QWORD PTR [rdx-1032] # AVX512
vucomiss xmm30, xmm29 # AVX512
vucomiss xmm30, xmm29, {sae} # AVX512
vucomiss xmm30, DWORD PTR [rcx] # AVX512
vucomiss xmm30, DWORD PTR [rax+r14*8+0x1234] # AVX512
vucomiss xmm30, DWORD PTR [rdx+508] # AVX512 Disp8
vucomiss xmm30, DWORD PTR [rdx+512] # AVX512
vucomiss xmm30, DWORD PTR [rdx-512] # AVX512 Disp8
vucomiss xmm30, DWORD PTR [rdx-516] # AVX512
vcvtsd2usi eax, xmm30 # AVX512
vcvtsd2usi eax, xmm30, {rn-sae} # AVX512
vcvtsd2usi eax, xmm30, {ru-sae} # AVX512
vcvtsd2usi eax, xmm30, {rd-sae} # AVX512
vcvtsd2usi eax, xmm30, {rz-sae} # AVX512
vcvtsd2usi eax, QWORD PTR [rcx] # AVX512
vcvtsd2usi eax, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtsd2usi eax, QWORD PTR [rdx+1016] # AVX512 Disp8
vcvtsd2usi eax, QWORD PTR [rdx+1024] # AVX512
vcvtsd2usi eax, QWORD PTR [rdx-1024] # AVX512 Disp8
vcvtsd2usi eax, QWORD PTR [rdx-1032] # AVX512
vcvtsd2usi ebp, xmm30 # AVX512
vcvtsd2usi ebp, xmm30, {rn-sae} # AVX512
vcvtsd2usi ebp, xmm30, {ru-sae} # AVX512
vcvtsd2usi ebp, xmm30, {rd-sae} # AVX512
vcvtsd2usi ebp, xmm30, {rz-sae} # AVX512
vcvtsd2usi ebp, QWORD PTR [rcx] # AVX512
vcvtsd2usi ebp, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtsd2usi ebp, QWORD PTR [rdx+1016] # AVX512 Disp8
vcvtsd2usi ebp, QWORD PTR [rdx+1024] # AVX512
vcvtsd2usi ebp, QWORD PTR [rdx-1024] # AVX512 Disp8
vcvtsd2usi ebp, QWORD PTR [rdx-1032] # AVX512
vcvtsd2usi r13d, xmm30 # AVX512
vcvtsd2usi r13d, xmm30, {rn-sae} # AVX512
vcvtsd2usi r13d, xmm30, {ru-sae} # AVX512
vcvtsd2usi r13d, xmm30, {rd-sae} # AVX512
vcvtsd2usi r13d, xmm30, {rz-sae} # AVX512
vcvtsd2usi r13d, QWORD PTR [rcx] # AVX512
vcvtsd2usi r13d, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtsd2usi r13d, QWORD PTR [rdx+1016] # AVX512 Disp8
vcvtsd2usi r13d, QWORD PTR [rdx+1024] # AVX512
vcvtsd2usi r13d, QWORD PTR [rdx-1024] # AVX512 Disp8
vcvtsd2usi r13d, QWORD PTR [rdx-1032] # AVX512
vcvtsd2usi rax, xmm30 # AVX512
vcvtsd2usi rax, xmm30, {rn-sae} # AVX512
vcvtsd2usi rax, xmm30, {ru-sae} # AVX512
vcvtsd2usi rax, xmm30, {rd-sae} # AVX512
vcvtsd2usi rax, xmm30, {rz-sae} # AVX512
vcvtsd2usi rax, QWORD PTR [rcx] # AVX512
vcvtsd2usi rax, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtsd2usi rax, QWORD PTR [rdx+1016] # AVX512 Disp8
vcvtsd2usi rax, QWORD PTR [rdx+1024] # AVX512
vcvtsd2usi rax, QWORD PTR [rdx-1024] # AVX512 Disp8
vcvtsd2usi rax, QWORD PTR [rdx-1032] # AVX512
vcvtsd2usi r8, xmm30 # AVX512
vcvtsd2usi r8, xmm30, {rn-sae} # AVX512
vcvtsd2usi r8, xmm30, {ru-sae} # AVX512
vcvtsd2usi r8, xmm30, {rd-sae} # AVX512
vcvtsd2usi r8, xmm30, {rz-sae} # AVX512
vcvtsd2usi r8, QWORD PTR [rcx] # AVX512
vcvtsd2usi r8, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtsd2usi r8, QWORD PTR [rdx+1016] # AVX512 Disp8
vcvtsd2usi r8, QWORD PTR [rdx+1024] # AVX512
vcvtsd2usi r8, QWORD PTR [rdx-1024] # AVX512 Disp8
vcvtsd2usi r8, QWORD PTR [rdx-1032] # AVX512
vcvtss2usi eax, xmm30 # AVX512
vcvtss2usi eax, xmm30, {rn-sae} # AVX512
vcvtss2usi eax, xmm30, {ru-sae} # AVX512
vcvtss2usi eax, xmm30, {rd-sae} # AVX512
vcvtss2usi eax, xmm30, {rz-sae} # AVX512
vcvtss2usi eax, DWORD PTR [rcx] # AVX512
vcvtss2usi eax, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtss2usi eax, DWORD PTR [rdx+508] # AVX512 Disp8
vcvtss2usi eax, DWORD PTR [rdx+512] # AVX512
vcvtss2usi eax, DWORD PTR [rdx-512] # AVX512 Disp8
vcvtss2usi eax, DWORD PTR [rdx-516] # AVX512
vcvtss2usi ebp, xmm30 # AVX512
vcvtss2usi ebp, xmm30, {rn-sae} # AVX512
vcvtss2usi ebp, xmm30, {ru-sae} # AVX512
vcvtss2usi ebp, xmm30, {rd-sae} # AVX512
vcvtss2usi ebp, xmm30, {rz-sae} # AVX512
vcvtss2usi ebp, DWORD PTR [rcx] # AVX512
vcvtss2usi ebp, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtss2usi ebp, DWORD PTR [rdx+508] # AVX512 Disp8
vcvtss2usi ebp, DWORD PTR [rdx+512] # AVX512
vcvtss2usi ebp, DWORD PTR [rdx-512] # AVX512 Disp8
vcvtss2usi ebp, DWORD PTR [rdx-516] # AVX512
vcvtss2usi r13d, xmm30 # AVX512
vcvtss2usi r13d, xmm30, {rn-sae} # AVX512
vcvtss2usi r13d, xmm30, {ru-sae} # AVX512
vcvtss2usi r13d, xmm30, {rd-sae} # AVX512
vcvtss2usi r13d, xmm30, {rz-sae} # AVX512
vcvtss2usi r13d, DWORD PTR [rcx] # AVX512
vcvtss2usi r13d, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtss2usi r13d, DWORD PTR [rdx+508] # AVX512 Disp8
vcvtss2usi r13d, DWORD PTR [rdx+512] # AVX512
vcvtss2usi r13d, DWORD PTR [rdx-512] # AVX512 Disp8
vcvtss2usi r13d, DWORD PTR [rdx-516] # AVX512
vcvtss2usi rax, xmm30 # AVX512
vcvtss2usi rax, xmm30, {rn-sae} # AVX512
vcvtss2usi rax, xmm30, {ru-sae} # AVX512
vcvtss2usi rax, xmm30, {rd-sae} # AVX512
vcvtss2usi rax, xmm30, {rz-sae} # AVX512
vcvtss2usi rax, DWORD PTR [rcx] # AVX512
vcvtss2usi rax, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtss2usi rax, DWORD PTR [rdx+508] # AVX512 Disp8
vcvtss2usi rax, DWORD PTR [rdx+512] # AVX512
vcvtss2usi rax, DWORD PTR [rdx-512] # AVX512 Disp8
vcvtss2usi rax, DWORD PTR [rdx-516] # AVX512
vcvtss2usi r8, xmm30 # AVX512
vcvtss2usi r8, xmm30, {rn-sae} # AVX512
vcvtss2usi r8, xmm30, {ru-sae} # AVX512
vcvtss2usi r8, xmm30, {rd-sae} # AVX512
vcvtss2usi r8, xmm30, {rz-sae} # AVX512
vcvtss2usi r8, DWORD PTR [rcx] # AVX512
vcvtss2usi r8, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtss2usi r8, DWORD PTR [rdx+508] # AVX512 Disp8
vcvtss2usi r8, DWORD PTR [rdx+512] # AVX512
vcvtss2usi r8, DWORD PTR [rdx-512] # AVX512 Disp8
vcvtss2usi r8, DWORD PTR [rdx-516] # AVX512
vcvtusi2sd xmm30, xmm29, eax # AVX512
vcvtusi2sd xmm30, xmm29, ebp # AVX512
vcvtusi2sd xmm30, xmm29, r13d # AVX512
vcvtusi2sd xmm30, xmm29, DWORD PTR [rcx] # AVX512
vcvtusi2sd xmm30, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtusi2sd xmm30, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcvtusi2sd xmm30, xmm29, DWORD PTR [rdx+512] # AVX512
vcvtusi2sd xmm30, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcvtusi2sd xmm30, xmm29, DWORD PTR [rdx-516] # AVX512
vcvtusi2sd xmm30, xmm29, rax # AVX512
vcvtusi2sd xmm30, xmm29, {rn-sae}, rax # AVX512
vcvtusi2sd xmm30, xmm29, {ru-sae}, rax # AVX512
vcvtusi2sd xmm30, xmm29, {rd-sae}, rax # AVX512
vcvtusi2sd xmm30, xmm29, {rz-sae}, rax # AVX512
vcvtusi2sd xmm30, xmm29, r8 # AVX512
vcvtusi2sd xmm30, xmm29, {rn-sae}, r8 # AVX512
vcvtusi2sd xmm30, xmm29, {ru-sae}, r8 # AVX512
vcvtusi2sd xmm30, xmm29, {rd-sae}, r8 # AVX512
vcvtusi2sd xmm30, xmm29, {rz-sae}, r8 # AVX512
vcvtusi2sd xmm30, xmm29, QWORD PTR [rcx] # AVX512
vcvtusi2sd xmm30, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtusi2sd xmm30, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcvtusi2sd xmm30, xmm29, QWORD PTR [rdx+1024] # AVX512
vcvtusi2sd xmm30, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcvtusi2sd xmm30, xmm29, QWORD PTR [rdx-1032] # AVX512
vcvtusi2ss xmm30, xmm29, eax # AVX512
vcvtusi2ss xmm30, xmm29, {rn-sae}, eax # AVX512
vcvtusi2ss xmm30, xmm29, {ru-sae}, eax # AVX512
vcvtusi2ss xmm30, xmm29, {rd-sae}, eax # AVX512
vcvtusi2ss xmm30, xmm29, {rz-sae}, eax # AVX512
vcvtusi2ss xmm30, xmm29, ebp # AVX512
vcvtusi2ss xmm30, xmm29, {rn-sae}, ebp # AVX512
vcvtusi2ss xmm30, xmm29, {ru-sae}, ebp # AVX512
vcvtusi2ss xmm30, xmm29, {rd-sae}, ebp # AVX512
vcvtusi2ss xmm30, xmm29, {rz-sae}, ebp # AVX512
vcvtusi2ss xmm30, xmm29, r13d # AVX512
vcvtusi2ss xmm30, xmm29, {rn-sae}, r13d # AVX512
vcvtusi2ss xmm30, xmm29, {ru-sae}, r13d # AVX512
vcvtusi2ss xmm30, xmm29, {rd-sae}, r13d # AVX512
vcvtusi2ss xmm30, xmm29, {rz-sae}, r13d # AVX512
vcvtusi2ss xmm30, xmm29, DWORD PTR [rcx] # AVX512
vcvtusi2ss xmm30, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtusi2ss xmm30, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vcvtusi2ss xmm30, xmm29, DWORD PTR [rdx+512] # AVX512
vcvtusi2ss xmm30, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vcvtusi2ss xmm30, xmm29, DWORD PTR [rdx-516] # AVX512
vcvtusi2ss xmm30, xmm29, rax # AVX512
vcvtusi2ss xmm30, xmm29, {rn-sae}, rax # AVX512
vcvtusi2ss xmm30, xmm29, {ru-sae}, rax # AVX512
vcvtusi2ss xmm30, xmm29, {rd-sae}, rax # AVX512
vcvtusi2ss xmm30, xmm29, {rz-sae}, rax # AVX512
vcvtusi2ss xmm30, xmm29, r8 # AVX512
vcvtusi2ss xmm30, xmm29, {rn-sae}, r8 # AVX512
vcvtusi2ss xmm30, xmm29, {ru-sae}, r8 # AVX512
vcvtusi2ss xmm30, xmm29, {rd-sae}, r8 # AVX512
vcvtusi2ss xmm30, xmm29, {rz-sae}, r8 # AVX512
vcvtusi2ss xmm30, xmm29, QWORD PTR [rcx] # AVX512
vcvtusi2ss xmm30, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vcvtusi2ss xmm30, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vcvtusi2ss xmm30, xmm29, QWORD PTR [rdx+1024] # AVX512
vcvtusi2ss xmm30, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vcvtusi2ss xmm30, xmm29, QWORD PTR [rdx-1032] # AVX512
vscalefsd xmm30{k7}, xmm29, xmm28 # AVX512
vscalefsd xmm30{k7}{z}, xmm29, xmm28 # AVX512
vscalefsd xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vscalefsd xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vscalefsd xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vscalefsd xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vscalefsd xmm30{k7}, xmm29, QWORD PTR [rcx] # AVX512
vscalefsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234] # AVX512
vscalefsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016] # AVX512 Disp8
vscalefsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024] # AVX512
vscalefsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024] # AVX512 Disp8
vscalefsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032] # AVX512
vscalefss xmm30{k7}, xmm29, xmm28 # AVX512
vscalefss xmm30{k7}{z}, xmm29, xmm28 # AVX512
vscalefss xmm30{k7}, xmm29, xmm28, {rn-sae} # AVX512
vscalefss xmm30{k7}, xmm29, xmm28, {ru-sae} # AVX512
vscalefss xmm30{k7}, xmm29, xmm28, {rd-sae} # AVX512
vscalefss xmm30{k7}, xmm29, xmm28, {rz-sae} # AVX512
vscalefss xmm30{k7}, xmm29, DWORD PTR [rcx] # AVX512
vscalefss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234] # AVX512
vscalefss xmm30{k7}, xmm29, DWORD PTR [rdx+508] # AVX512 Disp8
vscalefss xmm30{k7}, xmm29, DWORD PTR [rdx+512] # AVX512
vscalefss xmm30{k7}, xmm29, DWORD PTR [rdx-512] # AVX512 Disp8
vscalefss xmm30{k7}, xmm29, DWORD PTR [rdx-516] # AVX512
vfixupimmss xmm30{k7}, xmm29, xmm28, 0xab # AVX512
vfixupimmss xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512
vfixupimmss xmm30{k7}, xmm29, xmm28, {sae}, 0xab # AVX512
vfixupimmss xmm30{k7}, xmm29, xmm28, 123 # AVX512
vfixupimmss xmm30{k7}, xmm29, xmm28, {sae}, 123 # AVX512
vfixupimmss xmm30{k7}, xmm29, DWORD PTR [rcx], 123 # AVX512
vfixupimmss xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234], 123 # AVX512
vfixupimmss xmm30{k7}, xmm29, DWORD PTR [rdx+508], 123 # AVX512 Disp8
vfixupimmss xmm30{k7}, xmm29, DWORD PTR [rdx+512], 123 # AVX512
vfixupimmss xmm30{k7}, xmm29, DWORD PTR [rdx-512], 123 # AVX512 Disp8
vfixupimmss xmm30{k7}, xmm29, DWORD PTR [rdx-516], 123 # AVX512
vfixupimmsd xmm30{k7}, xmm29, xmm28, 0xab # AVX512
vfixupimmsd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512
vfixupimmsd xmm30{k7}, xmm29, xmm28, {sae}, 0xab # AVX512
vfixupimmsd xmm30{k7}, xmm29, xmm28, 123 # AVX512
vfixupimmsd xmm30{k7}, xmm29, xmm28, {sae}, 123 # AVX512
vfixupimmsd xmm30{k7}, xmm29, QWORD PTR [rcx], 123 # AVX512
vfixupimmsd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234], 123 # AVX512
vfixupimmsd xmm30{k7}, xmm29, QWORD PTR [rdx+1016], 123 # AVX512 Disp8
vfixupimmsd xmm30{k7}, xmm29, QWORD PTR [rdx+1024], 123 # AVX512
vfixupimmsd xmm30{k7}, xmm29, QWORD PTR [rdx-1024], 123 # AVX512 Disp8
vfixupimmsd xmm30{k7}, xmm29, QWORD PTR [rdx-1032], 123 # AVX512
vrndscalesd xmm30{k7}, xmm29, xmm28, 0xab # AVX512
vrndscalesd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512
vrndscalesd xmm30{k7}, xmm29, xmm28, {sae}, 0xab # AVX512
vrndscalesd xmm30{k7}, xmm29, xmm28, 123 # AVX512
vrndscalesd xmm30{k7}, xmm29, xmm28, {sae}, 123 # AVX512
vrndscalesd xmm30{k7}, xmm29, QWORD PTR [rcx], 123 # AVX512
vrndscalesd xmm30{k7}, xmm29, QWORD PTR [rax+r14*8+0x1234], 123 # AVX512
vrndscalesd xmm30{k7}, xmm29, QWORD PTR [rdx+1016], 123 # AVX512 Disp8
vrndscalesd xmm30{k7}, xmm29, QWORD PTR [rdx+1024], 123 # AVX512
vrndscalesd xmm30{k7}, xmm29, QWORD PTR [rdx-1024], 123 # AVX512 Disp8
vrndscalesd xmm30{k7}, xmm29, QWORD PTR [rdx-1032], 123 # AVX512
vrndscaless xmm30{k7}, xmm29, xmm28, 0xab # AVX512
vrndscaless xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512
vrndscaless xmm30{k7}, xmm29, xmm28, {sae}, 0xab # AVX512
vrndscaless xmm30{k7}, xmm29, xmm28, 123 # AVX512
vrndscaless xmm30{k7}, xmm29, xmm28, {sae}, 123 # AVX512
vrndscaless xmm30{k7}, xmm29, DWORD PTR [rcx], 123 # AVX512
vrndscaless xmm30{k7}, xmm29, DWORD PTR [rax+r14*8+0x1234], 123 # AVX512
vrndscaless xmm30{k7}, xmm29, DWORD PTR [rdx+508], 123 # AVX512 Disp8
vrndscaless xmm30{k7}, xmm29, DWORD PTR [rdx+512], 123 # AVX512
vrndscaless xmm30{k7}, xmm29, DWORD PTR [rdx-512], 123 # AVX512 Disp8
vrndscaless xmm30{k7}, xmm29, DWORD PTR [rdx-516], 123 # AVX512
|
stsp/binutils-ia16
| 1,620
|
gas/testsuite/gas/i386/x86-64-lockbad-1.s
|
# 64bit unlockable Instructions
.text
foo:
lock mov %ecx, %eax
lock mov (%rbx), %eax
lock add %ebx, %eax
lock add $0x64, %ebx
lock adc %ebx, %eax
lock adc $0x64, %ebx
lock and %ebx, %eax
lock and $0x64, %ebx
lock btc %eax, %ebx
lock btc $0x64, %ebx
lock btr %eax, %ebx
lock btr $0x64, %ebx
lock bts %eax, %ebx
lock bts $0x64, %ebx
lock cmpxchg %eax,%ebx
lock decl %ebx
lock incl %ebx
lock negl %ebx
lock notl %ebx
lock or %ebx, %eax
lock or $0x64, %ebx
lock sbb %ebx, %eax
lock sbb $0x64, %ebx
lock sub %ebx, %eax
lock sub $0x64, %ebx
lock xadd %eax, %ebx
lock xchg %ebx, %eax
lock xchg %eax, %ebx
lock xor %ebx, %eax
lock xor $0x64, %ebx
lock add (%rbx), %eax
lock adc (%rbx), %eax
lock and (%rbx), %eax
lock or (%rbx), %eax
lock sbb (%rbx), %eax
lock sub (%rbx), %eax
lock xor (%rbx), %eax
.intel_syntax noprefix
lock mov eax,ebx
lock mov eax,DWORD PTR [rbx]
lock add eax,ebx
lock add ebx,0x64
lock adc eax,ebx
lock adc ebx,0x64
lock and eax,ebx
lock and ebx,0x64
lock btc ebx,eax
lock btc ebx,0x64
lock btr ebx,eax
lock btr ebx,0x64
lock bts ebx,eax
lock bts ebx,0x64
lock cmpxchg ebx,eax
lock dec ebx
lock inc ebx
lock neg ebx
lock not ebx
lock or eax,ebx
lock or ebx,0x64
lock sbb eax,ebx
lock sbb ebx,0x64
lock sub eax,ebx
lock sub ebx,0x64
lock xadd ebx,eax
lock xchg ebx,eax
lock xchg ebx,eax
lock xor eax,ebx
lock xor ebx,0x64
lock add eax,DWORD PTR [rbx]
lock adc eax,DWORD PTR [rbx]
lock and eax,DWORD PTR [rbx]
lock or eax,DWORD PTR [rbx]
lock sbb eax,DWORD PTR [rbx]
lock sub eax,DWORD PTR [rbx]
lock xor eax,DWORD PTR [rbx]
|
stsp/binutils-ia16
| 110,199
|
gas/testsuite/gas/i386/x86-64-avx512_fp16.s
|
# Check 64bit AVX512-FP16 instructions
.allow_index_reg
.text
_start:
vaddph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vaddph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vaddph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vaddph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vaddph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vaddph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vaddph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vaddsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vaddsh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vaddsh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vaddsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vaddsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vaddsh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vaddsh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcmpph $123, %zmm28, %zmm29, %k5 #AVX512-FP16
vcmpph $123, {sae}, %zmm28, %zmm29, %k5 #AVX512-FP16 HAS_SAE
vcmpph $123, {sae}, %zmm28, %zmm29, %k5{%k7} #AVX512-FP16 MASK_ENABLING HAS_SAE
vcmpph $123, 0x10000000(%rbp, %r14, 8), %zmm29, %k5{%k7} #AVX512-FP16 MASK_ENABLING
vcmpph $123, (%r9){1to32}, %zmm29, %k5 #AVX512-FP16 BROADCAST_EN
vcmpph $123, 8128(%rcx), %zmm29, %k5 #AVX512-FP16 Disp8(7f)
vcmpph $123, -256(%rdx){1to32}, %zmm29, %k5{%k7} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING
vcmpsh $123, %xmm28, %xmm29, %k5 #AVX512-FP16
vcmpsh $123, {sae}, %xmm28, %xmm29, %k5 #AVX512-FP16 HAS_SAE
vcmpsh $123, {sae}, %xmm28, %xmm29, %k5{%k7} #AVX512-FP16 MASK_ENABLING HAS_SAE
vcmpsh $123, 0x10000000(%rbp, %r14, 8), %xmm29, %k5{%k7} #AVX512-FP16 MASK_ENABLING
vcmpsh $123, (%r9), %xmm29, %k5 #AVX512-FP16
vcmpsh $123, 254(%rcx), %xmm29, %k5 #AVX512-FP16 Disp8(7f)
vcmpsh $123, -256(%rdx), %xmm29, %k5{%k7} #AVX512-FP16 Disp8(80) MASK_ENABLING
vcomish %xmm29, %xmm30 #AVX512-FP16
vcomish {sae}, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE
vcomish 0x10000000(%rbp, %r14, 8), %xmm30 #AVX512-FP16
vcomish (%r9), %xmm30 #AVX512-FP16
vcomish 254(%rcx), %xmm30 #AVX512-FP16 Disp8(7f)
vcomish -256(%rdx), %xmm30 #AVX512-FP16 Disp8(80)
vcvtdq2ph %zmm29, %ymm30 #AVX512-FP16
vcvtdq2ph {rn-sae}, %zmm29, %ymm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtdq2ph {rn-sae}, %zmm29, %ymm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtdq2ph 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtdq2ph (%r9){1to16}, %ymm30 #AVX512-FP16 BROADCAST_EN
vcvtdq2ph 8128(%rcx), %ymm30 #AVX512-FP16 Disp8(7f)
vcvtdq2ph -512(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph %zmm29, %xmm30 #AVX512-FP16
vcvtpd2ph {rn-sae}, %zmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtpd2ph {rn-sae}, %zmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtpd2phz 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtpd2ph (%r9){1to8}, %xmm30 #AVX512-FP16 BROADCAST_EN
vcvtpd2phz 8128(%rcx), %xmm30 #AVX512-FP16 Disp8(7f)
vcvtpd2ph -1024(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq %ymm29, %zmm30 #AVX512-FP16
vcvtph2dq {rn-sae}, %ymm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2dq {rn-sae}, %ymm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2dq 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2dq (%r9){1to16}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvtph2dq 4064(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvtph2dq -256(%rdx){1to16}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd %xmm29, %zmm30 #AVX512-FP16
vcvtph2pd {sae}, %xmm29, %zmm30 #AVX512-FP16 HAS_SAE
vcvtph2pd {sae}, %xmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtph2pd 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2pd (%r9){1to8}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvtph2pd 2032(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvtph2pd -256(%rdx){1to8}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx %ymm29, %zmm30 #AVX512-FP16
vcvtph2psx {sae}, %ymm29, %zmm30 #AVX512-FP16 HAS_SAE
vcvtph2psx {sae}, %ymm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtph2psx 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2psx (%r9){1to16}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvtph2psx 4064(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvtph2psx -256(%rdx){1to16}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq %xmm29, %zmm30 #AVX512-FP16
vcvtph2qq {rn-sae}, %xmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2qq {rn-sae}, %xmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2qq 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2qq (%r9){1to8}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvtph2qq 2032(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvtph2qq -256(%rdx){1to8}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq %ymm29, %zmm30 #AVX512-FP16
vcvtph2udq {rn-sae}, %ymm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2udq {rn-sae}, %ymm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2udq 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2udq (%r9){1to16}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvtph2udq 4064(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvtph2udq -256(%rdx){1to16}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq %xmm29, %zmm30 #AVX512-FP16
vcvtph2uqq {rn-sae}, %xmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2uqq {rn-sae}, %xmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2uqq 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2uqq (%r9){1to8}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvtph2uqq 2032(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvtph2uqq -256(%rdx){1to8}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw %zmm29, %zmm30 #AVX512-FP16
vcvtph2uw {rn-sae}, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2uw {rn-sae}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2uw 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2uw (%r9){1to32}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvtph2uw 8128(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvtph2uw -256(%rdx){1to32}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w %zmm29, %zmm30 #AVX512-FP16
vcvtph2w {rn-sae}, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2w {rn-sae}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2w 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtph2w (%r9){1to32}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvtph2w 8128(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvtph2w -256(%rdx){1to32}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx %zmm29, %ymm30 #AVX512-FP16
vcvtps2phx {rn-sae}, %zmm29, %ymm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtps2phx {rn-sae}, %zmm29, %ymm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtps2phx 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtps2phx (%r9){1to16}, %ymm30 #AVX512-FP16 BROADCAST_EN
vcvtps2phx 8128(%rcx), %ymm30 #AVX512-FP16 Disp8(7f)
vcvtps2phx -512(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph %zmm29, %xmm30 #AVX512-FP16
vcvtqq2ph {rn-sae}, %zmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtqq2ph {rn-sae}, %zmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtqq2phz 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtqq2ph (%r9){1to8}, %xmm30 #AVX512-FP16 BROADCAST_EN
vcvtqq2phz 8128(%rcx), %xmm30 #AVX512-FP16 Disp8(7f)
vcvtqq2ph -1024(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtsd2sh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vcvtsd2sh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsd2sh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtsd2sh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtsd2sh (%r9), %xmm29, %xmm30 #AVX512-FP16
vcvtsd2sh 1016(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vcvtsd2sh -1024(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2sd %xmm28, %xmm29, %xmm30 #AVX512-FP16
vcvtsh2sd {sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE
vcvtsh2sd {sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtsh2sd 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtsh2sd (%r9), %xmm29, %xmm30 #AVX512-FP16
vcvtsh2sd 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vcvtsh2sd -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2si %xmm30, %edx #AVX512-FP16
vcvtsh2si {rn-sae}, %xmm30, %edx #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2si %xmm30, %r12 #AVX512-FP16
vcvtsh2si {rn-sae}, %xmm30, %r12 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2si 0x10000000(%rbp, %r14, 8), %edx #AVX512-FP16
vcvtsh2si (%r9), %edx #AVX512-FP16
vcvtsh2si 254(%rcx), %edx #AVX512-FP16 Disp8(7f)
vcvtsh2si -256(%rdx), %edx #AVX512-FP16 Disp8(80)
vcvtsh2si 0x10000000(%rbp, %r14, 8), %r12 #AVX512-FP16
vcvtsh2si (%r9), %r12 #AVX512-FP16
vcvtsh2si 254(%rcx), %r12 #AVX512-FP16 Disp8(7f)
vcvtsh2si -256(%rdx), %r12 #AVX512-FP16 Disp8(80)
vcvtsh2ss %xmm28, %xmm29, %xmm30 #AVX512-FP16
vcvtsh2ss {sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE
vcvtsh2ss {sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtsh2ss 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtsh2ss (%r9), %xmm29, %xmm30 #AVX512-FP16
vcvtsh2ss 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vcvtsh2ss -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2usi %xmm30, %edx #AVX512-FP16
vcvtsh2usi {rn-sae}, %xmm30, %edx #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2usi %xmm30, %r12 #AVX512-FP16
vcvtsh2usi {rn-sae}, %xmm30, %r12 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2usi 0x10000000(%rbp, %r14, 8), %edx #AVX512-FP16
vcvtsh2usi (%r9), %edx #AVX512-FP16
vcvtsh2usi 254(%rcx), %edx #AVX512-FP16 Disp8(7f)
vcvtsh2usi -256(%rdx), %edx #AVX512-FP16 Disp8(80)
vcvtsh2usi 0x10000000(%rbp, %r14, 8), %r12 #AVX512-FP16
vcvtsh2usi (%r9), %r12 #AVX512-FP16
vcvtsh2usi 254(%rcx), %r12 #AVX512-FP16 Disp8(7f)
vcvtsh2usi -256(%rdx), %r12 #AVX512-FP16 Disp8(80)
vcvtsi2sh %r12, %xmm29, %xmm30 #AVX512-FP16
vcvtsi2sh %r12, {rn-sae}, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsi2sh %edx, %xmm29, %xmm30 #AVX512-FP16
vcvtsi2sh %edx, {rn-sae}, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsi2shl 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30 #AVX512-FP16
vcvtsi2shl (%r9), %xmm29, %xmm30 #AVX512-FP16
vcvtsi2shl 508(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vcvtsi2shl -512(%rdx), %xmm29, %xmm30 #AVX512-FP16 Disp8(80)
vcvtsi2shq 1016(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vcvtsi2shq -1024(%rdx), %xmm29, %xmm30 #AVX512-FP16 Disp8(80)
vcvtss2sh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vcvtss2sh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtss2sh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtss2sh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtss2sh (%r9), %xmm29, %xmm30 #AVX512-FP16
vcvtss2sh 508(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vcvtss2sh -512(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq %ymm29, %zmm30 #AVX512-FP16
vcvttph2dq {sae}, %ymm29, %zmm30 #AVX512-FP16 HAS_SAE
vcvttph2dq {sae}, %ymm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2dq 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2dq (%r9){1to16}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvttph2dq 4064(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvttph2dq -256(%rdx){1to16}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq %xmm29, %zmm30 #AVX512-FP16
vcvttph2qq {sae}, %xmm29, %zmm30 #AVX512-FP16 HAS_SAE
vcvttph2qq {sae}, %xmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2qq 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2qq (%r9){1to8}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvttph2qq 2032(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvttph2qq -256(%rdx){1to8}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq %ymm29, %zmm30 #AVX512-FP16
vcvttph2udq {sae}, %ymm29, %zmm30 #AVX512-FP16 HAS_SAE
vcvttph2udq {sae}, %ymm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2udq 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2udq (%r9){1to16}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvttph2udq 4064(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvttph2udq -256(%rdx){1to16}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq %xmm29, %zmm30 #AVX512-FP16
vcvttph2uqq {sae}, %xmm29, %zmm30 #AVX512-FP16 HAS_SAE
vcvttph2uqq {sae}, %xmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2uqq 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2uqq (%r9){1to8}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvttph2uqq 2032(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvttph2uqq -256(%rdx){1to8}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw %zmm29, %zmm30 #AVX512-FP16
vcvttph2uw {sae}, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE
vcvttph2uw {sae}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2uw 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2uw (%r9){1to32}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvttph2uw 8128(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvttph2uw -256(%rdx){1to32}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w %zmm29, %zmm30 #AVX512-FP16
vcvttph2w {sae}, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE
vcvttph2w {sae}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2w 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvttph2w (%r9){1to32}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvttph2w 8128(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvttph2w -256(%rdx){1to32}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttsh2si %xmm30, %edx #AVX512-FP16
vcvttsh2si {sae}, %xmm30, %edx #AVX512-FP16 HAS_SAE
vcvttsh2si %xmm30, %r12 #AVX512-FP16
vcvttsh2si {sae}, %xmm30, %r12 #AVX512-FP16 HAS_SAE
vcvttsh2si 0x10000000(%rbp, %r14, 8), %edx #AVX512-FP16
vcvttsh2si (%r9), %edx #AVX512-FP16
vcvttsh2si 254(%rcx), %edx #AVX512-FP16 Disp8(7f)
vcvttsh2si -256(%rdx), %edx #AVX512-FP16 Disp8(80)
vcvttsh2si 0x10000000(%rbp, %r14, 8), %r12 #AVX512-FP16
vcvttsh2si (%r9), %r12 #AVX512-FP16
vcvttsh2si 254(%rcx), %r12 #AVX512-FP16 Disp8(7f)
vcvttsh2si -256(%rdx), %r12 #AVX512-FP16 Disp8(80)
vcvttsh2usi %xmm30, %edx #AVX512-FP16
vcvttsh2usi {sae}, %xmm30, %edx #AVX512-FP16 HAS_SAE
vcvttsh2usi %xmm30, %r12 #AVX512-FP16
vcvttsh2usi {sae}, %xmm30, %r12 #AVX512-FP16 HAS_SAE
vcvttsh2usi 0x10000000(%rbp, %r14, 8), %edx #AVX512-FP16
vcvttsh2usi (%r9), %edx #AVX512-FP16
vcvttsh2usi 254(%rcx), %edx #AVX512-FP16 Disp8(7f)
vcvttsh2usi -256(%rdx), %edx #AVX512-FP16 Disp8(80)
vcvttsh2usi 0x10000000(%rbp, %r14, 8), %r12 #AVX512-FP16
vcvttsh2usi (%r9), %r12 #AVX512-FP16
vcvttsh2usi 254(%rcx), %r12 #AVX512-FP16 Disp8(7f)
vcvttsh2usi -256(%rdx), %r12 #AVX512-FP16 Disp8(80)
vcvtudq2ph %zmm29, %ymm30 #AVX512-FP16
vcvtudq2ph {rn-sae}, %zmm29, %ymm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtudq2ph {rn-sae}, %zmm29, %ymm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtudq2ph 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtudq2ph (%r9){1to16}, %ymm30 #AVX512-FP16 BROADCAST_EN
vcvtudq2ph 8128(%rcx), %ymm30 #AVX512-FP16 Disp8(7f)
vcvtudq2ph -512(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph %zmm29, %xmm30 #AVX512-FP16
vcvtuqq2ph {rn-sae}, %zmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtuqq2ph {rn-sae}, %zmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtuqq2phz 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtuqq2ph (%r9){1to8}, %xmm30 #AVX512-FP16 BROADCAST_EN
vcvtuqq2phz 8128(%rcx), %xmm30 #AVX512-FP16 Disp8(7f)
vcvtuqq2ph -1024(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtusi2sh %r12, %xmm29, %xmm30 #AVX512-FP16
vcvtusi2sh %r12, {rn-sae}, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtusi2sh %edx, %xmm29, %xmm30 #AVX512-FP16
vcvtusi2sh %edx, {rn-sae}, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtusi2shl 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30 #AVX512-FP16
vcvtusi2shl (%r9), %xmm29, %xmm30 #AVX512-FP16
vcvtusi2shl 508(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vcvtusi2shl -512(%rdx), %xmm29, %xmm30 #AVX512-FP16 Disp8(80)
vcvtusi2shq 1016(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vcvtusi2shq -1024(%rdx), %xmm29, %xmm30 #AVX512-FP16 Disp8(80)
vcvtuw2ph %zmm29, %zmm30 #AVX512-FP16
vcvtuw2ph {rn-sae}, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtuw2ph {rn-sae}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtuw2ph 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtuw2ph (%r9){1to32}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvtuw2ph 8128(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvtuw2ph -256(%rdx){1to32}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph %zmm29, %zmm30 #AVX512-FP16
vcvtw2ph {rn-sae}, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vcvtw2ph {rn-sae}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtw2ph 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vcvtw2ph (%r9){1to32}, %zmm30 #AVX512-FP16 BROADCAST_EN
vcvtw2ph 8128(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vcvtw2ph -256(%rdx){1to32}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vdivph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vdivph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vdivph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vdivph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vdivph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vdivph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vdivsh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vdivsh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vdivsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vdivsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vdivsh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vdivsh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfcmaddcph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfcmaddcph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmaddcph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfcmaddcph (%r9){1to16}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfcmaddcph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfcmaddcph -512(%rdx){1to16}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfcmaddcsh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfcmaddcsh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmaddcsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfcmaddcsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfcmaddcsh 508(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfcmaddcsh -512(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfcmulcph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfcmulcph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmulcph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfcmulcph (%r9){1to16}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfcmulcph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfcmulcph -512(%rdx){1to16}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfcmulcsh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfcmulcsh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmulcsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfcmulcsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfcmulcsh 508(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfcmulcsh -512(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfmadd132ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd132ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd132ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd132ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfmadd132ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfmadd132ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132sh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfmadd132sh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd132sh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd132sh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd132sh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfmadd132sh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfmadd132sh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfmadd213ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd213ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd213ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd213ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfmadd213ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfmadd213ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213sh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfmadd213sh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd213sh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd213sh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd213sh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfmadd213sh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfmadd213sh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfmadd231ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd231ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd231ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd231ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfmadd231ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfmadd231ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231sh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfmadd231sh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd231sh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd231sh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmadd231sh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfmadd231sh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfmadd231sh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfmaddcph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddcph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddcph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddcph (%r9){1to16}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfmaddcph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfmaddcph -512(%rdx){1to16}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfmaddcsh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddcsh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddcsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddcsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfmaddcsh 508(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfmaddcsh -512(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfmaddsub132ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub132ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub132ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddsub132ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfmaddsub132ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfmaddsub132ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfmaddsub213ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub213ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub213ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddsub213ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfmaddsub213ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfmaddsub213ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfmaddsub231ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub231ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub231ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmaddsub231ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfmaddsub231ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfmaddsub231ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfmsub132ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub132ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub132ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub132ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfmsub132ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfmsub132ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132sh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfmsub132sh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub132sh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub132sh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub132sh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfmsub132sh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfmsub132sh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfmsub213ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub213ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub213ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub213ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfmsub213ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfmsub213ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213sh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfmsub213sh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub213sh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub213sh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub213sh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfmsub213sh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfmsub213sh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfmsub231ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub231ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub231ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub231ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfmsub231ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfmsub231ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231sh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfmsub231sh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub231sh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub231sh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmsub231sh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfmsub231sh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfmsub231sh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfmsubadd132ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd132ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd132ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmsubadd132ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfmsubadd132ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfmsubadd132ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfmsubadd213ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd213ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd213ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmsubadd213ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfmsubadd213ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfmsubadd213ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfmsubadd231ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd231ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd231ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmsubadd231ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfmsubadd231ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfmsubadd231ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfmulcph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmulcph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmulcph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmulcph (%r9){1to16}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfmulcph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfmulcph -512(%rdx){1to16}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfmulcsh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfmulcsh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmulcsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfmulcsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfmulcsh 508(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfmulcsh -512(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfnmadd132ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd132ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd132ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd132ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfnmadd132ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfnmadd132ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132sh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfnmadd132sh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd132sh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd132sh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd132sh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfnmadd132sh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfnmadd132sh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfnmadd213ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd213ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd213ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd213ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfnmadd213ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfnmadd213ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213sh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfnmadd213sh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd213sh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd213sh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd213sh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfnmadd213sh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfnmadd213sh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfnmadd231ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd231ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd231ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd231ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfnmadd231ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfnmadd231ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231sh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfnmadd231sh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd231sh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd231sh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfnmadd231sh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfnmadd231sh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfnmadd231sh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfnmsub132ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub132ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub132ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub132ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfnmsub132ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfnmsub132ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132sh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfnmsub132sh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub132sh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub132sh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub132sh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfnmsub132sh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfnmsub132sh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfnmsub213ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub213ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub213ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub213ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfnmsub213ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfnmsub213ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213sh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfnmsub213sh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub213sh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub213sh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub213sh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfnmsub213sh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfnmsub213sh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vfnmsub231ph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub231ph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub231ph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub231ph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vfnmsub231ph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vfnmsub231ph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231sh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vfnmsub231sh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub231sh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub231sh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vfnmsub231sh (%r9), %xmm29, %xmm30 #AVX512-FP16
vfnmsub231sh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vfnmsub231sh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfpclassph $123, %zmm30, %k5 #AVX512-FP16
vfpclassph $123, %zmm30, %k5{%k7} #AVX512-FP16 MASK_ENABLING
vfpclassphz $123, 0x10000000(%rbp, %r14, 8), %k5{%k7} #AVX512-FP16 MASK_ENABLING
vfpclassph $123, (%r9){1to32}, %k5 #AVX512-FP16 BROADCAST_EN
vfpclassphz $123, 8128(%rcx), %k5 #AVX512-FP16 Disp8(7f)
vfpclassph $123, -256(%rdx){1to32}, %k5{%k7} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING
vfpclasssh $123, %xmm30, %k5 #AVX512-FP16
vfpclasssh $123, %xmm30, %k5{%k7} #AVX512-FP16 MASK_ENABLING
vfpclasssh $123, 0x10000000(%rbp, %r14, 8), %k5{%k7} #AVX512-FP16 MASK_ENABLING
vfpclasssh $123, (%r9), %k5 #AVX512-FP16
vfpclasssh $123, 254(%rcx), %k5 #AVX512-FP16 Disp8(7f)
vfpclasssh $123, -256(%rdx), %k5{%k7} #AVX512-FP16 Disp8(80) MASK_ENABLING
vgetexpph %zmm29, %zmm30 #AVX512-FP16
vgetexpph {sae}, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE
vgetexpph {sae}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetexpph 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vgetexpph (%r9){1to32}, %zmm30 #AVX512-FP16 BROADCAST_EN
vgetexpph 8128(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vgetexpph -256(%rdx){1to32}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetexpsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vgetexpsh {sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE
vgetexpsh {sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetexpsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vgetexpsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vgetexpsh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vgetexpsh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph $123, %zmm29, %zmm30 #AVX512-FP16
vgetmantph $123, {sae}, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE
vgetmantph $123, {sae}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetmantph $123, 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vgetmantph $123, (%r9){1to32}, %zmm30 #AVX512-FP16 BROADCAST_EN
vgetmantph $123, 8128(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vgetmantph $123, -256(%rdx){1to32}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantsh $123, %xmm28, %xmm29, %xmm30 #AVX512-FP16
vgetmantsh $123, {sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE
vgetmantsh $123, {sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetmantsh $123, 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vgetmantsh $123, (%r9), %xmm29, %xmm30 #AVX512-FP16
vgetmantsh $123, 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vgetmantsh $123, -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmaxph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vmaxph {sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE
vmaxph {sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vmaxph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vmaxph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vmaxph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vmaxph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vmaxsh {sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE
vmaxsh {sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vmaxsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vmaxsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vmaxsh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vmaxsh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vminph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vminph {sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE
vminph {sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vminph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vminph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vminph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vminph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vminsh {sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE
vminsh {sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vminsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vminsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vminsh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vminsh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmovsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vmovsh %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vmovsh 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vmovsh (%r9), %xmm30 #AVX512-FP16
vmovsh 254(%rcx), %xmm30 #AVX512-FP16 Disp8(7f)
vmovsh -256(%rdx), %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmovsh %xmm30, 0x10000000(%rbp, %r14, 8){%k7} #AVX512-FP16 MASK_ENABLING
vmovsh %xmm30, (%r9) #AVX512-FP16
vmovsh %xmm30, 254(%rcx) #AVX512-FP16 Disp8(7f)
vmovsh %xmm30, -256(%rdx){%k7} #AVX512-FP16 Disp8(80) MASK_ENABLING
vmovw 0x10000000(%rbp, %r14, 8), %xmm30 #AVX512-FP16
vmovw (%r9), %xmm30 #AVX512-FP16
vmovw 254(%rcx), %xmm30 #AVX512-FP16 Disp8(7f)
vmovw -256(%rdx), %xmm30 #AVX512-FP16 Disp8(80)
vmovw %xmm30, 0x10000000(%rbp, %r14, 8) #AVX512-FP16
vmovw %xmm30, (%r9) #AVX512-FP16
vmovw %xmm30, 254(%rcx) #AVX512-FP16 Disp8(7f)
vmovw %xmm30, -256(%rdx) #AVX512-FP16 Disp8(80)
vmulph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vmulph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vmulph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vmulph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vmulph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vmulph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vmulph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vmulsh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vmulsh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vmulsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vmulsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vmulsh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vmulsh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrcpph %zmm29, %zmm30 #AVX512-FP16
vrcpph %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vrcpph 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vrcpph (%r9){1to32}, %zmm30 #AVX512-FP16 BROADCAST_EN
vrcpph 8128(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vrcpph -256(%rdx){1to32}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vrcpsh %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vrcpsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vrcpsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vrcpsh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vrcpsh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vreduceph $123, %zmm29, %zmm30 #AVX512-FP16
vreduceph $123, {sae}, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE
vreduceph $123, {sae}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vreduceph $123, 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vreduceph $123, (%r9){1to32}, %zmm30 #AVX512-FP16 BROADCAST_EN
vreduceph $123, 8128(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vreduceph $123, -256(%rdx){1to32}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreducesh $123, %xmm28, %xmm29, %xmm30 #AVX512-FP16
vreducesh $123, {sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE
vreducesh $123, {sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vreducesh $123, 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vreducesh $123, (%r9), %xmm29, %xmm30 #AVX512-FP16
vreducesh $123, 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vreducesh $123, -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph $123, %zmm29, %zmm30 #AVX512-FP16
vrndscaleph $123, {sae}, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE
vrndscaleph $123, {sae}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vrndscaleph $123, 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vrndscaleph $123, (%r9){1to32}, %zmm30 #AVX512-FP16 BROADCAST_EN
vrndscaleph $123, 8128(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vrndscaleph $123, -256(%rdx){1to32}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscalesh $123, %xmm28, %xmm29, %xmm30 #AVX512-FP16
vrndscalesh $123, {sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE
vrndscalesh $123, {sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vrndscalesh $123, 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vrndscalesh $123, (%r9), %xmm29, %xmm30 #AVX512-FP16
vrndscalesh $123, 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vrndscalesh $123, -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph %zmm29, %zmm30 #AVX512-FP16
vrsqrtph %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vrsqrtph 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vrsqrtph (%r9){1to32}, %zmm30 #AVX512-FP16 BROADCAST_EN
vrsqrtph 8128(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vrsqrtph -256(%rdx){1to32}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vrsqrtsh %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL
vrsqrtsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vrsqrtsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vrsqrtsh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vrsqrtsh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vscalefph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vscalefph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vscalefph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vscalefph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vscalefph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vscalefph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vscalefph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vscalefsh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vscalefsh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vscalefsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vscalefsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vscalefsh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vscalefsh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph %zmm29, %zmm30 #AVX512-FP16
vsqrtph {rn-sae}, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vsqrtph {rn-sae}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsqrtph 0x10000000(%rbp, %r14, 8), %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vsqrtph (%r9){1to32}, %zmm30 #AVX512-FP16 BROADCAST_EN
vsqrtph 8128(%rcx), %zmm30 #AVX512-FP16 Disp8(7f)
vsqrtph -256(%rdx){1to32}, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vsqrtsh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vsqrtsh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsqrtsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vsqrtsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vsqrtsh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vsqrtsh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vsubph %zmm28, %zmm29, %zmm30 #AVX512-FP16
vsubph {rn-sae}, %zmm28, %zmm29, %zmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vsubph {rn-sae}, %zmm28, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsubph 0x10000000(%rbp, %r14, 8), %zmm29, %zmm30{%k7} #AVX512-FP16 MASK_ENABLING
vsubph (%r9){1to32}, %zmm29, %zmm30 #AVX512-FP16 BROADCAST_EN
vsubph 8128(%rcx), %zmm29, %zmm30 #AVX512-FP16 Disp8(7f)
vsubph -256(%rdx){1to32}, %zmm29, %zmm30{%k7}{z} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubsh %xmm28, %xmm29, %xmm30 #AVX512-FP16
vsubsh {rn-sae}, %xmm28, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE RC_CTRL
vsubsh {rn-sae}, %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsubsh 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16 MASK_ENABLING
vsubsh (%r9), %xmm29, %xmm30 #AVX512-FP16
vsubsh 254(%rcx), %xmm29, %xmm30 #AVX512-FP16 Disp8(7f)
vsubsh -256(%rdx), %xmm29, %xmm30{%k7}{z} #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vucomish %xmm29, %xmm30 #AVX512-FP16
vucomish {sae}, %xmm29, %xmm30 #AVX512-FP16 HAS_SAE
vucomish 0x10000000(%rbp, %r14, 8), %xmm30 #AVX512-FP16
vucomish (%r9), %xmm30 #AVX512-FP16
vucomish 254(%rcx), %xmm30 #AVX512-FP16 Disp8(7f)
vucomish -256(%rdx), %xmm30 #AVX512-FP16 Disp8(80)
.intel_syntax noprefix
vaddph zmm30, zmm29, zmm28 #AVX512-FP16
vaddph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vaddph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vaddph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vaddph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vaddph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vaddph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vaddsh xmm30, xmm29, xmm28 #AVX512-FP16
vaddsh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vaddsh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vaddsh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vaddsh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vaddsh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vaddsh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcmpph k5, zmm29, zmm28, 123 #AVX512-FP16
vcmpph k5, zmm29, zmm28{sae}, 123 #AVX512-FP16 HAS_SAE
vcmpph k5{k7}, zmm29, zmm28{sae}, 123 #AVX512-FP16 MASK_ENABLING HAS_SAE
vcmpph k5{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vcmpph k5, zmm29, WORD BCST [r9], 123 #AVX512-FP16 BROADCAST_EN
vcmpph k5, zmm29, ZMMWORD PTR [rcx+8128], 123 #AVX512-FP16 Disp8(7f)
vcmpph k5{k7}, zmm29, WORD BCST [rdx-256], 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING
vcmpsh k5, xmm29, xmm28, 123 #AVX512-FP16
vcmpsh k5, xmm29, xmm28{sae}, 123 #AVX512-FP16 HAS_SAE
vcmpsh k5{k7}, xmm29, xmm28{sae}, 123 #AVX512-FP16 MASK_ENABLING HAS_SAE
vcmpsh k5{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vcmpsh k5, xmm29, WORD PTR [r9], 123 #AVX512-FP16
vcmpsh k5, xmm29, WORD PTR [rcx+254], 123 #AVX512-FP16 Disp8(7f)
vcmpsh k5{k7}, xmm29, WORD PTR [rdx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING
vcomish xmm30, xmm29 #AVX512-FP16
vcomish xmm30, xmm29{sae} #AVX512-FP16 HAS_SAE
vcomish xmm30, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16
vcomish xmm30, WORD PTR [r9] #AVX512-FP16
vcomish xmm30, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vcomish xmm30, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80)
vcvtdq2ph ymm30, zmm29 #AVX512-FP16
vcvtdq2ph ymm30, zmm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtdq2ph ymm30{k7}{z}, zmm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtdq2ph ymm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtdq2ph ymm30, DWORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvtdq2ph ymm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vcvtdq2ph ymm30{k7}{z}, DWORD BCST [rdx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph xmm30, zmm29 #AVX512-FP16
vcvtpd2ph xmm30, zmm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtpd2ph xmm30{k7}{z}, zmm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtpd2ph xmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtpd2ph xmm30, QWORD BCST [r9]{1to8} #AVX512-FP16 BROADCAST_EN
vcvtpd2ph xmm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vcvtpd2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to8} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq zmm30, ymm29 #AVX512-FP16
vcvtph2dq zmm30, ymm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2dq zmm30{k7}{z}, ymm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2dq zmm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2dq zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvtph2dq zmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16 Disp8(7f)
vcvtph2dq zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd zmm30, xmm29 #AVX512-FP16
vcvtph2pd zmm30, xmm29{sae} #AVX512-FP16 HAS_SAE
vcvtph2pd zmm30{k7}{z}, xmm29{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtph2pd zmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2pd zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvtph2pd zmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16 Disp8(7f)
vcvtph2pd zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx zmm30, ymm29 #AVX512-FP16
vcvtph2psx zmm30, ymm29{sae} #AVX512-FP16 HAS_SAE
vcvtph2psx zmm30{k7}{z}, ymm29{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtph2psx zmm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2psx zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvtph2psx zmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16 Disp8(7f)
vcvtph2psx zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq zmm30, xmm29 #AVX512-FP16
vcvtph2qq zmm30, xmm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2qq zmm30{k7}{z}, xmm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2qq zmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2qq zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvtph2qq zmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16 Disp8(7f)
vcvtph2qq zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq zmm30, ymm29 #AVX512-FP16
vcvtph2udq zmm30, ymm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2udq zmm30{k7}{z}, ymm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2udq zmm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2udq zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvtph2udq zmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16 Disp8(7f)
vcvtph2udq zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq zmm30, xmm29 #AVX512-FP16
vcvtph2uqq zmm30, xmm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2uqq zmm30{k7}{z}, xmm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2uqq zmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2uqq zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvtph2uqq zmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16 Disp8(7f)
vcvtph2uqq zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw zmm30, zmm29 #AVX512-FP16
vcvtph2uw zmm30, zmm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2uw zmm30{k7}{z}, zmm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2uw zmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2uw zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvtph2uw zmm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vcvtph2uw zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w zmm30, zmm29 #AVX512-FP16
vcvtph2w zmm30, zmm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtph2w zmm30{k7}{z}, zmm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtph2w zmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtph2w zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvtph2w zmm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vcvtph2w zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx ymm30, zmm29 #AVX512-FP16
vcvtps2phx ymm30, zmm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtps2phx ymm30{k7}{z}, zmm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtps2phx ymm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtps2phx ymm30, DWORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvtps2phx ymm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vcvtps2phx ymm30{k7}{z}, DWORD BCST [rdx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph xmm30, zmm29 #AVX512-FP16
vcvtqq2ph xmm30, zmm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtqq2ph xmm30{k7}{z}, zmm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtqq2ph xmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtqq2ph xmm30, QWORD BCST [r9]{1to8} #AVX512-FP16 BROADCAST_EN
vcvtqq2ph xmm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vcvtqq2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to8} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtsd2sh xmm30, xmm29, xmm28 #AVX512-FP16
vcvtsd2sh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsd2sh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtsd2sh xmm30{k7}, xmm29, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtsd2sh xmm30, xmm29, QWORD PTR [r9] #AVX512-FP16
vcvtsd2sh xmm30, xmm29, QWORD PTR [rcx+1016] #AVX512-FP16 Disp8(7f)
vcvtsd2sh xmm30{k7}{z}, xmm29, QWORD PTR [rdx-1024] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2sd xmm30, xmm29, xmm28 #AVX512-FP16
vcvtsh2sd xmm30, xmm29, xmm28{sae} #AVX512-FP16 HAS_SAE
vcvtsh2sd xmm30{k7}{z}, xmm29, xmm28{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtsh2sd xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtsh2sd xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vcvtsh2sd xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vcvtsh2sd xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2si edx, xmm30 #AVX512-FP16
vcvtsh2si edx, xmm30{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2si r12, xmm30 #AVX512-FP16
vcvtsh2si r12, xmm30{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2si edx, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16
vcvtsh2si edx, WORD PTR [r9] #AVX512-FP16
vcvtsh2si edx, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vcvtsh2si edx, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80)
vcvtsh2si r12, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16
vcvtsh2si r12, WORD PTR [r9] #AVX512-FP16
vcvtsh2si r12, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vcvtsh2si r12, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80)
vcvtsh2ss xmm30, xmm29, xmm28 #AVX512-FP16
vcvtsh2ss xmm30, xmm29, xmm28{sae} #AVX512-FP16 HAS_SAE
vcvtsh2ss xmm30{k7}{z}, xmm29, xmm28{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvtsh2ss xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtsh2ss xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vcvtsh2ss xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vcvtsh2ss xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvtsh2usi edx, xmm30 #AVX512-FP16
vcvtsh2usi edx, xmm30{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2usi r12, xmm30 #AVX512-FP16
vcvtsh2usi r12, xmm30{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsh2usi edx, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16
vcvtsh2usi edx, WORD PTR [r9] #AVX512-FP16
vcvtsh2usi edx, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vcvtsh2usi edx, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80)
vcvtsh2usi r12, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16
vcvtsh2usi r12, WORD PTR [r9] #AVX512-FP16
vcvtsh2usi r12, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vcvtsh2usi r12, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80)
vcvtsi2sh xmm30, xmm29, r12 #AVX512-FP16
vcvtsi2sh xmm30, xmm29, r12{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsi2sh xmm30, xmm29, edx #AVX512-FP16
vcvtsi2sh xmm30, xmm29, edx{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtsi2sh xmm30, xmm29, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16
vcvtsi2sh xmm30, xmm29, DWORD PTR [r9] #AVX512-FP16
vcvtsi2sh xmm30, xmm29, DWORD PTR [rcx+508] #AVX512-FP16 Disp8(7f)
vcvtsi2sh xmm30, xmm29, DWORD PTR [rdx-512] #AVX512-FP16 Disp8(80)
vcvtsi2sh xmm30, xmm29, QWORD PTR [rcx+1016] #AVX512-FP16 Disp8(7f)
vcvtsi2sh xmm30, xmm29, QWORD PTR [rdx-1024] #AVX512-FP16 Disp8(80)
vcvtss2sh xmm30, xmm29, xmm28 #AVX512-FP16
vcvtss2sh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtss2sh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtss2sh xmm30{k7}, xmm29, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtss2sh xmm30, xmm29, DWORD PTR [r9] #AVX512-FP16
vcvtss2sh xmm30, xmm29, DWORD PTR [rcx+508] #AVX512-FP16 Disp8(7f)
vcvtss2sh xmm30{k7}{z}, xmm29, DWORD PTR [rdx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq zmm30, ymm29 #AVX512-FP16
vcvttph2dq zmm30, ymm29{sae} #AVX512-FP16 HAS_SAE
vcvttph2dq zmm30{k7}{z}, ymm29{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2dq zmm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2dq zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvttph2dq zmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16 Disp8(7f)
vcvttph2dq zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq zmm30, xmm29 #AVX512-FP16
vcvttph2qq zmm30, xmm29{sae} #AVX512-FP16 HAS_SAE
vcvttph2qq zmm30{k7}{z}, xmm29{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2qq zmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2qq zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvttph2qq zmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16 Disp8(7f)
vcvttph2qq zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq zmm30, ymm29 #AVX512-FP16
vcvttph2udq zmm30, ymm29{sae} #AVX512-FP16 HAS_SAE
vcvttph2udq zmm30{k7}{z}, ymm29{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2udq zmm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2udq zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvttph2udq zmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16 Disp8(7f)
vcvttph2udq zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq zmm30, xmm29 #AVX512-FP16
vcvttph2uqq zmm30, xmm29{sae} #AVX512-FP16 HAS_SAE
vcvttph2uqq zmm30{k7}{z}, xmm29{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2uqq zmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2uqq zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvttph2uqq zmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16 Disp8(7f)
vcvttph2uqq zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw zmm30, zmm29 #AVX512-FP16
vcvttph2uw zmm30, zmm29{sae} #AVX512-FP16 HAS_SAE
vcvttph2uw zmm30{k7}{z}, zmm29{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2uw zmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2uw zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvttph2uw zmm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vcvttph2uw zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w zmm30, zmm29 #AVX512-FP16
vcvttph2w zmm30, zmm29{sae} #AVX512-FP16 HAS_SAE
vcvttph2w zmm30{k7}{z}, zmm29{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vcvttph2w zmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvttph2w zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvttph2w zmm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vcvttph2w zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttsh2si edx, xmm30 #AVX512-FP16
vcvttsh2si edx, xmm30{sae} #AVX512-FP16 HAS_SAE
vcvttsh2si r12, xmm30 #AVX512-FP16
vcvttsh2si r12, xmm30{sae} #AVX512-FP16 HAS_SAE
vcvttsh2si edx, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16
vcvttsh2si edx, WORD PTR [r9] #AVX512-FP16
vcvttsh2si edx, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vcvttsh2si edx, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80)
vcvttsh2si r12, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16
vcvttsh2si r12, WORD PTR [r9] #AVX512-FP16
vcvttsh2si r12, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vcvttsh2si r12, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80)
vcvttsh2usi edx, xmm30 #AVX512-FP16
vcvttsh2usi edx, xmm30{sae} #AVX512-FP16 HAS_SAE
vcvttsh2usi r12, xmm30 #AVX512-FP16
vcvttsh2usi r12, xmm30{sae} #AVX512-FP16 HAS_SAE
vcvttsh2usi edx, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16
vcvttsh2usi edx, WORD PTR [r9] #AVX512-FP16
vcvttsh2usi edx, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vcvttsh2usi edx, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80)
vcvttsh2usi r12, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16
vcvttsh2usi r12, WORD PTR [r9] #AVX512-FP16
vcvttsh2usi r12, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vcvttsh2usi r12, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80)
vcvtudq2ph ymm30, zmm29 #AVX512-FP16
vcvtudq2ph ymm30, zmm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtudq2ph ymm30{k7}{z}, zmm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtudq2ph ymm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtudq2ph ymm30, DWORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvtudq2ph ymm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vcvtudq2ph ymm30{k7}{z}, DWORD BCST [rdx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm30, zmm29 #AVX512-FP16
vcvtuqq2ph xmm30, zmm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtuqq2ph xmm30{k7}{z}, zmm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtuqq2ph xmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtuqq2ph xmm30, QWORD BCST [r9]{1to8} #AVX512-FP16 BROADCAST_EN
vcvtuqq2ph xmm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vcvtuqq2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to8} #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtusi2sh xmm30, xmm29, r12 #AVX512-FP16
vcvtusi2sh xmm30, xmm29, r12{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtusi2sh xmm30, xmm29, edx #AVX512-FP16
vcvtusi2sh xmm30, xmm29, edx{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtusi2sh xmm30, xmm29, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16
vcvtusi2sh xmm30, xmm29, DWORD PTR [r9] #AVX512-FP16
vcvtusi2sh xmm30, xmm29, DWORD PTR [rcx+508] #AVX512-FP16 Disp8(7f)
vcvtusi2sh xmm30, xmm29, DWORD PTR [rdx-512] #AVX512-FP16 Disp8(80)
vcvtusi2sh xmm30, xmm29, QWORD PTR [rcx+1016] #AVX512-FP16 Disp8(7f)
vcvtusi2sh xmm30, xmm29, QWORD PTR [rdx-1024] #AVX512-FP16 Disp8(80)
vcvtuw2ph zmm30, zmm29 #AVX512-FP16
vcvtuw2ph zmm30, zmm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtuw2ph zmm30{k7}{z}, zmm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtuw2ph zmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtuw2ph zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvtuw2ph zmm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vcvtuw2ph zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph zmm30, zmm29 #AVX512-FP16
vcvtw2ph zmm30, zmm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vcvtw2ph zmm30{k7}{z}, zmm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vcvtw2ph zmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vcvtw2ph zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vcvtw2ph zmm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vcvtw2ph zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph zmm30, zmm29, zmm28 #AVX512-FP16
vdivph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vdivph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vdivph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vdivph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vdivph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vdivph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivsh xmm30, xmm29, xmm28 #AVX512-FP16
vdivsh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vdivsh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vdivsh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vdivsh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vdivsh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vdivsh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph zmm30, zmm29, zmm28 #AVX512-FP16
vfcmaddcph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfcmaddcph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmaddcph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfcmaddcph zmm30, zmm29, DWORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfcmaddcph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfcmaddcph zmm30{k7}{z}, zmm29, DWORD BCST [rdx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcsh xmm30, xmm29, xmm28 #AVX512-FP16
vfcmaddcsh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfcmaddcsh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmaddcsh xmm30{k7}, xmm29, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfcmaddcsh xmm30, xmm29, DWORD PTR [r9] #AVX512-FP16
vfcmaddcsh xmm30, xmm29, DWORD PTR [rcx+508] #AVX512-FP16 Disp8(7f)
vfcmaddcsh xmm30{k7}{z}, xmm29, DWORD PTR [rdx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph zmm30, zmm29, zmm28 #AVX512-FP16
vfcmulcph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfcmulcph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmulcph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfcmulcph zmm30, zmm29, DWORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfcmulcph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfcmulcph zmm30{k7}{z}, zmm29, DWORD BCST [rdx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcsh xmm30, xmm29, xmm28 #AVX512-FP16
vfcmulcsh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfcmulcsh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfcmulcsh xmm30{k7}, xmm29, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfcmulcsh xmm30, xmm29, DWORD PTR [r9] #AVX512-FP16
vfcmulcsh xmm30, xmm29, DWORD PTR [rcx+508] #AVX512-FP16 Disp8(7f)
vfcmulcsh xmm30{k7}{z}, xmm29, DWORD PTR [rdx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph zmm30, zmm29, zmm28 #AVX512-FP16
vfmadd132ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd132ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd132ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd132ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfmadd132ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfmadd132ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132sh xmm30, xmm29, xmm28 #AVX512-FP16
vfmadd132sh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd132sh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd132sh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd132sh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vfmadd132sh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vfmadd132sh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph zmm30, zmm29, zmm28 #AVX512-FP16
vfmadd213ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd213ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd213ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd213ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfmadd213ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfmadd213ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213sh xmm30, xmm29, xmm28 #AVX512-FP16
vfmadd213sh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd213sh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd213sh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd213sh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vfmadd213sh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vfmadd213sh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph zmm30, zmm29, zmm28 #AVX512-FP16
vfmadd231ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd231ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd231ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd231ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfmadd231ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfmadd231ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231sh xmm30, xmm29, xmm28 #AVX512-FP16
vfmadd231sh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmadd231sh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmadd231sh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmadd231sh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vfmadd231sh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vfmadd231sh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph zmm30, zmm29, zmm28 #AVX512-FP16
vfmaddcph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddcph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddcph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddcph zmm30, zmm29, DWORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfmaddcph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfmaddcph zmm30{k7}{z}, zmm29, DWORD BCST [rdx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcsh xmm30, xmm29, xmm28 #AVX512-FP16
vfmaddcsh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddcsh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddcsh xmm30{k7}, xmm29, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddcsh xmm30, xmm29, DWORD PTR [r9] #AVX512-FP16
vfmaddcsh xmm30, xmm29, DWORD PTR [rcx+508] #AVX512-FP16 Disp8(7f)
vfmaddcsh xmm30{k7}{z}, xmm29, DWORD PTR [rdx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph zmm30, zmm29, zmm28 #AVX512-FP16
vfmaddsub132ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub132ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub132ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddsub132ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfmaddsub132ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfmaddsub132ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph zmm30, zmm29, zmm28 #AVX512-FP16
vfmaddsub213ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub213ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub213ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddsub213ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfmaddsub213ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfmaddsub213ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph zmm30, zmm29, zmm28 #AVX512-FP16
vfmaddsub231ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmaddsub231ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmaddsub231ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmaddsub231ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfmaddsub231ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfmaddsub231ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph zmm30, zmm29, zmm28 #AVX512-FP16
vfmsub132ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub132ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub132ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub132ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfmsub132ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfmsub132ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132sh xmm30, xmm29, xmm28 #AVX512-FP16
vfmsub132sh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub132sh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub132sh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub132sh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vfmsub132sh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vfmsub132sh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph zmm30, zmm29, zmm28 #AVX512-FP16
vfmsub213ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub213ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub213ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub213ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfmsub213ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfmsub213ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213sh xmm30, xmm29, xmm28 #AVX512-FP16
vfmsub213sh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub213sh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub213sh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub213sh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vfmsub213sh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vfmsub213sh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph zmm30, zmm29, zmm28 #AVX512-FP16
vfmsub231ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub231ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub231ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub231ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfmsub231ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfmsub231ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231sh xmm30, xmm29, xmm28 #AVX512-FP16
vfmsub231sh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsub231sh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsub231sh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsub231sh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vfmsub231sh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vfmsub231sh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph zmm30, zmm29, zmm28 #AVX512-FP16
vfmsubadd132ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd132ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd132ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsubadd132ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfmsubadd132ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfmsubadd132ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph zmm30, zmm29, zmm28 #AVX512-FP16
vfmsubadd213ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd213ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd213ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsubadd213ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfmsubadd213ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfmsubadd213ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph zmm30, zmm29, zmm28 #AVX512-FP16
vfmsubadd231ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmsubadd231ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmsubadd231ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmsubadd231ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfmsubadd231ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfmsubadd231ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph zmm30, zmm29, zmm28 #AVX512-FP16
vfmulcph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmulcph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmulcph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmulcph zmm30, zmm29, DWORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfmulcph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfmulcph zmm30{k7}{z}, zmm29, DWORD BCST [rdx-512] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcsh xmm30, xmm29, xmm28 #AVX512-FP16
vfmulcsh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfmulcsh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfmulcsh xmm30{k7}, xmm29, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfmulcsh xmm30, xmm29, DWORD PTR [r9] #AVX512-FP16
vfmulcsh xmm30, xmm29, DWORD PTR [rcx+508] #AVX512-FP16 Disp8(7f)
vfmulcsh xmm30{k7}{z}, xmm29, DWORD PTR [rdx-512] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph zmm30, zmm29, zmm28 #AVX512-FP16
vfnmadd132ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd132ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd132ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd132ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfnmadd132ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfnmadd132ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132sh xmm30, xmm29, xmm28 #AVX512-FP16
vfnmadd132sh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd132sh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd132sh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd132sh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vfnmadd132sh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vfnmadd132sh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph zmm30, zmm29, zmm28 #AVX512-FP16
vfnmadd213ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd213ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd213ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd213ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfnmadd213ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfnmadd213ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213sh xmm30, xmm29, xmm28 #AVX512-FP16
vfnmadd213sh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd213sh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd213sh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd213sh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vfnmadd213sh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vfnmadd213sh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph zmm30, zmm29, zmm28 #AVX512-FP16
vfnmadd231ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd231ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd231ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd231ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfnmadd231ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfnmadd231ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231sh xmm30, xmm29, xmm28 #AVX512-FP16
vfnmadd231sh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmadd231sh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmadd231sh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmadd231sh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vfnmadd231sh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vfnmadd231sh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph zmm30, zmm29, zmm28 #AVX512-FP16
vfnmsub132ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub132ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub132ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub132ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfnmsub132ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfnmsub132ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132sh xmm30, xmm29, xmm28 #AVX512-FP16
vfnmsub132sh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub132sh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub132sh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub132sh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vfnmsub132sh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vfnmsub132sh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph zmm30, zmm29, zmm28 #AVX512-FP16
vfnmsub213ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub213ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub213ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub213ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfnmsub213ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfnmsub213ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213sh xmm30, xmm29, xmm28 #AVX512-FP16
vfnmsub213sh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub213sh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub213sh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub213sh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vfnmsub213sh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vfnmsub213sh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph zmm30, zmm29, zmm28 #AVX512-FP16
vfnmsub231ph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub231ph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub231ph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub231ph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vfnmsub231ph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vfnmsub231ph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231sh xmm30, xmm29, xmm28 #AVX512-FP16
vfnmsub231sh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vfnmsub231sh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vfnmsub231sh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vfnmsub231sh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vfnmsub231sh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vfnmsub231sh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vfpclassph k5, zmm30, 123 #AVX512-FP16
vfpclassph k5{k7}, zmm30, 123 #AVX512-FP16 MASK_ENABLING
vfpclassph k5{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vfpclassph k5, WORD BCST [r9]{1to32}, 123 #AVX512-FP16 BROADCAST_EN
vfpclassph k5, ZMMWORD PTR [rcx+8128], 123 #AVX512-FP16 Disp8(7f)
vfpclassph k5{k7}, WORD BCST [rdx-256]{1to32}, 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING
vfpclasssh k5, xmm30, 123 #AVX512-FP16
vfpclasssh k5{k7}, xmm30, 123 #AVX512-FP16 MASK_ENABLING
vfpclasssh k5{k7}, WORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vfpclasssh k5, WORD PTR [r9], 123 #AVX512-FP16
vfpclasssh k5, WORD PTR [rcx+254], 123 #AVX512-FP16 Disp8(7f)
vfpclasssh k5{k7}, WORD PTR [rdx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING
vgetexpph zmm30, zmm29 #AVX512-FP16
vgetexpph zmm30, zmm29{sae} #AVX512-FP16 HAS_SAE
vgetexpph zmm30{k7}{z}, zmm29{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetexpph zmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vgetexpph zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vgetexpph zmm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vgetexpph zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetexpsh xmm30, xmm29, xmm28 #AVX512-FP16
vgetexpsh xmm30, xmm29, xmm28{sae} #AVX512-FP16 HAS_SAE
vgetexpsh xmm30{k7}{z}, xmm29, xmm28{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetexpsh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vgetexpsh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vgetexpsh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vgetexpsh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph zmm30, zmm29, 123 #AVX512-FP16
vgetmantph zmm30, zmm29{sae}, 123 #AVX512-FP16 HAS_SAE
vgetmantph zmm30{k7}{z}, zmm29{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetmantph zmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vgetmantph zmm30, WORD BCST [r9], 123 #AVX512-FP16 BROADCAST_EN
vgetmantph zmm30, ZMMWORD PTR [rcx+8128], 123 #AVX512-FP16 Disp8(7f)
vgetmantph zmm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantsh xmm30, xmm29, xmm28, 123 #AVX512-FP16
vgetmantsh xmm30, xmm29, xmm28{sae}, 123 #AVX512-FP16 HAS_SAE
vgetmantsh xmm30{k7}{z}, xmm29, xmm28{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vgetmantsh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vgetmantsh xmm30, xmm29, WORD PTR [r9], 123 #AVX512-FP16
vgetmantsh xmm30, xmm29, WORD PTR [rcx+254], 123 #AVX512-FP16 Disp8(7f)
vgetmantsh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmaxph zmm30, zmm29, zmm28 #AVX512-FP16
vmaxph zmm30, zmm29, zmm28{sae} #AVX512-FP16 HAS_SAE
vmaxph zmm30{k7}{z}, zmm29, zmm28{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vmaxph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmaxph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vmaxph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vmaxph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxsh xmm30, xmm29, xmm28 #AVX512-FP16
vmaxsh xmm30, xmm29, xmm28{sae} #AVX512-FP16 HAS_SAE
vmaxsh xmm30{k7}{z}, xmm29, xmm28{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vmaxsh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmaxsh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vmaxsh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vmaxsh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vminph zmm30, zmm29, zmm28 #AVX512-FP16
vminph zmm30, zmm29, zmm28{sae} #AVX512-FP16 HAS_SAE
vminph zmm30{k7}{z}, zmm29, zmm28{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vminph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vminph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vminph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vminph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminsh xmm30, xmm29, xmm28 #AVX512-FP16
vminsh xmm30, xmm29, xmm28{sae} #AVX512-FP16 HAS_SAE
vminsh xmm30{k7}{z}, xmm29, xmm28{sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vminsh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vminsh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vminsh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vminsh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmovsh xmm30, xmm29, xmm28 #AVX512-FP16
vmovsh xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16 MASK_ENABLING ZEROCTL
vmovsh xmm30{k7}, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmovsh xmm30, WORD PTR [r9] #AVX512-FP16
vmovsh xmm30, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vmovsh xmm30{k7}{z}, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vmovsh WORD PTR [rbp+r14*8+0x10000000]{k7}, xmm30 #AVX512-FP16 MASK_ENABLING
vmovsh WORD PTR [r9], xmm30 #AVX512-FP16
vmovsh WORD PTR [rcx+254], xmm30 #AVX512-FP16 Disp8(7f)
vmovsh WORD PTR [rdx-256]{k7}, xmm30 #AVX512-FP16 Disp8(80) MASK_ENABLING
vmovw xmm30, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16
vmovw xmm30, WORD PTR [r9] #AVX512-FP16
vmovw xmm30, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vmovw xmm30, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80)
vmovw WORD PTR [rbp+r14*8+0x10000000], xmm30 #AVX512-FP16
vmovw WORD PTR [r9], xmm30 #AVX512-FP16
vmovw WORD PTR [rcx+254], xmm30 #AVX512-FP16 Disp8(7f)
vmovw WORD PTR [rdx-256], xmm30 #AVX512-FP16 Disp8(80)
vmulph zmm30, zmm29, zmm28 #AVX512-FP16
vmulph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vmulph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vmulph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmulph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vmulph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vmulph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulsh xmm30, xmm29, xmm28 #AVX512-FP16
vmulsh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vmulsh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vmulsh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vmulsh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vmulsh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vmulsh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrcpph zmm30, zmm29 #AVX512-FP16
vrcpph zmm30{k7}{z}, zmm29 #AVX512-FP16 MASK_ENABLING ZEROCTL
vrcpph zmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vrcpph zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vrcpph zmm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vrcpph zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpsh xmm30, xmm29, xmm28 #AVX512-FP16
vrcpsh xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16 MASK_ENABLING ZEROCTL
vrcpsh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vrcpsh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vrcpsh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vrcpsh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vreduceph zmm30, zmm29, 123 #AVX512-FP16
vreduceph zmm30, zmm29{sae}, 123 #AVX512-FP16 HAS_SAE
vreduceph zmm30{k7}{z}, zmm29{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vreduceph zmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vreduceph zmm30, WORD BCST [r9], 123 #AVX512-FP16 BROADCAST_EN
vreduceph zmm30, ZMMWORD PTR [rcx+8128], 123 #AVX512-FP16 Disp8(7f)
vreduceph zmm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreducesh xmm30, xmm29, xmm28, 123 #AVX512-FP16
vreducesh xmm30, xmm29, xmm28{sae}, 123 #AVX512-FP16 HAS_SAE
vreducesh xmm30{k7}{z}, xmm29, xmm28{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vreducesh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vreducesh xmm30, xmm29, WORD PTR [r9], 123 #AVX512-FP16
vreducesh xmm30, xmm29, WORD PTR [rcx+254], 123 #AVX512-FP16 Disp8(7f)
vreducesh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph zmm30, zmm29, 123 #AVX512-FP16
vrndscaleph zmm30, zmm29{sae}, 123 #AVX512-FP16 HAS_SAE
vrndscaleph zmm30{k7}{z}, zmm29{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vrndscaleph zmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vrndscaleph zmm30, WORD BCST [r9], 123 #AVX512-FP16 BROADCAST_EN
vrndscaleph zmm30, ZMMWORD PTR [rcx+8128], 123 #AVX512-FP16 Disp8(7f)
vrndscaleph zmm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscalesh xmm30, xmm29, xmm28, 123 #AVX512-FP16
vrndscalesh xmm30, xmm29, xmm28{sae}, 123 #AVX512-FP16 HAS_SAE
vrndscalesh xmm30{k7}{z}, xmm29, xmm28{sae}, 123 #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE
vrndscalesh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16 MASK_ENABLING
vrndscalesh xmm30, xmm29, WORD PTR [r9], 123 #AVX512-FP16
vrndscalesh xmm30, xmm29, WORD PTR [rcx+254], 123 #AVX512-FP16 Disp8(7f)
vrndscalesh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256], 123 #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph zmm30, zmm29 #AVX512-FP16
vrsqrtph zmm30{k7}{z}, zmm29 #AVX512-FP16 MASK_ENABLING ZEROCTL
vrsqrtph zmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vrsqrtph zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vrsqrtph zmm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vrsqrtph zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtsh xmm30, xmm29, xmm28 #AVX512-FP16
vrsqrtsh xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16 MASK_ENABLING ZEROCTL
vrsqrtsh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vrsqrtsh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vrsqrtsh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vrsqrtsh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vscalefph zmm30, zmm29, zmm28 #AVX512-FP16
vscalefph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vscalefph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vscalefph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vscalefph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vscalefph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vscalefph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefsh xmm30, xmm29, xmm28 #AVX512-FP16
vscalefsh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vscalefsh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vscalefsh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vscalefsh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vscalefsh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vscalefsh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph zmm30, zmm29 #AVX512-FP16
vsqrtph zmm30, zmm29{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vsqrtph zmm30{k7}{z}, zmm29{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsqrtph zmm30{k7}, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vsqrtph zmm30, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vsqrtph zmm30, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vsqrtph zmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtsh xmm30, xmm29, xmm28 #AVX512-FP16
vsqrtsh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vsqrtsh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsqrtsh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vsqrtsh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vsqrtsh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vsqrtsh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vsubph zmm30, zmm29, zmm28 #AVX512-FP16
vsubph zmm30, zmm29, zmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vsubph zmm30{k7}{z}, zmm29, zmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsubph zmm30{k7}, zmm29, ZMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vsubph zmm30, zmm29, WORD BCST [r9] #AVX512-FP16 BROADCAST_EN
vsubph zmm30, zmm29, ZMMWORD PTR [rcx+8128] #AVX512-FP16 Disp8(7f)
vsubph zmm30{k7}{z}, zmm29, WORD BCST [rdx-256] #AVX512-FP16 BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubsh xmm30, xmm29, xmm28 #AVX512-FP16
vsubsh xmm30, xmm29, xmm28{rn-sae} #AVX512-FP16 HAS_SAE RC_CTRL
vsubsh xmm30{k7}{z}, xmm29, xmm28{rn-sae} #AVX512-FP16 MASK_ENABLING ZEROCTL HAS_SAE RC_CTRL
vsubsh xmm30{k7}, xmm29, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16 MASK_ENABLING
vsubsh xmm30, xmm29, WORD PTR [r9] #AVX512-FP16
vsubsh xmm30, xmm29, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vsubsh xmm30{k7}{z}, xmm29, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80) MASK_ENABLING ZEROCTL
vucomish xmm30, xmm29 #AVX512-FP16
vucomish xmm30, xmm29{sae} #AVX512-FP16 HAS_SAE
vucomish xmm30, WORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16
vucomish xmm30, WORD PTR [r9] #AVX512-FP16
vucomish xmm30, WORD PTR [rcx+254] #AVX512-FP16 Disp8(7f)
vucomish xmm30, WORD PTR [rdx-256] #AVX512-FP16 Disp8(80)
|
stsp/binutils-ia16
| 86,024
|
gas/testsuite/gas/i386/x86-64-avx512bw.s
|
# Check 64bit AVX512BW instructions
.allow_index_reg
.text
_start:
vpabsb %zmm29, %zmm30 # AVX512BW
vpabsb %zmm29, %zmm30{%k7} # AVX512BW
vpabsb %zmm29, %zmm30{%k7}{z} # AVX512BW
vpabsb (%rcx), %zmm30 # AVX512BW
vpabsb 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpabsb 8128(%rdx), %zmm30 # AVX512BW Disp8
vpabsb 8192(%rdx), %zmm30 # AVX512BW
vpabsb -8192(%rdx), %zmm30 # AVX512BW Disp8
vpabsb -8256(%rdx), %zmm30 # AVX512BW
vpabsw %zmm29, %zmm30 # AVX512BW
vpabsw %zmm29, %zmm30{%k7} # AVX512BW
vpabsw %zmm29, %zmm30{%k7}{z} # AVX512BW
vpabsw (%rcx), %zmm30 # AVX512BW
vpabsw 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpabsw 8128(%rdx), %zmm30 # AVX512BW Disp8
vpabsw 8192(%rdx), %zmm30 # AVX512BW
vpabsw -8192(%rdx), %zmm30 # AVX512BW Disp8
vpabsw -8256(%rdx), %zmm30 # AVX512BW
vpackssdw %zmm28, %zmm29, %zmm30 # AVX512BW
vpackssdw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpackssdw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpackssdw (%rcx), %zmm29, %zmm30 # AVX512BW
vpackssdw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpackssdw (%rcx){1to16}, %zmm29, %zmm30 # AVX512BW
vpackssdw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackssdw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpackssdw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackssdw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpackssdw 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW Disp8
vpackssdw 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW
vpackssdw -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW Disp8
vpackssdw -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW
vpacksswb %zmm28, %zmm29, %zmm30 # AVX512BW
vpacksswb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpacksswb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpacksswb (%rcx), %zmm29, %zmm30 # AVX512BW
vpacksswb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpacksswb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpacksswb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpacksswb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpacksswb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpackusdw %zmm28, %zmm29, %zmm30 # AVX512BW
vpackusdw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpackusdw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpackusdw (%rcx), %zmm29, %zmm30 # AVX512BW
vpackusdw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpackusdw (%rcx){1to16}, %zmm29, %zmm30 # AVX512BW
vpackusdw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackusdw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpackusdw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackusdw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpackusdw 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW Disp8
vpackusdw 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW
vpackusdw -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW Disp8
vpackusdw -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512BW
vpackuswb %zmm28, %zmm29, %zmm30 # AVX512BW
vpackuswb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpackuswb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpackuswb (%rcx), %zmm29, %zmm30 # AVX512BW
vpackuswb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpackuswb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackuswb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpackuswb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpackuswb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddb %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddb (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusb %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddusb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddusb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddusb (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddusb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddusb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusw %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddusw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddusw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddusw (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddusw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddusw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddusw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddusw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddw %zmm28, %zmm29, %zmm30 # AVX512BW
vpaddw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpaddw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpaddw (%rcx), %zmm29, %zmm30 # AVX512BW
vpaddw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpaddw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpaddw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpaddw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpalignr $0xab, %zmm28, %zmm29, %zmm30 # AVX512BW
vpalignr $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpalignr $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpalignr $123, %zmm28, %zmm29, %zmm30 # AVX512BW
vpalignr $123, (%rcx), %zmm29, %zmm30 # AVX512BW
vpalignr $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpalignr $123, 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpalignr $123, 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpalignr $123, -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpalignr $123, -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgb %zmm28, %zmm29, %zmm30 # AVX512BW
vpavgb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpavgb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpavgb (%rcx), %zmm29, %zmm30 # AVX512BW
vpavgb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpavgb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgw %zmm28, %zmm29, %zmm30 # AVX512BW
vpavgw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpavgw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpavgw (%rcx), %zmm29, %zmm30 # AVX512BW
vpavgw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpavgw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpavgw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpavgw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpblendmb %zmm28, %zmm29, %zmm30 # AVX512BW
vpblendmb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpblendmb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpblendmb (%rcx), %zmm29, %zmm30 # AVX512BW
vpblendmb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpblendmb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpblendmb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpblendmb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpblendmb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpbroadcastb %xmm29, %zmm30 # AVX512BW
vpbroadcastb %xmm29, %zmm30{%k7} # AVX512BW
vpbroadcastb %xmm29, %zmm30{%k7}{z} # AVX512BW
vpbroadcastb (%rcx), %zmm30 # AVX512BW
vpbroadcastb 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpbroadcastb 127(%rdx), %zmm30 # AVX512BW Disp8
vpbroadcastb 128(%rdx), %zmm30 # AVX512BW
vpbroadcastb -128(%rdx), %zmm30 # AVX512BW Disp8
vpbroadcastb -129(%rdx), %zmm30 # AVX512BW
vpbroadcastb %eax, %zmm30 # AVX512BW
vpbroadcastb %eax, %zmm30{%k7} # AVX512BW
vpbroadcastb %eax, %zmm30{%k7}{z} # AVX512BW
vpbroadcastw %xmm29, %zmm30 # AVX512BW
vpbroadcastw %xmm29, %zmm30{%k7} # AVX512BW
vpbroadcastw %xmm29, %zmm30{%k7}{z} # AVX512BW
vpbroadcastw (%rcx), %zmm30 # AVX512BW
vpbroadcastw 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpbroadcastw 254(%rdx), %zmm30 # AVX512BW Disp8
vpbroadcastw 256(%rdx), %zmm30 # AVX512BW
vpbroadcastw -256(%rdx), %zmm30 # AVX512BW Disp8
vpbroadcastw -258(%rdx), %zmm30 # AVX512BW
vpbroadcastw %eax, %zmm30 # AVX512BW
vpbroadcastw %eax, %zmm30{%k7} # AVX512BW
vpbroadcastw %eax, %zmm30{%k7}{z} # AVX512BW
vpcmpeqb %zmm29, %zmm30, %k5 # AVX512BW
vpcmpeqb %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpeqb (%rcx), %zmm30, %k5 # AVX512BW
vpcmpeqb 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpeqb 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqb 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpeqb -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqb -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpeqw %zmm29, %zmm30, %k5 # AVX512BW
vpcmpeqw %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpeqw (%rcx), %zmm30, %k5 # AVX512BW
vpcmpeqw 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpeqw 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqw 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpeqw -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpeqw -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtb %zmm29, %zmm30, %k5 # AVX512BW
vpcmpgtb %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpgtb (%rcx), %zmm30, %k5 # AVX512BW
vpcmpgtb 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpgtb 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtb 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtb -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtb -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtw %zmm29, %zmm30, %k5 # AVX512BW
vpcmpgtw %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpgtw (%rcx), %zmm30, %k5 # AVX512BW
vpcmpgtw 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpgtw 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtw 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpgtw -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpgtw -8256(%rdx), %zmm30, %k5 # AVX512BW
vpblendmw %zmm28, %zmm29, %zmm30 # AVX512BW
vpblendmw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpblendmw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpblendmw (%rcx), %zmm29, %zmm30 # AVX512BW
vpblendmw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpblendmw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpblendmw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpblendmw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpblendmw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpextrb $0xab, %xmm29, %eax # AVX512BW
vpextrb $123, %xmm29, %rax # AVX512BW
vpextrb $123, %xmm29, %r8 # AVX512BW
vpextrb $123, %xmm29, (%rcx) # AVX512BW
vpextrb $123, %xmm29, 0x123(%rax,%r14,8) # AVX512BW
vpextrb $123, %xmm29, 127(%rdx) # AVX512BW Disp8
vpextrb $123, %xmm29, 128(%rdx) # AVX512BW
vpextrb $123, %xmm29, -128(%rdx) # AVX512BW Disp8
vpextrb $123, %xmm29, -129(%rdx) # AVX512BW
vpextrw $123, %xmm29, (%rcx) # AVX512BW
vpextrw $123, %xmm29, 0x123(%rax,%r14,8) # AVX512BW
vpextrw $123, %xmm29, 254(%rdx) # AVX512BW Disp8
vpextrw $123, %xmm29, 256(%rdx) # AVX512BW
vpextrw $123, %xmm29, -256(%rdx) # AVX512BW Disp8
vpextrw $123, %xmm29, -258(%rdx) # AVX512BW
vpextrw $0xab, %xmm30, %eax # AVX512BW
vpextrw $123, %xmm30, %rax # AVX512BW
vpextrw $123, %xmm30, %r8 # AVX512BW
vpinsrb $0xab, %eax, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, %rax, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, %ebp, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, %r13, %xmm29, %xmm30 # AVX512BW
vpinsrb $123, (%rcx), %xmm29, %xmm30 # AVX512BW
vpinsrb $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512BW
vpinsrb $123, 127(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrb $123, 128(%rdx), %xmm29, %xmm30 # AVX512BW
vpinsrb $123, -128(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrb $123, -129(%rdx), %xmm29, %xmm30 # AVX512BW
vpinsrw $0xab, %eax, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, %rax, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, %ebp, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, %r13, %xmm29, %xmm30 # AVX512BW
vpinsrw $123, (%rcx), %xmm29, %xmm30 # AVX512BW
vpinsrw $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512BW
vpinsrw $123, 254(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrw $123, 256(%rdx), %xmm29, %xmm30 # AVX512BW
vpinsrw $123, -256(%rdx), %xmm29, %xmm30 # AVX512BW Disp8
vpinsrw $123, -258(%rdx), %xmm29, %xmm30 # AVX512BW
vpmaddubsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaddubsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaddubsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaddubsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaddubsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaddubsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddubsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaddubsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddubsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaddwd %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaddwd %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaddwd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaddwd (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaddwd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaddwd 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddwd 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaddwd -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaddwd -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxub %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxub %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxub %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxub (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxub 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxub 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxub 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxub -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxub -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxuw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmaxuw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmaxuw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmaxuw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmaxuw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmaxuw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxuw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmaxuw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmaxuw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpminsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpminsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpminsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpminsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminub %zmm28, %zmm29, %zmm30 # AVX512BW
vpminub %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminub %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminub (%rcx), %zmm29, %zmm30 # AVX512BW
vpminub 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminub 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminub 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminub -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminub -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpminuw %zmm28, %zmm29, %zmm30 # AVX512BW
vpminuw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpminuw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpminuw (%rcx), %zmm29, %zmm30 # AVX512BW
vpminuw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpminuw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminuw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpminuw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpminuw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmovsxbw %ymm29, %zmm30 # AVX512BW
vpmovsxbw %ymm29, %zmm30{%k7} # AVX512BW
vpmovsxbw %ymm29, %zmm30{%k7}{z} # AVX512BW
vpmovsxbw (%rcx), %zmm30 # AVX512BW
vpmovsxbw 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpmovsxbw 4064(%rdx), %zmm30 # AVX512BW Disp8
vpmovsxbw 4096(%rdx), %zmm30 # AVX512BW
vpmovsxbw -4096(%rdx), %zmm30 # AVX512BW Disp8
vpmovsxbw -4128(%rdx), %zmm30 # AVX512BW
vpmovzxbw %ymm29, %zmm30 # AVX512BW
vpmovzxbw %ymm29, %zmm30{%k7} # AVX512BW
vpmovzxbw %ymm29, %zmm30{%k7}{z} # AVX512BW
vpmovzxbw (%rcx), %zmm30 # AVX512BW
vpmovzxbw 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpmovzxbw 4064(%rdx), %zmm30 # AVX512BW Disp8
vpmovzxbw 4096(%rdx), %zmm30 # AVX512BW
vpmovzxbw -4096(%rdx), %zmm30 # AVX512BW Disp8
vpmovzxbw -4128(%rdx), %zmm30 # AVX512BW
vpmulhrsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmulhrsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmulhrsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmulhrsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmulhrsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmulhrsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhrsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhrsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhrsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhuw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmulhuw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmulhuw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmulhuw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmulhuw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmulhuw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhuw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhuw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhuw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmulhw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmulhw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmulhw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmulhw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmulhw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmulhw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmulhw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmullw %zmm28, %zmm29, %zmm30 # AVX512BW
vpmullw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpmullw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpmullw (%rcx), %zmm29, %zmm30 # AVX512BW
vpmullw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpmullw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmullw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpmullw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpmullw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsadbw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsadbw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsadbw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsadbw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsadbw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsadbw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsadbw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpshufb %zmm28, %zmm29, %zmm30 # AVX512BW
vpshufb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpshufb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpshufb (%rcx), %zmm29, %zmm30 # AVX512BW
vpshufb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpshufb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpshufb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpshufb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpshufb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpshufhw $0xab, %zmm29, %zmm30 # AVX512BW
vpshufhw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpshufhw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpshufhw $123, %zmm29, %zmm30 # AVX512BW
vpshufhw $123, (%rcx), %zmm30 # AVX512BW
vpshufhw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpshufhw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpshufhw $123, 8192(%rdx), %zmm30 # AVX512BW
vpshufhw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpshufhw $123, -8256(%rdx), %zmm30 # AVX512BW
vpshuflw $0xab, %zmm29, %zmm30 # AVX512BW
vpshuflw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpshuflw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpshuflw $123, %zmm29, %zmm30 # AVX512BW
vpshuflw $123, (%rcx), %zmm30 # AVX512BW
vpshuflw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpshuflw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpshuflw $123, 8192(%rdx), %zmm30 # AVX512BW
vpshuflw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpshuflw $123, -8256(%rdx), %zmm30 # AVX512BW
vpsllw %xmm28, %zmm29, %zmm30 # AVX512BW
vpsllw %xmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsllw %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsllw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsllw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsllw 2032(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsllw 2048(%rdx), %zmm29, %zmm30 # AVX512BW
vpsllw -2048(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsllw -2064(%rdx), %zmm29, %zmm30 # AVX512BW
vpsraw %xmm28, %zmm29, %zmm30 # AVX512BW
vpsraw %xmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsraw %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsraw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsraw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsraw 2032(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsraw 2048(%rdx), %zmm29, %zmm30 # AVX512BW
vpsraw -2048(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsraw -2064(%rdx), %zmm29, %zmm30 # AVX512BW
vpsrlw %xmm28, %zmm29, %zmm30 # AVX512BW
vpsrlw %xmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsrlw %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsrlw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsrlw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsrlw 2032(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsrlw 2048(%rdx), %zmm29, %zmm30 # AVX512BW
vpsrlw -2048(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsrlw -2064(%rdx), %zmm29, %zmm30 # AVX512BW
vpsrldq $0xab, %zmm29, %zmm30 # AVX512BW
vpsrldq $123, %zmm29, %zmm30 # AVX512BW
vpsrldq $123, (%rcx), %zmm30 # AVX512BW
vpsrldq $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsrldq $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsrldq $123, 8192(%rdx), %zmm30 # AVX512BW
vpsrldq $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsrldq $123, -8256(%rdx), %zmm30 # AVX512BW
vpsrlw $0xab, %zmm29, %zmm30 # AVX512BW
vpsrlw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpsrlw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsrlw $123, %zmm29, %zmm30 # AVX512BW
vpsrlw $123, (%rcx), %zmm30 # AVX512BW
vpsrlw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsrlw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsrlw $123, 8192(%rdx), %zmm30 # AVX512BW
vpsrlw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsrlw $123, -8256(%rdx), %zmm30 # AVX512BW
vpsraw $0xab, %zmm29, %zmm30 # AVX512BW
vpsraw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpsraw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsraw $123, %zmm29, %zmm30 # AVX512BW
vpsraw $123, (%rcx), %zmm30 # AVX512BW
vpsraw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsraw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsraw $123, 8192(%rdx), %zmm30 # AVX512BW
vpsraw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsraw $123, -8256(%rdx), %zmm30 # AVX512BW
vpsrlvw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsrlvw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsrlvw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsrlvw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsrlvw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsrlvw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsrlvw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsrlvw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsrlvw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsravw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsravw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsravw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsravw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsravw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsravw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsravw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsravw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsravw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubb %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubb (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsb %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubsb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubsb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubsb (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubsb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubsb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubsw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubsw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubsw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubsw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubsw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubsw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubsw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusb %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubusb %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubusb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubusb (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubusb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubusb 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusb 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusb -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusb -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubusw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubusw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubusw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubusw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubusw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubusw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubusw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsubw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsubw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsubw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsubw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsubw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsubw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsubw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhbw %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpckhbw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpckhbw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpckhbw (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpckhbw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpckhbw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhbw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhbw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhbw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhwd %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpckhwd %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpckhwd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpckhwd (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpckhwd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpckhwd 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhwd 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpckhwd -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpckhwd -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklbw %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpcklbw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpcklbw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpcklbw (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpcklbw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpcklbw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklbw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklbw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklbw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklwd %zmm28, %zmm29, %zmm30 # AVX512BW
vpunpcklwd %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpunpcklwd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpunpcklwd (%rcx), %zmm29, %zmm30 # AVX512BW
vpunpcklwd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpunpcklwd 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklwd 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpunpcklwd -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpunpcklwd -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpmovwb %zmm29, %ymm30 # AVX512BW
vpmovwb %zmm29, %ymm30{%k7} # AVX512BW
vpmovwb %zmm29, %ymm30{%k7}{z} # AVX512BW
vpmovswb %zmm29, %ymm30 # AVX512BW
vpmovswb %zmm29, %ymm30{%k7} # AVX512BW
vpmovswb %zmm29, %ymm30{%k7}{z} # AVX512BW
vpmovuswb %zmm29, %ymm30 # AVX512BW
vpmovuswb %zmm29, %ymm30{%k7} # AVX512BW
vpmovuswb %zmm29, %ymm30{%k7}{z} # AVX512BW
vdbpsadbw $0xab, %zmm28, %zmm29, %zmm30 # AVX512BW
vdbpsadbw $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vdbpsadbw $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vdbpsadbw $123, %zmm28, %zmm29, %zmm30 # AVX512BW
vdbpsadbw $123, (%rcx), %zmm29, %zmm30 # AVX512BW
vdbpsadbw $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vdbpsadbw $123, 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vdbpsadbw $123, 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vdbpsadbw $123, -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vdbpsadbw $123, -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpermw %zmm28, %zmm29, %zmm30 # AVX512BW
vpermw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpermw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpermw (%rcx), %zmm29, %zmm30 # AVX512BW
vpermw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpermw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpermw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpermw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpermw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpermt2w %zmm28, %zmm29, %zmm30 # AVX512BW
vpermt2w %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpermt2w %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpermt2w (%rcx), %zmm29, %zmm30 # AVX512BW
vpermt2w 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpermt2w 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpermt2w 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpermt2w -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpermt2w -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vpslldq $0xab, %zmm29, %zmm30 # AVX512BW
vpslldq $123, %zmm29, %zmm30 # AVX512BW
vpslldq $123, (%rcx), %zmm30 # AVX512BW
vpslldq $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpslldq $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpslldq $123, 8192(%rdx), %zmm30 # AVX512BW
vpslldq $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpslldq $123, -8256(%rdx), %zmm30 # AVX512BW
vpsllw $0xab, %zmm29, %zmm30 # AVX512BW
vpsllw $0xab, %zmm29, %zmm30{%k7} # AVX512BW
vpsllw $0xab, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsllw $123, %zmm29, %zmm30 # AVX512BW
vpsllw $123, (%rcx), %zmm30 # AVX512BW
vpsllw $123, 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vpsllw $123, 8128(%rdx), %zmm30 # AVX512BW Disp8
vpsllw $123, 8192(%rdx), %zmm30 # AVX512BW
vpsllw $123, -8192(%rdx), %zmm30 # AVX512BW Disp8
vpsllw $123, -8256(%rdx), %zmm30 # AVX512BW
vpsllvw %zmm28, %zmm29, %zmm30 # AVX512BW
vpsllvw %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpsllvw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpsllvw (%rcx), %zmm29, %zmm30 # AVX512BW
vpsllvw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpsllvw 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsllvw 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpsllvw -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpsllvw -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vmovdqu8 %zmm29, %zmm30 # AVX512BW
vmovdqu8 %zmm29, %zmm30{%k7} # AVX512BW
vmovdqu8 %zmm29, %zmm30{%k7}{z} # AVX512BW
vmovdqu8 (%rcx), %zmm30 # AVX512BW
vmovdqu8 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vmovdqu8 8128(%rdx), %zmm30 # AVX512BW Disp8
vmovdqu8 8192(%rdx), %zmm30 # AVX512BW
vmovdqu8 -8192(%rdx), %zmm30 # AVX512BW Disp8
vmovdqu8 -8256(%rdx), %zmm30 # AVX512BW
vmovdqu16 %zmm29, %zmm30 # AVX512BW
vmovdqu16 %zmm29, %zmm30{%k7} # AVX512BW
vmovdqu16 %zmm29, %zmm30{%k7}{z} # AVX512BW
vmovdqu16 (%rcx), %zmm30 # AVX512BW
vmovdqu16 0x123(%rax,%r14,8), %zmm30 # AVX512BW
vmovdqu16 8128(%rdx), %zmm30 # AVX512BW Disp8
vmovdqu16 8192(%rdx), %zmm30 # AVX512BW
vmovdqu16 -8192(%rdx), %zmm30 # AVX512BW Disp8
vmovdqu16 -8256(%rdx), %zmm30 # AVX512BW
kandq %k7, %k6, %k5 # AVX512BW
kandd %k7, %k6, %k5 # AVX512BW
kandnq %k7, %k6, %k5 # AVX512BW
kandnd %k7, %k6, %k5 # AVX512BW
korq %k7, %k6, %k5 # AVX512BW
kord %k7, %k6, %k5 # AVX512BW
kxnorq %k7, %k6, %k5 # AVX512BW
kxnord %k7, %k6, %k5 # AVX512BW
kxorq %k7, %k6, %k5 # AVX512BW
kxord %k7, %k6, %k5 # AVX512BW
knotq %k6, %k5 # AVX512BW
knotd %k6, %k5 # AVX512BW
kortestq %k6, %k5 # AVX512BW
kortestd %k6, %k5 # AVX512BW
ktestq %k6, %k5 # AVX512BW
ktestd %k6, %k5 # AVX512BW
kshiftrq $0xab, %k6, %k5 # AVX512BW
kshiftrq $123, %k6, %k5 # AVX512BW
kshiftrd $0xab, %k6, %k5 # AVX512BW
kshiftrd $123, %k6, %k5 # AVX512BW
kshiftlq $0xab, %k6, %k5 # AVX512BW
kshiftlq $123, %k6, %k5 # AVX512BW
kshiftld $0xab, %k6, %k5 # AVX512BW
kshiftld $123, %k6, %k5 # AVX512BW
kmovq %k6, %k5 # AVX512BW
kmovq (%rcx), %k5 # AVX512BW
kmovq 0x123(%rax,%r14,8), %k5 # AVX512BW
kmovd %k6, %k5 # AVX512BW
kmovd (%rcx), %k5 # AVX512BW
kmovd 0x123(%rax,%r14,8), %k5 # AVX512BW
kmovq %k5, (%rcx) # AVX512BW
kmovq %k5, 0x123(%rax,%r14,8) # AVX512BW
kmovd %k5, (%rcx) # AVX512BW
kmovd %k5, 0x123(%rax,%r14,8) # AVX512BW
kmovq %rax, %k5 # AVX512BW
kmovq %r8, %k5 # AVX512BW
kmovd %eax, %k5 # AVX512BW
kmovd %ebp, %k5 # AVX512BW
kmovd %r13d, %k5 # AVX512BW
kmovq %k5, %rax # AVX512BW
kmovq %k5, %r8 # AVX512BW
kmovd %k5, %eax # AVX512BW
kmovd %k5, %ebp # AVX512BW
kmovd %k5, %r13d # AVX512BW
kaddq %k7, %k6, %k5 # AVX512BW
kaddd %k7, %k6, %k5 # AVX512BW
kunpckwd %k7, %k6, %k5 # AVX512BW
kunpckdq %k7, %k6, %k5 # AVX512BW
vpmovwb %zmm30, (%rcx) # AVX512BW
vpmovwb %zmm30, (%rcx){%k7} # AVX512BW
vpmovwb %zmm30, 0x123(%rax,%r14,8) # AVX512BW
vpmovwb %zmm30, 4064(%rdx) # AVX512BW Disp8
vpmovwb %zmm30, 4096(%rdx) # AVX512BW
vpmovwb %zmm30, -4096(%rdx) # AVX512BW Disp8
vpmovwb %zmm30, -4128(%rdx) # AVX512BW
vpmovswb %zmm30, (%rcx) # AVX512BW
vpmovswb %zmm30, (%rcx){%k7} # AVX512BW
vpmovswb %zmm30, 0x123(%rax,%r14,8) # AVX512BW
vpmovswb %zmm30, 4064(%rdx) # AVX512BW Disp8
vpmovswb %zmm30, 4096(%rdx) # AVX512BW
vpmovswb %zmm30, -4096(%rdx) # AVX512BW Disp8
vpmovswb %zmm30, -4128(%rdx) # AVX512BW
vpmovuswb %zmm30, (%rcx) # AVX512BW
vpmovuswb %zmm30, (%rcx){%k7} # AVX512BW
vpmovuswb %zmm30, 0x123(%rax,%r14,8) # AVX512BW
vpmovuswb %zmm30, 4064(%rdx) # AVX512BW Disp8
vpmovuswb %zmm30, 4096(%rdx) # AVX512BW
vpmovuswb %zmm30, -4096(%rdx) # AVX512BW Disp8
vpmovuswb %zmm30, -4128(%rdx) # AVX512BW
vmovdqu8 %zmm30, (%rcx) # AVX512BW
vmovdqu8 %zmm30, (%rcx){%k7} # AVX512BW
vmovdqu8 %zmm30, 0x123(%rax,%r14,8) # AVX512BW
vmovdqu8 %zmm30, 8128(%rdx) # AVX512BW Disp8
vmovdqu8 %zmm30, 8192(%rdx) # AVX512BW
vmovdqu8 %zmm30, -8192(%rdx) # AVX512BW Disp8
vmovdqu8 %zmm30, -8256(%rdx) # AVX512BW
vmovdqu16 %zmm30, (%rcx) # AVX512BW
vmovdqu16 %zmm30, (%rcx){%k7} # AVX512BW
vmovdqu16 %zmm30, 0x123(%rax,%r14,8) # AVX512BW
vmovdqu16 %zmm30, 8128(%rdx) # AVX512BW Disp8
vmovdqu16 %zmm30, 8192(%rdx) # AVX512BW
vmovdqu16 %zmm30, -8192(%rdx) # AVX512BW Disp8
vmovdqu16 %zmm30, -8256(%rdx) # AVX512BW
vpermi2w %zmm28, %zmm29, %zmm30 # AVX512BW
vpermi2w %zmm28, %zmm29, %zmm30{%k7} # AVX512BW
vpermi2w %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512BW
vpermi2w (%rcx), %zmm29, %zmm30 # AVX512BW
vpermi2w 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512BW
vpermi2w 8128(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpermi2w 8192(%rdx), %zmm29, %zmm30 # AVX512BW
vpermi2w -8192(%rdx), %zmm29, %zmm30 # AVX512BW Disp8
vpermi2w -8256(%rdx), %zmm29, %zmm30 # AVX512BW
vptestmb %zmm29, %zmm30, %k5 # AVX512BW
vptestmb %zmm29, %zmm30, %k5{%k7} # AVX512BW
vptestmb (%rcx), %zmm30, %k5 # AVX512BW
vptestmb 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vptestmb 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vptestmb 8192(%rdx), %zmm30, %k5 # AVX512BW
vptestmb -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vptestmb -8256(%rdx), %zmm30, %k5 # AVX512BW
vptestmw %zmm29, %zmm30, %k5 # AVX512BW
vptestmw %zmm29, %zmm30, %k5{%k7} # AVX512BW
vptestmw (%rcx), %zmm30, %k5 # AVX512BW
vptestmw 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vptestmw 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vptestmw 8192(%rdx), %zmm30, %k5 # AVX512BW
vptestmw -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vptestmw -8256(%rdx), %zmm30, %k5 # AVX512BW
vpmovb2m %zmm30, %k5 # AVX512BW
vpmovw2m %zmm30, %k5 # AVX512BW
vpmovm2b %k5, %zmm30 # AVX512BW
vpmovm2w %k5, %zmm30 # AVX512BW
vptestnmb %zmm28, %zmm29, %k5 # AVX512BW
vptestnmb %zmm28, %zmm29, %k5{%k7} # AVX512BW
vptestnmb (%rcx), %zmm29, %k5 # AVX512BW
vptestnmb 0x123(%rax,%r14,8), %zmm29, %k5 # AVX512BW
vptestnmb 8128(%rdx), %zmm29, %k5 # AVX512BW Disp8
vptestnmb 8192(%rdx), %zmm29, %k5 # AVX512BW
vptestnmb -8192(%rdx), %zmm29, %k5 # AVX512BW Disp8
vptestnmb -8256(%rdx), %zmm29, %k5 # AVX512BW
vptestnmw %zmm28, %zmm29, %k5 # AVX512BW
vptestnmw %zmm28, %zmm29, %k5{%k7} # AVX512BW
vptestnmw (%rcx), %zmm29, %k5 # AVX512BW
vptestnmw 0x123(%rax,%r14,8), %zmm29, %k5 # AVX512BW
vptestnmw 8128(%rdx), %zmm29, %k5 # AVX512BW Disp8
vptestnmw 8192(%rdx), %zmm29, %k5 # AVX512BW
vptestnmw -8192(%rdx), %zmm29, %k5 # AVX512BW Disp8
vptestnmw -8256(%rdx), %zmm29, %k5 # AVX512BW
vpcmpb $0xab, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpb $0xab, %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpb $123, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpb $123, (%rcx), %zmm30, %k5 # AVX512BW
vpcmpb $123, 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpb $123, 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpb $123, 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpb $123, -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpb $123, -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpw $0xab, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpw $0xab, %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpw $123, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpw $123, (%rcx), %zmm30, %k5 # AVX512BW
vpcmpw $123, 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpw $123, 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpw $123, 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpw $123, -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpw $123, -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpub $0xab, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpub $0xab, %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpub $123, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpub $123, (%rcx), %zmm30, %k5 # AVX512BW
vpcmpub $123, 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpub $123, 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpub $123, 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpub $123, -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpub $123, -8256(%rdx), %zmm30, %k5 # AVX512BW
vpcmpuw $0xab, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpuw $0xab, %zmm29, %zmm30, %k5{%k7} # AVX512BW
vpcmpuw $123, %zmm29, %zmm30, %k5 # AVX512BW
vpcmpuw $123, (%rcx), %zmm30, %k5 # AVX512BW
vpcmpuw $123, 0x123(%rax,%r14,8), %zmm30, %k5 # AVX512BW
vpcmpuw $123, 8128(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpuw $123, 8192(%rdx), %zmm30, %k5 # AVX512BW
vpcmpuw $123, -8192(%rdx), %zmm30, %k5 # AVX512BW Disp8
vpcmpuw $123, -8256(%rdx), %zmm30, %k5 # AVX512BW
.intel_syntax noprefix
vpabsb zmm30, zmm29 # AVX512BW
vpabsb zmm30{k7}, zmm29 # AVX512BW
vpabsb zmm30{k7}{z}, zmm29 # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpabsb zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpabsb zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpabsb zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpabsw zmm30, zmm29 # AVX512BW
vpabsw zmm30{k7}, zmm29 # AVX512BW
vpabsw zmm30{k7}{z}, zmm29 # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpabsw zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpabsw zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpabsw zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpackssdw zmm30, zmm29, zmm28 # AVX512BW
vpackssdw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpackssdw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpackssdw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpackssdw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpackssdw zmm30, zmm29, [rcx]{1to16} # AVX512BW
vpackssdw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpackssdw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpackssdw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpackssdw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpackssdw zmm30, zmm29, [rdx+508]{1to16} # AVX512BW Disp8
vpackssdw zmm30, zmm29, [rdx+512]{1to16} # AVX512BW
vpackssdw zmm30, zmm29, [rdx-512]{1to16} # AVX512BW Disp8
vpackssdw zmm30, zmm29, [rdx-516]{1to16} # AVX512BW
vpacksswb zmm30, zmm29, zmm28 # AVX512BW
vpacksswb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpacksswb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpacksswb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpackusdw zmm30, zmm29, zmm28 # AVX512BW
vpackusdw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpackusdw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpackusdw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpackusdw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpackusdw zmm30, zmm29, [rcx]{1to16} # AVX512BW
vpackusdw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpackusdw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpackusdw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpackusdw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpackusdw zmm30, zmm29, [rdx+508]{1to16} # AVX512BW Disp8
vpackusdw zmm30, zmm29, [rdx+512]{1to16} # AVX512BW
vpackusdw zmm30, zmm29, [rdx-512]{1to16} # AVX512BW Disp8
vpackusdw zmm30, zmm29, [rdx-516]{1to16} # AVX512BW
vpackuswb zmm30, zmm29, zmm28 # AVX512BW
vpackuswb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpackuswb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpackuswb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddb zmm30, zmm29, zmm28 # AVX512BW
vpaddb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddsb zmm30, zmm29, zmm28 # AVX512BW
vpaddsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddsw zmm30, zmm29, zmm28 # AVX512BW
vpaddsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddusb zmm30, zmm29, zmm28 # AVX512BW
vpaddusb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddusb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddusb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddusw zmm30, zmm29, zmm28 # AVX512BW
vpaddusw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddusw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddusw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpaddw zmm30, zmm29, zmm28 # AVX512BW
vpaddw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpaddw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpaddw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpalignr zmm30, zmm29, zmm28, 0xab # AVX512BW
vpalignr zmm30{k7}, zmm29, zmm28, 0xab # AVX512BW
vpalignr zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512BW
vpalignr zmm30, zmm29, zmm28, 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpalignr zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpavgb zmm30, zmm29, zmm28 # AVX512BW
vpavgb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpavgb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpavgb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpavgw zmm30, zmm29, zmm28 # AVX512BW
vpavgw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpavgw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpavgw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpblendmb zmm30, zmm29, zmm28 # AVX512BW
vpblendmb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpblendmb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpblendmb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpblendmb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpblendmb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpblendmb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpblendmb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpblendmb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpbroadcastb zmm30, xmm29 # AVX512BW
vpbroadcastb zmm30{k7}, xmm29 # AVX512BW
vpbroadcastb zmm30{k7}{z}, xmm29 # AVX512BW
vpbroadcastb zmm30, BYTE PTR [rcx] # AVX512BW
vpbroadcastb zmm30, BYTE PTR [rax+r14*8+0x1234] # AVX512BW
vpbroadcastb zmm30, BYTE PTR [rdx+127] # AVX512BW Disp8
vpbroadcastb zmm30, BYTE PTR [rdx+128] # AVX512BW
vpbroadcastb zmm30, BYTE PTR [rdx-128] # AVX512BW Disp8
vpbroadcastb zmm30, BYTE PTR [rdx-129] # AVX512BW
vpbroadcastb zmm30, eax # AVX512BW
vpbroadcastb zmm30{k7}, eax # AVX512BW
vpbroadcastb zmm30{k7}{z}, eax # AVX512BW
vpbroadcastw zmm30, xmm29 # AVX512BW
vpbroadcastw zmm30{k7}, xmm29 # AVX512BW
vpbroadcastw zmm30{k7}{z}, xmm29 # AVX512BW
vpbroadcastw zmm30, WORD PTR [rcx] # AVX512BW
vpbroadcastw zmm30, WORD PTR [rax+r14*8+0x1234] # AVX512BW
vpbroadcastw zmm30, WORD PTR [rdx+254] # AVX512BW Disp8
vpbroadcastw zmm30, WORD PTR [rdx+256] # AVX512BW
vpbroadcastw zmm30, WORD PTR [rdx-256] # AVX512BW Disp8
vpbroadcastw zmm30, WORD PTR [rdx-258] # AVX512BW
vpbroadcastw zmm30, eax # AVX512BW
vpbroadcastw zmm30{k7}, eax # AVX512BW
vpbroadcastw zmm30{k7}{z}, eax # AVX512BW
vpcmpeqb k5, zmm30, zmm29 # AVX512BW
vpcmpeqb k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpeqb k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpeqw k5, zmm30, zmm29 # AVX512BW
vpcmpeqw k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpeqw k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpgtb k5, zmm30, zmm29 # AVX512BW
vpcmpgtb k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpgtb k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpgtw k5, zmm30, zmm29 # AVX512BW
vpcmpgtw k5{k7}, zmm30, zmm29 # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpcmpgtw k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpblendmw zmm30, zmm29, zmm28 # AVX512BW
vpblendmw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpblendmw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpblendmw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpblendmw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpblendmw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpblendmw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpblendmw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpblendmw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpextrb eax, xmm29, 0xab # AVX512BW
vpextrb rax, xmm29, 123 # AVX512BW
vpextrb r8, xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rcx], xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rax+r14*8+0x1234], xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rdx+127], xmm29, 123 # AVX512BW Disp8
vpextrb BYTE PTR [rdx+128], xmm29, 123 # AVX512BW
vpextrb BYTE PTR [rdx-128], xmm29, 123 # AVX512BW Disp8
vpextrb BYTE PTR [rdx-129], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rcx], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rax+r14*8+0x1234], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rdx+254], xmm29, 123 # AVX512BW Disp8
vpextrw WORD PTR [rdx+256], xmm29, 123 # AVX512BW
vpextrw WORD PTR [rdx-256], xmm29, 123 # AVX512BW Disp8
vpextrw WORD PTR [rdx-258], xmm29, 123 # AVX512BW
vpextrw eax, xmm30, 0xab # AVX512BW
vpextrw rax, xmm30, 123 # AVX512BW
vpextrw r8, xmm30, 123 # AVX512BW
vpinsrb xmm30, xmm29, eax, 0xab # AVX512BW
vpinsrb xmm30, xmm29, rax, 123 # AVX512BW
vpinsrb xmm30, xmm29, ebp, 123 # AVX512BW
vpinsrb xmm30, xmm29, r13, 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rcx], 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rdx+127], 123 # AVX512BW Disp8
vpinsrb xmm30, xmm29, BYTE PTR [rdx+128], 123 # AVX512BW
vpinsrb xmm30, xmm29, BYTE PTR [rdx-128], 123 # AVX512BW Disp8
vpinsrb xmm30, xmm29, BYTE PTR [rdx-129], 123 # AVX512BW
vpinsrw xmm30, xmm29, eax, 0xab # AVX512BW
vpinsrw xmm30, xmm29, rax, 123 # AVX512BW
vpinsrw xmm30, xmm29, ebp, 123 # AVX512BW
vpinsrw xmm30, xmm29, r13, 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rcx], 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rdx+254], 123 # AVX512BW Disp8
vpinsrw xmm30, xmm29, WORD PTR [rdx+256], 123 # AVX512BW
vpinsrw xmm30, xmm29, WORD PTR [rdx-256], 123 # AVX512BW Disp8
vpinsrw xmm30, xmm29, WORD PTR [rdx-258], 123 # AVX512BW
vpmaddubsw zmm30, zmm29, zmm28 # AVX512BW
vpmaddubsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaddubsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaddubsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaddwd zmm30, zmm29, zmm28 # AVX512BW
vpmaddwd zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaddwd zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaddwd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxsb zmm30, zmm29, zmm28 # AVX512BW
vpmaxsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxsw zmm30, zmm29, zmm28 # AVX512BW
vpmaxsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxub zmm30, zmm29, zmm28 # AVX512BW
vpmaxub zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxub zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxub zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmaxuw zmm30, zmm29, zmm28 # AVX512BW
vpmaxuw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmaxuw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmaxuw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminsb zmm30, zmm29, zmm28 # AVX512BW
vpminsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminsw zmm30, zmm29, zmm28 # AVX512BW
vpminsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminub zmm30, zmm29, zmm28 # AVX512BW
vpminub zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminub zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminub zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminub zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminub zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpminuw zmm30, zmm29, zmm28 # AVX512BW
vpminuw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpminuw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpminuw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmovsxbw zmm30, ymm29 # AVX512BW
vpmovsxbw zmm30{k7}, ymm29 # AVX512BW
vpmovsxbw zmm30{k7}{z}, ymm29 # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rcx] # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rdx+4064] # AVX512BW Disp8
vpmovsxbw zmm30, YMMWORD PTR [rdx+4096] # AVX512BW
vpmovsxbw zmm30, YMMWORD PTR [rdx-4096] # AVX512BW Disp8
vpmovsxbw zmm30, YMMWORD PTR [rdx-4128] # AVX512BW
vpmovzxbw zmm30, ymm29 # AVX512BW
vpmovzxbw zmm30{k7}, ymm29 # AVX512BW
vpmovzxbw zmm30{k7}{z}, ymm29 # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rcx] # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rdx+4064] # AVX512BW Disp8
vpmovzxbw zmm30, YMMWORD PTR [rdx+4096] # AVX512BW
vpmovzxbw zmm30, YMMWORD PTR [rdx-4096] # AVX512BW Disp8
vpmovzxbw zmm30, YMMWORD PTR [rdx-4128] # AVX512BW
vpmulhrsw zmm30, zmm29, zmm28 # AVX512BW
vpmulhrsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmulhrsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmulhrsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmulhuw zmm30, zmm29, zmm28 # AVX512BW
vpmulhuw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmulhuw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmulhuw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmulhw zmm30, zmm29, zmm28 # AVX512BW
vpmulhw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmulhw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmulhw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmullw zmm30, zmm29, zmm28 # AVX512BW
vpmullw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpmullw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpmullw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsadbw zmm30, zmm29, zmm28 # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsadbw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpshufb zmm30, zmm29, zmm28 # AVX512BW
vpshufb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpshufb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpshufb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpshufhw zmm30, zmm29, 0xab # AVX512BW
vpshufhw zmm30{k7}, zmm29, 0xab # AVX512BW
vpshufhw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpshufhw zmm30, zmm29, 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpshufhw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpshufhw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpshufhw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpshuflw zmm30, zmm29, 0xab # AVX512BW
vpshuflw zmm30{k7}, zmm29, 0xab # AVX512BW
vpshuflw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpshuflw zmm30, zmm29, 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpshuflw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpshuflw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpshuflw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsllw zmm30, zmm29, xmm28 # AVX512BW
vpsllw zmm30{k7}, zmm29, xmm28 # AVX512BW
vpsllw zmm30{k7}{z}, zmm29, xmm28 # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rcx] # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rdx+2032] # AVX512BW Disp8
vpsllw zmm30, zmm29, XMMWORD PTR [rdx+2048] # AVX512BW
vpsllw zmm30, zmm29, XMMWORD PTR [rdx-2048] # AVX512BW Disp8
vpsllw zmm30, zmm29, XMMWORD PTR [rdx-2064] # AVX512BW
vpsraw zmm30, zmm29, xmm28 # AVX512BW
vpsraw zmm30{k7}, zmm29, xmm28 # AVX512BW
vpsraw zmm30{k7}{z}, zmm29, xmm28 # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rcx] # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rdx+2032] # AVX512BW Disp8
vpsraw zmm30, zmm29, XMMWORD PTR [rdx+2048] # AVX512BW
vpsraw zmm30, zmm29, XMMWORD PTR [rdx-2048] # AVX512BW Disp8
vpsraw zmm30, zmm29, XMMWORD PTR [rdx-2064] # AVX512BW
vpsrlw zmm30, zmm29, xmm28 # AVX512BW
vpsrlw zmm30{k7}, zmm29, xmm28 # AVX512BW
vpsrlw zmm30{k7}{z}, zmm29, xmm28 # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rcx] # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx+2032] # AVX512BW Disp8
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx+2048] # AVX512BW
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx-2048] # AVX512BW Disp8
vpsrlw zmm30, zmm29, XMMWORD PTR [rdx-2064] # AVX512BW
vpsrldq zmm30, zmm29, 0xab # AVX512BW
vpsrldq zmm30, zmm29, 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsrldq zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsrldq zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsrldq zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsrlw zmm30, zmm29, 0xab # AVX512BW
vpsrlw zmm30{k7}, zmm29, 0xab # AVX512BW
vpsrlw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpsrlw zmm30, zmm29, 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsrlw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsrlw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsrlw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsraw zmm30, zmm29, 0xab # AVX512BW
vpsraw zmm30{k7}, zmm29, 0xab # AVX512BW
vpsraw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpsraw zmm30, zmm29, 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsraw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsraw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsraw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsrlvw zmm30, zmm29, zmm28 # AVX512BW
vpsrlvw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsrlvw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsrlvw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsrlvw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsrlvw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsrlvw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsrlvw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsrlvw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsravw zmm30, zmm29, zmm28 # AVX512BW
vpsravw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsravw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsravw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsravw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsravw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsravw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsravw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsravw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubb zmm30, zmm29, zmm28 # AVX512BW
vpsubb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubsb zmm30, zmm29, zmm28 # AVX512BW
vpsubsb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubsb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubsb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubsw zmm30, zmm29, zmm28 # AVX512BW
vpsubsw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubsw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubsw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubusb zmm30, zmm29, zmm28 # AVX512BW
vpsubusb zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubusb zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubusb zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubusw zmm30, zmm29, zmm28 # AVX512BW
vpsubusw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubusw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubusw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpsubw zmm30, zmm29, zmm28 # AVX512BW
vpsubw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsubw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsubw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpckhbw zmm30, zmm29, zmm28 # AVX512BW
vpunpckhbw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpckhbw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpckhbw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpckhwd zmm30, zmm29, zmm28 # AVX512BW
vpunpckhwd zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpckhwd zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpckhwd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpcklbw zmm30, zmm29, zmm28 # AVX512BW
vpunpcklbw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpcklbw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpcklbw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpunpcklwd zmm30, zmm29, zmm28 # AVX512BW
vpunpcklwd zmm30{k7}, zmm29, zmm28 # AVX512BW
vpunpcklwd zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpunpcklwd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmovwb ymm30, zmm29 # AVX512BW
vpmovwb ymm30{k7}, zmm29 # AVX512BW
vpmovwb ymm30{k7}{z}, zmm29 # AVX512BW
vpmovswb ymm30, zmm29 # AVX512BW
vpmovswb ymm30{k7}, zmm29 # AVX512BW
vpmovswb ymm30{k7}{z}, zmm29 # AVX512BW
vpmovuswb ymm30, zmm29 # AVX512BW
vpmovuswb ymm30{k7}, zmm29 # AVX512BW
vpmovuswb ymm30{k7}{z}, zmm29 # AVX512BW
vdbpsadbw zmm30, zmm29, zmm28, 0xab # AVX512BW
vdbpsadbw zmm30{k7}, zmm29, zmm28, 0xab # AVX512BW
vdbpsadbw zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512BW
vdbpsadbw zmm30, zmm29, zmm28, 123 # AVX512BW
vdbpsadbw zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512BW
vdbpsadbw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vdbpsadbw zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vdbpsadbw zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vdbpsadbw zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vdbpsadbw zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpermw zmm30, zmm29, zmm28 # AVX512BW
vpermw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpermw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpermw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpermw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpermw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpermw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpermw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpermw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpermt2w zmm30, zmm29, zmm28 # AVX512BW
vpermt2w zmm30{k7}, zmm29, zmm28 # AVX512BW
vpermt2w zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpermt2w zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpermt2w zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpermt2w zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpermt2w zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpermt2w zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpermt2w zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpslldq zmm30, zmm29, 0xab # AVX512BW
vpslldq zmm30, zmm29, 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpslldq zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpslldq zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpslldq zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsllw zmm30, zmm29, 0xab # AVX512BW
vpsllw zmm30{k7}, zmm29, 0xab # AVX512BW
vpsllw zmm30{k7}{z}, zmm29, 0xab # AVX512BW
vpsllw zmm30, zmm29, 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpsllw zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpsllw zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpsllw zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpsllvw zmm30, zmm29, zmm28 # AVX512BW
vpsllvw zmm30{k7}, zmm29, zmm28 # AVX512BW
vpsllvw zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpsllvw zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpsllvw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpsllvw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpsllvw zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpsllvw zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpsllvw zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vmovdqu8 zmm30, zmm29 # AVX512BW
vmovdqu8 zmm30{k7}, zmm29 # AVX512BW
vmovdqu8 zmm30{k7}{z}, zmm29 # AVX512BW
vmovdqu8 zmm30, ZMMWORD PTR [rcx] # AVX512BW
vmovdqu8 zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vmovdqu8 zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vmovdqu8 zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vmovdqu8 zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vmovdqu8 zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vmovdqu16 zmm30, zmm29 # AVX512BW
vmovdqu16 zmm30{k7}, zmm29 # AVX512BW
vmovdqu16 zmm30{k7}{z}, zmm29 # AVX512BW
vmovdqu16 zmm30, ZMMWORD PTR [rcx] # AVX512BW
vmovdqu16 zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vmovdqu16 zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vmovdqu16 zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vmovdqu16 zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vmovdqu16 zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
kandq k5, k6, k7 # AVX512BW
kandd k5, k6, k7 # AVX512BW
kandnq k5, k6, k7 # AVX512BW
kandnd k5, k6, k7 # AVX512BW
korq k5, k6, k7 # AVX512BW
kord k5, k6, k7 # AVX512BW
kxnorq k5, k6, k7 # AVX512BW
kxnord k5, k6, k7 # AVX512BW
kxorq k5, k6, k7 # AVX512BW
kxord k5, k6, k7 # AVX512BW
knotq k5, k6 # AVX512BW
knotd k5, k6 # AVX512BW
kortestq k5, k6 # AVX512BW
kortestd k5, k6 # AVX512BW
ktestq k5, k6 # AVX512BW
ktestd k5, k6 # AVX512BW
kshiftrq k5, k6, 0xab # AVX512BW
kshiftrq k5, k6, 123 # AVX512BW
kshiftrd k5, k6, 0xab # AVX512BW
kshiftrd k5, k6, 123 # AVX512BW
kshiftlq k5, k6, 0xab # AVX512BW
kshiftlq k5, k6, 123 # AVX512BW
kshiftld k5, k6, 0xab # AVX512BW
kshiftld k5, k6, 123 # AVX512BW
kmovq k5, k6 # AVX512BW
kmovq k5, QWORD PTR [rcx] # AVX512BW
kmovq k5, QWORD PTR [rax+r14*8+0x1234] # AVX512BW
kmovd k5, k6 # AVX512BW
kmovd k5, DWORD PTR [rcx] # AVX512BW
kmovd k5, DWORD PTR [rax+r14*8+0x1234] # AVX512BW
kmovq QWORD PTR [rcx], k5 # AVX512BW
kmovq QWORD PTR [rax+r14*8+0x1234], k5 # AVX512BW
kmovd DWORD PTR [rcx], k5 # AVX512BW
kmovd DWORD PTR [rax+r14*8+0x1234], k5 # AVX512BW
kmovq k5, rax # AVX512BW
kmovq k5, r8 # AVX512BW
kmovd k5, eax # AVX512BW
kmovd k5, ebp # AVX512BW
kmovd k5, r13d # AVX512BW
kmovq rax, k5 # AVX512BW
kmovq r8, k5 # AVX512BW
kmovd eax, k5 # AVX512BW
kmovd ebp, k5 # AVX512BW
kmovd r13d, k5 # AVX512BW
kaddq k5, k6, k7 # AVX512BW
kaddd k5, k6, k7 # AVX512BW
kunpckwd k5, k6, k7 # AVX512BW
kunpckdq k5, k6, k7 # AVX512BW
vpmovwb YMMWORD PTR [rcx], zmm30 # AVX512BW
vpmovwb YMMWORD PTR [rcx]{k7}, zmm30 # AVX512BW
vpmovwb YMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512BW
vpmovwb YMMWORD PTR [rdx+4064], zmm30 # AVX512BW Disp8
vpmovwb YMMWORD PTR [rdx+4096], zmm30 # AVX512BW
vpmovwb YMMWORD PTR [rdx-4096], zmm30 # AVX512BW Disp8
vpmovwb YMMWORD PTR [rdx-4128], zmm30 # AVX512BW
vpmovswb YMMWORD PTR [rcx], zmm30 # AVX512BW
vpmovswb YMMWORD PTR [rcx]{k7}, zmm30 # AVX512BW
vpmovswb YMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512BW
vpmovswb YMMWORD PTR [rdx+4064], zmm30 # AVX512BW Disp8
vpmovswb YMMWORD PTR [rdx+4096], zmm30 # AVX512BW
vpmovswb YMMWORD PTR [rdx-4096], zmm30 # AVX512BW Disp8
vpmovswb YMMWORD PTR [rdx-4128], zmm30 # AVX512BW
vpmovuswb YMMWORD PTR [rcx], zmm30 # AVX512BW
vpmovuswb YMMWORD PTR [rcx]{k7}, zmm30 # AVX512BW
vpmovuswb YMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512BW
vpmovuswb YMMWORD PTR [rdx+4064], zmm30 # AVX512BW Disp8
vpmovuswb YMMWORD PTR [rdx+4096], zmm30 # AVX512BW
vpmovuswb YMMWORD PTR [rdx-4096], zmm30 # AVX512BW Disp8
vpmovuswb YMMWORD PTR [rdx-4128], zmm30 # AVX512BW
vmovdqu8 ZMMWORD PTR [rcx], zmm30 # AVX512BW
vmovdqu8 ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512BW
vmovdqu8 ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512BW
vmovdqu8 ZMMWORD PTR [rdx+8128], zmm30 # AVX512BW Disp8
vmovdqu8 ZMMWORD PTR [rdx+8192], zmm30 # AVX512BW
vmovdqu8 ZMMWORD PTR [rdx-8192], zmm30 # AVX512BW Disp8
vmovdqu8 ZMMWORD PTR [rdx-8256], zmm30 # AVX512BW
vmovdqu16 ZMMWORD PTR [rcx], zmm30 # AVX512BW
vmovdqu16 ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512BW
vmovdqu16 ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512BW
vmovdqu16 ZMMWORD PTR [rdx+8128], zmm30 # AVX512BW Disp8
vmovdqu16 ZMMWORD PTR [rdx+8192], zmm30 # AVX512BW
vmovdqu16 ZMMWORD PTR [rdx-8192], zmm30 # AVX512BW Disp8
vmovdqu16 ZMMWORD PTR [rdx-8256], zmm30 # AVX512BW
vpermi2w zmm30, zmm29, zmm28 # AVX512BW
vpermi2w zmm30{k7}, zmm29, zmm28 # AVX512BW
vpermi2w zmm30{k7}{z}, zmm29, zmm28 # AVX512BW
vpermi2w zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vpermi2w zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vpermi2w zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vpermi2w zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vpermi2w zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vpermi2w zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vptestmb k5, zmm30, zmm29 # AVX512BW
vptestmb k5{k7}, zmm30, zmm29 # AVX512BW
vptestmb k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vptestmb k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vptestmb k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vptestmb k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vptestmb k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vptestmb k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vptestmw k5, zmm30, zmm29 # AVX512BW
vptestmw k5{k7}, zmm30, zmm29 # AVX512BW
vptestmw k5, zmm30, ZMMWORD PTR [rcx] # AVX512BW
vptestmw k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vptestmw k5, zmm30, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vptestmw k5, zmm30, ZMMWORD PTR [rdx+8192] # AVX512BW
vptestmw k5, zmm30, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vptestmw k5, zmm30, ZMMWORD PTR [rdx-8256] # AVX512BW
vpmovb2m k5, zmm30 # AVX512BW
vpmovw2m k5, zmm30 # AVX512BW
vpmovm2b zmm30, k5 # AVX512BW
vpmovm2w zmm30, k5 # AVX512BW
vptestnmb k5, zmm29, zmm28 # AVX512BW
vptestnmb k5{k7}, zmm29, zmm28 # AVX512BW
vptestnmb k5, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vptestnmb k5, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vptestnmb k5, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vptestnmb k5, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vptestnmb k5, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vptestnmb k5, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vptestnmw k5, zmm29, zmm28 # AVX512BW
vptestnmw k5{k7}, zmm29, zmm28 # AVX512BW
vptestnmw k5, zmm29, ZMMWORD PTR [rcx] # AVX512BW
vptestnmw k5, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512BW
vptestnmw k5, zmm29, ZMMWORD PTR [rdx+8128] # AVX512BW Disp8
vptestnmw k5, zmm29, ZMMWORD PTR [rdx+8192] # AVX512BW
vptestnmw k5, zmm29, ZMMWORD PTR [rdx-8192] # AVX512BW Disp8
vptestnmw k5, zmm29, ZMMWORD PTR [rdx-8256] # AVX512BW
vpcmpb k5, zmm30, zmm29, 0xab # AVX512BW
vpcmpb k5{k7}, zmm30, zmm29, 0xab # AVX512BW
vpcmpb k5, zmm30, zmm29, 123 # AVX512BW
vpcmpb k5, zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpcmpb k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpcmpb k5, zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpcmpb k5, zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpcmpb k5, zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpcmpb k5, zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpcmpw k5, zmm30, zmm29, 0xab # AVX512BW
vpcmpw k5{k7}, zmm30, zmm29, 0xab # AVX512BW
vpcmpw k5, zmm30, zmm29, 123 # AVX512BW
vpcmpw k5, zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpcmpw k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpcmpw k5, zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpcmpw k5, zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpcmpw k5, zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpcmpw k5, zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpcmpub k5, zmm30, zmm29, 0xab # AVX512BW
vpcmpub k5{k7}, zmm30, zmm29, 0xab # AVX512BW
vpcmpub k5, zmm30, zmm29, 123 # AVX512BW
vpcmpub k5, zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpcmpub k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpcmpub k5, zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpcmpub k5, zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpcmpub k5, zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpcmpub k5, zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
vpcmpuw k5, zmm30, zmm29, 0xab # AVX512BW
vpcmpuw k5{k7}, zmm30, zmm29, 0xab # AVX512BW
vpcmpuw k5, zmm30, zmm29, 123 # AVX512BW
vpcmpuw k5, zmm30, ZMMWORD PTR [rcx], 123 # AVX512BW
vpcmpuw k5, zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512BW
vpcmpuw k5, zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512BW Disp8
vpcmpuw k5, zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512BW
vpcmpuw k5, zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512BW Disp8
vpcmpuw k5, zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512BW
|
stsp/binutils-ia16
| 1,032
|
gas/testsuite/gas/i386/x86-64-avx512er-rcig.s
|
# Check 64bit AVX512ER-RCIG instructions
.allow_index_reg
.text
_start:
vexp2ps {sae}, %zmm29, %zmm30 # AVX512ER
vexp2pd {sae}, %zmm29, %zmm30 # AVX512ER
vrcp28ps {sae}, %zmm29, %zmm30 # AVX512ER
vrcp28pd {sae}, %zmm29, %zmm30 # AVX512ER
vrcp28ss {sae}, %xmm28, %xmm29, %xmm30 # AVX512ER
vrcp28sd {sae}, %xmm28, %xmm29, %xmm30 # AVX512ER
vrsqrt28ps {sae}, %zmm29, %zmm30 # AVX512ER
vrsqrt28pd {sae}, %zmm29, %zmm30 # AVX512ER
vrsqrt28ss {sae}, %xmm28, %xmm29, %xmm30 # AVX512ER
vrsqrt28sd {sae}, %xmm28, %xmm29, %xmm30 # AVX512ER
.intel_syntax noprefix
vexp2ps zmm30, zmm29, {sae} # AVX512ER
vexp2pd zmm30, zmm29, {sae} # AVX512ER
vrcp28ps zmm30, zmm29, {sae} # AVX512ER
vrcp28pd zmm30, zmm29, {sae} # AVX512ER
vrcp28ss xmm30, xmm29, xmm28, {sae} # AVX512ER
vrcp28sd xmm30, xmm29, xmm28, {sae} # AVX512ER
vrsqrt28ps zmm30, zmm29, {sae} # AVX512ER
vrsqrt28pd zmm30, zmm29, {sae} # AVX512ER
vrsqrt28ss xmm30, xmm29, xmm28, {sae} # AVX512ER
vrsqrt28sd xmm30, xmm29, xmm28, {sae} # AVX512ER
|
stsp/binutils-ia16
| 3,179
|
gas/testsuite/gas/i386/x86-64-avx512_vpopcntdq.s
|
# Check 64bit AVX512_VPOPCNTDQ instructions
.allow_index_reg
.text
_start:
vpopcntd %zmm29, %zmm30 # AVX512_VPOPCNTDQ
vpopcntd %zmm29, %zmm30{%k7} # AVX512_VPOPCNTDQ
vpopcntd %zmm29, %zmm30{%k7}{z} # AVX512_VPOPCNTDQ
vpopcntd (%rcx), %zmm30 # AVX512_VPOPCNTDQ
vpopcntd 0x123(%rax,%r14,8), %zmm30 # AVX512_VPOPCNTDQ
vpopcntd (%rcx){1to16}, %zmm30 # AVX512_VPOPCNTDQ
vpopcntd 8128(%rdx), %zmm30 # AVX512_VPOPCNTDQ Disp8
vpopcntd 8192(%rdx), %zmm30 # AVX512_VPOPCNTDQ
vpopcntd -8192(%rdx), %zmm30 # AVX512_VPOPCNTDQ Disp8
vpopcntd -8256(%rdx), %zmm30 # AVX512_VPOPCNTDQ
vpopcntd 508(%rdx){1to16}, %zmm30 # AVX512_VPOPCNTDQ Disp8
vpopcntd 512(%rdx){1to16}, %zmm30 # AVX512_VPOPCNTDQ
vpopcntd -512(%rdx){1to16}, %zmm30 # AVX512_VPOPCNTDQ Disp8
vpopcntd -516(%rdx){1to16}, %zmm30 # AVX512_VPOPCNTDQ
vpopcntq %zmm29, %zmm30 # AVX512_VPOPCNTDQ
vpopcntq %zmm29, %zmm30{%k7} # AVX512_VPOPCNTDQ
vpopcntq %zmm29, %zmm30{%k7}{z} # AVX512_VPOPCNTDQ
vpopcntq (%rcx), %zmm30 # AVX512_VPOPCNTDQ
vpopcntq 0x123(%rax,%r14,8), %zmm30 # AVX512_VPOPCNTDQ
vpopcntq (%rcx){1to8}, %zmm30 # AVX512_VPOPCNTDQ
vpopcntq 8128(%rdx), %zmm30 # AVX512_VPOPCNTDQ Disp8
vpopcntq 8192(%rdx), %zmm30 # AVX512_VPOPCNTDQ
vpopcntq -8192(%rdx), %zmm30 # AVX512_VPOPCNTDQ Disp8
vpopcntq -8256(%rdx), %zmm30 # AVX512_VPOPCNTDQ
vpopcntq 1016(%rdx){1to8}, %zmm30 # AVX512_VPOPCNTDQ Disp8
vpopcntq 1024(%rdx){1to8}, %zmm30 # AVX512_VPOPCNTDQ
vpopcntq -1024(%rdx){1to8}, %zmm30 # AVX512_VPOPCNTDQ Disp8
vpopcntq -1032(%rdx){1to8}, %zmm30 # AVX512_VPOPCNTDQ
.intel_syntax noprefix
vpopcntd zmm30, zmm29 # AVX512_VPOPCNTDQ
vpopcntd zmm30{k7}, zmm29 # AVX512_VPOPCNTDQ
vpopcntd zmm30{k7}{z}, zmm29 # AVX512_VPOPCNTDQ
vpopcntd zmm30, ZMMWORD PTR [rcx] # AVX512_VPOPCNTDQ
vpopcntd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512_VPOPCNTDQ
vpopcntd zmm30, [rcx]{1to16} # AVX512_VPOPCNTDQ
vpopcntd zmm30, ZMMWORD PTR [rdx+8128] # AVX512_VPOPCNTDQ Disp8
vpopcntd zmm30, ZMMWORD PTR [rdx+8192] # AVX512_VPOPCNTDQ
vpopcntd zmm30, ZMMWORD PTR [rdx-8192] # AVX512_VPOPCNTDQ Disp8
vpopcntd zmm30, ZMMWORD PTR [rdx-8256] # AVX512_VPOPCNTDQ
vpopcntd zmm30, [rdx+508]{1to16} # AVX512_VPOPCNTDQ Disp8
vpopcntd zmm30, [rdx+512]{1to16} # AVX512_VPOPCNTDQ
vpopcntd zmm30, [rdx-512]{1to16} # AVX512_VPOPCNTDQ Disp8
vpopcntd zmm30, [rdx-516]{1to16} # AVX512_VPOPCNTDQ
vpopcntq zmm30, zmm29 # AVX512_VPOPCNTDQ
vpopcntq zmm30{k7}, zmm29 # AVX512_VPOPCNTDQ
vpopcntq zmm30{k7}{z}, zmm29 # AVX512_VPOPCNTDQ
vpopcntq zmm30, ZMMWORD PTR [rcx] # AVX512_VPOPCNTDQ
vpopcntq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512_VPOPCNTDQ
vpopcntq zmm30, [rcx]{1to8} # AVX512_VPOPCNTDQ
vpopcntq zmm30, ZMMWORD PTR [rdx+8128] # AVX512_VPOPCNTDQ Disp8
vpopcntq zmm30, ZMMWORD PTR [rdx+8192] # AVX512_VPOPCNTDQ
vpopcntq zmm30, ZMMWORD PTR [rdx-8192] # AVX512_VPOPCNTDQ Disp8
vpopcntq zmm30, ZMMWORD PTR [rdx-8256] # AVX512_VPOPCNTDQ
vpopcntq zmm30, [rdx+1016]{1to8} # AVX512_VPOPCNTDQ Disp8
vpopcntq zmm30, [rdx+1024]{1to8} # AVX512_VPOPCNTDQ
vpopcntq zmm30, [rdx-1024]{1to8} # AVX512_VPOPCNTDQ Disp8
vpopcntq zmm30, [rdx-1032]{1to8} # AVX512_VPOPCNTDQ
|
stsp/binutils-ia16
| 5,416
|
gas/testsuite/gas/i386/x86-64-optimize-3.s
|
# Check 64bit instructions with optimized encoding
.allow_index_reg
.text
_start:
testq $0x7f, %rax
testl $0x7f, %eax
testw $0x7f, %ax
testb $0x7f, %al
test $0x7f, %rbx
test $0x7f, %ebx
test $0x7f, %bx
test $0x7f, %bl
test $0x7f, %rdi
test $0x7f, %edi
test $0x7f, %di
test $0x7f, %dil
test $0x7f, %r9
test $0x7f, %r9d
test $0x7f, %r9w
test $0x7f, %r9b
test $0x7f, %r12
test $0x7f, %r12d
test $0x7f, %r12w
test $0x7f, %r12b
and %cl, %cl
and %dx, %dx
and %ebx, %ebx
and %rsp, %rsp
or %bpl, %bpl
or %si, %si
or %edi, %edi
or %r8, %r8
vandnpd %zmm1, %zmm1, %zmm5
vmovdqa32 %xmm1, %xmm2
vmovdqa64 %xmm1, %xmm2
vmovdqu8 %xmm1, %xmm2
vmovdqu16 %xmm1, %xmm2
vmovdqu32 %xmm1, %xmm2
vmovdqu64 %xmm1, %xmm2
vmovdqa32 %xmm11, %xmm12
vmovdqa64 %xmm11, %xmm12
vmovdqu8 %xmm11, %xmm12
vmovdqu16 %xmm11, %xmm12
vmovdqu32 %xmm11, %xmm12
vmovdqu64 %xmm11, %xmm12
vmovdqa32 127(%rax), %xmm2
vmovdqa64 127(%rax), %xmm2
vmovdqu8 127(%rax), %xmm2
vmovdqu16 127(%rax), %xmm2
vmovdqu32 127(%rax), %xmm2
vmovdqu64 127(%rax), %xmm2
vmovdqa32 %xmm1, 128(%rax)
vmovdqa64 %xmm1, 128(%rax)
vmovdqu8 %xmm1, 128(%rax)
vmovdqu16 %xmm1, 128(%rax)
vmovdqu32 %xmm1, 128(%rax)
vmovdqu64 %xmm1, 128(%rax)
vmovdqa32 %ymm1, %ymm2
vmovdqa64 %ymm1, %ymm2
vmovdqu8 %ymm1, %ymm2
vmovdqu16 %ymm1, %ymm2
vmovdqu32 %ymm1, %ymm2
vmovdqu64 %ymm1, %ymm2
vmovdqa32 %ymm11, %ymm12
vmovdqa64 %ymm11, %ymm12
vmovdqu8 %ymm11, %ymm12
vmovdqu16 %ymm11, %ymm12
vmovdqu32 %ymm11, %ymm12
vmovdqu64 %ymm11, %ymm12
vmovdqa32 127(%rax), %ymm2
vmovdqa64 127(%rax), %ymm2
vmovdqu8 127(%rax), %ymm2
vmovdqu16 127(%rax), %ymm2
vmovdqu32 127(%rax), %ymm2
vmovdqu64 127(%rax), %ymm2
vmovdqa32 %ymm1, 128(%rax)
vmovdqa64 %ymm1, 128(%rax)
vmovdqu8 %ymm1, 128(%rax)
vmovdqu16 %ymm1, 128(%rax)
vmovdqu32 %ymm1, 128(%rax)
vmovdqu64 %ymm1, 128(%rax)
vmovdqa32 %xmm21, %xmm2
vmovdqa64 %xmm21, %xmm2
vmovdqu8 %xmm21, %xmm2
vmovdqu16 %xmm21, %xmm2
vmovdqu32 %xmm21, %xmm2
vmovdqu64 %xmm21, %xmm2
vmovdqa32 %zmm1, %zmm2
vmovdqa64 %zmm1, %zmm2
vmovdqu8 %zmm1, %zmm2
vmovdqu16 %zmm1, %zmm2
vmovdqu32 %zmm1, %zmm2
vmovdqu64 %zmm1, %zmm2
{evex} vmovdqa32 %ymm1, %ymm2
{evex} vmovdqa64 %ymm1, %ymm2
{evex} vmovdqu8 %xmm1, %xmm2
{evex} vmovdqu16 %xmm1, %xmm2
{evex} vmovdqu32 %xmm1, %xmm2
{evex} vmovdqu64 %xmm1, %xmm2
vmovdqa32 %ymm1, %ymm2{%k1}
vmovdqa64 %ymm1, %ymm2{%k1}
vmovdqu8 %xmm1, %xmm2{%k1}
vmovdqu16 %xmm1, %xmm2{%k1}
vmovdqu32 %xmm1, %xmm2{%k1}
vmovdqu64 %xmm1, %xmm2{%k1}
vmovdqa32 (%rax), %ymm2{%k1}
vmovdqa64 (%rax), %ymm2{%k1}
vmovdqu8 (%rax), %xmm2{%k1}
vmovdqu16 (%rax), %xmm2{%k1}
vmovdqu32 (%rax), %xmm2{%k1}
vmovdqu64 (%rax), %xmm2{%k1}
vmovdqa32 %ymm1, (%rax){%k1}
vmovdqa64 %ymm1, (%rax){%k1}
vmovdqu8 %xmm1, (%rax){%k1}
vmovdqu16 %xmm1, (%rax){%k1}
vmovdqu32 %xmm1, (%rax){%k1}
vmovdqu64 %xmm1, (%rax){%k1}
vmovdqa32 %xmm1, %xmm2{%k1}{z}
vmovdqa64 %xmm1, %xmm2{%k1}{z}
vmovdqu8 %xmm1, %xmm2{%k1}{z}
vmovdqu16 %xmm1, %xmm2{%k1}{z}
vmovdqu32 %xmm1, %xmm2{%k1}{z}
vmovdqu64 %xmm1, %xmm2{%k1}{z}
vpandd %xmm2, %xmm3, %xmm4
vpandq %xmm12, %xmm3, %xmm4
vpandnd %xmm2, %xmm13, %xmm4
vpandnq %xmm2, %xmm3, %xmm14
vpord %xmm2, %xmm3, %xmm4
vporq %xmm12, %xmm3, %xmm4
vpxord %xmm2, %xmm13, %xmm4
vpxorq %xmm2, %xmm3, %xmm14
vpandd %ymm2, %ymm3, %ymm4
vpandq %ymm12, %ymm3, %ymm4
vpandnd %ymm2, %ymm13, %ymm4
vpandnq %ymm2, %ymm3, %ymm14
vpord %ymm2, %ymm3, %ymm4
vporq %ymm12, %ymm3, %ymm4
vpxord %ymm2, %ymm13, %ymm4
vpxorq %ymm2, %ymm3, %ymm14
vpandd 112(%rax), %xmm2, %xmm3
vpandq 112(%rax), %xmm2, %xmm3
vpandnd 112(%rax), %xmm2, %xmm3
vpandnq 112(%rax), %xmm2, %xmm3
vpord 112(%rax), %xmm2, %xmm3
vporq 112(%rax), %xmm2, %xmm3
vpxord 112(%rax), %xmm2, %xmm3
vpxorq 112(%rax), %xmm2, %xmm3
vpandd 128(%rax), %xmm2, %xmm3
vpandq 128(%rax), %xmm2, %xmm3
vpandnd 128(%rax), %xmm2, %xmm3
vpandnq 128(%rax), %xmm2, %xmm3
vpord 128(%rax), %xmm2, %xmm3
vporq 128(%rax), %xmm2, %xmm3
vpxord 128(%rax), %xmm2, %xmm3
vpxorq 128(%rax), %xmm2, %xmm3
vpandd 96(%rax), %ymm2, %ymm3
vpandq 96(%rax), %ymm2, %ymm3
vpandnd 96(%rax), %ymm2, %ymm3
vpandnq 96(%rax), %ymm2, %ymm3
vpord 96(%rax), %ymm2, %ymm3
vporq 96(%rax), %ymm2, %ymm3
vpxord 96(%rax), %ymm2, %ymm3
vpxorq 96(%rax), %ymm2, %ymm3
vpandd 128(%rax), %ymm2, %ymm3
vpandq 128(%rax), %ymm2, %ymm3
vpandnd 128(%rax), %ymm2, %ymm3
vpandnq 128(%rax), %ymm2, %ymm3
vpord 128(%rax), %ymm2, %ymm3
vporq 128(%rax), %ymm2, %ymm3
vpxord 128(%rax), %ymm2, %ymm3
vpxorq 128(%rax), %ymm2, %ymm3
vpandd %xmm22, %xmm23, %xmm24
vpandq %ymm22, %ymm3, %ymm4
vpandnd %ymm2, %ymm23, %ymm4
vpandnq %xmm2, %xmm3, %xmm24
vpord %xmm22, %xmm23, %xmm24
vporq %ymm22, %ymm3, %ymm4
vpxord %ymm2, %ymm23, %ymm4
vpxorq %xmm2, %xmm3, %xmm24
vpandd %xmm2, %xmm3, %xmm4{%k5}
vpandq %ymm12, %ymm3, %ymm4{%k5}
vpandnd %ymm2, %ymm13, %ymm4{%k5}
vpandnq %xmm2, %xmm3, %xmm14{%k5}
vpord %xmm2, %xmm3, %xmm4{%k5}
vporq %ymm12, %ymm3, %ymm4{%k5}
vpxord %ymm2, %ymm13, %ymm4{%k5}
vpxorq %xmm2, %xmm3, %xmm14{%k5}
vpandd (%rax){1to8}, %ymm2, %ymm3
vpandq (%rax){1to2}, %xmm2, %xmm3
vpandnd (%rax){1to4}, %xmm2, %xmm3
vpandnq (%rax){1to4}, %ymm2, %ymm3
vpord (%rax){1to8}, %ymm2, %ymm3
vporq (%rax){1to2}, %xmm2, %xmm3
vpxord (%rax){1to4}, %xmm2, %xmm3
vpxorq (%rax){1to4}, %ymm2, %ymm3
|
stsp/binutils-ia16
| 1,923
|
gas/testsuite/gas/i386/bundle-lock.s
|
.bundle_align_mode 5
# We use these macros to test each pattern at every offset from
# bundle alignment, i.e. [0,31].
.macro offset_sequence size, offset
.p2align 5
sequence_\size\()_offset_\offset\():
.if \offset
.space \offset, 0xf4
.endif
test_sequence \size
.endm
.macro test_offsets size
offset_sequence \size, 0
offset_sequence \size, 1
offset_sequence \size, 2
offset_sequence \size, 3
offset_sequence \size, 4
offset_sequence \size, 5
offset_sequence \size, 6
offset_sequence \size, 7
offset_sequence \size, 8
offset_sequence \size, 9
offset_sequence \size, 10
offset_sequence \size, 11
offset_sequence \size, 12
offset_sequence \size, 13
offset_sequence \size, 14
offset_sequence \size, 15
offset_sequence \size, 16
offset_sequence \size, 17
offset_sequence \size, 18
offset_sequence \size, 19
offset_sequence \size, 20
offset_sequence \size, 21
offset_sequence \size, 22
offset_sequence \size, 23
offset_sequence \size, 24
offset_sequence \size, 25
offset_sequence \size, 26
offset_sequence \size, 27
offset_sequence \size, 28
offset_sequence \size, 29
offset_sequence \size, 30
offset_sequence \size, 31
.endm
.macro test_sequence size
.bundle_lock
clc
.rept \size - 1
cld
.endr
.bundle_unlock
.endm
test_offsets 1
test_offsets 2
test_offsets 3
test_offsets 4
test_offsets 5
test_offsets 6
test_offsets 7
test_offsets 8
test_offsets 9
test_offsets 10
test_offsets 11
test_offsets 12
test_offsets 13
test_offsets 14
test_offsets 15
test_offsets 16
test_offsets 17
test_offsets 18
test_offsets 19
test_offsets 20
test_offsets 21
test_offsets 22
test_offsets 23
test_offsets 24
test_offsets 25
test_offsets 26
test_offsets 27
test_offsets 28
test_offsets 29
test_offsets 30
test_offsets 31
test_offsets 32
.p2align 5
# Nested .bundle_lock.
.bundle_lock
clc
.bundle_lock
cld
.bundle_unlock
clc
.bundle_unlock
.p2align 5
hlt
|
stsp/binutils-ia16
| 2,282
|
gas/testsuite/gas/i386/noavx512-2.s
|
# Test .arch .noavx512vl
.text
vpabsb %zmm5, %zmm6{%k7} # AVX512BW
vpabsb %xmm5, %xmm6{%k7} # AVX512BW + AVX512VL
vpabsb %ymm5, %ymm6{%k7} # AVX512BW + AVX512VL
vpconflictd %zmm5, %zmm6 # AVX412CD
vpconflictd %xmm5, %xmm6 # AVX412CD + AVX512VL
vpconflictd %ymm5, %ymm6 # AVX412CD + AVX512VL
vcvtpd2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512DQ + AVX512VL
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512DQ + AVX512VL
vexp2ps %zmm5, %zmm6{%k7} # AVX512ER
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512F + AVX512VL
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512F + AVX512VL
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512IFMA + AVX512VL
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512IFMA + AVX512VL
vgatherpf0dpd 23(%ebp,%ymm7,8){%k1} # AVX512PF
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %xmm4, %xmm5, %xmm6{%k7} # AVX512VBMI + AVX512VL
vpermb %ymm4, %ymm5, %ymm6{%k7} # AVX512VBMI + AVX512VL
.arch .noavx512vl
vpabsb %zmm5, %zmm6{%k7} # AVX512BW
vpabsb %xmm5, %xmm6{%k7} # AVX512BW + AVX512VL
vpabsb %ymm5, %ymm6{%k7} # AVX512BW + AVX512VL
vpconflictd %zmm5, %zmm6 # AVX412CD
vpconflictd %xmm5, %xmm6 # AVX412CD + AVX512VL
vpconflictd %ymm5, %ymm6 # AVX412CD + AVX512VL
vcvtpd2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512DQ + AVX512VL
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512DQ + AVX512VL
vexp2ps %zmm5, %zmm6{%k7} # AVX512ER
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512F + AVX512VL
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512F + AVX512VL
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512IFMA + AVX512VL
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512IFMA + AVX512VL
vgatherpf0dpd 23(%ebp,%ymm7,8){%k1} # AVX512PF
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %xmm4, %xmm5, %xmm6{%k7} # AVX512VBMI + AVX512VL
vpermb %ymm4, %ymm5, %ymm6{%k7} # AVX512VBMI + AVX512VL
vpabsb %xmm5, %xmm6
vpabsb %ymm5, %ymm6
vaddpd %xmm4, %xmm5, %xmm6
vaddpd %ymm4, %ymm5, %ymm6
pabsb %xmm5, %xmm6
addpd %xmm4, %xmm6
.intel_syntax noprefix
vfpclasspd k0, [eax], 0
vfpclassps k0, [eax+0x80], 0
.p2align 4
|
stsp/binutils-ia16
| 1,200
|
gas/testsuite/gas/i386/x86-64-mpx-addr32.s
|
.byte 0x67; bndmk (%rax),%bnd1
.byte 0x67; bndmk 0x3(%rcx,%rbx,1),%bnd1
.byte 0x67; bndmov (%r8),%bnd1
.byte 0x67; bndmov 0x3(%r9,%rdx,1),%bnd1
.byte 0x67; bndmov %bnd1,(%rax)
.byte 0x67; bndmov %bnd1,0x3(%rcx,%rax,1)
.byte 0x67; bndcl (%rcx),%bnd1
.byte 0x67; bndcl 0x3(%rcx,%rax,1),%bnd1
.byte 0x67; bndcu (%rcx),%bnd1
.byte 0x67; bndcu 0x3(%rcx,%rax,1),%bnd1
.byte 0x67; bndcn (%rcx),%bnd1
.byte 0x67; bndcn 0x3(%rcx,%rax,1),%bnd1
.byte 0x67; bndstx %bnd0,0x3(%rax,%rbx,1)
.byte 0x67; bndstx %bnd2,0x3(%rbx)
.byte 0x67; bndldx 0x3(%rax,%rbx,1),%bnd0
.byte 0x67; bndldx 0x3(%rbx),%bnd2
.byte 0x67; bndmk (%rax),%bnd1
.byte 0x67; bndmk 0x3(%rdx,%rax,1),%bnd1
.byte 0x67; bndmov (%rax),%bnd1
.byte 0x67; bndmov 0x3(%rdx,%rax,1),%bnd1
.byte 0x67; bndmov %bnd1,(%rax)
.byte 0x67; bndmov %bnd1,0x3(%rdx,%rax,1)
.byte 0x67; bndcl (%rax),%bnd1
.byte 0x67; bndcl 0x3(%rdx,%rax,1),%bnd1
.byte 0x67; bndcu (%rax),%bnd1
.byte 0x67; bndcu 0x3(%rdx,%rax,1),%bnd1
.byte 0x67; bndcn (%rax),%bnd1
.byte 0x67; bndcn 0x3(%rdx,%rax,1),%bnd1
.byte 0x67; bndstx %bnd0,0x3(%rax,%rbx,1)
.byte 0x67; bndstx %bnd2,0x3(,%rbx,1)
.byte 0x67; bndldx 0x3(%rax,%rbx,1),%bnd0
.byte 0x67; bndldx 0x3(,%rbx,1),%bnd2
|
stsp/binutils-ia16
| 1,217
|
gas/testsuite/gas/i386/secrel.s
|
.text
.ascii ">>>>"
pre04: .ascii "<<<<"
.ascii ">>>>>"
pre0d: .ascii "<<<"
.ascii ">>>>>>"
pre16: .ascii "<<"
.ascii ">>>>>>>"
pre1f: .ascii "<"
.data
.ascii ">>>>"
sam04: .ascii "<<<<"
.ascii ">>>>>"
sam0d: .ascii "<<<"
.ascii ">>>>>>"
sam16: .ascii "<<"
.ascii ">>>>>>>"
sam1f: .ascii "<"
.ascii ">>>>"
.secrel32 pre04
.byte 0x11
.secrel32 pre0d
.byte 0x11
.secrel32 pre16
.byte 0x11
.long pre1f@secrel32
.byte 0x11
.ascii "<<<<<<<<"
.ascii ">>>>"
.secrel32 sam04
.byte 0x11
.secrel32 sam0d
.byte 0x11
.long sam16@secrel32
.byte 0x11
.secrel32 sam1f
.byte 0x11
.ascii "<<<<<<<<"
.ascii ">>>>"
.secrel32 nex04
.byte 0x11
.long nex0d@secrel32
.byte 0x11
.secrel32 nex16
.byte 0x11
.secrel32 nex1f
.byte 0x11
.ascii "<<<<<<<<"
.ascii ">>>>"
.long ext24@secrel32
.byte 0x11
.secrel32 ext2d
.byte 0x11
.secrel32 ext36
.byte 0x11
.secrel32 ext3f
.byte 0x11
.ascii "<<<<<<<<"
.long pre0d@secrel32+3
.long pre16@secrel32+six
leal bar@SECREL32+44(%eax), %edx
.section .rdata
.ascii ">>>>"
nex04: .ascii "<<<<"
.ascii ">>>>>"
nex0d: .ascii "<<<"
.ascii ">>>>>>"
nex16: .ascii "<<"
.ascii ">>>>>>>"
nex1f: .ascii "<"
.ascii ">>>>"
.p2align 4,0
.equ six,6
|
stsp/binutils-ia16
| 2,706
|
gas/testsuite/gas/i386/avx512bitalg.s
|
# Check 32bit AVX512BITALG instructions
.allow_index_reg
.text
_start:
vpshufbitqmb %zmm4, %zmm5, %k5 # AVX512BITALG
vpshufbitqmb %zmm4, %zmm5, %k5{%k7} # AVX512BITALG
vpshufbitqmb -123456(%esp,%esi,8), %zmm5, %k5 # AVX512BITALG
vpshufbitqmb 8128(%edx), %zmm5, %k5 # AVX512BITALG Disp8
vpopcntb %zmm5, %zmm6 # AVX512BITALG
vpopcntb %zmm5, %zmm6{%k7} # AVX512BITALG
vpopcntb %zmm5, %zmm6{%k7}{z} # AVX512BITALG
vpopcntb -123456(%esp,%esi,8), %zmm6 # AVX512BITALG
vpopcntb 8128(%edx), %zmm6 # AVX512BITALG Disp8
vpopcntw %zmm5, %zmm6 # AVX512BITALG
vpopcntw %zmm5, %zmm6{%k7} # AVX512BITALG
vpopcntw %zmm5, %zmm6{%k7}{z} # AVX512BITALG
vpopcntw -123456(%esp,%esi,8), %zmm6 # AVX512BITALG
vpopcntw 8128(%edx), %zmm6 # AVX512BITALG Disp8
vpopcntd %zmm5, %zmm6 # AVX512BITALG
vpopcntd %zmm5, %zmm6{%k7} # AVX512BITALG
vpopcntd %zmm5, %zmm6{%k7}{z} # AVX512BITALG
vpopcntd -123456(%esp,%esi,8), %zmm6 # AVX512BITALG
vpopcntd 8128(%edx), %zmm6 # AVX512BITALG Disp8
vpopcntd 508(%edx){1to16}, %zmm6 # AVX512BITALG Disp8
vpopcntq %zmm5, %zmm6 # AVX512BITALG
vpopcntq %zmm5, %zmm6{%k7} # AVX512BITALG
vpopcntq %zmm5, %zmm6{%k7}{z} # AVX512BITALG
vpopcntq -123456(%esp,%esi,8), %zmm6 # AVX512BITALG
vpopcntq 8128(%edx), %zmm6 # AVX512BITALG Disp8
vpopcntq 1016(%edx){1to8}, %zmm6 # AVX512BITALG Disp8
.intel_syntax noprefix
vpshufbitqmb k5, zmm5, zmm4 # AVX512BITALG
vpshufbitqmb k5{k7}, zmm5, zmm4 # AVX512BITALG
vpshufbitqmb k5, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512BITALG
vpshufbitqmb k5, zmm5, ZMMWORD PTR [edx+8128] # AVX512BITALG Disp8
vpopcntb zmm6, zmm5 # AVX512BITALG
vpopcntb zmm6{k7}, zmm5 # AVX512BITALG
vpopcntb zmm6{k7}{z}, zmm5 # AVX512BITALG
vpopcntb zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512BITALG
vpopcntb zmm6, ZMMWORD PTR [edx+8128] # AVX512BITALG Disp8
vpopcntw zmm6, zmm5 # AVX512BITALG
vpopcntw zmm6{k7}, zmm5 # AVX512BITALG
vpopcntw zmm6{k7}{z}, zmm5 # AVX512BITALG
vpopcntw zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512BITALG
vpopcntw zmm6, ZMMWORD PTR [edx+8128] # AVX512BITALG Disp8
vpopcntd zmm6, zmm5 # AVX512BITALG
vpopcntd zmm6{k7}, zmm5 # AVX512BITALG
vpopcntd zmm6{k7}{z}, zmm5 # AVX512BITALG
vpopcntd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512BITALG
vpopcntd zmm6, ZMMWORD PTR [edx+8128] # AVX512BITALG Disp8
vpopcntd zmm6, [edx+508]{1to16} # AVX512BITALG Disp8
vpopcntq zmm6, zmm5 # AVX512BITALG
vpopcntq zmm6{k7}, zmm5 # AVX512BITALG
vpopcntq zmm6{k7}{z}, zmm5 # AVX512BITALG
vpopcntq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512BITALG
vpopcntq zmm6, ZMMWORD PTR [edx+8128] # AVX512BITALG Disp8
vpopcntq zmm6, [edx+1016]{1to8} # AVX512BITALG Disp8
|
stsp/binutils-ia16
| 117,571
|
gas/testsuite/gas/i386/x86-64-avx512_fp16_vl.s
|
# Check 64bit AVX512-FP16,AVX512VL instructions
.allow_index_reg
.text
_start:
vaddph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vaddph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vaddph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vaddph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vaddph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vaddph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vaddph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vaddph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vaddph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vaddph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vaddph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vaddph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcmpph $123, %ymm28, %ymm29, %k5 #AVX512-FP16,AVX512VL
vcmpph $123, %ymm28, %ymm29, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph $123, %xmm28, %xmm29, %k5 #AVX512-FP16,AVX512VL
vcmpph $123, %xmm28, %xmm29, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph $123, 0x10000000(%rbp, %r14, 8), %xmm29, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph $123, (%r9){1to8}, %xmm29, %k5 #AVX512-FP16,AVX512VL BROADCAST_EN
vcmpph $123, 2032(%rcx), %xmm29, %k5 #AVX512-FP16,AVX512VL Disp8(7f)
vcmpph $123, -256(%rdx){1to8}, %xmm29, %k5{%k7} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vcmpph $123, 0x10000000(%rbp, %r14, 8), %ymm29, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph $123, (%r9){1to16}, %ymm29, %k5 #AVX512-FP16,AVX512VL BROADCAST_EN
vcmpph $123, 4064(%rcx), %ymm29, %k5 #AVX512-FP16,AVX512VL Disp8(7f)
vcmpph $123, -256(%rdx){1to16}, %ymm29, %k5{%k7} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vcvtdq2ph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtdq2ph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtdq2ph %ymm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtdq2ph %ymm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtdq2phx 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtdq2ph (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtdq2phx 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtdq2ph -512(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtdq2ph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtdq2phy 4064(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtdq2ph -512(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtpd2ph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtpd2ph %ymm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtpd2ph %ymm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtpd2phx 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtpd2ph (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtpd2phx 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtpd2ph -1024(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtpd2phy 4064(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtpd2ph -1024(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2dq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2dq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2dq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2dq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2dq (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2dq 1016(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2dq -256(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2dq (%r9){1to8}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2dq 2032(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2dq -256(%rdx){1to8}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2pd %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2pd %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2pd %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2pd 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2pd (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2pd 508(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2pd -256(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2pd (%r9){1to4}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2pd 1016(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2pd -256(%rdx){1to4}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2psx %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2psx %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2psx %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2psx 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2psx (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2psx 1016(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2psx -256(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2psx (%r9){1to8}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2psx 2032(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2psx -256(%rdx){1to8}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2qq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2qq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2qq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2qq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2qq (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2qq 508(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2qq -256(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2qq (%r9){1to4}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2qq 1016(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2qq -256(%rdx){1to4}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2udq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2udq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2udq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2udq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2udq (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2udq 1016(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2udq -256(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2udq (%r9){1to8}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2udq 2032(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2udq -256(%rdx){1to8}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2uqq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uqq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2uqq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uqq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uqq (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uqq 508(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uqq -256(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uqq (%r9){1to4}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uqq 1016(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uqq -256(%rdx){1to4}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2uw %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uw %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2uw %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uw 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uw (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uw 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uw -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uw (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uw 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uw -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtph2w %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2w %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtph2w %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2w 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2w (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2w 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2w -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2w (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2w 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2w -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtps2phx %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtps2phx %ymm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtps2phx %ymm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtps2phxx 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtps2phx (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtps2phxx 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtps2phx -512(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtps2phxy 4064(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtps2phx -512(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtqq2ph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtqq2ph %ymm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtqq2ph %ymm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtqq2phx 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtqq2ph (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtqq2phx 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtqq2ph -1024(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtqq2phy 4064(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtqq2ph -1024(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvttph2dq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2dq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvttph2dq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2dq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2dq (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2dq 1016(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2dq -256(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2dq (%r9){1to8}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2dq 2032(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2dq -256(%rdx){1to8}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvttph2qq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2qq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvttph2qq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2qq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2qq (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2qq 508(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2qq -256(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2qq (%r9){1to4}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2qq 1016(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2qq -256(%rdx){1to4}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvttph2udq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2udq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvttph2udq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2udq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2udq (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2udq 1016(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2udq -256(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2udq (%r9){1to8}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2udq 2032(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2udq -256(%rdx){1to8}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvttph2uqq %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uqq %xmm29, %ymm30 #AVX512-FP16,AVX512VL
vcvttph2uqq %xmm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uqq 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uqq (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uqq 508(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uqq -256(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uqq (%r9){1to4}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uqq 1016(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uqq -256(%rdx){1to4}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvttph2uw %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uw %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vcvttph2uw %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uw 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uw (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uw 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uw -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uw (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uw 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uw -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvttph2w %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2w %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vcvttph2w %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2w 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2w (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2w 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2w -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2w (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2w 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2w -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtudq2ph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtudq2ph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtudq2ph %ymm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtudq2ph %ymm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtudq2phx 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtudq2ph (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtudq2phx 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtudq2ph -512(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtudq2ph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtudq2phy 4064(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtudq2ph -512(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtuqq2ph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuqq2ph %ymm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtuqq2ph %ymm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuqq2phx 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuqq2ph (%r9){1to2}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuqq2phx 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuqq2ph -1024(%rdx){1to2}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph (%r9){1to4}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuqq2phy 4064(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuqq2ph -1024(%rdx){1to4}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuw2ph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtuw2ph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuw2ph %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtuw2ph %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuw2ph 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuw2ph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuw2ph 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuw2ph -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuw2ph 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuw2ph (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuw2ph 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuw2ph -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vcvtw2ph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtw2ph %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vcvtw2ph %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtw2ph 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtw2ph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtw2ph 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtw2ph -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtw2ph (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtw2ph 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vcvtw2ph -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vdivph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vdivph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vdivph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vdivph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vdivph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vdivph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vdivph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vdivph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vdivph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vdivph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfcmaddcph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmaddcph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfcmaddcph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmaddcph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmaddcph (%r9){1to8}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmaddcph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfcmaddcph -512(%rdx){1to8}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmaddcph (%r9){1to4}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmaddcph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfcmaddcph -512(%rdx){1to4}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfcmulcph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmulcph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfcmulcph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmulcph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmulcph (%r9){1to8}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmulcph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfcmulcph -512(%rdx){1to8}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmulcph (%r9){1to4}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmulcph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfcmulcph -512(%rdx){1to4}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmadd132ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd132ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmadd132ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd132ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd132ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd132ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd132ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd132ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd132ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd132ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmadd213ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd213ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmadd213ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd213ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd213ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd213ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd213ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd213ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd213ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd213ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmadd231ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd231ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmadd231ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd231ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd231ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd231ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd231ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd231ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd231ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd231ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmaddcph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddcph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmaddcph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddcph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddcph (%r9){1to8}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddcph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddcph -512(%rdx){1to8}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddcph (%r9){1to4}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddcph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddcph -512(%rdx){1to4}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmaddsub132ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub132ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmaddsub132ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub132ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub132ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub132ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub132ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub132ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub132ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub132ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmaddsub213ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub213ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmaddsub213ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub213ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub213ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub213ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub213ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub213ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub213ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub213ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmaddsub231ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub231ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmaddsub231ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub231ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub231ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub231ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub231ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub231ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub231ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub231ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmsub132ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub132ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmsub132ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub132ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub132ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub132ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub132ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub132ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub132ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub132ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmsub213ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub213ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmsub213ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub213ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub213ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub213ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub213ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub213ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub213ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub213ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmsub231ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub231ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmsub231ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub231ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub231ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub231ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub231ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub231ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub231ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub231ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmsubadd132ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd132ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmsubadd132ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd132ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd132ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd132ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd132ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd132ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd132ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd132ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmsubadd213ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd213ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmsubadd213ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd213ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd213ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd213ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd213ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd213ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd213ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd213ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmsubadd231ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd231ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmsubadd231ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd231ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd231ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd231ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd231ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd231ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd231ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd231ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfmulcph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmulcph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfmulcph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmulcph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmulcph (%r9){1to8}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmulcph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmulcph -512(%rdx){1to8}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfmulcph (%r9){1to4}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfmulcph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfmulcph -512(%rdx){1to4}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfnmadd132ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd132ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfnmadd132ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd132ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd132ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd132ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd132ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd132ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd132ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd132ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfnmadd213ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd213ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfnmadd213ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd213ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd213ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd213ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd213ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd213ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd213ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd213ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfnmadd231ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd231ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfnmadd231ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd231ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd231ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd231ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd231ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd231ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd231ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd231ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfnmsub132ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub132ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfnmsub132ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub132ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub132ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub132ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub132ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub132ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub132ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub132ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfnmsub213ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub213ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfnmsub213ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub213ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub213ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub213ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub213ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub213ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub213ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub213ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vfnmsub231ph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub231ph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vfnmsub231ph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub231ph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub231ph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub231ph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub231ph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub231ph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub231ph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub231ph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfpclassph $123, %xmm30, %k5 #AVX512-FP16,AVX512VL
vfpclassph $123, %xmm30, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph $123, %ymm30, %k5 #AVX512-FP16,AVX512VL
vfpclassph $123, %ymm30, %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassphx $123, 0x10000000(%rbp, %r14, 8), %k5{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph $123, (%r9){1to8}, %k5 #AVX512-FP16,AVX512VL BROADCAST_EN
vfpclassphx $123, 2032(%rcx), %k5 #AVX512-FP16,AVX512VL Disp8(7f)
vfpclassph $123, -256(%rdx){1to8}, %k5{%k7} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vfpclassph $123, (%r9){1to16}, %k5 #AVX512-FP16,AVX512VL BROADCAST_EN
vfpclassphy $123, 4064(%rcx), %k5 #AVX512-FP16,AVX512VL Disp8(7f)
vfpclassph $123, -256(%rdx){1to16}, %k5{%k7} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vgetexpph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vgetexpph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetexpph %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vgetexpph %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetexpph 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vgetexpph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetexpph 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vgetexpph -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetexpph 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vgetexpph (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetexpph 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vgetexpph -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph $123, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vgetmantph $123, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetmantph $123, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vgetmantph $123, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetmantph $123, 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vgetmantph $123, (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetmantph $123, 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vgetmantph $123, -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph $123, 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vgetmantph $123, (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetmantph $123, 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vgetmantph $123, -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vmaxph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmaxph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vmaxph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmaxph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vmaxph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vmaxph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vmaxph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vmaxph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vmaxph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vmaxph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vminph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vminph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vminph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vminph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vminph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vminph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vminph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vminph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vminph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vminph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vmulph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmulph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vmulph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmulph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vmulph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vmulph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vmulph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vmulph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vmulph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vmulph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vrcpph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrcpph %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vrcpph %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrcpph 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrcpph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vrcpph 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vrcpph -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpph 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrcpph (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vrcpph 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vrcpph -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreduceph $123, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vreduceph $123, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vreduceph $123, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vreduceph $123, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vreduceph $123, 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vreduceph $123, (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vreduceph $123, 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vreduceph $123, -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreduceph $123, 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vreduceph $123, (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vreduceph $123, 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vreduceph $123, -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph $123, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vrndscaleph $123, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrndscaleph $123, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vrndscaleph $123, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrndscaleph $123, 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrndscaleph $123, (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vrndscaleph $123, 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vrndscaleph $123, -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph $123, 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrndscaleph $123, (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vrndscaleph $123, 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vrndscaleph $123, -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vrsqrtph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrsqrtph %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vrsqrtph %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrsqrtph 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrsqrtph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vrsqrtph 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vrsqrtph -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vrsqrtph (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vrsqrtph 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vrsqrtph -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vscalefph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vscalefph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vscalefph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vscalefph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vscalefph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vscalefph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vscalefph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vscalefph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vscalefph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vscalefph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vsqrtph %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsqrtph %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vsqrtph %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsqrtph 0x10000000(%rbp, %r14, 8), %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vsqrtph (%r9){1to8}, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vsqrtph 2032(%rcx), %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vsqrtph -256(%rdx){1to8}, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph 0x10000000(%rbp, %r14, 8), %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vsqrtph (%r9){1to16}, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vsqrtph 4064(%rcx), %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vsqrtph -256(%rdx){1to16}, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubph %ymm28, %ymm29, %ymm30 #AVX512-FP16,AVX512VL
vsubph %ymm28, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsubph %xmm28, %xmm29, %xmm30 #AVX512-FP16,AVX512VL
vsubph %xmm28, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsubph 0x10000000(%rbp, %r14, 8), %ymm29, %ymm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vsubph (%r9){1to16}, %ymm29, %ymm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vsubph 4064(%rcx), %ymm29, %ymm30 #AVX512-FP16,AVX512VL Disp8(7f)
vsubph -256(%rdx){1to16}, %ymm29, %ymm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubph 0x10000000(%rbp, %r14, 8), %xmm29, %xmm30{%k7} #AVX512-FP16,AVX512VL MASK_ENABLING
vsubph (%r9){1to8}, %xmm29, %xmm30 #AVX512-FP16,AVX512VL BROADCAST_EN
vsubph 2032(%rcx), %xmm29, %xmm30 #AVX512-FP16,AVX512VL Disp8(7f)
vsubph -256(%rdx){1to8}, %xmm29, %xmm30{%k7}{z} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
.intel_syntax noprefix
vaddph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vaddph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vaddph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vaddph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vaddph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vaddph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vaddph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vaddph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vaddph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vaddph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vaddph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vaddph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcmpph k5, ymm29, ymm28, 123 #AVX512-FP16,AVX512VL
vcmpph k5{k7}, ymm29, ymm28, 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph k5, xmm29, xmm28, 123 #AVX512-FP16,AVX512VL
vcmpph k5{k7}, xmm29, xmm28, 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph k5{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph k5, xmm29, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vcmpph k5, xmm29, XMMWORD PTR [rcx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vcmpph k5{k7}, xmm29, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vcmpph k5{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vcmpph k5, ymm29, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vcmpph k5, ymm29, YMMWORD PTR [rcx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vcmpph k5{k7}, ymm29, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vcvtdq2ph xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtdq2ph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtdq2ph xmm30, ymm29 #AVX512-FP16,AVX512VL
vcvtdq2ph xmm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtdq2ph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtdq2ph xmm30, DWORD BCST [r9]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtdq2ph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtdq2ph xmm30{k7}{z}, DWORD BCST [rdx-512]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtdq2ph xmm30, DWORD BCST [r9]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtdq2ph xmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtdq2ph xmm30{k7}{z}, DWORD BCST [rdx-512]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtpd2ph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtpd2ph xmm30, ymm29 #AVX512-FP16,AVX512VL
vcvtpd2ph xmm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtpd2ph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtpd2ph xmm30, QWORD BCST [r9]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtpd2ph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtpd2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtpd2ph xmm30, QWORD BCST [r9]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtpd2ph xmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtpd2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2dq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2dq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2dq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2dq xmm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2dq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2dq xmm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2dq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2dq ymm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2dq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2dq ymm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2dq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2pd xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2pd ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2pd ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2pd xmm30{k7}, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2pd xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2pd xmm30, DWORD PTR [rcx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2pd xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2pd ymm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2pd ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2pd ymm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2pd ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2psx xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2psx ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2psx ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2psx xmm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2psx xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2psx xmm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2psx xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2psx ymm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2psx ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2psx ymm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2psx ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2qq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2qq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2qq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2qq xmm30{k7}, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2qq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2qq xmm30, DWORD PTR [rcx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2qq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2qq ymm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2qq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2qq ymm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2qq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2udq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2udq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2udq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2udq xmm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2udq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2udq xmm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2udq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2udq ymm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2udq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2udq ymm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2udq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2uqq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uqq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2uqq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uqq xmm30{k7}, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uqq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uqq xmm30, DWORD PTR [rcx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uqq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uqq ymm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uqq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uqq ymm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uqq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2uw xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uw ymm30, ymm29 #AVX512-FP16,AVX512VL
vcvtph2uw ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2uw xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uw xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uw xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uw xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2uw ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2uw ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2uw ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2uw ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtph2w xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2w ymm30, ymm29 #AVX512-FP16,AVX512VL
vcvtph2w ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtph2w xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2w xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2w xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2w xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtph2w ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtph2w ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtph2w ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtph2w ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtps2phx xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtps2phx xmm30, ymm29 #AVX512-FP16,AVX512VL
vcvtps2phx xmm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtps2phx xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtps2phx xmm30, DWORD BCST [r9]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtps2phx xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtps2phx xmm30{k7}{z}, DWORD BCST [rdx-512]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtps2phx xmm30, DWORD BCST [r9]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtps2phx xmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtps2phx xmm30{k7}{z}, DWORD BCST [rdx-512]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtqq2ph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtqq2ph xmm30, ymm29 #AVX512-FP16,AVX512VL
vcvtqq2ph xmm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtqq2ph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtqq2ph xmm30, QWORD BCST [r9]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtqq2ph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtqq2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtqq2ph xmm30, QWORD BCST [r9]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtqq2ph xmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtqq2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2dq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2dq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2dq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2dq xmm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2dq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2dq xmm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2dq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2dq ymm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2dq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2dq ymm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2dq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2qq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2qq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2qq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2qq xmm30{k7}, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2qq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2qq xmm30, DWORD PTR [rcx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2qq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2qq ymm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2qq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2qq ymm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2qq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2udq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2udq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2udq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2udq xmm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2udq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2udq xmm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2udq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2udq ymm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2udq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2udq ymm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2udq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2uqq xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uqq ymm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2uqq ymm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uqq xmm30{k7}, DWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uqq xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uqq xmm30, DWORD PTR [rcx+508] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uqq xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uqq ymm30{k7}, QWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uqq ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uqq ymm30, QWORD PTR [rcx+1016] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uqq ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2uw xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uw ymm30, ymm29 #AVX512-FP16,AVX512VL
vcvttph2uw ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2uw xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uw xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uw xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uw xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2uw ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2uw ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2uw ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2uw ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvttph2w xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2w ymm30, ymm29 #AVX512-FP16,AVX512VL
vcvttph2w ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvttph2w xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2w xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2w xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2w xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvttph2w ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvttph2w ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvttph2w ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvttph2w ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtudq2ph xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtudq2ph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtudq2ph xmm30, ymm29 #AVX512-FP16,AVX512VL
vcvtudq2ph xmm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtudq2ph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtudq2ph xmm30, DWORD BCST [r9]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtudq2ph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtudq2ph xmm30{k7}{z}, DWORD BCST [rdx-512]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtudq2ph xmm30, DWORD BCST [r9]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtudq2ph xmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtudq2ph xmm30{k7}{z}, DWORD BCST [rdx-512]{1to8} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtuqq2ph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm30, ymm29 #AVX512-FP16,AVX512VL
vcvtuqq2ph xmm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuqq2ph xmm30, QWORD BCST [r9]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuqq2ph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuqq2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to2} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuqq2ph xmm30, QWORD BCST [r9]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuqq2ph xmm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuqq2ph xmm30{k7}{z}, QWORD BCST [rdx-1024]{1to4} #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuw2ph xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtuw2ph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuw2ph ymm30, ymm29 #AVX512-FP16,AVX512VL
vcvtuw2ph ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtuw2ph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuw2ph xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuw2ph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuw2ph xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtuw2ph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtuw2ph ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtuw2ph ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtuw2ph ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph xmm30, xmm29 #AVX512-FP16,AVX512VL
vcvtw2ph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtw2ph ymm30, ymm29 #AVX512-FP16,AVX512VL
vcvtw2ph ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vcvtw2ph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtw2ph xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtw2ph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtw2ph xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vcvtw2ph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vcvtw2ph ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vcvtw2ph ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vcvtw2ph ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vdivph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vdivph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vdivph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vdivph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vdivph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vdivph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vdivph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vdivph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vdivph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vdivph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vdivph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfcmaddcph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmaddcph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfcmaddcph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmaddcph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmaddcph ymm30, ymm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmaddcph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfcmaddcph ymm30{k7}{z}, ymm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmaddcph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmaddcph xmm30, xmm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmaddcph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfcmaddcph xmm30{k7}{z}, xmm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfcmulcph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmulcph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfcmulcph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfcmulcph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmulcph ymm30, ymm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmulcph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfcmulcph ymm30{k7}{z}, ymm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfcmulcph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfcmulcph xmm30, xmm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfcmulcph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfcmulcph xmm30{k7}{z}, xmm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmadd132ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd132ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmadd132ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd132ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd132ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd132ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd132ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd132ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd132ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd132ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd132ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmadd213ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd213ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmadd213ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd213ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd213ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd213ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd213ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd213ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd213ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd213ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd213ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmadd231ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd231ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmadd231ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmadd231ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd231ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd231ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd231ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmadd231ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmadd231ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmadd231ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmadd231ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmaddcph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddcph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmaddcph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddcph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddcph ymm30, ymm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddcph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddcph ymm30{k7}{z}, ymm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddcph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddcph xmm30, xmm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddcph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddcph xmm30{k7}{z}, xmm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmaddsub132ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub132ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmaddsub132ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub132ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub132ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub132ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub132ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub132ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub132ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub132ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub132ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmaddsub213ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub213ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmaddsub213ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub213ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub213ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub213ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub213ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub213ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub213ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub213ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub213ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmaddsub231ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub231ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmaddsub231ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmaddsub231ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub231ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub231ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub231ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmaddsub231ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmaddsub231ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmaddsub231ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmaddsub231ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmsub132ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub132ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmsub132ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub132ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub132ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub132ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub132ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub132ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub132ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub132ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub132ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmsub213ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub213ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmsub213ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub213ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub213ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub213ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub213ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub213ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub213ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub213ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub213ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmsub231ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub231ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmsub231ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsub231ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub231ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub231ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub231ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsub231ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsub231ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsub231ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsub231ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmsubadd132ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd132ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmsubadd132ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd132ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd132ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd132ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd132ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd132ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd132ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd132ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd132ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmsubadd213ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd213ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmsubadd213ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd213ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd213ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd213ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd213ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd213ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd213ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd213ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd213ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmsubadd231ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd231ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmsubadd231ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmsubadd231ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd231ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd231ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd231ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmsubadd231ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmsubadd231ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmsubadd231ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmsubadd231ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfmulcph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmulcph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfmulcph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfmulcph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmulcph ymm30, ymm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmulcph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfmulcph ymm30{k7}{z}, ymm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfmulcph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfmulcph xmm30, xmm29, DWORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfmulcph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfmulcph xmm30{k7}{z}, xmm29, DWORD BCST [rdx-512] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfnmadd132ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd132ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfnmadd132ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd132ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd132ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd132ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd132ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd132ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd132ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd132ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd132ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfnmadd213ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd213ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfnmadd213ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd213ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd213ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd213ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd213ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd213ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd213ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd213ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd213ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfnmadd231ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd231ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfnmadd231ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmadd231ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd231ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd231ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd231ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmadd231ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmadd231ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmadd231ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmadd231ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfnmsub132ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub132ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfnmsub132ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub132ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub132ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub132ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub132ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub132ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub132ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub132ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub132ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfnmsub213ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub213ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfnmsub213ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub213ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub213ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub213ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub213ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub213ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub213ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub213ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub213ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vfnmsub231ph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub231ph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vfnmsub231ph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vfnmsub231ph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub231ph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub231ph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub231ph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfnmsub231ph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vfnmsub231ph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vfnmsub231ph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vfnmsub231ph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vfpclassph k5, xmm30, 123 #AVX512-FP16,AVX512VL
vfpclassph k5{k7}, xmm30, 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph k5, ymm30, 123 #AVX512-FP16,AVX512VL
vfpclassph k5{k7}, ymm30, 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph k5{k7}, XMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vfpclassph k5, WORD BCST [r9]{1to8}, 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vfpclassph k5, XMMWORD PTR [rcx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vfpclassph k5{k7}, WORD BCST [rdx-256]{1to8}, 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vfpclassph k5, WORD BCST [r9]{1to16}, 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vfpclassph k5, YMMWORD PTR [rcx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vfpclassph k5{k7}, WORD BCST [rdx-256]{1to16}, 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING
vgetexpph xmm30, xmm29 #AVX512-FP16,AVX512VL
vgetexpph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetexpph ymm30, ymm29 #AVX512-FP16,AVX512VL
vgetexpph ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetexpph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vgetexpph xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vgetexpph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vgetexpph xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetexpph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vgetexpph ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vgetexpph ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vgetexpph ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph ymm30, ymm29, 123 #AVX512-FP16,AVX512VL
vgetmantph ymm30{k7}{z}, ymm29, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetmantph xmm30, xmm29, 123 #AVX512-FP16,AVX512VL
vgetmantph xmm30{k7}{z}, xmm29, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vgetmantph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vgetmantph xmm30, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetmantph xmm30, XMMWORD PTR [rcx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vgetmantph xmm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vgetmantph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vgetmantph ymm30, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vgetmantph ymm30, YMMWORD PTR [rcx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vgetmantph ymm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vmaxph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmaxph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vmaxph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmaxph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vmaxph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vmaxph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vmaxph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmaxph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vmaxph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vmaxph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vmaxph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vminph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vminph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vminph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vminph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vminph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vminph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vminph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vminph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vminph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vminph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vminph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vmulph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmulph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vmulph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vmulph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vmulph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vmulph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vmulph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vmulph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vmulph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vmulph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vmulph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpph xmm30, xmm29 #AVX512-FP16,AVX512VL
vrcpph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrcpph ymm30, ymm29 #AVX512-FP16,AVX512VL
vrcpph ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrcpph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vrcpph xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vrcpph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vrcpph xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrcpph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vrcpph ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vrcpph ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vrcpph ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreduceph ymm30, ymm29, 123 #AVX512-FP16,AVX512VL
vreduceph ymm30{k7}{z}, ymm29, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vreduceph xmm30, xmm29, 123 #AVX512-FP16,AVX512VL
vreduceph xmm30{k7}{z}, xmm29, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vreduceph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vreduceph xmm30, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vreduceph xmm30, XMMWORD PTR [rcx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vreduceph xmm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vreduceph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vreduceph ymm30, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vreduceph ymm30, YMMWORD PTR [rcx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vreduceph ymm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph ymm30, ymm29, 123 #AVX512-FP16,AVX512VL
vrndscaleph ymm30{k7}{z}, ymm29, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrndscaleph xmm30, xmm29, 123 #AVX512-FP16,AVX512VL
vrndscaleph xmm30{k7}{z}, xmm29, 123 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrndscaleph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vrndscaleph xmm30, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vrndscaleph xmm30, XMMWORD PTR [rcx+2032], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vrndscaleph xmm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrndscaleph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000], 123 #AVX512-FP16,AVX512VL MASK_ENABLING
vrndscaleph ymm30, WORD BCST [r9], 123 #AVX512-FP16,AVX512VL BROADCAST_EN
vrndscaleph ymm30, YMMWORD PTR [rcx+4064], 123 #AVX512-FP16,AVX512VL Disp8(7f)
vrndscaleph ymm30{k7}{z}, WORD BCST [rdx-256], 123 #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph xmm30, xmm29 #AVX512-FP16,AVX512VL
vrsqrtph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrsqrtph ymm30, ymm29 #AVX512-FP16,AVX512VL
vrsqrtph ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vrsqrtph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vrsqrtph xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vrsqrtph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vrsqrtph xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vrsqrtph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vrsqrtph ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vrsqrtph ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vrsqrtph ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vscalefph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vscalefph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vscalefph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vscalefph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vscalefph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vscalefph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vscalefph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vscalefph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vscalefph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vscalefph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vscalefph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph xmm30, xmm29 #AVX512-FP16,AVX512VL
vsqrtph xmm30{k7}{z}, xmm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsqrtph ymm30, ymm29 #AVX512-FP16,AVX512VL
vsqrtph ymm30{k7}{z}, ymm29 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsqrtph xmm30{k7}, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vsqrtph xmm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vsqrtph xmm30, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vsqrtph xmm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsqrtph ymm30{k7}, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vsqrtph ymm30, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vsqrtph ymm30, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vsqrtph ymm30{k7}{z}, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubph ymm30, ymm29, ymm28 #AVX512-FP16,AVX512VL
vsubph ymm30{k7}{z}, ymm29, ymm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsubph xmm30, xmm29, xmm28 #AVX512-FP16,AVX512VL
vsubph xmm30{k7}{z}, xmm29, xmm28 #AVX512-FP16,AVX512VL MASK_ENABLING ZEROCTL
vsubph ymm30{k7}, ymm29, YMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vsubph ymm30, ymm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vsubph ymm30, ymm29, YMMWORD PTR [rcx+4064] #AVX512-FP16,AVX512VL Disp8(7f)
vsubph ymm30{k7}{z}, ymm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
vsubph xmm30{k7}, xmm29, XMMWORD PTR [rbp+r14*8+0x10000000] #AVX512-FP16,AVX512VL MASK_ENABLING
vsubph xmm30, xmm29, WORD BCST [r9] #AVX512-FP16,AVX512VL BROADCAST_EN
vsubph xmm30, xmm29, XMMWORD PTR [rcx+2032] #AVX512-FP16,AVX512VL Disp8(7f)
vsubph xmm30{k7}{z}, xmm29, WORD BCST [rdx-256] #AVX512-FP16,AVX512VL BROADCAST_EN Disp8(80) MASK_ENABLING ZEROCTL
|
stsp/binutils-ia16
| 9,338
|
gas/testsuite/gas/i386/noavx512-1.s
|
# Test .arch .noavx512XX
.text
vpabsb %zmm5, %zmm6{%k7} # AVX512BW
vpabsb %xmm5, %xmm6{%k7} # AVX512BW + AVX512VL
vpabsb %ymm5, %ymm6{%k7} # AVX512BW + AVX512VL
vpconflictd %zmm5, %zmm6 # AVX412CD
vpconflictd %xmm5, %xmm6 # AVX412CD + AVX512VL
vpconflictd %ymm5, %ymm6 # AVX412CD + AVX512VL
vcvtpd2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512DQ + AVX512VL
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512DQ + AVX512VL
vexp2ps %zmm5, %zmm6{%k7} # AVX512ER
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512F + AVX512VL
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512F + AVX512VL
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512IFMA + AVX512VL
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512IFMA + AVX512VL
vgatherpf0dpd 23(%ebp,%ymm7,8){%k1} # AVX512PF
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %xmm4, %xmm5, %xmm6{%k7} # AVX512VBMI + AVX512VL
vpermb %ymm4, %ymm5, %ymm6{%k7} # AVX512VBMI + AVX512VL
.arch .noavx512bw
vpabsb %zmm5, %zmm6{%k7} # AVX512BW
vpabsb %xmm5, %xmm6{%k7} # AVX512BW + AVX512VL
vpabsb %ymm5, %ymm6{%k7} # AVX512BW + AVX512VL
vpconflictd %zmm5, %zmm6 # AVX412CD
vpconflictd %xmm5, %xmm6 # AVX412CD + AVX512VL
vpconflictd %ymm5, %ymm6 # AVX412CD + AVX512VL
vcvtpd2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512DQ + AVX512VL
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512DQ + AVX512VL
vexp2ps %zmm5, %zmm6{%k7} # AVX512ER
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512F + AVX512VL
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512F + AVX512VL
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512IFMA + AVX512VL
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512IFMA + AVX512VL
vgatherpf0dpd 23(%ebp,%ymm7,8){%k1} # AVX512PF
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %xmm4, %xmm5, %xmm6{%k7} # AVX512VBMI + AVX512VL
vpermb %ymm4, %ymm5, %ymm6{%k7} # AVX512VBMI + AVX512VL
.arch .noavx512cd
vpabsb %zmm5, %zmm6{%k7} # AVX512BW
vpabsb %xmm5, %xmm6{%k7} # AVX512BW + AVX512VL
vpabsb %ymm5, %ymm6{%k7} # AVX512BW + AVX512VL
vpconflictd %zmm5, %zmm6 # AVX412CD
vpconflictd %xmm5, %xmm6 # AVX412CD + AVX512VL
vpconflictd %ymm5, %ymm6 # AVX412CD + AVX512VL
vcvtpd2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512DQ + AVX512VL
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512DQ + AVX512VL
vexp2ps %zmm5, %zmm6{%k7} # AVX512ER
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512F + AVX512VL
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512F + AVX512VL
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512IFMA + AVX512VL
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512IFMA + AVX512VL
vgatherpf0dpd 23(%ebp,%ymm7,8){%k1} # AVX512PF
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %xmm4, %xmm5, %xmm6{%k7} # AVX512VBMI + AVX512VL
vpermb %ymm4, %ymm5, %ymm6{%k7} # AVX512VBMI + AVX512VL
.arch .noavx512dq
vpabsb %zmm5, %zmm6{%k7} # AVX512BW
vpabsb %xmm5, %xmm6{%k7} # AVX512BW + AVX512VL
vpabsb %ymm5, %ymm6{%k7} # AVX512BW + AVX512VL
vpconflictd %zmm5, %zmm6 # AVX412CD
vpconflictd %xmm5, %xmm6 # AVX412CD + AVX512VL
vpconflictd %ymm5, %ymm6 # AVX412CD + AVX512VL
vcvtpd2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512DQ + AVX512VL
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512DQ + AVX512VL
vexp2ps %zmm5, %zmm6{%k7} # AVX512ER
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512F + AVX512VL
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512F + AVX512VL
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512IFMA + AVX512VL
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512IFMA + AVX512VL
vgatherpf0dpd 23(%ebp,%ymm7,8){%k1} # AVX512PF
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %xmm4, %xmm5, %xmm6{%k7} # AVX512VBMI + AVX512VL
vpermb %ymm4, %ymm5, %ymm6{%k7} # AVX512VBMI + AVX512VL
.arch .noavx512er
vpabsb %zmm5, %zmm6{%k7} # AVX512BW
vpabsb %xmm5, %xmm6{%k7} # AVX512BW + AVX512VL
vpabsb %ymm5, %ymm6{%k7} # AVX512BW + AVX512VL
vpconflictd %zmm5, %zmm6 # AVX412CD
vpconflictd %xmm5, %xmm6 # AVX412CD + AVX512VL
vpconflictd %ymm5, %ymm6 # AVX412CD + AVX512VL
vcvtpd2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512DQ + AVX512VL
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512DQ + AVX512VL
vexp2ps %zmm5, %zmm6{%k7} # AVX512ER
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512F + AVX512VL
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512F + AVX512VL
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512IFMA + AVX512VL
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512IFMA + AVX512VL
vgatherpf0dpd 23(%ebp,%ymm7,8){%k1} # AVX512PF
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %xmm4, %xmm5, %xmm6{%k7} # AVX512VBMI + AVX512VL
vpermb %ymm4, %ymm5, %ymm6{%k7} # AVX512VBMI + AVX512VL
.arch .noavx512ifma
vpabsb %zmm5, %zmm6{%k7} # AVX512BW
vpabsb %xmm5, %xmm6{%k7} # AVX512BW + AVX512VL
vpabsb %ymm5, %ymm6{%k7} # AVX512BW + AVX512VL
vpconflictd %zmm5, %zmm6 # AVX412CD
vpconflictd %xmm5, %xmm6 # AVX412CD + AVX512VL
vpconflictd %ymm5, %ymm6 # AVX412CD + AVX512VL
vcvtpd2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512DQ + AVX512VL
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512DQ + AVX512VL
vexp2ps %zmm5, %zmm6{%k7} # AVX512ER
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512F + AVX512VL
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512F + AVX512VL
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512IFMA + AVX512VL
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512IFMA + AVX512VL
vgatherpf0dpd 23(%ebp,%ymm7,8){%k1} # AVX512PF
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %xmm4, %xmm5, %xmm6{%k7} # AVX512VBMI + AVX512VL
vpermb %ymm4, %ymm5, %ymm6{%k7} # AVX512VBMI + AVX512VL
.arch .noavx512pf
vpabsb %zmm5, %zmm6{%k7} # AVX512BW
vpabsb %xmm5, %xmm6{%k7} # AVX512BW + AVX512VL
vpabsb %ymm5, %ymm6{%k7} # AVX512BW + AVX512VL
vpconflictd %zmm5, %zmm6 # AVX412CD
vpconflictd %xmm5, %xmm6 # AVX412CD + AVX512VL
vpconflictd %ymm5, %ymm6 # AVX412CD + AVX512VL
vcvtpd2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512DQ + AVX512VL
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512DQ + AVX512VL
vexp2ps %zmm5, %zmm6{%k7} # AVX512ER
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512F + AVX512VL
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512F + AVX512VL
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512IFMA + AVX512VL
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512IFMA + AVX512VL
vgatherpf0dpd 23(%ebp,%ymm7,8){%k1} # AVX512PF
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %xmm4, %xmm5, %xmm6{%k7} # AVX512VBMI + AVX512VL
vpermb %ymm4, %ymm5, %ymm6{%k7} # AVX512VBMI + AVX512VL
.arch .noavx512vbmi
vpabsb %zmm5, %zmm6{%k7} # AVX512BW
vpabsb %xmm5, %xmm6{%k7} # AVX512BW + AVX512VL
vpabsb %ymm5, %ymm6{%k7} # AVX512BW + AVX512VL
vpconflictd %zmm5, %zmm6 # AVX412CD
vpconflictd %xmm5, %xmm6 # AVX412CD + AVX512VL
vpconflictd %ymm5, %ymm6 # AVX412CD + AVX512VL
vcvtpd2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512DQ + AVX512VL
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512DQ + AVX512VL
vexp2ps %zmm5, %zmm6{%k7} # AVX512ER
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512F + AVX512VL
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512F + AVX512VL
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512IFMA + AVX512VL
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512IFMA + AVX512VL
vgatherpf0dpd 23(%ebp,%ymm7,8){%k1} # AVX512PF
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %xmm4, %xmm5, %xmm6{%k7} # AVX512VBMI + AVX512VL
vpermb %ymm4, %ymm5, %ymm6{%k7} # AVX512VBMI + AVX512VL
.arch .noavx512f
vpabsb %zmm5, %zmm6{%k7} # AVX512BW
vpabsb %xmm5, %xmm6{%k7} # AVX512BW + AVX512VL
vpabsb %ymm5, %ymm6{%k7} # AVX512BW + AVX512VL
vpconflictd %zmm5, %zmm6 # AVX412CD
vpconflictd %xmm5, %xmm6 # AVX412CD + AVX512VL
vpconflictd %ymm5, %ymm6 # AVX412CD + AVX512VL
vcvtpd2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtpd2qq (%ecx), %xmm6{%k7} # AVX512DQ + AVX512VL
vcvtpd2qq (%ecx), %ymm6{%k7} # AVX512DQ + AVX512VL
vexp2ps %zmm5, %zmm6{%k7} # AVX512ER
vaddpd %zmm4, %zmm5, %zmm6{%k7} # AVX512F
vaddpd %xmm4, %xmm5, %xmm6{%k7} # AVX512F + AVX512VL
vaddpd %ymm4, %ymm5, %ymm6{%k7} # AVX512F + AVX512VL
vpmadd52luq %zmm4, %zmm5, %zmm6{%k7} # AVX512IFMA
vpmadd52luq %xmm4, %xmm5, %xmm6{%k7} # AVX512IFMA + AVX512VL
vpmadd52luq %ymm4, %ymm5, %ymm6{%k7} # AVX512IFMA + AVX512VL
vgatherpf0dpd 23(%ebp,%ymm7,8){%k1} # AVX512PF
vpermb %zmm4, %zmm5, %zmm6{%k7} # AVX512VBMI
vpermb %xmm4, %xmm5, %xmm6{%k7} # AVX512VBMI + AVX512VL
vpermb %ymm4, %ymm5, %ymm6{%k7} # AVX512VBMI + AVX512VL
vpabsb %xmm5, %xmm6
vpabsb %ymm5, %ymm6
vaddpd %xmm4, %xmm5, %xmm6
vaddpd %ymm4, %ymm5, %ymm6
pabsb %xmm5, %xmm6
addpd %xmm4, %xmm6
.p2align 4
|
stsp/binutils-ia16
| 1,839
|
gas/testsuite/gas/i386/x86-64-avx_gfni.s
|
# Check AVX GFNI instructions
.allow_index_reg
.text
_start:
vgf2p8mulb %ymm4, %ymm5, %ymm6
vgf2p8mulb -123456(%rax,%r14,8), %ymm5, %ymm6
vgf2p8mulb 126(%rdx), %ymm5, %ymm6
vgf2p8affineqb $0xab, %ymm4, %ymm5, %ymm6
vgf2p8affineqb $123, -123456(%rax,%r14,8), %ymm5, %ymm6
vgf2p8affineqb $123, 126(%rdx), %ymm5, %ymm6
vgf2p8affineinvqb $0xab, %ymm4, %ymm5, %ymm6
vgf2p8affineinvqb $123, -123456(%rax,%r14,8), %ymm5, %ymm6
vgf2p8affineinvqb $123, 126(%rdx), %ymm5, %ymm6
vgf2p8mulb %xmm4, %xmm5, %xmm6
vgf2p8mulb -123456(%rax,%r14,8), %xmm5, %xmm6
vgf2p8mulb 126(%rdx), %xmm5, %xmm6
vgf2p8affineqb $0xab, %xmm4, %xmm5, %xmm6
vgf2p8affineqb $123, -123456(%rax,%r14,8), %xmm5, %xmm6
vgf2p8affineqb $123, 126(%rdx), %xmm5, %xmm6
vgf2p8affineinvqb $0xab, %xmm4, %xmm5, %xmm6
vgf2p8affineinvqb $123, -123456(%rax,%r14,8), %xmm5, %xmm6
vgf2p8affineinvqb $123, 126(%rdx), %xmm5, %xmm6
.intel_syntax noprefix
vgf2p8mulb ymm6, ymm5, ymm4
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [rax+r14*8-123456]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [rdx+126]
vgf2p8affineqb ymm6, ymm5, ymm4, 0xab
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [rax+r14*8-123456], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [rdx+126], 123
vgf2p8affineinvqb ymm6, ymm5, ymm4, 0xab
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [rax+r14*8-123456], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [rdx+126], 123
vgf2p8mulb xmm6, xmm5, xmm4
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [rax+r14*8-123456]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [rdx+126]
vgf2p8affineqb xmm6, xmm5, xmm4, 0xab
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [rax+r14*8-123456], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [rdx+126], 123
vgf2p8affineinvqb xmm6, xmm5, xmm4, 0xab
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [rax+r14*8-123456], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [rdx+126], 123
|
stsp/binutils-ia16
| 166,966
|
gas/testsuite/gas/i386/evex-lig.s
|
# Check EVEX LIG instructions
.allow_index_reg
.text
_start:
vaddsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vaddsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vaddsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vaddsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vaddsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vaddsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vaddsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vaddss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vaddss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vaddss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vaddss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vaddss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vaddss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vaddss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vaddss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vcmpsd $0xab, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpsd $0xab, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpsd $123, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpsd $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpsd $123, (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpsd $123, -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpsd $123, 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpsd $123, 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpsd $123, -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpsd $123, -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_ossd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_ossd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmplt_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmplt_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpltsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpltsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpltsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpltsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpltsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpltsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpltsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpltsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_ossd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_ossd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmple_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmple_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmplesd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplesd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmplesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmplesd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplesd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmplesd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplesd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_qsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_qsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_qsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpunord_qsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpunord_qsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_qsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_qsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_qsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunordsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunordsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunordsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpunordsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpunordsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunordsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunordsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunordsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_ussd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_ussd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnlt_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnltsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnltsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnltsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnltsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnltsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnltsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnltsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnltsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_ussd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_ussd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnle_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnle_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlesd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlesd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnlesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnlesd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlesd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlesd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlesd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_qsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_qsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_qsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpord_qsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpord_qsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_qsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_qsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_qsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpordsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpordsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpordsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpordsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpordsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpordsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpordsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpordsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_ussd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_ussd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnge_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnge_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngesd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngesd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngesd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngesd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngesd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngesd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_ussd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_ussd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngt_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngt_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngtsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngtsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngtsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngtsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngtsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngtsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngtsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngtsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalsesd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalsesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalsesd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpfalsesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpfalsesd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalsesd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalsesd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalsesd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_ossd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_ossd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpge_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpge_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgesd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgesd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgesd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgesd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgesd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgesd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_ossd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_ossd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgt_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgt_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgtsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgtsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgtsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgtsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgtsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgtsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgtsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgtsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmptruesd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptruesd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptruesd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmptruesd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmptruesd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptruesd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmptruesd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptruesd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_ossd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_ossd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmplt_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmplt_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmple_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmple_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_ssd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_ssd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_ssd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpunord_ssd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpunord_ssd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_ssd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_ssd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_ssd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_ussd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_ussd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_ssd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_ssd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_ssd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpord_ssd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpord_ssd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_ssd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_ssd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_ssd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_ussd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_ussd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_uqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_uqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_uqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_uqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_ossd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_ossd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpfalse_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_ossd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_ossd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_ossd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_ossd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_ossd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_ossd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_ossd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_ossd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpge_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpge_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqsd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_oqsd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_oqsd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqsd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqsd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_oqsd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqsd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_oqsd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_ussd %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_ussd {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_ussd (%ecx), %xmm5, %k5{%k7} # AVX512
vcmptrue_ussd -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmptrue_ussd 1016(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_ussd 1024(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_ussd -1024(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_ussd -1032(%edx), %xmm5, %k5{%k7} # AVX512
vcmpss $0xab, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpss $0xab, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpss $123, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpss $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpss $123, (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpss $123, -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpss $123, 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpss $123, 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpss $123, -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpss $123, -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_oqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_osss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_osss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmplt_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmplt_osss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_osss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_osss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_osss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpltss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpltss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpltss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpltss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpltss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpltss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpltss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpltss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_osss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_osss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmple_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmple_osss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_osss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_osss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_osss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpless %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpless {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpless (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpless -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpless 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpless 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpless -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpless -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_qss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_qss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_qss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpunord_qss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpunord_qss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_qss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_qss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_qss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunordss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunordss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunordss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpunordss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpunordss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunordss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunordss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunordss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_uqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_usss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_usss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnlt_usss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_usss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_usss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_usss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnltss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnltss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnltss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnltss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnltss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnltss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnltss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnltss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_usss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_usss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnle_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnle_usss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_usss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_usss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_usss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnless %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnless {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnless (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnless -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnless 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnless 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnless -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnless -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_qss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_qss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_qss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpord_qss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpord_qss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_qss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_qss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_qss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpordss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpordss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpordss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpordss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpordss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpordss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpordss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpordss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_uqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_usss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_usss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnge_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnge_usss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_usss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_usss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_usss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngess %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngess (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngess 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngess 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngess -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngess -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_usss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_usss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngt_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngt_usss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_usss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_usss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_usss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngtss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngtss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngtss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngtss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngtss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngtss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngtss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngtss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalsess %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalsess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalsess (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpfalsess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpfalsess 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalsess 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalsess -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalsess -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_oqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_osss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_osss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpge_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpge_osss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_osss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_osss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_osss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgess %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgess (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgess 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgess 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgess -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgess -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_osss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_osss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgt_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgt_osss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_osss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_osss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_osss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgtss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgtss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgtss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgtss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgtss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgtss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgtss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgtss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_uqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmptruess %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptruess {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptruess (%ecx), %xmm5, %k5{%k7} # AVX512
vcmptruess -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmptruess 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptruess 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmptruess -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptruess -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_osss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_osss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_osss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_osss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_osss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_osss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_oqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmplt_oqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmplt_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmplt_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmplt_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmplt_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_oqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmple_oqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmple_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmple_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmple_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmple_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_sss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_sss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpunord_sss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpunord_sss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpunord_sss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_sss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpunord_sss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpunord_sss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_usss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_usss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_usss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_usss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_usss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_usss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnlt_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnlt_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnle_uqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnle_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnle_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_sss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_sss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpord_sss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpord_sss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpord_sss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_sss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpord_sss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpord_sss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_usss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpeq_usss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpeq_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpeq_usss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_usss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpeq_usss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpeq_usss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpnge_uqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpnge_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpnge_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_uqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpngt_uqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_uqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpngt_uqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpngt_uqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_osss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpfalse_osss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpfalse_osss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_osss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpfalse_osss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpfalse_osss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_osss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_osss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpneq_osss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpneq_osss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpneq_osss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_osss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpneq_osss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpneq_osss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_oqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpge_oqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpge_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpge_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpge_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpge_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_oqss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmpgt_oqss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_oqss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmpgt_oqss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmpgt_oqss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_usss %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_usss {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512
vcmptrue_usss (%ecx), %xmm5, %k5{%k7} # AVX512
vcmptrue_usss -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512
vcmptrue_usss 508(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_usss 512(%edx), %xmm5, %k5{%k7} # AVX512
vcmptrue_usss -512(%edx), %xmm5, %k5{%k7} # AVX512 Disp8
vcmptrue_usss -516(%edx), %xmm5, %k5{%k7} # AVX512
vcomisd {sae}, %xmm5, %xmm6 # AVX512
vcomiss {sae}, %xmm5, %xmm6 # AVX512
vcvtsd2si {rn-sae}, %xmm6, %eax # AVX512
vcvtsd2si {ru-sae}, %xmm6, %eax # AVX512
vcvtsd2si {rd-sae}, %xmm6, %eax # AVX512
vcvtsd2si {rz-sae}, %xmm6, %eax # AVX512
vcvtsd2si {rn-sae}, %xmm6, %ebp # AVX512
vcvtsd2si {ru-sae}, %xmm6, %ebp # AVX512
vcvtsd2si {rd-sae}, %xmm6, %ebp # AVX512
vcvtsd2si {rz-sae}, %xmm6, %ebp # AVX512
vcvtsd2ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vcvtsd2ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vcvtsd2ss 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vcvtsd2ss -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vcvtsd2ss -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vcvtsi2ssl %eax, {rn-sae}, %xmm5, %xmm6 # AVX512
vcvtsi2ssl %eax, {ru-sae}, %xmm5, %xmm6 # AVX512
vcvtsi2ssl %eax, {rd-sae}, %xmm5, %xmm6 # AVX512
vcvtsi2ssl %eax, {rz-sae}, %xmm5, %xmm6 # AVX512
vcvtsi2ssl %ebp, {rn-sae}, %xmm5, %xmm6 # AVX512
vcvtsi2ssl %ebp, {ru-sae}, %xmm5, %xmm6 # AVX512
vcvtsi2ssl %ebp, {rd-sae}, %xmm5, %xmm6 # AVX512
vcvtsi2ssl %ebp, {rz-sae}, %xmm5, %xmm6 # AVX512
vcvtss2sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vcvtss2sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vcvtss2sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vcvtss2sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vcvtss2sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vcvtss2sd 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vcvtss2sd 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vcvtss2sd -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vcvtss2sd -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vcvtss2si {rn-sae}, %xmm6, %eax # AVX512
vcvtss2si {ru-sae}, %xmm6, %eax # AVX512
vcvtss2si {rd-sae}, %xmm6, %eax # AVX512
vcvtss2si {rz-sae}, %xmm6, %eax # AVX512
vcvtss2si {rn-sae}, %xmm6, %ebp # AVX512
vcvtss2si {ru-sae}, %xmm6, %ebp # AVX512
vcvtss2si {rd-sae}, %xmm6, %ebp # AVX512
vcvtss2si {rz-sae}, %xmm6, %ebp # AVX512
vcvttsd2si {sae}, %xmm6, %eax # AVX512
vcvttsd2si {sae}, %xmm6, %ebp # AVX512
vcvttss2si {sae}, %xmm6, %eax # AVX512
vcvttss2si {sae}, %xmm6, %ebp # AVX512
vdivsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vdivsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vdivsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vdivsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vdivsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vdivsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vdivsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vdivss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vdivss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vdivss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vdivss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vdivss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vdivss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vdivss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vdivss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmadd132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmadd132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmadd213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmadd213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmadd231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmadd231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmadd231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmadd231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmsub132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmsub132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmsub213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmsub213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmsub231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfmsub231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfmsub231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfmsub231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmadd132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmadd132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmadd213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmadd213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmadd231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmadd231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmadd231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmadd231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmsub132sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub132sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub132sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmsub132ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub132ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub132ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub132ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmsub213sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub213sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub213sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmsub213ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub213ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub213ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub213ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmsub231sd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub231sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub231sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfnmsub231ss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub231ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfnmsub231ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfnmsub231ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetexpsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetexpsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vgetexpsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetexpsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vgetexpsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vgetexpsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetexpsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetexpsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetexpsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetexpss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetexpss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vgetexpss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetexpss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vgetexpss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vgetexpss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetexpss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetexpss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetexpss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vgetmantsd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetmantsd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetmantsd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetmantsd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetmantss $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantss $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vgetmantss $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantss $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantss $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vgetmantss $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512
vgetmantss $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vgetmantss $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetmantss $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vgetmantss $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vgetmantss $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vmaxsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmaxsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vmaxsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmaxsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vmaxsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vmaxsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmaxsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vmaxsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmaxsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vmaxss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmaxss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vmaxss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmaxss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vmaxss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vmaxss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmaxss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vmaxss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmaxss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vminsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vminsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vminsd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vminsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vminsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vminsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vminsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vminsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vminsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vminss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vminss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vminss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vminss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vminss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vminss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vminss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vminss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vminss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vmovsd (%ecx), %xmm6{%k7} # AVX512
vmovsd (%ecx), %xmm6{%k7}{z} # AVX512
vmovsd -123456(%esp,%esi,8), %xmm6{%k7} # AVX512
vmovsd 1016(%edx), %xmm6{%k7} # AVX512 Disp8
vmovsd 1024(%edx), %xmm6{%k7} # AVX512
vmovsd -1024(%edx), %xmm6{%k7} # AVX512 Disp8
vmovsd -1032(%edx), %xmm6{%k7} # AVX512
vmovsd %xmm6, (%ecx){%k7} # AVX512
vmovsd %xmm6, -123456(%esp,%esi,8){%k7} # AVX512
vmovsd %xmm6, 1016(%edx){%k7} # AVX512 Disp8
vmovsd %xmm6, 1024(%edx){%k7} # AVX512
vmovsd %xmm6, -1024(%edx){%k7} # AVX512 Disp8
vmovsd %xmm6, -1032(%edx){%k7} # AVX512
vmovsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmovsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vmovss (%ecx), %xmm6{%k7} # AVX512
vmovss (%ecx), %xmm6{%k7}{z} # AVX512
vmovss -123456(%esp,%esi,8), %xmm6{%k7} # AVX512
vmovss 508(%edx), %xmm6{%k7} # AVX512 Disp8
vmovss 512(%edx), %xmm6{%k7} # AVX512
vmovss -512(%edx), %xmm6{%k7} # AVX512 Disp8
vmovss -516(%edx), %xmm6{%k7} # AVX512
vmovss %xmm6, (%ecx){%k7} # AVX512
vmovss %xmm6, -123456(%esp,%esi,8){%k7} # AVX512
vmovss %xmm6, 508(%edx){%k7} # AVX512 Disp8
vmovss %xmm6, 512(%edx){%k7} # AVX512
vmovss %xmm6, -512(%edx){%k7} # AVX512 Disp8
vmovss %xmm6, -516(%edx){%k7} # AVX512
vmovss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmovss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vmulsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vmulsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vmulsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vmulsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmulsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vmulsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmulsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vmulss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vmulss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vmulss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vmulss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vmulss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmulss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vmulss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vmulss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vrcp14sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrcp14sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vrcp14sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vrcp14sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vrcp14sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrcp14sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vrcp14sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrcp14sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vrcp14ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrcp14ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vrcp14ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vrcp14ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vrcp14ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrcp14ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vrcp14ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrcp14ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vrcp28ss %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512EMI
vrcp28ss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28ss (%ecx), %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrcp28ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrcp28ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28sd %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512EMI
vrcp28sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28sd (%ecx), %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrcp28sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vrcp28sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrcp28sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt14sd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrsqrt14sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vrsqrt14sd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vrsqrt14sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vrsqrt14sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrsqrt14sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vrsqrt14sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrsqrt14sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vrsqrt14ss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrsqrt14ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vrsqrt14ss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vrsqrt14ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vrsqrt14ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrsqrt14ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vrsqrt14ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrsqrt14ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vrsqrt28ss %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512EMI
vrsqrt28ss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28ss (%ecx), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrsqrt28ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrsqrt28ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28sd %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512EMI
vrsqrt28sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28sd (%ecx), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrsqrt28sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vrsqrt28sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512EMI Disp8
vrsqrt28sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512EMI
vsqrtsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vsqrtsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vsqrtsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vsqrtsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsqrtsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vsqrtsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsqrtsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vsqrtss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vsqrtss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsqrtss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vsqrtss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vsqrtss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsqrtss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vsqrtss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsqrtss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vsubsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vsubsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vsubsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vsubsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsubsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vsubsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsubsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vsubss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vsubss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vsubss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vsubss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vsubss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsubss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vsubss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vsubss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vucomisd {sae}, %xmm5, %xmm6 # AVX512
vucomiss {sae}, %xmm5, %xmm6 # AVX512
vcvtsd2usi %xmm6, %eax # AVX512
vcvtsd2usi {rn-sae}, %xmm6, %eax # AVX512
vcvtsd2usi {ru-sae}, %xmm6, %eax # AVX512
vcvtsd2usi {rd-sae}, %xmm6, %eax # AVX512
vcvtsd2usi {rz-sae}, %xmm6, %eax # AVX512
vcvtsd2usi (%ecx), %eax # AVX512
vcvtsd2usi -123456(%esp,%esi,8), %eax # AVX512
vcvtsd2usi 1016(%edx), %eax # AVX512 Disp8
vcvtsd2usi 1024(%edx), %eax # AVX512
vcvtsd2usi -1024(%edx), %eax # AVX512 Disp8
vcvtsd2usi -1032(%edx), %eax # AVX512
vcvtsd2usi %xmm6, %ebp # AVX512
vcvtsd2usi {rn-sae}, %xmm6, %ebp # AVX512
vcvtsd2usi {ru-sae}, %xmm6, %ebp # AVX512
vcvtsd2usi {rd-sae}, %xmm6, %ebp # AVX512
vcvtsd2usi {rz-sae}, %xmm6, %ebp # AVX512
vcvtsd2usi (%ecx), %ebp # AVX512
vcvtsd2usi -123456(%esp,%esi,8), %ebp # AVX512
vcvtsd2usi 1016(%edx), %ebp # AVX512 Disp8
vcvtsd2usi 1024(%edx), %ebp # AVX512
vcvtsd2usi -1024(%edx), %ebp # AVX512 Disp8
vcvtsd2usi -1032(%edx), %ebp # AVX512
vcvtss2usi %xmm6, %eax # AVX512
vcvtss2usi {rn-sae}, %xmm6, %eax # AVX512
vcvtss2usi {ru-sae}, %xmm6, %eax # AVX512
vcvtss2usi {rd-sae}, %xmm6, %eax # AVX512
vcvtss2usi {rz-sae}, %xmm6, %eax # AVX512
vcvtss2usi (%ecx), %eax # AVX512
vcvtss2usi -123456(%esp,%esi,8), %eax # AVX512
vcvtss2usi 508(%edx), %eax # AVX512 Disp8
vcvtss2usi 512(%edx), %eax # AVX512
vcvtss2usi -512(%edx), %eax # AVX512 Disp8
vcvtss2usi -516(%edx), %eax # AVX512
vcvtss2usi %xmm6, %ebp # AVX512
vcvtss2usi {rn-sae}, %xmm6, %ebp # AVX512
vcvtss2usi {ru-sae}, %xmm6, %ebp # AVX512
vcvtss2usi {rd-sae}, %xmm6, %ebp # AVX512
vcvtss2usi {rz-sae}, %xmm6, %ebp # AVX512
vcvtss2usi (%ecx), %ebp # AVX512
vcvtss2usi -123456(%esp,%esi,8), %ebp # AVX512
vcvtss2usi 508(%edx), %ebp # AVX512 Disp8
vcvtss2usi 512(%edx), %ebp # AVX512
vcvtss2usi -512(%edx), %ebp # AVX512 Disp8
vcvtss2usi -516(%edx), %ebp # AVX512
vcvtusi2sdl %eax, %xmm5, %xmm6 # AVX512
vcvtusi2sdl %ebp, %xmm5, %xmm6 # AVX512
vcvtusi2sdl (%ecx), %xmm5, %xmm6 # AVX512
vcvtusi2sdl -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512
vcvtusi2sdl 508(%edx), %xmm5, %xmm6 # AVX512 Disp8
vcvtusi2sdl 512(%edx), %xmm5, %xmm6 # AVX512
vcvtusi2sdl -512(%edx), %xmm5, %xmm6 # AVX512 Disp8
vcvtusi2sdl -516(%edx), %xmm5, %xmm6 # AVX512
vcvtusi2ssl %eax, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %eax, {rn-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %eax, {ru-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %eax, {rd-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %eax, {rz-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %ebp, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %ebp, {rn-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %ebp, {ru-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %ebp, {rd-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl %ebp, {rz-sae}, %xmm5, %xmm6 # AVX512
vcvtusi2ssl (%ecx), %xmm5, %xmm6 # AVX512
vcvtusi2ssl -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512
vcvtusi2ssl 508(%edx), %xmm5, %xmm6 # AVX512 Disp8
vcvtusi2ssl 512(%edx), %xmm5, %xmm6 # AVX512
vcvtusi2ssl -512(%edx), %xmm5, %xmm6 # AVX512 Disp8
vcvtusi2ssl -516(%edx), %xmm5, %xmm6 # AVX512
vscalefsd %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefsd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vscalefsd {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefsd {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefsd {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefsd {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefsd (%ecx), %xmm5, %xmm6{%k7} # AVX512
vscalefsd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vscalefsd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vscalefsd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vscalefsd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vscalefsd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vscalefss %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vscalefss {rn-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefss {ru-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefss {rd-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefss {rz-sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vscalefss (%ecx), %xmm5, %xmm6{%k7} # AVX512
vscalefss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vscalefss 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vscalefss 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vscalefss -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vscalefss -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfixupimmss $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfixupimmss $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vfixupimmss $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfixupimmss $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vfixupimmsd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfixupimmsd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vfixupimmsd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vfixupimmsd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vrndscalesd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrndscalesd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512
vrndscalesd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrndscalesd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512
vrndscaless $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscaless $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512
vrndscaless $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscaless $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscaless $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512
vrndscaless $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512
vrndscaless $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512
vrndscaless $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrndscaless $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512
vrndscaless $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512 Disp8
vrndscaless $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512
vcmpsh $123, %xmm4, %xmm5, %k5 # AVX512-FP16
vcmpsh $123, {sae}, %xmm4, %xmm5, %k5{%k7} # AVX512-FP16
vcmpsh $123, (%ecx), %xmm5, %k5 # AVX512-FP16
vcmpsh $123, -123456(%esp, %esi, 8), %xmm5, %k5{%k7} # AVX512-FP16
vcmpsh $123, 254(%ecx), %xmm5, %k5 # AVX512-FP16 Disp8
vcmpsh $123, -256(%edx), %xmm5, %k5{%k7} # AVX512-FP16 Disp8
vfpclasssh $123, %xmm4, %k5 # AVX512-FP16
vfpclasssh $123, (%ecx), %k5 # AVX512-FP16
vfpclasssh $123, -123456(%esp, %esi, 8), %k5{%k7} # AVX512-FP16
vfpclasssh $123, 254(%ecx), %k5 # AVX512-FP16 Disp8
vfpclasssh $123, -256(%edx), %k5{%k7} # AVX512-FP16 Disp8
.intel_syntax noprefix
vaddsd xmm6{k7}, xmm5, xmm4 # AVX512
vaddsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vaddsd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vaddsd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vaddsd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vaddsd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vaddsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vaddsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vaddsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vaddss xmm6{k7}, xmm5, xmm4 # AVX512
vaddss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vaddss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vaddss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vaddss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vaddss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vaddss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vaddss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vaddss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vaddss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vaddss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vaddss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpsd k5{k7}, xmm5, xmm4, 0xab # AVX512
vcmpsd k5{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vcmpsd k5{k7}, xmm5, xmm4, 123 # AVX512
vcmpsd k5{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vcmpsd k5{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512
vcmpsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512
vcmpsd k5{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512 Disp8
vcmpsd k5{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512
vcmpsd k5{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512 Disp8
vcmpsd k5{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512
vcmpeq_oqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_oqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpeq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpeqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpeqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpeqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpeqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmplt_ossd k5{k7}, xmm5, xmm4 # AVX512
vcmplt_ossd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmplt_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpltsd k5{k7}, xmm5, xmm4 # AVX512
vcmpltsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpltsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpltsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpltsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmple_ossd k5{k7}, xmm5, xmm4 # AVX512
vcmple_ossd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmple_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmple_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmple_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmplesd k5{k7}, xmm5, xmm4 # AVX512
vcmplesd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmplesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmplesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmplesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmplesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmplesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmplesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpunord_qsd k5{k7}, xmm5, xmm4 # AVX512
vcmpunord_qsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpunord_qsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpunordsd k5{k7}, xmm5, xmm4 # AVX512
vcmpunordsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpunordsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpunordsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpunordsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpneq_uqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_uqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpneq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpneqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpneqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpneqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpneqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnlt_ussd k5{k7}, xmm5, xmm4 # AVX512
vcmpnlt_ussd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnlt_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnltsd k5{k7}, xmm5, xmm4 # AVX512
vcmpnltsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnltsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnltsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnltsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnle_ussd k5{k7}, xmm5, xmm4 # AVX512
vcmpnle_ussd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnle_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnlesd k5{k7}, xmm5, xmm4 # AVX512
vcmpnlesd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnlesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnlesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnlesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpord_qsd k5{k7}, xmm5, xmm4 # AVX512
vcmpord_qsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpord_qsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpordsd k5{k7}, xmm5, xmm4 # AVX512
vcmpordsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpordsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpordsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpordsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpeq_uqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_uqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpeq_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnge_ussd k5{k7}, xmm5, xmm4 # AVX512
vcmpnge_ussd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnge_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpngesd k5{k7}, xmm5, xmm4 # AVX512
vcmpngesd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpngesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpngesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpngt_ussd k5{k7}, xmm5, xmm4 # AVX512
vcmpngt_ussd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpngt_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpngtsd k5{k7}, xmm5, xmm4 # AVX512
vcmpngtsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngtsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpngtsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpngtsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpfalse_oqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpfalse_oqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpfalse_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpfalsesd k5{k7}, xmm5, xmm4 # AVX512
vcmpfalsesd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpfalsesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpneq_oqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_oqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpneq_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpge_ossd k5{k7}, xmm5, xmm4 # AVX512
vcmpge_ossd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpge_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpgesd k5{k7}, xmm5, xmm4 # AVX512
vcmpgesd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpgesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpgesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpgt_ossd k5{k7}, xmm5, xmm4 # AVX512
vcmpgt_ossd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpgt_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpgtsd k5{k7}, xmm5, xmm4 # AVX512
vcmpgtsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgtsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpgtsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpgtsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmptrue_uqsd k5{k7}, xmm5, xmm4 # AVX512
vcmptrue_uqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmptrue_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmptruesd k5{k7}, xmm5, xmm4 # AVX512
vcmptruesd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmptruesd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmptruesd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmptruesd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpeq_ossd k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_ossd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpeq_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmplt_oqsd k5{k7}, xmm5, xmm4 # AVX512
vcmplt_oqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmplt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmple_oqsd k5{k7}, xmm5, xmm4 # AVX512
vcmple_oqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmple_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpunord_ssd k5{k7}, xmm5, xmm4 # AVX512
vcmpunord_ssd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpunord_ssd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpneq_ussd k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_ussd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpneq_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnlt_uqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpnlt_uqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnlt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnle_uqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpnle_uqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnle_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpord_ssd k5{k7}, xmm5, xmm4 # AVX512
vcmpord_ssd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpord_ssd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpeq_ussd k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_ussd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpeq_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpnge_uqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpnge_uqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpnge_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpngt_uqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpngt_uqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpngt_uqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpfalse_ossd k5{k7}, xmm5, xmm4 # AVX512
vcmpfalse_ossd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpfalse_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpneq_ossd k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_ossd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpneq_ossd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpge_oqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpge_oqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpge_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpgt_oqsd k5{k7}, xmm5, xmm4 # AVX512
vcmpgt_oqsd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmpgt_oqsd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmptrue_ussd k5{k7}, xmm5, xmm4 # AVX512
vcmptrue_ussd k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcmptrue_ussd k5{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcmpss k5{k7}, xmm5, xmm4, 0xab # AVX512
vcmpss k5{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vcmpss k5{k7}, xmm5, xmm4, 123 # AVX512
vcmpss k5{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vcmpss k5{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512
vcmpss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512
vcmpss k5{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512 Disp8
vcmpss k5{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512
vcmpss k5{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512 Disp8
vcmpss k5{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512
vcmpeq_oqss k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_oqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpeq_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpeqss k5{k7}, xmm5, xmm4 # AVX512
vcmpeqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpeqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpeqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmplt_osss k5{k7}, xmm5, xmm4 # AVX512
vcmplt_osss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmplt_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmplt_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmplt_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpltss k5{k7}, xmm5, xmm4 # AVX512
vcmpltss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpltss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpltss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpltss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpltss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpltss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpltss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmple_osss k5{k7}, xmm5, xmm4 # AVX512
vcmple_osss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmple_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmple_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmple_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpless k5{k7}, xmm5, xmm4 # AVX512
vcmpless k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpless k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpless k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpless k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpless k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpless k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpless k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpunord_qss k5{k7}, xmm5, xmm4 # AVX512
vcmpunord_qss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpunord_qss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpunordss k5{k7}, xmm5, xmm4 # AVX512
vcmpunordss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpunordss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpunordss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpunordss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpneq_uqss k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_uqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpneq_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpneqss k5{k7}, xmm5, xmm4 # AVX512
vcmpneqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpneqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpneqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnlt_usss k5{k7}, xmm5, xmm4 # AVX512
vcmpnlt_usss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnlt_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnltss k5{k7}, xmm5, xmm4 # AVX512
vcmpnltss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnltss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnltss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnltss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnle_usss k5{k7}, xmm5, xmm4 # AVX512
vcmpnle_usss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnle_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnless k5{k7}, xmm5, xmm4 # AVX512
vcmpnless k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnless k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnless k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnless k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnless k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnless k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnless k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpord_qss k5{k7}, xmm5, xmm4 # AVX512
vcmpord_qss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpord_qss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpord_qss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpord_qss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpordss k5{k7}, xmm5, xmm4 # AVX512
vcmpordss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpordss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpordss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpordss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpordss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpordss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpordss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpeq_uqss k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_uqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpeq_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnge_usss k5{k7}, xmm5, xmm4 # AVX512
vcmpnge_usss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnge_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpngess k5{k7}, xmm5, xmm4 # AVX512
vcmpngess k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpngess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpngess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpngess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpngess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpngess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpngt_usss k5{k7}, xmm5, xmm4 # AVX512
vcmpngt_usss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpngt_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpngtss k5{k7}, xmm5, xmm4 # AVX512
vcmpngtss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngtss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpngtss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpngtss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpfalse_oqss k5{k7}, xmm5, xmm4 # AVX512
vcmpfalse_oqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpfalse_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpfalsess k5{k7}, xmm5, xmm4 # AVX512
vcmpfalsess k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpfalsess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpfalsess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpfalsess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpneq_oqss k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_oqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpneq_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpge_osss k5{k7}, xmm5, xmm4 # AVX512
vcmpge_osss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpge_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpge_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpge_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpgess k5{k7}, xmm5, xmm4 # AVX512
vcmpgess k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpgess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpgess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpgess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpgess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpgess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpgt_osss k5{k7}, xmm5, xmm4 # AVX512
vcmpgt_osss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpgt_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpgtss k5{k7}, xmm5, xmm4 # AVX512
vcmpgtss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgtss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpgtss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpgtss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmptrue_uqss k5{k7}, xmm5, xmm4 # AVX512
vcmptrue_uqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmptrue_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmptruess k5{k7}, xmm5, xmm4 # AVX512
vcmptruess k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmptruess k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmptruess k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmptruess k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmptruess k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmptruess k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmptruess k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpeq_osss k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_osss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpeq_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmplt_oqss k5{k7}, xmm5, xmm4 # AVX512
vcmplt_oqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmplt_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmple_oqss k5{k7}, xmm5, xmm4 # AVX512
vcmple_oqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmple_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmple_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmple_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpunord_sss k5{k7}, xmm5, xmm4 # AVX512
vcmpunord_sss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpunord_sss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpneq_usss k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_usss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpneq_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnlt_uqss k5{k7}, xmm5, xmm4 # AVX512
vcmpnlt_uqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnlt_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnle_uqss k5{k7}, xmm5, xmm4 # AVX512
vcmpnle_uqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnle_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpord_sss k5{k7}, xmm5, xmm4 # AVX512
vcmpord_sss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpord_sss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpord_sss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpord_sss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpeq_usss k5{k7}, xmm5, xmm4 # AVX512
vcmpeq_usss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpeq_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpnge_uqss k5{k7}, xmm5, xmm4 # AVX512
vcmpnge_uqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpnge_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpngt_uqss k5{k7}, xmm5, xmm4 # AVX512
vcmpngt_uqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpngt_uqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpfalse_osss k5{k7}, xmm5, xmm4 # AVX512
vcmpfalse_osss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpfalse_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpneq_osss k5{k7}, xmm5, xmm4 # AVX512
vcmpneq_osss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpneq_osss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpge_oqss k5{k7}, xmm5, xmm4 # AVX512
vcmpge_oqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpge_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmpgt_oqss k5{k7}, xmm5, xmm4 # AVX512
vcmpgt_oqss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmpgt_oqss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcmptrue_usss k5{k7}, xmm5, xmm4 # AVX512
vcmptrue_usss k5{k7}, xmm5, xmm4, {sae} # AVX512
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcmptrue_usss k5{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcomisd xmm6, xmm5, {sae} # AVX512
vcomiss xmm6, xmm5, {sae} # AVX512
vcvtsd2si eax, xmm6, {rn-sae} # AVX512
vcvtsd2si eax, xmm6, {ru-sae} # AVX512
vcvtsd2si eax, xmm6, {rd-sae} # AVX512
vcvtsd2si eax, xmm6, {rz-sae} # AVX512
vcvtsd2si ebp, xmm6, {rn-sae} # AVX512
vcvtsd2si ebp, xmm6, {ru-sae} # AVX512
vcvtsd2si ebp, xmm6, {rd-sae} # AVX512
vcvtsd2si ebp, xmm6, {rz-sae} # AVX512
vcvtsd2ss xmm6{k7}, xmm5, xmm4 # AVX512
vcvtsd2ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vcvtsd2ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vcvtsd2ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vcvtsd2ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vcvtsd2ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vcvtsd2ss xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vcvtsi2ss xmm6, xmm5, {rn-sae}, eax # AVX512
vcvtsi2ss xmm6, xmm5, {ru-sae}, eax # AVX512
vcvtsi2ss xmm6, xmm5, {rd-sae}, eax # AVX512
vcvtsi2ss xmm6, xmm5, {rz-sae}, eax # AVX512
vcvtsi2ss xmm6, xmm5, {rn-sae}, ebp # AVX512
vcvtsi2ss xmm6, xmm5, {ru-sae}, ebp # AVX512
vcvtsi2ss xmm6, xmm5, {rd-sae}, ebp # AVX512
vcvtsi2ss xmm6, xmm5, {rz-sae}, ebp # AVX512
vcvtss2sd xmm6{k7}, xmm5, xmm4 # AVX512
vcvtss2sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vcvtss2sd xmm6{k7}, xmm5, xmm4, {sae} # AVX512
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcvtss2sd xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vcvtss2si eax, xmm6, {rn-sae} # AVX512
vcvtss2si eax, xmm6, {ru-sae} # AVX512
vcvtss2si eax, xmm6, {rd-sae} # AVX512
vcvtss2si eax, xmm6, {rz-sae} # AVX512
vcvtss2si ebp, xmm6, {rn-sae} # AVX512
vcvtss2si ebp, xmm6, {ru-sae} # AVX512
vcvtss2si ebp, xmm6, {rd-sae} # AVX512
vcvtss2si ebp, xmm6, {rz-sae} # AVX512
vcvttsd2si eax, xmm6, {sae} # AVX512
vcvttsd2si ebp, xmm6, {sae} # AVX512
vcvttss2si eax, xmm6, {sae} # AVX512
vcvttss2si ebp, xmm6, {sae} # AVX512
vdivsd xmm6{k7}, xmm5, xmm4 # AVX512
vdivsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vdivsd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vdivsd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vdivsd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vdivsd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vdivsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vdivsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vdivsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vdivss xmm6{k7}, xmm5, xmm4 # AVX512
vdivss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vdivss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vdivss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vdivss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vdivss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vdivss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vdivss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vdivss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vdivss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vdivss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vdivss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfmadd132sd xmm6{k7}, xmm5, xmm4 # AVX512
vfmadd132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmadd132sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmadd132sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmadd132sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmadd132sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfmadd132ss xmm6{k7}, xmm5, xmm4 # AVX512
vfmadd132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmadd132ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmadd132ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmadd132ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmadd132ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfmadd213sd xmm6{k7}, xmm5, xmm4 # AVX512
vfmadd213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmadd213sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmadd213sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmadd213sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmadd213sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfmadd213ss xmm6{k7}, xmm5, xmm4 # AVX512
vfmadd213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmadd213ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmadd213ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmadd213ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmadd213ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfmadd231sd xmm6{k7}, xmm5, xmm4 # AVX512
vfmadd231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmadd231sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmadd231sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmadd231sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmadd231sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfmadd231ss xmm6{k7}, xmm5, xmm4 # AVX512
vfmadd231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmadd231ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmadd231ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmadd231ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmadd231ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfmsub132sd xmm6{k7}, xmm5, xmm4 # AVX512
vfmsub132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmsub132sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmsub132sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmsub132sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmsub132sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfmsub132ss xmm6{k7}, xmm5, xmm4 # AVX512
vfmsub132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmsub132ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmsub132ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmsub132ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmsub132ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfmsub213sd xmm6{k7}, xmm5, xmm4 # AVX512
vfmsub213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmsub213sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmsub213sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmsub213sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmsub213sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfmsub213ss xmm6{k7}, xmm5, xmm4 # AVX512
vfmsub213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmsub213ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmsub213ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmsub213ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmsub213ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfmsub231sd xmm6{k7}, xmm5, xmm4 # AVX512
vfmsub231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmsub231sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmsub231sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmsub231sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmsub231sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfmsub231ss xmm6{k7}, xmm5, xmm4 # AVX512
vfmsub231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfmsub231ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfmsub231ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfmsub231ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfmsub231ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfnmadd132sd xmm6{k7}, xmm5, xmm4 # AVX512
vfnmadd132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmadd132sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmadd132sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmadd132sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmadd132sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfnmadd132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfnmadd132ss xmm6{k7}, xmm5, xmm4 # AVX512
vfnmadd132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmadd132ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmadd132ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmadd132ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmadd132ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfnmadd132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfnmadd213sd xmm6{k7}, xmm5, xmm4 # AVX512
vfnmadd213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmadd213sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmadd213sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmadd213sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmadd213sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfnmadd213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfnmadd213ss xmm6{k7}, xmm5, xmm4 # AVX512
vfnmadd213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmadd213ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmadd213ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmadd213ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmadd213ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfnmadd213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfnmadd231sd xmm6{k7}, xmm5, xmm4 # AVX512
vfnmadd231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmadd231sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmadd231sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmadd231sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmadd231sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfnmadd231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfnmadd231ss xmm6{k7}, xmm5, xmm4 # AVX512
vfnmadd231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmadd231ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmadd231ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmadd231ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmadd231ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfnmadd231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfnmsub132sd xmm6{k7}, xmm5, xmm4 # AVX512
vfnmsub132sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmsub132sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmsub132sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmsub132sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmsub132sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfnmsub132sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfnmsub132ss xmm6{k7}, xmm5, xmm4 # AVX512
vfnmsub132ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmsub132ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmsub132ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmsub132ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmsub132ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfnmsub132ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfnmsub213sd xmm6{k7}, xmm5, xmm4 # AVX512
vfnmsub213sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmsub213sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmsub213sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmsub213sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmsub213sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfnmsub213sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfnmsub213ss xmm6{k7}, xmm5, xmm4 # AVX512
vfnmsub213ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmsub213ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmsub213ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmsub213ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmsub213ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfnmsub213ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfnmsub231sd xmm6{k7}, xmm5, xmm4 # AVX512
vfnmsub231sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmsub231sd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmsub231sd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmsub231sd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmsub231sd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vfnmsub231sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vfnmsub231ss xmm6{k7}, xmm5, xmm4 # AVX512
vfnmsub231ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vfnmsub231ss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vfnmsub231ss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vfnmsub231ss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vfnmsub231ss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vfnmsub231ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vgetexpsd xmm6{k7}, xmm5, xmm4 # AVX512
vgetexpsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vgetexpsd xmm6{k7}, xmm5, xmm4, {sae} # AVX512
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vgetexpsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vgetexpss xmm6{k7}, xmm5, xmm4 # AVX512
vgetexpss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vgetexpss xmm6{k7}, xmm5, xmm4, {sae} # AVX512
vgetexpss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vgetexpss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vgetexpss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vgetmantsd xmm6{k7}, xmm5, xmm4, 0xab # AVX512
vgetmantsd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512
vgetmantsd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vgetmantsd xmm6{k7}, xmm5, xmm4, 123 # AVX512
vgetmantsd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512 Disp8
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512 Disp8
vgetmantsd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512
vgetmantss xmm6{k7}, xmm5, xmm4, 0xab # AVX512
vgetmantss xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512
vgetmantss xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vgetmantss xmm6{k7}, xmm5, xmm4, 123 # AVX512
vgetmantss xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vgetmantss xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512
vgetmantss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512 Disp8
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512 Disp8
vgetmantss xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512
vmaxsd xmm6{k7}, xmm5, xmm4 # AVX512
vmaxsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vmaxsd xmm6{k7}, xmm5, xmm4, {sae} # AVX512
vmaxsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vmaxsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vmaxsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vmaxss xmm6{k7}, xmm5, xmm4 # AVX512
vmaxss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vmaxss xmm6{k7}, xmm5, xmm4, {sae} # AVX512
vmaxss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vmaxss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vmaxss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vminsd xmm6{k7}, xmm5, xmm4 # AVX512
vminsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vminsd xmm6{k7}, xmm5, xmm4, {sae} # AVX512
vminsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vminsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vminsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vminsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vminsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vminsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vminss xmm6{k7}, xmm5, xmm4 # AVX512
vminss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vminss xmm6{k7}, xmm5, xmm4, {sae} # AVX512
vminss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vminss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vminss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vminss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vminss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vminss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vmovsd xmm6{k7}, QWORD PTR [ecx] # AVX512
vmovsd xmm6{k7}{z}, QWORD PTR [ecx] # AVX512
vmovsd xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512
vmovsd xmm6{k7}, QWORD PTR [edx+1016] # AVX512 Disp8
vmovsd xmm6{k7}, QWORD PTR [edx+1024] # AVX512
vmovsd xmm6{k7}, QWORD PTR [edx-1024] # AVX512 Disp8
vmovsd xmm6{k7}, QWORD PTR [edx-1032] # AVX512
vmovsd QWORD PTR [ecx]{k7}, xmm6 # AVX512
vmovsd QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512
vmovsd QWORD PTR [edx+1016]{k7}, xmm6 # AVX512 Disp8
vmovsd QWORD PTR [edx+1024]{k7}, xmm6 # AVX512
vmovsd QWORD PTR [edx-1024]{k7}, xmm6 # AVX512 Disp8
vmovsd QWORD PTR [edx-1032]{k7}, xmm6 # AVX512
vmovsd xmm6{k7}, xmm5, xmm4 # AVX512
vmovsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vmovss xmm6{k7}, DWORD PTR [ecx] # AVX512
vmovss xmm6{k7}{z}, DWORD PTR [ecx] # AVX512
vmovss xmm6{k7}, DWORD PTR [esp+esi*8-123456] # AVX512
vmovss xmm6{k7}, DWORD PTR [edx+508] # AVX512 Disp8
vmovss xmm6{k7}, DWORD PTR [edx+512] # AVX512
vmovss xmm6{k7}, DWORD PTR [edx-512] # AVX512 Disp8
vmovss xmm6{k7}, DWORD PTR [edx-516] # AVX512
vmovss DWORD PTR [ecx]{k7}, xmm6 # AVX512
vmovss DWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512
vmovss DWORD PTR [edx+508]{k7}, xmm6 # AVX512 Disp8
vmovss DWORD PTR [edx+512]{k7}, xmm6 # AVX512
vmovss DWORD PTR [edx-512]{k7}, xmm6 # AVX512 Disp8
vmovss DWORD PTR [edx-516]{k7}, xmm6 # AVX512
vmovss xmm6{k7}, xmm5, xmm4 # AVX512
vmovss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vmulsd xmm6{k7}, xmm5, xmm4 # AVX512
vmulsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vmulsd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vmulsd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vmulsd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vmulsd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vmulsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vmulsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vmulsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vmulss xmm6{k7}, xmm5, xmm4 # AVX512
vmulss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vmulss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vmulss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vmulss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vmulss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vmulss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vmulss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vmulss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vmulss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vmulss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vmulss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vrcp14sd xmm6{k7}, xmm5, xmm4 # AVX512
vrcp14sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vrcp14sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vrcp14ss xmm6{k7}, xmm5, xmm4 # AVX512
vrcp14ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vrcp14ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vrcp28ss xmm6{k7}, xmm5, xmm4 # AVX512EMI
vrcp28ss xmm6{k7}{z}, xmm5, xmm4 # AVX512EMI
vrcp28ss xmm6{k7}, xmm5, xmm4, {sae} # AVX512EMI
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512EMI
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512EMI
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512EMI Disp8
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512EMI
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512EMI Disp8
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512EMI
vrcp28sd xmm6{k7}, xmm5, xmm4 # AVX512EMI
vrcp28sd xmm6{k7}{z}, xmm5, xmm4 # AVX512EMI
vrcp28sd xmm6{k7}, xmm5, xmm4, {sae} # AVX512EMI
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512EMI
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512EMI
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512EMI Disp8
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512EMI
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512EMI Disp8
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512EMI
vrsqrt14sd xmm6{k7}, xmm5, xmm4 # AVX512
vrsqrt14sd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vrsqrt14sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vrsqrt14ss xmm6{k7}, xmm5, xmm4 # AVX512
vrsqrt14ss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vrsqrt14ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vrsqrt28ss xmm6{k7}, xmm5, xmm4 # AVX512EMI
vrsqrt28ss xmm6{k7}{z}, xmm5, xmm4 # AVX512EMI
vrsqrt28ss xmm6{k7}, xmm5, xmm4, {sae} # AVX512EMI
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512EMI
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512EMI
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512EMI Disp8
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512EMI
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512EMI Disp8
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512EMI
vrsqrt28sd xmm6{k7}, xmm5, xmm4 # AVX512EMI
vrsqrt28sd xmm6{k7}{z}, xmm5, xmm4 # AVX512EMI
vrsqrt28sd xmm6{k7}, xmm5, xmm4, {sae} # AVX512EMI
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512EMI
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512EMI
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512EMI Disp8
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512EMI
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512EMI Disp8
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512EMI
vsqrtsd xmm6{k7}, xmm5, xmm4 # AVX512
vsqrtsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vsqrtsd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vsqrtsd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vsqrtsd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vsqrtsd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vsqrtsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vsqrtss xmm6{k7}, xmm5, xmm4 # AVX512
vsqrtss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vsqrtss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vsqrtss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vsqrtss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vsqrtss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vsqrtss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vsqrtss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vsqrtss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vsubsd xmm6{k7}, xmm5, xmm4 # AVX512
vsubsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vsubsd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vsubsd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vsubsd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vsubsd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vsubsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vsubsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vsubsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vsubss xmm6{k7}, xmm5, xmm4 # AVX512
vsubss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vsubss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vsubss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vsubss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vsubss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vsubss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vsubss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vsubss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vsubss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vsubss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vsubss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vucomisd xmm6, xmm5, {sae} # AVX512
vucomiss xmm6, xmm5, {sae} # AVX512
vcvtsd2usi eax, xmm6 # AVX512
vcvtsd2usi eax, xmm6, {rn-sae} # AVX512
vcvtsd2usi eax, xmm6, {ru-sae} # AVX512
vcvtsd2usi eax, xmm6, {rd-sae} # AVX512
vcvtsd2usi eax, xmm6, {rz-sae} # AVX512
vcvtsd2usi eax, QWORD PTR [ecx] # AVX512
vcvtsd2usi eax, QWORD PTR [esp+esi*8-123456] # AVX512
vcvtsd2usi eax, QWORD PTR [edx+1016] # AVX512 Disp8
vcvtsd2usi eax, QWORD PTR [edx+1024] # AVX512
vcvtsd2usi eax, QWORD PTR [edx-1024] # AVX512 Disp8
vcvtsd2usi eax, QWORD PTR [edx-1032] # AVX512
vcvtsd2usi ebp, xmm6 # AVX512
vcvtsd2usi ebp, xmm6, {rn-sae} # AVX512
vcvtsd2usi ebp, xmm6, {ru-sae} # AVX512
vcvtsd2usi ebp, xmm6, {rd-sae} # AVX512
vcvtsd2usi ebp, xmm6, {rz-sae} # AVX512
vcvtsd2usi ebp, QWORD PTR [ecx] # AVX512
vcvtsd2usi ebp, QWORD PTR [esp+esi*8-123456] # AVX512
vcvtsd2usi ebp, QWORD PTR [edx+1016] # AVX512 Disp8
vcvtsd2usi ebp, QWORD PTR [edx+1024] # AVX512
vcvtsd2usi ebp, QWORD PTR [edx-1024] # AVX512 Disp8
vcvtsd2usi ebp, QWORD PTR [edx-1032] # AVX512
vcvtss2usi eax, xmm6 # AVX512
vcvtss2usi eax, xmm6, {rn-sae} # AVX512
vcvtss2usi eax, xmm6, {ru-sae} # AVX512
vcvtss2usi eax, xmm6, {rd-sae} # AVX512
vcvtss2usi eax, xmm6, {rz-sae} # AVX512
vcvtss2usi eax, DWORD PTR [ecx] # AVX512
vcvtss2usi eax, DWORD PTR [esp+esi*8-123456] # AVX512
vcvtss2usi eax, DWORD PTR [edx+508] # AVX512 Disp8
vcvtss2usi eax, DWORD PTR [edx+512] # AVX512
vcvtss2usi eax, DWORD PTR [edx-512] # AVX512 Disp8
vcvtss2usi eax, DWORD PTR [edx-516] # AVX512
vcvtss2usi ebp, xmm6 # AVX512
vcvtss2usi ebp, xmm6, {rn-sae} # AVX512
vcvtss2usi ebp, xmm6, {ru-sae} # AVX512
vcvtss2usi ebp, xmm6, {rd-sae} # AVX512
vcvtss2usi ebp, xmm6, {rz-sae} # AVX512
vcvtss2usi ebp, DWORD PTR [ecx] # AVX512
vcvtss2usi ebp, DWORD PTR [esp+esi*8-123456] # AVX512
vcvtss2usi ebp, DWORD PTR [edx+508] # AVX512 Disp8
vcvtss2usi ebp, DWORD PTR [edx+512] # AVX512
vcvtss2usi ebp, DWORD PTR [edx-512] # AVX512 Disp8
vcvtss2usi ebp, DWORD PTR [edx-516] # AVX512
vcvtusi2sd xmm6, xmm5, eax # AVX512
vcvtusi2sd xmm6, xmm5, ebp # AVX512
vcvtusi2sd xmm6, xmm5, DWORD PTR [ecx] # AVX512
vcvtusi2sd xmm6, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx+512] # AVX512
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcvtusi2sd xmm6, xmm5, DWORD PTR [edx-516] # AVX512
vcvtusi2ss xmm6, xmm5, eax # AVX512
vcvtusi2ss xmm6, xmm5, {rn-sae}, eax # AVX512
vcvtusi2ss xmm6, xmm5, {ru-sae}, eax # AVX512
vcvtusi2ss xmm6, xmm5, {rd-sae}, eax # AVX512
vcvtusi2ss xmm6, xmm5, {rz-sae}, eax # AVX512
vcvtusi2ss xmm6, xmm5, ebp # AVX512
vcvtusi2ss xmm6, xmm5, {rn-sae}, ebp # AVX512
vcvtusi2ss xmm6, xmm5, {ru-sae}, ebp # AVX512
vcvtusi2ss xmm6, xmm5, {rd-sae}, ebp # AVX512
vcvtusi2ss xmm6, xmm5, {rz-sae}, ebp # AVX512
vcvtusi2ss xmm6, xmm5, DWORD PTR [ecx] # AVX512
vcvtusi2ss xmm6, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx+512] # AVX512
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vcvtusi2ss xmm6, xmm5, DWORD PTR [edx-516] # AVX512
vscalefsd xmm6{k7}, xmm5, xmm4 # AVX512
vscalefsd xmm6{k7}{z}, xmm5, xmm4 # AVX512
vscalefsd xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vscalefsd xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vscalefsd xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vscalefsd xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vscalefsd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512
vscalefsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512 Disp8
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512 Disp8
vscalefsd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512
vscalefss xmm6{k7}, xmm5, xmm4 # AVX512
vscalefss xmm6{k7}{z}, xmm5, xmm4 # AVX512
vscalefss xmm6{k7}, xmm5, xmm4, {rn-sae} # AVX512
vscalefss xmm6{k7}, xmm5, xmm4, {ru-sae} # AVX512
vscalefss xmm6{k7}, xmm5, xmm4, {rd-sae} # AVX512
vscalefss xmm6{k7}, xmm5, xmm4, {rz-sae} # AVX512
vscalefss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512
vscalefss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512 Disp8
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512 Disp8
vscalefss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512
vfixupimmss xmm6{k7}, xmm5, xmm4, 0xab # AVX512
vfixupimmss xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512
vfixupimmss xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vfixupimmss xmm6{k7}, xmm5, xmm4, 123 # AVX512
vfixupimmss xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512 Disp8
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512 Disp8
vfixupimmss xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512
vfixupimmsd xmm6{k7}, xmm5, xmm4, 0xab # AVX512
vfixupimmsd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512
vfixupimmsd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vfixupimmsd xmm6{k7}, xmm5, xmm4, 123 # AVX512
vfixupimmsd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512 Disp8
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512 Disp8
vfixupimmsd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512
vrndscalesd xmm6{k7}, xmm5, xmm4, 0xab # AVX512
vrndscalesd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512
vrndscalesd xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vrndscalesd xmm6{k7}, xmm5, xmm4, 123 # AVX512
vrndscalesd xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512 Disp8
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512 Disp8
vrndscalesd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512
vrndscaless xmm6{k7}, xmm5, xmm4, 0xab # AVX512
vrndscaless xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512
vrndscaless xmm6{k7}, xmm5, xmm4, {sae}, 0xab # AVX512
vrndscaless xmm6{k7}, xmm5, xmm4, 123 # AVX512
vrndscaless xmm6{k7}, xmm5, xmm4, {sae}, 123 # AVX512
vrndscaless xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512
vrndscaless xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512 Disp8
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512 Disp8
vrndscaless xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512
vcmpsh k5, xmm5, xmm4, 123 # AVX512-FP16
vcmpsh k5{k7}, xmm5, xmm4, {sae}, 123 # AVX512-FP16
vcmpsh k5, xmm5, WORD PTR [ecx], 123 # AVX512-FP16
vcmpsh k5{k7}, xmm5, WORD PTR [esp+esi*8-123456], 123 # AVX512-FP16
vcmpsh k5, xmm5, WORD PTR [ecx+254], 123 # AVX512-FP16 Disp8
vcmpsh k5{k7}, xmm5, WORD PTR [edx-256], 123 # AVX512-FP16 Disp8
vfpclasssh k5, xmm4, 123 # AVX512-FP16
vfpclasssh k5, WORD PTR [ecx], 123 # AVX512-FP16
vfpclasssh k5{k7}, WORD PTR [esp+esi*8-123456], 123 # AVX512-FP16
vfpclasssh k5, WORD PTR [ecx+254], 123 # AVX512-FP16 Disp8
vfpclasssh k5{k7}, WORD PTR [edx-256], 123 # AVX512-FP16 Disp8
|
stsp/binutils-ia16
| 6,527
|
gas/testsuite/gas/i386/x86-64-avx512pf.s
|
# Check 64bit AVX512PF instructions
.allow_index_reg
.text
_start:
vgatherpf0dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vgatherpf0dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vgatherpf0dpd 256(%r9,%ymm31){%k1} # AVX512PF
vgatherpf0dpd 1024(%rcx,%ymm31,4){%k1} # AVX512PF
vgatherpf0dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf0dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf0dps 256(%r9,%zmm31){%k1} # AVX512PF
vgatherpf0dps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vgatherpf0qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf0qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf0qpd 256(%r9,%zmm31){%k1} # AVX512PF
vgatherpf0qpd 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vgatherpf0qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf0qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf0qps 256(%r9,%zmm31){%k1} # AVX512PF
vgatherpf0qps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vgatherpf1dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vgatherpf1dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vgatherpf1dpd 256(%r9,%ymm31){%k1} # AVX512PF
vgatherpf1dpd 1024(%rcx,%ymm31,4){%k1} # AVX512PF
vgatherpf1dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf1dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf1dps 256(%r9,%zmm31){%k1} # AVX512PF
vgatherpf1dps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vgatherpf1qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf1qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf1qpd 256(%r9,%zmm31){%k1} # AVX512PF
vgatherpf1qpd 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vgatherpf1qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf1qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vgatherpf1qps 256(%r9,%zmm31){%k1} # AVX512PF
vgatherpf1qps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vscatterpf0dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vscatterpf0dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vscatterpf0dpd 256(%r9,%ymm31){%k1} # AVX512PF
vscatterpf0dpd 1024(%rcx,%ymm31,4){%k1} # AVX512PF
vscatterpf0dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf0dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf0dps 256(%r9,%zmm31){%k1} # AVX512PF
vscatterpf0dps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vscatterpf0qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf0qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf0qpd 256(%r9,%zmm31){%k1} # AVX512PF
vscatterpf0qpd 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vscatterpf0qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf0qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf0qps 256(%r9,%zmm31){%k1} # AVX512PF
vscatterpf0qps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vscatterpf1dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vscatterpf1dpd 123(%r14,%ymm31,8){%k1} # AVX512PF
vscatterpf1dpd 256(%r9,%ymm31){%k1} # AVX512PF
vscatterpf1dpd 1024(%rcx,%ymm31,4){%k1} # AVX512PF
vscatterpf1dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf1dps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf1dps 256(%r9,%zmm31){%k1} # AVX512PF
vscatterpf1dps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vscatterpf1qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf1qpd 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf1qpd 256(%r9,%zmm31){%k1} # AVX512PF
vscatterpf1qpd 1024(%rcx,%zmm31,4){%k1} # AVX512PF
vscatterpf1qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf1qps 123(%r14,%zmm31,8){%k1} # AVX512PF
vscatterpf1qps 256(%r9,%zmm31){%k1} # AVX512PF
vscatterpf1qps 1024(%rcx,%zmm31,4){%k1} # AVX512PF
.intel_syntax noprefix
vgatherpf0dpd [r14+ymm31*8-123]{k1} # AVX512PF
vgatherpf0dpd [r14+ymm31*8-123]{k1} # AVX512PF
vgatherpf0dpd [r9+ymm31+256]{k1} # AVX512PF
vgatherpf0dpd [rcx+ymm31*4+1024]{k1} # AVX512PF
vgatherpf0dps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf0dps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf0dps [r9+zmm31+256]{k1} # AVX512PF
vgatherpf0dps [rcx+zmm31*4+1024]{k1} # AVX512PF
vgatherpf0qpd [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf0qpd [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf0qpd [r9+zmm31+256]{k1} # AVX512PF
vgatherpf0qpd [rcx+zmm31*4+1024]{k1} # AVX512PF
vgatherpf0qps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf0qps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf0qps [r9+zmm31+256]{k1} # AVX512PF
vgatherpf0qps [rcx+zmm31*4+1024]{k1} # AVX512PF
vgatherpf1dpd [r14+ymm31*8-123]{k1} # AVX512PF
vgatherpf1dpd [r14+ymm31*8-123]{k1} # AVX512PF
vgatherpf1dpd [r9+ymm31+256]{k1} # AVX512PF
vgatherpf1dpd [rcx+ymm31*4+1024]{k1} # AVX512PF
vgatherpf1dps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf1dps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf1dps [r9+zmm31+256]{k1} # AVX512PF
vgatherpf1dps [rcx+zmm31*4+1024]{k1} # AVX512PF
vgatherpf1qpd [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf1qpd [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf1qpd [r9+zmm31+256]{k1} # AVX512PF
vgatherpf1qpd [rcx+zmm31*4+1024]{k1} # AVX512PF
vgatherpf1qps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf1qps [r14+zmm31*8-123]{k1} # AVX512PF
vgatherpf1qps [r9+zmm31+256]{k1} # AVX512PF
vgatherpf1qps [rcx+zmm31*4+1024]{k1} # AVX512PF
vscatterpf0dpd [r14+ymm31*8-123]{k1} # AVX512PF
vscatterpf0dpd [r14+ymm31*8-123]{k1} # AVX512PF
vscatterpf0dpd [r9+ymm31+256]{k1} # AVX512PF
vscatterpf0dpd [rcx+ymm31*4+1024]{k1} # AVX512PF
vscatterpf0dps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf0dps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf0dps [r9+zmm31+256]{k1} # AVX512PF
vscatterpf0dps [rcx+zmm31*4+1024]{k1} # AVX512PF
vscatterpf0qpd [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf0qpd [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf0qpd [r9+zmm31+256]{k1} # AVX512PF
vscatterpf0qpd [rcx+zmm31*4+1024]{k1} # AVX512PF
vscatterpf0qps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf0qps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf0qps [r9+zmm31+256]{k1} # AVX512PF
vscatterpf0qps [rcx+zmm31*4+1024]{k1} # AVX512PF
vscatterpf1dpd [r14+ymm31*8-123]{k1} # AVX512PF
vscatterpf1dpd [r14+ymm31*8-123]{k1} # AVX512PF
vscatterpf1dpd [r9+ymm31+256]{k1} # AVX512PF
vscatterpf1dpd [rcx+ymm31*4+1024]{k1} # AVX512PF
vscatterpf1dps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf1dps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf1dps [r9+zmm31+256]{k1} # AVX512PF
vscatterpf1dps [rcx+zmm31*4+1024]{k1} # AVX512PF
vscatterpf1qpd [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf1qpd [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf1qpd [r9+zmm31+256]{k1} # AVX512PF
vscatterpf1qpd [rcx+zmm31*4+1024]{k1} # AVX512PF
vscatterpf1qps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf1qps [r14+zmm31*8-123]{k1} # AVX512PF
vscatterpf1qps [r9+zmm31+256]{k1} # AVX512PF
vscatterpf1qps [rcx+zmm31*4+1024]{k1} # AVX512PF
|
subhamb123/sel-embeddeddebugger
| 11,643
|
freertos_src/freertos_exception_startup.s
|
.section .text
.global freertos_exception_startup
.extern registers
.macro MRS_REG reg_name, dest
MRS \dest, \reg_name
.endm
freertos_exception_startup:
// Load registers x29 and x30 from the stack
ldp x29, x30, [sp], #16
// Load registers x18 to x2 from the stack
ldp x18, x19, [sp], #16
ldp x16, x17, [sp], #16
ldp x14, x15, [sp], #16
ldp x12, x13, [sp], #16
ldp x10, x11, [sp], #16
ldp x8, x9, [sp], #16
ldp x6, x7, [sp], #16
ldp x4, x5, [sp], #16
ldp x2, x3, [sp], #16
// Load the address of the external register pointer (registers)
adrp x1, registersx // Load the page address of registersx into x1
add x1, x1, :lo12:registersx // Add the low 12 bits of registersx address to x1
// Calculate the memory address for each register and store its value in the corresponding element of the registers array
stp x29, x30, [x1, #29*8] // Write x29 and x30 to registers[29] and registers[30]
stp x27, x28, [x1, #27*8] // not from stack
stp x25, x26, [x1, #25*8] // not from stack
stp x23, x24, [x1, #23*8] // not from stack
stp x21, x22, [x1, #21*8] // not from stack
str x20, [x1, #20*8] // outlier
stp x18, x19, [x1, #18*8] // Write x18 and x19 to registers[18] and registers[19]
stp x16, x17, [x1, #16*8] // Write x16 and x17 to registers[16] and registers[17]
stp x14, x15, [x1, #14*8] // Write x14 and x15 to registers[14] and registers[15]
stp x12, x13, [x1, #12*8] // Write x12 and x13 to registers[12] and registers[13]
stp x10, x11, [x1, #10*8] // Write x10 and x11 to registers[10] and registers[11]
stp x8, x9, [x1, #8*8] // Write x8 and x9 to registers[8] and registers[9]
stp x6, x7, [x1, #6*8] // Write x6 and x7 to registers[6] and registers[7]
stp x4, x5, [x1, #4*8] // Write x4 and x5 to registers[4] and registers[5]
stp x2, x3, [x1, #2*8] // Write x2 and x3 to registers[2] and registers[3]
// Move the address of the external register pointer (registers) from x1 to x2
mov x2, x1
// Load registers x29 and x30 from the stack
ldp x0, x1, [sp], #16
// Store x0 and x1 to registers[0] and registers[1] using stp
stp x0, x1, [x2] // Write x0 and x1 to registers[0] and registers[1]
// Store pc
MRS_REG ELR_EL3, x0 // Get PC from elr_el3
str x0, [x2, #31*8]
// Store 32 bit system registers
// Load the address of the external register pointer (registers)
adrp x1, registers32 // Load the page address of registers32 into x1
add x1, x1, :lo12:registers32 // Add the low 12 bits of registers32 address to x1
// Read and store the values of the specified registers
MRS_REG FPCR, x0
STR x0, [x1, #0] // Store FPCR in the array
MRS_REG FPSR, x0
STR x0, [x1, #4] // Store FPSR in the array
MRS_REG MPIDR_EL1, x0
STR x0, [x1, #8] // Store MPIDR_EL1 in the array
MRS_REG IFSR32_EL2, x0
STR x0, [x1, #12] // Store IFSR32_EL2 in the array
MRS_REG ESR_EL1, x0
STR x0, [x1, #16] // Store ESR_EL1 in the array
MRS_REG ESR_EL2, x0
STR x0, [x1, #20] // Store ESR_EL2 in the array
MRS_REG ESR_EL3, x0
STR x0, [x1, #24] // Store ESR_EL3 in the array
MRS_REG ISR_EL1, x0
STR x0, [x1, #28] // Store ISR_EL1 in the array
MRS_REG SCTLR_EL3, x0
STR x0, [x1, #32] // Store SCTLR_EL3 in the array
MRS_REG TCR_EL3, x0
STR x0, [x1, #36] // Store TCR_EL3 in the array
MRS_REG CONTEXTIDR_EL1, x0
STR x0, [x1, #40] // Store CONTEXTIDR_EL1 in the array
MRS_REG CPACR_EL1, x0
STR x0, [x1, #44] // Store CPACR_EL1 in the array
MRS_REG ACTLR_EL3, x0
STR x0, [x1, #48] // Store ACTLR_EL3 in the array
MRS_REG PMCR_EL0, x0
STR x0, [x1, #52] // Store PMCR_EL0 in the array
MRS_REG PMCNTENSET_EL0, x0
STR x0, [x1, #56] // Store PMCNTENSET_EL0 in the array
MRS_REG PMOVSCLR_EL0, x0
STR x0, [x1, #60] // Store PMOVSCLR_EL0 in the array
MRS_REG PMUSERENR_EL0, x0
STR x0, [x1, #64] // Store PMUSERENR_EL0 in the array
MRS_REG PMINTENSET_EL1, x0
STR x0, [x1, #68] // Store PMINTENSET_EL1 in the array
MRS_REG PMEVCNTR0_EL0, x0
STR x0, [x1, #72] // Store PMEVCNTR0_EL0 in the array
MRS_REG PMEVCNTR1_EL0, x0
STR x0, [x1, #76] // Store PMEVCNTR1_EL0 in the array
MRS_REG PMEVCNTR2_EL0, x0
STR x0, [x1, #80] // Store PMEVCNTR2_EL0 in the array
MRS_REG PMEVCNTR3_EL0, x0
STR x0, [x1, #84] // Store PMEVCNTR3_EL0 in the array
MRS_REG PMEVCNTR4_EL0, x0
STR x0, [x1, #88] // Store PMEVCNTR4_EL0 in the array
MRS_REG PMEVCNTR5_EL0, x0
STR x0, [x1, #92] // Store PMEVCNTR5_EL0 in the array
MRS_REG PMEVTYPER0_EL0, x0
STR x0, [x1, #96] // Store PMEVTYPER0_EL0 in the array
MRS_REG PMEVTYPER1_EL0, x0
STR x0, [x1, #100] // Store PMEVTYPER1_EL0 in the array
MRS_REG PMEVTYPER2_EL0, x0
STR x0, [x1, #104] // Store PMEVTYPER2_EL0 in the array
MRS_REG PMEVTYPER3_EL0, x0
STR x0, [x1, #108] // Store PMEVTYPER3_EL0 in the array
MRS_REG PMEVTYPER4_EL0, x0
STR x0, [x1, #112] // Store PMEVTYPER4_EL0 in the array
MRS_REG PMEVTYPER5_EL0, x0
STR x0, [x1, #116] // Store PMEVTYPER5_EL0 in the array
MRS_REG PMCCFILTR_EL0, x0
STR x0, [x1, #120] // Store PMCCFILTR_EL0 in the array
MRS_REG SCR_EL3, x0
STR x0, [x1, #124] // Store SCR_EL3 in the array
MRS_REG CPTR_EL3, x0
STR x0, [x1, #128] // Store CPTR_EL3 in the array
MRS_REG MDCR_EL3, x0
STR x0, [x1, #132] // Store MDCR_EL3 in the array
MRS_REG CNTKCTL_EL1, x0
STR x0, [x1, #136] // Store CNTKCTL_EL1 in the array
MRS_REG CNTP_TVAL_EL0, x0
STR x0, [x1, #140] // Store CNTP_TVAL_EL0 in the array
MRS_REG CNTP_CTL_EL0, x0
STR x0, [x1, #144] // Store CNTP_CTL_EL0 in the array
MRS_REG CNTV_TVAL_EL0, x0
STR x0, [x1, #148] // Store CNTV_TVAL_EL0 in the array
MRS_REG CNTV_CTL_EL0, x0
STR x0, [x1, #152] // Store CNTV_CTL_EL0 in the array
MRS_REG CNTHCTL_EL2, x0
STR x0, [x1, #156] // Store CNTHCTL_EL2 in the array
MRS_REG CNTHP_TVAL_EL2, x0
STR x0, [x1, #160] // Store CNTHP_TVAL_EL2 in the array
MRS_REG CNTHP_CTL_EL2, x0
STR x0, [x1, #164] // Store CNTHP_CTL_EL2 in the array
MRS_REG CNTPS_TVAL_EL1, x0
STR x0, [x1, #168] // Store CNTPS_TVAL_EL1 in the array
// Store 64 bit system registers
// Load the address of the external register pointer (registers)
adrp x1, registers64 // Load the page address of registers64 into x1
add x1, x1, :lo12:registers64 // Add the low 12 bits of registers64 address to x1
// Read and store the values of the specified registers
MRS_REG FAR_EL3, x0
STR x0, [x1, #0] // Store FAR_EL3 in the array
MRS_REG VBAR_EL3, x0
STR x0, [x1, #8] // Store VBAR_EL3 in the array
MRS_REG TTBR0_EL3, x0
STR x0, [x1, #16] // Store TTBR0_EL3 in the array
MRS_REG MAIR_EL3, x0
STR x0, [x1, #24] // Store MAIR_EL3 in the array
MRS_REG AMAIR_EL3, x0
STR x0, [x1, #32] // Store AMAIR_EL3 in the array
MRS_REG PAR_EL1, x0
STR x0, [x1, #40] // Store PAR_EL1 in the array
MRS_REG TPIDR_EL0, x0
STR x0, [x1, #48] // Store TPIDR_EL0 in the array
MRS_REG TPIDRRO_EL0, x0
STR x0, [x1, #56] // Store TPIDRRO_EL0 in the array
MRS_REG TPIDR_EL1, x0
STR x0, [x1, #64] // Store TPIDR_EL1 in the array
MRS_REG TPIDR_EL3, x0
STR x0, [x1, #72] // Store TPIDR_EL3 in the array
MRS_REG RVBAR_EL3, x0
STR x0, [x1, #80] // Store RVBAR_EL3 in the array
MRS_REG RMR_EL3, x0
STR x0, [x1, #88] // Store RMR_EL3 in the array
MRS_REG SDER32_EL3, x0
STR x0, [x1, #96] // Store SDER32_EL3 in the array
MRS_REG CNTFRQ_EL0, x0
STR x0, [x1, #104] // Store CNTFRQ_EL0 in the array
MRS_REG CNTVCT_EL0, x0
STR x0, [x1, #112] // Store CNTVCT_EL0 in the array
MRS_REG CNTP_CVAL_EL0, x0
STR x0, [x1, #120] // Store CNTP_CVAL_EL0 in the array
MRS_REG CNTV_CVAL_EL0, x0
STR x0, [x1, #128] // Store CNTV_CVAL_EL0 in the array
MRS_REG CNTVOFF_EL2, x0
STR x0, [x1, #136] // Store CNTVOFF_EL2 in the array
MRS_REG CNTHP_CVAL_EL2, x0
STR x0, [x1, #144] // Store CNTHP_CVAL_EL2 in the array
MRS_REG CNTPS_CVAL_EL1, x0
STR x0, [x1, #152] // Store CNTPS_CVAL_EL1 in the array
MRS_REG CNTPS_CTL_EL1, x0
STR x0, [x1, #160] // Store CNTPS_CTL_EL1 in the array
MRS_REG ELR_EL1, x0
STR x0, [x1, #168] // Store ELR_EL1 in the array
MRS_REG ELR_EL2, x0
STR x0, [x1, #176] // Store ELR_EL2 in the array
MRS_REG ELR_EL3, x0
STR x0, [x1, #184] // Store ELR_EL3 in the array
// Store v registers
// Load the address of the external register pointer (registers)
adrp x1, registersv // Load the page address of registersv into x1
add x1, x1, :lo12:registersv // Add the low 12 bits of registersv address to x1
MOV x0, v0.d[0]
STR x0, [x1, #0]
MOV x0, v0.d[1]
STR x0, [x1, #8]
MOV x0, v1.d[0]
STR x0, [x1, #16]
MOV x0, v1.d[1]
STR x0, [x1, #24]
MOV x0, v2.d[0]
STR x0, [x1, #32]
MOV x0, v2.d[1]
STR x0, [x1, #40]
MOV x0, v3.d[0]
STR x0, [x1, #48]
MOV x0, v3.d[1]
STR x0, [x1, #56]
MOV x0, v4.d[0]
STR x0, [x1, #64]
MOV x0, v4.d[1]
STR x0, [x1, #72]
MOV x0, v5.d[0]
STR x0, [x1, #80]
MOV x0, v5.d[1]
STR x0, [x1, #88]
MOV x0, v6.d[0]
STR x0, [x1, #96]
MOV x0, v6.d[1]
STR x0, [x1, #104]
MOV x0, v7.d[0]
STR x0, [x1, #112]
MOV x0, v7.d[1]
STR x0, [x1, #120]
MOV x0, v8.d[0]
STR x0, [x1, #128]
MOV x0, v8.d[1]
STR x0, [x1, #136]
MOV x0, v9.d[0]
STR x0, [x1, #144]
MOV x0, v9.d[1]
STR x0, [x1, #152]
MOV x0, v10.d[0]
STR x0, [x1, #160]
MOV x0, v10.d[1]
STR x0, [x1, #168]
MOV x0, v11.d[0]
STR x0, [x1, #176]
MOV x0, v11.d[1]
STR x0, [x1, #184]
MOV x0, v12.d[0]
STR x0, [x1, #192]
MOV x0, v12.d[1]
STR x0, [x1, #200]
MOV x0, v13.d[0]
STR x0, [x1, #208]
MOV x0, v13.d[1]
STR x0, [x1, #216]
MOV x0, v14.d[0]
STR x0, [x1, #224]
MOV x0, v14.d[1]
STR x0, [x1, #232]
MOV x0, v15.d[0]
STR x0, [x1, #240]
MOV x0, v15.d[1]
STR x0, [x1, #248]
MOV x0, v16.d[0]
STR x0, [x1, #256]
MOV x0, v16.d[1]
STR x0, [x1, #264]
MOV x0, v17.d[0]
STR x0, [x1, #272]
MOV x0, v17.d[1]
STR x0, [x1, #280]
MOV x0, v18.d[0]
STR x0, [x1, #288]
MOV x0, v18.d[1]
STR x0, [x1, #296]
MOV x0, v19.d[0]
STR x0, [x1, #304]
MOV x0, v19.d[1]
STR x0, [x1, #312]
MOV x0, v20.d[0]
STR x0, [x1, #320]
MOV x0, v20.d[1]
STR x0, [x1, #328]
MOV x0, v21.d[0]
STR x0, [x1, #336]
MOV x0, v21.d[1]
STR x0, [x1, #344]
MOV x0, v22.d[0]
STR x0, [x1, #352]
MOV x0, v22.d[1]
STR x0, [x1, #360]
MOV x0, v23.d[0]
STR x0, [x1, #368]
MOV x0, v23.d[1]
STR x0, [x1, #376]
MOV x0, v24.d[0]
STR x0, [x1, #384]
MOV x0, v24.d[1]
STR x0, [x1, #392]
MOV x0, v25.d[0]
STR x0, [x1, #400]
MOV x0, v25.d[1]
STR x0, [x1, #408]
MOV x0, v26.d[0]
STR x0, [x1, #416]
MOV x0, v26.d[1]
STR x0, [x1, #424]
MOV x0, v27.d[0]
STR x0, [x1, #432]
MOV x0, v27.d[1]
STR x0, [x1, #440]
MOV x0, v28.d[0]
STR x0, [x1, #448]
MOV x0, v28.d[1]
STR x0, [x1, #456]
MOV x0, v29.d[0]
STR x0, [x1, #464]
MOV x0, v29.d[1]
STR x0, [x1, #472]
MOV x0, v30.d[0]
STR x0, [x1, #480]
MOV x0, v30.d[1]
STR x0, [x1, #488]
MOV x0, v31.d[0]
STR x0, [x1, #496]
MOV x0, v31.d[1]
STR x0, [x1, #504]
// Store GICD registers
// Branch to the exception_handler() function
adr x0, freertos_exception_handler // Load the address of exception_handler() into x0
b freertos_exception_handler // Branch with link to exception_handler()
|
stsp/binutils-ia16
| 116,029
|
gas/testsuite/gas/i386/x86-64-avx.s
|
# Check 64bit AVX instructions
.allow_index_reg
.text
_start:
# Tests for op
vzeroall
vzeroupper
# Tests for op mem64
vldmxcsr (%rcx)
vstmxcsr (%rcx)
# Tests for op mem256, mask, ymm
# Tests for op ymm, mask, mem256
vmaskmovpd (%rcx),%ymm4,%ymm6
vmaskmovpd %ymm4,%ymm6,(%rcx)
vmaskmovps (%rcx),%ymm4,%ymm6
vmaskmovps %ymm4,%ymm6,(%rcx)
# Tests for op imm8, ymm/mem256, ymm
vpermilpd $7,%ymm6,%ymm2
vpermilpd $7,(%rcx),%ymm6
vpermilps $7,%ymm6,%ymm2
vpermilps $7,(%rcx),%ymm6
vroundpd $7,%ymm6,%ymm2
vroundpd $7,(%rcx),%ymm6
vroundps $7,%ymm6,%ymm2
vroundps $7,(%rcx),%ymm6
# Tests for op ymm/mem256, ymm, ymm
vaddpd %ymm4,%ymm6,%ymm2
vaddpd (%rcx),%ymm6,%ymm2
vaddps %ymm4,%ymm6,%ymm2
vaddps (%rcx),%ymm6,%ymm2
vaddsubpd %ymm4,%ymm6,%ymm2
vaddsubpd (%rcx),%ymm6,%ymm2
vaddsubps %ymm4,%ymm6,%ymm2
vaddsubps (%rcx),%ymm6,%ymm2
vandnpd %ymm4,%ymm6,%ymm2
vandnpd (%rcx),%ymm6,%ymm2
vandnps %ymm4,%ymm6,%ymm2
vandnps (%rcx),%ymm6,%ymm2
vandpd %ymm4,%ymm6,%ymm2
vandpd (%rcx),%ymm6,%ymm2
vandps %ymm4,%ymm6,%ymm2
vandps (%rcx),%ymm6,%ymm2
vdivpd %ymm4,%ymm6,%ymm2
vdivpd (%rcx),%ymm6,%ymm2
vdivps %ymm4,%ymm6,%ymm2
vdivps (%rcx),%ymm6,%ymm2
vhaddpd %ymm4,%ymm6,%ymm2
vhaddpd (%rcx),%ymm6,%ymm2
vhaddps %ymm4,%ymm6,%ymm2
vhaddps (%rcx),%ymm6,%ymm2
vhsubpd %ymm4,%ymm6,%ymm2
vhsubpd (%rcx),%ymm6,%ymm2
vhsubps %ymm4,%ymm6,%ymm2
vhsubps (%rcx),%ymm6,%ymm2
vmaxpd %ymm4,%ymm6,%ymm2
vmaxpd (%rcx),%ymm6,%ymm2
vmaxps %ymm4,%ymm6,%ymm2
vmaxps (%rcx),%ymm6,%ymm2
vminpd %ymm4,%ymm6,%ymm2
vminpd (%rcx),%ymm6,%ymm2
vminps %ymm4,%ymm6,%ymm2
vminps (%rcx),%ymm6,%ymm2
vmulpd %ymm4,%ymm6,%ymm2
vmulpd (%rcx),%ymm6,%ymm2
vmulps %ymm4,%ymm6,%ymm2
vmulps (%rcx),%ymm6,%ymm2
vorpd %ymm4,%ymm6,%ymm2
vorpd (%rcx),%ymm6,%ymm2
vorps %ymm4,%ymm6,%ymm2
vorps (%rcx),%ymm6,%ymm2
vpermilpd %ymm4,%ymm6,%ymm2
vpermilpd (%rcx),%ymm6,%ymm2
vpermilps %ymm4,%ymm6,%ymm2
vpermilps (%rcx),%ymm6,%ymm2
vsubpd %ymm4,%ymm6,%ymm2
vsubpd (%rcx),%ymm6,%ymm2
vsubps %ymm4,%ymm6,%ymm2
vsubps (%rcx),%ymm6,%ymm2
vunpckhpd %ymm4,%ymm6,%ymm2
vunpckhpd (%rcx),%ymm6,%ymm2
vunpckhps %ymm4,%ymm6,%ymm2
vunpckhps (%rcx),%ymm6,%ymm2
vunpcklpd %ymm4,%ymm6,%ymm2
vunpcklpd (%rcx),%ymm6,%ymm2
vunpcklps %ymm4,%ymm6,%ymm2
vunpcklps (%rcx),%ymm6,%ymm2
vxorpd %ymm4,%ymm6,%ymm2
vxorpd (%rcx),%ymm6,%ymm2
vxorps %ymm4,%ymm6,%ymm2
vxorps (%rcx),%ymm6,%ymm2
vcmpeqpd %ymm4,%ymm6,%ymm2
vcmpeqpd (%rcx),%ymm6,%ymm2
vcmpltpd %ymm4,%ymm6,%ymm2
vcmpltpd (%rcx),%ymm6,%ymm2
vcmplepd %ymm4,%ymm6,%ymm2
vcmplepd (%rcx),%ymm6,%ymm2
vcmpunordpd %ymm4,%ymm6,%ymm2
vcmpunordpd (%rcx),%ymm6,%ymm2
vcmpneqpd %ymm4,%ymm6,%ymm2
vcmpneqpd (%rcx),%ymm6,%ymm2
vcmpnltpd %ymm4,%ymm6,%ymm2
vcmpnltpd (%rcx),%ymm6,%ymm2
vcmpnlepd %ymm4,%ymm6,%ymm2
vcmpnlepd (%rcx),%ymm6,%ymm2
vcmpordpd %ymm4,%ymm6,%ymm2
vcmpordpd (%rcx),%ymm6,%ymm2
vcmpeq_uqpd %ymm4,%ymm6,%ymm2
vcmpeq_uqpd (%rcx),%ymm6,%ymm2
vcmpngepd %ymm4,%ymm6,%ymm2
vcmpngepd (%rcx),%ymm6,%ymm2
vcmpngtpd %ymm4,%ymm6,%ymm2
vcmpngtpd (%rcx),%ymm6,%ymm2
vcmpfalsepd %ymm4,%ymm6,%ymm2
vcmpfalsepd (%rcx),%ymm6,%ymm2
vcmpneq_oqpd %ymm4,%ymm6,%ymm2
vcmpneq_oqpd (%rcx),%ymm6,%ymm2
vcmpgepd %ymm4,%ymm6,%ymm2
vcmpgepd (%rcx),%ymm6,%ymm2
vcmpgtpd %ymm4,%ymm6,%ymm2
vcmpgtpd (%rcx),%ymm6,%ymm2
vcmptruepd %ymm4,%ymm6,%ymm2
vcmptruepd (%rcx),%ymm6,%ymm2
vcmpeq_ospd %ymm4,%ymm6,%ymm2
vcmpeq_ospd (%rcx),%ymm6,%ymm2
vcmplt_oqpd %ymm4,%ymm6,%ymm2
vcmplt_oqpd (%rcx),%ymm6,%ymm2
vcmple_oqpd %ymm4,%ymm6,%ymm2
vcmple_oqpd (%rcx),%ymm6,%ymm2
vcmpunord_spd %ymm4,%ymm6,%ymm2
vcmpunord_spd (%rcx),%ymm6,%ymm2
vcmpneq_uspd %ymm4,%ymm6,%ymm2
vcmpneq_uspd (%rcx),%ymm6,%ymm2
vcmpnlt_uqpd %ymm4,%ymm6,%ymm2
vcmpnlt_uqpd (%rcx),%ymm6,%ymm2
vcmpnle_uqpd %ymm4,%ymm6,%ymm2
vcmpnle_uqpd (%rcx),%ymm6,%ymm2
vcmpord_spd %ymm4,%ymm6,%ymm2
vcmpord_spd (%rcx),%ymm6,%ymm2
vcmpeq_uspd %ymm4,%ymm6,%ymm2
vcmpeq_uspd (%rcx),%ymm6,%ymm2
vcmpnge_uqpd %ymm4,%ymm6,%ymm2
vcmpnge_uqpd (%rcx),%ymm6,%ymm2
vcmpngt_uqpd %ymm4,%ymm6,%ymm2
vcmpngt_uqpd (%rcx),%ymm6,%ymm2
vcmpfalse_ospd %ymm4,%ymm6,%ymm2
vcmpfalse_ospd (%rcx),%ymm6,%ymm2
vcmpneq_ospd %ymm4,%ymm6,%ymm2
vcmpneq_ospd (%rcx),%ymm6,%ymm2
vcmpge_oqpd %ymm4,%ymm6,%ymm2
vcmpge_oqpd (%rcx),%ymm6,%ymm2
vcmpgt_oqpd %ymm4,%ymm6,%ymm2
vcmpgt_oqpd (%rcx),%ymm6,%ymm2
vcmptrue_uspd %ymm4,%ymm6,%ymm2
vcmptrue_uspd (%rcx),%ymm6,%ymm2
vcmpeqps %ymm4,%ymm6,%ymm2
vcmpeqps (%rcx),%ymm6,%ymm2
vcmpltps %ymm4,%ymm6,%ymm2
vcmpltps (%rcx),%ymm6,%ymm2
vcmpleps %ymm4,%ymm6,%ymm2
vcmpleps (%rcx),%ymm6,%ymm2
vcmpunordps %ymm4,%ymm6,%ymm2
vcmpunordps (%rcx),%ymm6,%ymm2
vcmpneqps %ymm4,%ymm6,%ymm2
vcmpneqps (%rcx),%ymm6,%ymm2
vcmpnltps %ymm4,%ymm6,%ymm2
vcmpnltps (%rcx),%ymm6,%ymm2
vcmpnleps %ymm4,%ymm6,%ymm2
vcmpnleps (%rcx),%ymm6,%ymm2
vcmpordps %ymm4,%ymm6,%ymm2
vcmpordps (%rcx),%ymm6,%ymm2
vcmpeq_uqps %ymm4,%ymm6,%ymm2
vcmpeq_uqps (%rcx),%ymm6,%ymm2
vcmpngeps %ymm4,%ymm6,%ymm2
vcmpngeps (%rcx),%ymm6,%ymm2
vcmpngtps %ymm4,%ymm6,%ymm2
vcmpngtps (%rcx),%ymm6,%ymm2
vcmpfalseps %ymm4,%ymm6,%ymm2
vcmpfalseps (%rcx),%ymm6,%ymm2
vcmpneq_oqps %ymm4,%ymm6,%ymm2
vcmpneq_oqps (%rcx),%ymm6,%ymm2
vcmpgeps %ymm4,%ymm6,%ymm2
vcmpgeps (%rcx),%ymm6,%ymm2
vcmpgtps %ymm4,%ymm6,%ymm2
vcmpgtps (%rcx),%ymm6,%ymm2
vcmptrueps %ymm4,%ymm6,%ymm2
vcmptrueps (%rcx),%ymm6,%ymm2
vcmpeq_osps %ymm4,%ymm6,%ymm2
vcmpeq_osps (%rcx),%ymm6,%ymm2
vcmplt_oqps %ymm4,%ymm6,%ymm2
vcmplt_oqps (%rcx),%ymm6,%ymm2
vcmple_oqps %ymm4,%ymm6,%ymm2
vcmple_oqps (%rcx),%ymm6,%ymm2
vcmpunord_sps %ymm4,%ymm6,%ymm2
vcmpunord_sps (%rcx),%ymm6,%ymm2
vcmpneq_usps %ymm4,%ymm6,%ymm2
vcmpneq_usps (%rcx),%ymm6,%ymm2
vcmpnlt_uqps %ymm4,%ymm6,%ymm2
vcmpnlt_uqps (%rcx),%ymm6,%ymm2
vcmpnle_uqps %ymm4,%ymm6,%ymm2
vcmpnle_uqps (%rcx),%ymm6,%ymm2
vcmpord_sps %ymm4,%ymm6,%ymm2
vcmpord_sps (%rcx),%ymm6,%ymm2
vcmpeq_usps %ymm4,%ymm6,%ymm2
vcmpeq_usps (%rcx),%ymm6,%ymm2
vcmpnge_uqps %ymm4,%ymm6,%ymm2
vcmpnge_uqps (%rcx),%ymm6,%ymm2
vcmpngt_uqps %ymm4,%ymm6,%ymm2
vcmpngt_uqps (%rcx),%ymm6,%ymm2
vcmpfalse_osps %ymm4,%ymm6,%ymm2
vcmpfalse_osps (%rcx),%ymm6,%ymm2
vcmpneq_osps %ymm4,%ymm6,%ymm2
vcmpneq_osps (%rcx),%ymm6,%ymm2
vcmpge_oqps %ymm4,%ymm6,%ymm2
vcmpge_oqps (%rcx),%ymm6,%ymm2
vcmpgt_oqps %ymm4,%ymm6,%ymm2
vcmpgt_oqps (%rcx),%ymm6,%ymm2
vcmptrue_usps %ymm4,%ymm6,%ymm2
vcmptrue_usps (%rcx),%ymm6,%ymm2
vgf2p8mulb %ymm4, %ymm5, %ymm6
vgf2p8mulb (%rcx), %ymm5, %ymm6
vgf2p8mulb -123456(%rax,%r14,8), %ymm5, %ymm6
vgf2p8mulb 4064(%rdx), %ymm5, %ymm6
vgf2p8mulb 4096(%rdx), %ymm5, %ymm6
vgf2p8mulb -4096(%rdx), %ymm5, %ymm6
vgf2p8mulb -4128(%rdx), %ymm5, %ymm6
# Tests for op ymm/mem256, xmm
vcvtpd2dqy %ymm4,%xmm4
vcvtpd2dqy (%rcx),%xmm4
vcvtpd2psy %ymm4,%xmm4
vcvtpd2psy (%rcx),%xmm4
vcvttpd2dqy %ymm4,%xmm4
vcvttpd2dqy (%rcx),%xmm4
# Tests for op ymm/mem256, ymm
vcvtdq2ps %ymm4,%ymm6
vcvtdq2ps (%rcx),%ymm4
vcvtps2dq %ymm4,%ymm6
vcvtps2dq (%rcx),%ymm4
vcvttps2dq %ymm4,%ymm6
vcvttps2dq (%rcx),%ymm4
vmovapd %ymm4,%ymm6
vmovapd (%rcx),%ymm4
vmovaps %ymm4,%ymm6
vmovaps (%rcx),%ymm4
vmovdqa %ymm4,%ymm6
vmovdqa (%rcx),%ymm4
vmovdqu %ymm4,%ymm6
vmovdqu (%rcx),%ymm4
vmovddup %ymm4,%ymm6
vmovddup (%rcx),%ymm4
vmovshdup %ymm4,%ymm6
vmovshdup (%rcx),%ymm4
vmovsldup %ymm4,%ymm6
vmovsldup (%rcx),%ymm4
vmovupd %ymm4,%ymm6
vmovupd (%rcx),%ymm4
vmovups %ymm4,%ymm6
vmovups (%rcx),%ymm4
vptest %ymm4,%ymm6
vptest (%rcx),%ymm4
vrcpps %ymm4,%ymm6
vrcpps (%rcx),%ymm4
vrsqrtps %ymm4,%ymm6
vrsqrtps (%rcx),%ymm4
vsqrtpd %ymm4,%ymm6
vsqrtpd (%rcx),%ymm4
vsqrtps %ymm4,%ymm6
vsqrtps (%rcx),%ymm4
vtestpd %ymm4,%ymm6
vtestpd (%rcx),%ymm4
vtestps %ymm4,%ymm6
vtestps (%rcx),%ymm4
# Tests for op ymm, ymm/mem256
vmovapd %ymm4,%ymm6
vmovapd %ymm4,(%rcx)
vmovaps %ymm4,%ymm6
vmovaps %ymm4,(%rcx)
vmovdqa %ymm4,%ymm6
vmovdqa %ymm4,(%rcx)
vmovdqu %ymm4,%ymm6
vmovdqu %ymm4,(%rcx)
vmovupd %ymm4,%ymm6
vmovupd %ymm4,(%rcx)
vmovups %ymm4,%ymm6
vmovups %ymm4,(%rcx)
# Tests for op mem256, ymm
vlddqu (%rcx),%ymm4
# Tests for op ymm, mem256
vmovntdq %ymm4,(%rcx)
vmovntpd %ymm4,(%rcx)
vmovntps %ymm4,(%rcx)
# Tests for op imm8, ymm/mem256, ymm, ymm
vblendpd $7,%ymm4,%ymm6,%ymm2
vblendpd $7,(%rcx),%ymm6,%ymm2
vblendps $7,%ymm4,%ymm6,%ymm2
vblendps $7,(%rcx),%ymm6,%ymm2
vcmppd $7,%ymm4,%ymm6,%ymm2
vcmppd $7,(%rcx),%ymm6,%ymm2
vcmpps $7,%ymm4,%ymm6,%ymm2
vcmpps $7,(%rcx),%ymm6,%ymm2
vdpps $7,%ymm4,%ymm6,%ymm2
vdpps $7,(%rcx),%ymm6,%ymm2
vperm2f128 $7,%ymm4,%ymm6,%ymm2
vperm2f128 $7,(%rcx),%ymm6,%ymm2
vshufpd $7,%ymm4,%ymm6,%ymm2
vshufpd $7,(%rcx),%ymm6,%ymm2
vshufps $7,%ymm4,%ymm6,%ymm2
vshufps $7,(%rcx),%ymm6,%ymm2
vgf2p8affineqb $0xab, %ymm4, %ymm5, %ymm6
vgf2p8affineqb $123, %ymm4, %ymm5, %ymm6
vgf2p8affineqb $123, (%rcx), %ymm5, %ymm6
vgf2p8affineqb $123, -123456(%rax,%r14,8), %ymm5, %ymm6
vgf2p8affineqb $123, 4064(%rdx), %ymm5, %ymm6
vgf2p8affineqb $123, 4096(%rdx), %ymm5, %ymm6
vgf2p8affineqb $123, -4096(%rdx), %ymm5, %ymm6
vgf2p8affineqb $123, -4128(%rdx), %ymm5, %ymm6
vgf2p8affineinvqb $0xab, %ymm4, %ymm5, %ymm6
vgf2p8affineinvqb $123, %ymm4, %ymm5, %ymm6
vgf2p8affineinvqb $123, (%rcx), %ymm5, %ymm6
vgf2p8affineinvqb $123, -123456(%rax,%r14,8), %ymm5, %ymm6
vgf2p8affineinvqb $123, 4064(%rdx), %ymm5, %ymm6
vgf2p8affineinvqb $123, 4096(%rdx), %ymm5, %ymm6
vgf2p8affineinvqb $123, -4096(%rdx), %ymm5, %ymm6
vgf2p8affineinvqb $123, -4128(%rdx), %ymm5, %ymm6
# Tests for op ymm, ymm/mem256, ymm, ymm
vblendvpd %ymm4,%ymm6,%ymm2,%ymm7
vblendvpd %ymm4,(%rcx),%ymm2,%ymm7
vblendvps %ymm4,%ymm6,%ymm2,%ymm7
vblendvps %ymm4,(%rcx),%ymm2,%ymm7
# Tests for op imm8, xmm/mem128, ymm, ymm
vinsertf128 $7,%xmm4,%ymm4,%ymm6
vinsertf128 $7,(%rcx),%ymm4,%ymm6
# Tests for op imm8, ymm, xmm/mem128
vextractf128 $7,%ymm4,%xmm4
vextractf128 $7,%ymm4,(%rcx)
# Tests for op mem128, ymm
vbroadcastf128 (%rcx),%ymm4
# Tests for op xmm/mem128, xmm
vcvtdq2ps %xmm4,%xmm6
vcvtdq2ps (%rcx),%xmm4
vcvtpd2dqx %xmm4,%xmm6
vcvtpd2dqx (%rcx),%xmm4
vcvtpd2psx %xmm4,%xmm6
vcvtpd2psx (%rcx),%xmm4
vcvtps2dq %xmm4,%xmm6
vcvtps2dq (%rcx),%xmm4
vcvttpd2dqx %xmm4,%xmm6
vcvttpd2dqx (%rcx),%xmm4
vcvttps2dq %xmm4,%xmm6
vcvttps2dq (%rcx),%xmm4
vmovapd %xmm4,%xmm6
vmovapd (%rcx),%xmm4
vmovaps %xmm4,%xmm6
vmovaps (%rcx),%xmm4
vmovdqa %xmm4,%xmm6
vmovdqa (%rcx),%xmm4
vmovdqu %xmm4,%xmm6
vmovdqu (%rcx),%xmm4
vmovshdup %xmm4,%xmm6
vmovshdup (%rcx),%xmm4
vmovsldup %xmm4,%xmm6
vmovsldup (%rcx),%xmm4
vmovupd %xmm4,%xmm6
vmovupd (%rcx),%xmm4
vmovups %xmm4,%xmm6
vmovups (%rcx),%xmm4
vpabsb %xmm4,%xmm6
vpabsb (%rcx),%xmm4
vpabsw %xmm4,%xmm6
vpabsw (%rcx),%xmm4
vpabsd %xmm4,%xmm6
vpabsd (%rcx),%xmm4
vphminposuw %xmm4,%xmm6
vphminposuw (%rcx),%xmm4
vptest %xmm4,%xmm6
vptest (%rcx),%xmm4
vtestps %xmm4,%xmm6
vtestps (%rcx),%xmm4
vtestpd %xmm4,%xmm6
vtestpd (%rcx),%xmm4
vrcpps %xmm4,%xmm6
vrcpps (%rcx),%xmm4
vrsqrtps %xmm4,%xmm6
vrsqrtps (%rcx),%xmm4
vsqrtpd %xmm4,%xmm6
vsqrtpd (%rcx),%xmm4
vsqrtps %xmm4,%xmm6
vsqrtps (%rcx),%xmm4
vaesimc %xmm4,%xmm6
vaesimc (%rcx),%xmm4
# Tests for op xmm, xmm/mem128
vmovapd %xmm4,%xmm6
vmovapd %xmm4,(%rcx)
vmovaps %xmm4,%xmm6
vmovaps %xmm4,(%rcx)
vmovdqa %xmm4,%xmm6
vmovdqa %xmm4,(%rcx)
vmovdqu %xmm4,%xmm6
vmovdqu %xmm4,(%rcx)
vmovupd %xmm4,%xmm6
vmovupd %xmm4,(%rcx)
vmovups %xmm4,%xmm6
vmovups %xmm4,(%rcx)
# Tests for op mem128, xmm
vlddqu (%rcx),%xmm4
vmovntdqa (%rcx),%xmm4
# Tests for op xmm, mem128
vmovntdq %xmm4,(%rcx)
vmovntpd %xmm4,(%rcx)
vmovntps %xmm4,(%rcx)
# Tests for op xmm/mem128, ymm
vcvtdq2pd %xmm4,%ymm4
vcvtdq2pd (%rcx),%ymm4
vcvtps2pd %xmm4,%ymm4
vcvtps2pd (%rcx),%ymm4
# Tests for op xmm/mem128, xmm, xmm
vaddpd %xmm4,%xmm6,%xmm2
vaddpd (%rcx),%xmm6,%xmm7
vaddps %xmm4,%xmm6,%xmm2
vaddps (%rcx),%xmm6,%xmm7
vaddsubpd %xmm4,%xmm6,%xmm2
vaddsubpd (%rcx),%xmm6,%xmm7
vaddsubps %xmm4,%xmm6,%xmm2
vaddsubps (%rcx),%xmm6,%xmm7
vandnpd %xmm4,%xmm6,%xmm2
vandnpd (%rcx),%xmm6,%xmm7
vandnps %xmm4,%xmm6,%xmm2
vandnps (%rcx),%xmm6,%xmm7
vandpd %xmm4,%xmm6,%xmm2
vandpd (%rcx),%xmm6,%xmm7
vandps %xmm4,%xmm6,%xmm2
vandps (%rcx),%xmm6,%xmm7
vdivpd %xmm4,%xmm6,%xmm2
vdivpd (%rcx),%xmm6,%xmm7
vdivps %xmm4,%xmm6,%xmm2
vdivps (%rcx),%xmm6,%xmm7
vhaddpd %xmm4,%xmm6,%xmm2
vhaddpd (%rcx),%xmm6,%xmm7
vhaddps %xmm4,%xmm6,%xmm2
vhaddps (%rcx),%xmm6,%xmm7
vhsubpd %xmm4,%xmm6,%xmm2
vhsubpd (%rcx),%xmm6,%xmm7
vhsubps %xmm4,%xmm6,%xmm2
vhsubps (%rcx),%xmm6,%xmm7
vmaxpd %xmm4,%xmm6,%xmm2
vmaxpd (%rcx),%xmm6,%xmm7
vmaxps %xmm4,%xmm6,%xmm2
vmaxps (%rcx),%xmm6,%xmm7
vminpd %xmm4,%xmm6,%xmm2
vminpd (%rcx),%xmm6,%xmm7
vminps %xmm4,%xmm6,%xmm2
vminps (%rcx),%xmm6,%xmm7
vmulpd %xmm4,%xmm6,%xmm2
vmulpd (%rcx),%xmm6,%xmm7
vmulps %xmm4,%xmm6,%xmm2
vmulps (%rcx),%xmm6,%xmm7
vorpd %xmm4,%xmm6,%xmm2
vorpd (%rcx),%xmm6,%xmm7
vorps %xmm4,%xmm6,%xmm2
vorps (%rcx),%xmm6,%xmm7
vpacksswb %xmm4,%xmm6,%xmm2
vpacksswb (%rcx),%xmm6,%xmm7
vpackssdw %xmm4,%xmm6,%xmm2
vpackssdw (%rcx),%xmm6,%xmm7
vpackuswb %xmm4,%xmm6,%xmm2
vpackuswb (%rcx),%xmm6,%xmm7
vpackusdw %xmm4,%xmm6,%xmm2
vpackusdw (%rcx),%xmm6,%xmm7
vpaddb %xmm4,%xmm6,%xmm2
vpaddb (%rcx),%xmm6,%xmm7
vpaddw %xmm4,%xmm6,%xmm2
vpaddw (%rcx),%xmm6,%xmm7
vpaddd %xmm4,%xmm6,%xmm2
vpaddd (%rcx),%xmm6,%xmm7
vpaddq %xmm4,%xmm6,%xmm2
vpaddq (%rcx),%xmm6,%xmm7
vpaddsb %xmm4,%xmm6,%xmm2
vpaddsb (%rcx),%xmm6,%xmm7
vpaddsw %xmm4,%xmm6,%xmm2
vpaddsw (%rcx),%xmm6,%xmm7
vpaddusb %xmm4,%xmm6,%xmm2
vpaddusb (%rcx),%xmm6,%xmm7
vpaddusw %xmm4,%xmm6,%xmm2
vpaddusw (%rcx),%xmm6,%xmm7
vpand %xmm4,%xmm6,%xmm2
vpand (%rcx),%xmm6,%xmm7
vpandn %xmm4,%xmm6,%xmm2
vpandn (%rcx),%xmm6,%xmm7
vpavgb %xmm4,%xmm6,%xmm2
vpavgb (%rcx),%xmm6,%xmm7
vpavgw %xmm4,%xmm6,%xmm2
vpavgw (%rcx),%xmm6,%xmm7
vpclmullqlqdq %xmm4,%xmm6,%xmm2
vpclmullqlqdq (%rcx),%xmm6,%xmm7
vpclmulhqlqdq %xmm4,%xmm6,%xmm2
vpclmulhqlqdq (%rcx),%xmm6,%xmm7
vpclmullqhqdq %xmm4,%xmm6,%xmm2
vpclmullqhqdq (%rcx),%xmm6,%xmm7
vpclmulhqhqdq %xmm4,%xmm6,%xmm2
vpclmulhqhqdq (%rcx),%xmm6,%xmm7
vpcmpeqb %xmm4,%xmm6,%xmm2
vpcmpeqb (%rcx),%xmm6,%xmm7
vpcmpeqw %xmm4,%xmm6,%xmm2
vpcmpeqw (%rcx),%xmm6,%xmm7
vpcmpeqd %xmm4,%xmm6,%xmm2
vpcmpeqd (%rcx),%xmm6,%xmm7
vpcmpeqq %xmm4,%xmm6,%xmm2
vpcmpeqq (%rcx),%xmm6,%xmm7
vpcmpgtb %xmm4,%xmm6,%xmm2
vpcmpgtb (%rcx),%xmm6,%xmm7
vpcmpgtw %xmm4,%xmm6,%xmm2
vpcmpgtw (%rcx),%xmm6,%xmm7
vpcmpgtd %xmm4,%xmm6,%xmm2
vpcmpgtd (%rcx),%xmm6,%xmm7
vpcmpgtq %xmm4,%xmm6,%xmm2
vpcmpgtq (%rcx),%xmm6,%xmm7
vpermilpd %xmm4,%xmm6,%xmm2
vpermilpd (%rcx),%xmm6,%xmm7
vpermilps %xmm4,%xmm6,%xmm2
vpermilps (%rcx),%xmm6,%xmm7
vphaddw %xmm4,%xmm6,%xmm2
vphaddw (%rcx),%xmm6,%xmm7
vphaddd %xmm4,%xmm6,%xmm2
vphaddd (%rcx),%xmm6,%xmm7
vphaddsw %xmm4,%xmm6,%xmm2
vphaddsw (%rcx),%xmm6,%xmm7
vphsubw %xmm4,%xmm6,%xmm2
vphsubw (%rcx),%xmm6,%xmm7
vphsubd %xmm4,%xmm6,%xmm2
vphsubd (%rcx),%xmm6,%xmm7
vphsubsw %xmm4,%xmm6,%xmm2
vphsubsw (%rcx),%xmm6,%xmm7
vpmaddwd %xmm4,%xmm6,%xmm2
vpmaddwd (%rcx),%xmm6,%xmm7
vpmaddubsw %xmm4,%xmm6,%xmm2
vpmaddubsw (%rcx),%xmm6,%xmm7
vpmaxsb %xmm4,%xmm6,%xmm2
vpmaxsb (%rcx),%xmm6,%xmm7
vpmaxsw %xmm4,%xmm6,%xmm2
vpmaxsw (%rcx),%xmm6,%xmm7
vpmaxsd %xmm4,%xmm6,%xmm2
vpmaxsd (%rcx),%xmm6,%xmm7
vpmaxub %xmm4,%xmm6,%xmm2
vpmaxub (%rcx),%xmm6,%xmm7
vpmaxuw %xmm4,%xmm6,%xmm2
vpmaxuw (%rcx),%xmm6,%xmm7
vpmaxud %xmm4,%xmm6,%xmm2
vpmaxud (%rcx),%xmm6,%xmm7
vpminsb %xmm4,%xmm6,%xmm2
vpminsb (%rcx),%xmm6,%xmm7
vpminsw %xmm4,%xmm6,%xmm2
vpminsw (%rcx),%xmm6,%xmm7
vpminsd %xmm4,%xmm6,%xmm2
vpminsd (%rcx),%xmm6,%xmm7
vpminub %xmm4,%xmm6,%xmm2
vpminub (%rcx),%xmm6,%xmm7
vpminuw %xmm4,%xmm6,%xmm2
vpminuw (%rcx),%xmm6,%xmm7
vpminud %xmm4,%xmm6,%xmm2
vpminud (%rcx),%xmm6,%xmm7
vpmulhuw %xmm4,%xmm6,%xmm2
vpmulhuw (%rcx),%xmm6,%xmm7
vpmulhrsw %xmm4,%xmm6,%xmm2
vpmulhrsw (%rcx),%xmm6,%xmm7
vpmulhw %xmm4,%xmm6,%xmm2
vpmulhw (%rcx),%xmm6,%xmm7
vpmullw %xmm4,%xmm6,%xmm2
vpmullw (%rcx),%xmm6,%xmm7
vpmulld %xmm4,%xmm6,%xmm2
vpmulld (%rcx),%xmm6,%xmm7
vpmuludq %xmm4,%xmm6,%xmm2
vpmuludq (%rcx),%xmm6,%xmm7
vpmuldq %xmm4,%xmm6,%xmm2
vpmuldq (%rcx),%xmm6,%xmm7
vpor %xmm4,%xmm6,%xmm2
vpor (%rcx),%xmm6,%xmm7
vpsadbw %xmm4,%xmm6,%xmm2
vpsadbw (%rcx),%xmm6,%xmm7
vpshufb %xmm4,%xmm6,%xmm2
vpshufb (%rcx),%xmm6,%xmm7
vpsignb %xmm4,%xmm6,%xmm2
vpsignb (%rcx),%xmm6,%xmm7
vpsignw %xmm4,%xmm6,%xmm2
vpsignw (%rcx),%xmm6,%xmm7
vpsignd %xmm4,%xmm6,%xmm2
vpsignd (%rcx),%xmm6,%xmm7
vpsllw %xmm4,%xmm6,%xmm2
vpsllw (%rcx),%xmm6,%xmm7
vpslld %xmm4,%xmm6,%xmm2
vpslld (%rcx),%xmm6,%xmm7
vpsllq %xmm4,%xmm6,%xmm2
vpsllq (%rcx),%xmm6,%xmm7
vpsraw %xmm4,%xmm6,%xmm2
vpsraw (%rcx),%xmm6,%xmm7
vpsrad %xmm4,%xmm6,%xmm2
vpsrad (%rcx),%xmm6,%xmm7
vpsrlw %xmm4,%xmm6,%xmm2
vpsrlw (%rcx),%xmm6,%xmm7
vpsrld %xmm4,%xmm6,%xmm2
vpsrld (%rcx),%xmm6,%xmm7
vpsrlq %xmm4,%xmm6,%xmm2
vpsrlq (%rcx),%xmm6,%xmm7
vpsubb %xmm4,%xmm6,%xmm2
vpsubb (%rcx),%xmm6,%xmm7
vpsubw %xmm4,%xmm6,%xmm2
vpsubw (%rcx),%xmm6,%xmm7
vpsubd %xmm4,%xmm6,%xmm2
vpsubd (%rcx),%xmm6,%xmm7
vpsubq %xmm4,%xmm6,%xmm2
vpsubq (%rcx),%xmm6,%xmm7
vpsubsb %xmm4,%xmm6,%xmm2
vpsubsb (%rcx),%xmm6,%xmm7
vpsubsw %xmm4,%xmm6,%xmm2
vpsubsw (%rcx),%xmm6,%xmm7
vpsubusb %xmm4,%xmm6,%xmm2
vpsubusb (%rcx),%xmm6,%xmm7
vpsubusw %xmm4,%xmm6,%xmm2
vpsubusw (%rcx),%xmm6,%xmm7
vpunpckhbw %xmm4,%xmm6,%xmm2
vpunpckhbw (%rcx),%xmm6,%xmm7
vpunpckhwd %xmm4,%xmm6,%xmm2
vpunpckhwd (%rcx),%xmm6,%xmm7
vpunpckhdq %xmm4,%xmm6,%xmm2
vpunpckhdq (%rcx),%xmm6,%xmm7
vpunpckhqdq %xmm4,%xmm6,%xmm2
vpunpckhqdq (%rcx),%xmm6,%xmm7
vpunpcklbw %xmm4,%xmm6,%xmm2
vpunpcklbw (%rcx),%xmm6,%xmm7
vpunpcklwd %xmm4,%xmm6,%xmm2
vpunpcklwd (%rcx),%xmm6,%xmm7
vpunpckldq %xmm4,%xmm6,%xmm2
vpunpckldq (%rcx),%xmm6,%xmm7
vpunpcklqdq %xmm4,%xmm6,%xmm2
vpunpcklqdq (%rcx),%xmm6,%xmm7
vpxor %xmm4,%xmm6,%xmm2
vpxor (%rcx),%xmm6,%xmm7
vsubpd %xmm4,%xmm6,%xmm2
vsubpd (%rcx),%xmm6,%xmm7
vsubps %xmm4,%xmm6,%xmm2
vsubps (%rcx),%xmm6,%xmm7
vunpckhpd %xmm4,%xmm6,%xmm2
vunpckhpd (%rcx),%xmm6,%xmm7
vunpckhps %xmm4,%xmm6,%xmm2
vunpckhps (%rcx),%xmm6,%xmm7
vunpcklpd %xmm4,%xmm6,%xmm2
vunpcklpd (%rcx),%xmm6,%xmm7
vunpcklps %xmm4,%xmm6,%xmm2
vunpcklps (%rcx),%xmm6,%xmm7
vxorpd %xmm4,%xmm6,%xmm2
vxorpd (%rcx),%xmm6,%xmm7
vxorps %xmm4,%xmm6,%xmm2
vxorps (%rcx),%xmm6,%xmm7
vaesenc %xmm4,%xmm6,%xmm2
vaesenc (%rcx),%xmm6,%xmm7
vaesenclast %xmm4,%xmm6,%xmm2
vaesenclast (%rcx),%xmm6,%xmm7
vaesdec %xmm4,%xmm6,%xmm2
vaesdec (%rcx),%xmm6,%xmm7
vaesdeclast %xmm4,%xmm6,%xmm2
vaesdeclast (%rcx),%xmm6,%xmm7
vcmpeqpd %xmm4,%xmm6,%xmm2
vcmpeqpd (%rcx),%xmm6,%xmm7
vcmpltpd %xmm4,%xmm6,%xmm2
vcmpltpd (%rcx),%xmm6,%xmm7
vcmplepd %xmm4,%xmm6,%xmm2
vcmplepd (%rcx),%xmm6,%xmm7
vcmpunordpd %xmm4,%xmm6,%xmm2
vcmpunordpd (%rcx),%xmm6,%xmm7
vcmpneqpd %xmm4,%xmm6,%xmm2
vcmpneqpd (%rcx),%xmm6,%xmm7
vcmpnltpd %xmm4,%xmm6,%xmm2
vcmpnltpd (%rcx),%xmm6,%xmm7
vcmpnlepd %xmm4,%xmm6,%xmm2
vcmpnlepd (%rcx),%xmm6,%xmm7
vcmpordpd %xmm4,%xmm6,%xmm2
vcmpordpd (%rcx),%xmm6,%xmm7
vcmpeq_uqpd %xmm4,%xmm6,%xmm2
vcmpeq_uqpd (%rcx),%xmm6,%xmm7
vcmpngepd %xmm4,%xmm6,%xmm2
vcmpngepd (%rcx),%xmm6,%xmm7
vcmpngtpd %xmm4,%xmm6,%xmm2
vcmpngtpd (%rcx),%xmm6,%xmm7
vcmpfalsepd %xmm4,%xmm6,%xmm2
vcmpfalsepd (%rcx),%xmm6,%xmm7
vcmpneq_oqpd %xmm4,%xmm6,%xmm2
vcmpneq_oqpd (%rcx),%xmm6,%xmm7
vcmpgepd %xmm4,%xmm6,%xmm2
vcmpgepd (%rcx),%xmm6,%xmm7
vcmpgtpd %xmm4,%xmm6,%xmm2
vcmpgtpd (%rcx),%xmm6,%xmm7
vcmptruepd %xmm4,%xmm6,%xmm2
vcmptruepd (%rcx),%xmm6,%xmm7
vcmpeq_ospd %xmm4,%xmm6,%xmm2
vcmpeq_ospd (%rcx),%xmm6,%xmm7
vcmplt_oqpd %xmm4,%xmm6,%xmm2
vcmplt_oqpd (%rcx),%xmm6,%xmm7
vcmple_oqpd %xmm4,%xmm6,%xmm2
vcmple_oqpd (%rcx),%xmm6,%xmm7
vcmpunord_spd %xmm4,%xmm6,%xmm2
vcmpunord_spd (%rcx),%xmm6,%xmm7
vcmpneq_uspd %xmm4,%xmm6,%xmm2
vcmpneq_uspd (%rcx),%xmm6,%xmm7
vcmpnlt_uqpd %xmm4,%xmm6,%xmm2
vcmpnlt_uqpd (%rcx),%xmm6,%xmm7
vcmpnle_uqpd %xmm4,%xmm6,%xmm2
vcmpnle_uqpd (%rcx),%xmm6,%xmm7
vcmpord_spd %xmm4,%xmm6,%xmm2
vcmpord_spd (%rcx),%xmm6,%xmm7
vcmpeq_uspd %xmm4,%xmm6,%xmm2
vcmpeq_uspd (%rcx),%xmm6,%xmm7
vcmpnge_uqpd %xmm4,%xmm6,%xmm2
vcmpnge_uqpd (%rcx),%xmm6,%xmm7
vcmpngt_uqpd %xmm4,%xmm6,%xmm2
vcmpngt_uqpd (%rcx),%xmm6,%xmm7
vcmpfalse_ospd %xmm4,%xmm6,%xmm2
vcmpfalse_ospd (%rcx),%xmm6,%xmm7
vcmpneq_ospd %xmm4,%xmm6,%xmm2
vcmpneq_ospd (%rcx),%xmm6,%xmm7
vcmpge_oqpd %xmm4,%xmm6,%xmm2
vcmpge_oqpd (%rcx),%xmm6,%xmm7
vcmpgt_oqpd %xmm4,%xmm6,%xmm2
vcmpgt_oqpd (%rcx),%xmm6,%xmm7
vcmptrue_uspd %xmm4,%xmm6,%xmm2
vcmptrue_uspd (%rcx),%xmm6,%xmm7
vcmpeqps %xmm4,%xmm6,%xmm2
vcmpeqps (%rcx),%xmm6,%xmm7
vcmpltps %xmm4,%xmm6,%xmm2
vcmpltps (%rcx),%xmm6,%xmm7
vcmpleps %xmm4,%xmm6,%xmm2
vcmpleps (%rcx),%xmm6,%xmm7
vcmpunordps %xmm4,%xmm6,%xmm2
vcmpunordps (%rcx),%xmm6,%xmm7
vcmpneqps %xmm4,%xmm6,%xmm2
vcmpneqps (%rcx),%xmm6,%xmm7
vcmpnltps %xmm4,%xmm6,%xmm2
vcmpnltps (%rcx),%xmm6,%xmm7
vcmpnleps %xmm4,%xmm6,%xmm2
vcmpnleps (%rcx),%xmm6,%xmm7
vcmpordps %xmm4,%xmm6,%xmm2
vcmpordps (%rcx),%xmm6,%xmm7
vcmpeq_uqps %xmm4,%xmm6,%xmm2
vcmpeq_uqps (%rcx),%xmm6,%xmm7
vcmpngeps %xmm4,%xmm6,%xmm2
vcmpngeps (%rcx),%xmm6,%xmm7
vcmpngtps %xmm4,%xmm6,%xmm2
vcmpngtps (%rcx),%xmm6,%xmm7
vcmpfalseps %xmm4,%xmm6,%xmm2
vcmpfalseps (%rcx),%xmm6,%xmm7
vcmpneq_oqps %xmm4,%xmm6,%xmm2
vcmpneq_oqps (%rcx),%xmm6,%xmm7
vcmpgeps %xmm4,%xmm6,%xmm2
vcmpgeps (%rcx),%xmm6,%xmm7
vcmpgtps %xmm4,%xmm6,%xmm2
vcmpgtps (%rcx),%xmm6,%xmm7
vcmptrueps %xmm4,%xmm6,%xmm2
vcmptrueps (%rcx),%xmm6,%xmm7
vcmpeq_osps %xmm4,%xmm6,%xmm2
vcmpeq_osps (%rcx),%xmm6,%xmm7
vcmplt_oqps %xmm4,%xmm6,%xmm2
vcmplt_oqps (%rcx),%xmm6,%xmm7
vcmple_oqps %xmm4,%xmm6,%xmm2
vcmple_oqps (%rcx),%xmm6,%xmm7
vcmpunord_sps %xmm4,%xmm6,%xmm2
vcmpunord_sps (%rcx),%xmm6,%xmm7
vcmpneq_usps %xmm4,%xmm6,%xmm2
vcmpneq_usps (%rcx),%xmm6,%xmm7
vcmpnlt_uqps %xmm4,%xmm6,%xmm2
vcmpnlt_uqps (%rcx),%xmm6,%xmm7
vcmpnle_uqps %xmm4,%xmm6,%xmm2
vcmpnle_uqps (%rcx),%xmm6,%xmm7
vcmpord_sps %xmm4,%xmm6,%xmm2
vcmpord_sps (%rcx),%xmm6,%xmm7
vcmpeq_usps %xmm4,%xmm6,%xmm2
vcmpeq_usps (%rcx),%xmm6,%xmm7
vcmpnge_uqps %xmm4,%xmm6,%xmm2
vcmpnge_uqps (%rcx),%xmm6,%xmm7
vcmpngt_uqps %xmm4,%xmm6,%xmm2
vcmpngt_uqps (%rcx),%xmm6,%xmm7
vcmpfalse_osps %xmm4,%xmm6,%xmm2
vcmpfalse_osps (%rcx),%xmm6,%xmm7
vcmpneq_osps %xmm4,%xmm6,%xmm2
vcmpneq_osps (%rcx),%xmm6,%xmm7
vcmpge_oqps %xmm4,%xmm6,%xmm2
vcmpge_oqps (%rcx),%xmm6,%xmm7
vcmpgt_oqps %xmm4,%xmm6,%xmm2
vcmpgt_oqps (%rcx),%xmm6,%xmm7
vcmptrue_usps %xmm4,%xmm6,%xmm2
vcmptrue_usps (%rcx),%xmm6,%xmm7
vgf2p8mulb %xmm4, %xmm5, %xmm6
vgf2p8mulb (%rcx), %xmm5, %xmm6
vgf2p8mulb -123456(%rax,%r14,8), %xmm5, %xmm6
vgf2p8mulb 2032(%rdx), %xmm5, %xmm6
vgf2p8mulb 2048(%rdx), %xmm5, %xmm6
vgf2p8mulb -2048(%rdx), %xmm5, %xmm6
vgf2p8mulb -2064(%rdx), %xmm5, %xmm6
# Tests for op mem128, xmm, xmm
vmaskmovps (%rcx),%xmm4,%xmm6
vmaskmovpd (%rcx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem128, xmm
vaeskeygenassist $7,%xmm4,%xmm6
vaeskeygenassist $7,(%rcx),%xmm6
vpcmpestri $7,%xmm4,%xmm6
vpcmpestri $7,(%rcx),%xmm6
vpcmpestriq $7,%xmm4,%xmm6
vpcmpestril $7,(%rcx),%xmm6
vpcmpestrm $7,%xmm4,%xmm6
vpcmpestrm $7,(%rcx),%xmm6
vpcmpestrmq $7,%xmm4,%xmm6
vpcmpestrml $7,(%rcx),%xmm6
vpcmpistri $7,%xmm4,%xmm6
vpcmpistri $7,(%rcx),%xmm6
vpcmpistrm $7,%xmm4,%xmm6
vpcmpistrm $7,(%rcx),%xmm6
vpermilpd $7,%xmm4,%xmm6
vpermilpd $7,(%rcx),%xmm6
vpermilps $7,%xmm4,%xmm6
vpermilps $7,(%rcx),%xmm6
vpshufd $7,%xmm4,%xmm6
vpshufd $7,(%rcx),%xmm6
vpshufhw $7,%xmm4,%xmm6
vpshufhw $7,(%rcx),%xmm6
vpshuflw $7,%xmm4,%xmm6
vpshuflw $7,(%rcx),%xmm6
vroundpd $7,%xmm4,%xmm6
vroundpd $7,(%rcx),%xmm6
vroundps $7,%xmm4,%xmm6
vroundps $7,(%rcx),%xmm6
# Tests for op xmm, xmm, mem128
vmaskmovps %xmm4,%xmm6,(%rcx)
vmaskmovpd %xmm4,%xmm6,(%rcx)
# Tests for op imm8, xmm/mem128, xmm, xmm
vblendpd $7,%xmm4,%xmm6,%xmm2
vblendpd $7,(%rcx),%xmm6,%xmm2
vblendps $7,%xmm4,%xmm6,%xmm2
vblendps $7,(%rcx),%xmm6,%xmm2
vcmppd $7,%xmm4,%xmm6,%xmm2
vcmppd $7,(%rcx),%xmm6,%xmm2
vcmpps $7,%xmm4,%xmm6,%xmm2
vcmpps $7,(%rcx),%xmm6,%xmm2
vdppd $7,%xmm4,%xmm6,%xmm2
vdppd $7,(%rcx),%xmm6,%xmm2
vdpps $7,%xmm4,%xmm6,%xmm2
vdpps $7,(%rcx),%xmm6,%xmm2
vmpsadbw $7,%xmm4,%xmm6,%xmm2
vmpsadbw $7,(%rcx),%xmm6,%xmm2
vpalignr $7,%xmm4,%xmm6,%xmm2
vpalignr $7,(%rcx),%xmm6,%xmm2
vpblendw $7,%xmm4,%xmm6,%xmm2
vpblendw $7,(%rcx),%xmm6,%xmm2
vpclmulqdq $7,%xmm4,%xmm6,%xmm2
vpclmulqdq $7,(%rcx),%xmm6,%xmm2
vshufpd $7,%xmm4,%xmm6,%xmm2
vshufpd $7,(%rcx),%xmm6,%xmm2
vshufps $7,%xmm4,%xmm6,%xmm2
vshufps $7,(%rcx),%xmm6,%xmm2
vgf2p8affineqb $0xab, %xmm4, %xmm5, %xmm6
vgf2p8affineqb $123, %xmm4, %xmm5, %xmm6
vgf2p8affineqb $123, (%rcx), %xmm5, %xmm6
vgf2p8affineqb $123, -123456(%rax,%r14,8), %xmm5, %xmm6
vgf2p8affineqb $123, 2032(%rdx), %xmm5, %xmm6
vgf2p8affineqb $123, 2048(%rdx), %xmm5, %xmm6
vgf2p8affineqb $123, -2048(%rdx), %xmm5, %xmm6
vgf2p8affineqb $123, -2064(%rdx), %xmm5, %xmm6
vgf2p8affineinvqb $0xab, %xmm4, %xmm5, %xmm6
vgf2p8affineinvqb $123, %xmm4, %xmm5, %xmm6
vgf2p8affineinvqb $123, (%rcx), %xmm5, %xmm6
vgf2p8affineinvqb $123, -123456(%rax,%r14,8), %xmm5, %xmm6
vgf2p8affineinvqb $123, 2032(%rdx), %xmm5, %xmm6
vgf2p8affineinvqb $123, 2048(%rdx), %xmm5, %xmm6
vgf2p8affineinvqb $123, -2048(%rdx), %xmm5, %xmm6
vgf2p8affineinvqb $123, -2064(%rdx), %xmm5, %xmm6
# Tests for op xmm, xmm/mem128, xmm, xmm
vblendvpd %xmm4,%xmm6,%xmm2,%xmm7
vblendvpd %xmm4,(%rcx),%xmm2,%xmm7
vblendvps %xmm4,%xmm6,%xmm2,%xmm7
vblendvps %xmm4,(%rcx),%xmm2,%xmm7
vpblendvb %xmm4,%xmm6,%xmm2,%xmm7
vpblendvb %xmm4,(%rcx),%xmm2,%xmm7
# Tests for op mem64, ymm
vbroadcastsd (%rcx),%ymm4
# Tests for op xmm/mem64, xmm
vcomisd %xmm4,%xmm6
vcomisd (%rcx),%xmm4
vcvtdq2pd %xmm4,%xmm6
vcvtdq2pd (%rcx),%xmm4
vcvtps2pd %xmm4,%xmm6
vcvtps2pd (%rcx),%xmm4
vmovddup %xmm4,%xmm6
vmovddup (%rcx),%xmm4
vpmovsxbw %xmm4,%xmm6
vpmovsxbw (%rcx),%xmm4
vpmovsxwd %xmm4,%xmm6
vpmovsxwd (%rcx),%xmm4
vpmovsxdq %xmm4,%xmm6
vpmovsxdq (%rcx),%xmm4
vpmovzxbw %xmm4,%xmm6
vpmovzxbw (%rcx),%xmm4
vpmovzxwd %xmm4,%xmm6
vpmovzxwd (%rcx),%xmm4
vpmovzxdq %xmm4,%xmm6
vpmovzxdq (%rcx),%xmm4
vucomisd %xmm4,%xmm6
vucomisd (%rcx),%xmm4
# Tests for op mem64, xmm
vmovsd (%rcx),%xmm4
# Tests for op xmm, mem64
vmovlpd %xmm4,(%rcx)
vmovlps %xmm4,(%rcx)
vmovhpd %xmm4,(%rcx)
vmovhps %xmm4,(%rcx)
vmovsd %xmm4,(%rcx)
# Tests for op xmm, regq/mem64
# Tests for op regq/mem64, xmm
vmovd %xmm4,%rcx
vmovd %rcx,%xmm4
vmovq %xmm4,%rcx
vmovq %rcx,%xmm4
vmovq %xmm4,(%rcx)
vmovq (%rcx),%xmm4
# Tests for op xmm/mem64, regl
vcvtsd2si %xmm4,%ecx
vcvtsd2si (%rcx),%ecx
vcvttsd2si %xmm4,%ecx
vcvttsd2si (%rcx),%ecx
# Tests for op xmm/mem64, regq
vcvtsd2si %xmm4,%rcx
vcvtsd2si (%rcx),%rcx
vcvttsd2si %xmm4,%rcx
vcvttsd2si (%rcx),%rcx
# Tests for op regq/mem64, xmm, xmm
vcvtsi2sdq %rcx,%xmm4,%xmm6
vcvtsi2sdq (%rcx),%xmm4,%xmm6
vcvtsi2ssq %rcx,%xmm4,%xmm6
vcvtsi2ssq (%rcx),%xmm4,%xmm6
# Tests for op imm8, regq/mem64, xmm, xmm
vpinsrq $7,%rcx,%xmm4,%xmm6
vpinsrq $7,(%rcx),%xmm4,%xmm6
# Testsf for op imm8, xmm, regq/mem64
vpextrq $7,%xmm4,%rcx
vpextrq $7,%xmm4,(%rcx)
# Tests for op mem64, xmm, xmm
vmovlpd (%rcx),%xmm4,%xmm6
vmovlps (%rcx),%xmm4,%xmm6
vmovhpd (%rcx),%xmm4,%xmm6
vmovhps (%rcx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd $7,%xmm4,%xmm6,%xmm2
vcmpsd $7,(%rcx),%xmm6,%xmm2
vroundsd $7,%xmm4,%xmm6,%xmm2
vroundsd $7,(%rcx),%xmm6,%xmm2
# Tests for op xmm/mem64, xmm, xmm
vaddsd %xmm4,%xmm6,%xmm2
vaddsd (%rcx),%xmm6,%xmm2
vcvtsd2ss %xmm4,%xmm6,%xmm2
vcvtsd2ss (%rcx),%xmm6,%xmm2
vdivsd %xmm4,%xmm6,%xmm2
vdivsd (%rcx),%xmm6,%xmm2
vmaxsd %xmm4,%xmm6,%xmm2
vmaxsd (%rcx),%xmm6,%xmm2
vminsd %xmm4,%xmm6,%xmm2
vminsd (%rcx),%xmm6,%xmm2
vmulsd %xmm4,%xmm6,%xmm2
vmulsd (%rcx),%xmm6,%xmm2
vsqrtsd %xmm4,%xmm6,%xmm2
vsqrtsd (%rcx),%xmm6,%xmm2
vsubsd %xmm4,%xmm6,%xmm2
vsubsd (%rcx),%xmm6,%xmm2
vcmpeqsd %xmm4,%xmm6,%xmm2
vcmpeqsd (%rcx),%xmm6,%xmm2
vcmpltsd %xmm4,%xmm6,%xmm2
vcmpltsd (%rcx),%xmm6,%xmm2
vcmplesd %xmm4,%xmm6,%xmm2
vcmplesd (%rcx),%xmm6,%xmm2
vcmpunordsd %xmm4,%xmm6,%xmm2
vcmpunordsd (%rcx),%xmm6,%xmm2
vcmpneqsd %xmm4,%xmm6,%xmm2
vcmpneqsd (%rcx),%xmm6,%xmm2
vcmpnltsd %xmm4,%xmm6,%xmm2
vcmpnltsd (%rcx),%xmm6,%xmm2
vcmpnlesd %xmm4,%xmm6,%xmm2
vcmpnlesd (%rcx),%xmm6,%xmm2
vcmpordsd %xmm4,%xmm6,%xmm2
vcmpordsd (%rcx),%xmm6,%xmm2
vcmpeq_uqsd %xmm4,%xmm6,%xmm2
vcmpeq_uqsd (%rcx),%xmm6,%xmm2
vcmpngesd %xmm4,%xmm6,%xmm2
vcmpngesd (%rcx),%xmm6,%xmm2
vcmpngtsd %xmm4,%xmm6,%xmm2
vcmpngtsd (%rcx),%xmm6,%xmm2
vcmpfalsesd %xmm4,%xmm6,%xmm2
vcmpfalsesd (%rcx),%xmm6,%xmm2
vcmpneq_oqsd %xmm4,%xmm6,%xmm2
vcmpneq_oqsd (%rcx),%xmm6,%xmm2
vcmpgesd %xmm4,%xmm6,%xmm2
vcmpgesd (%rcx),%xmm6,%xmm2
vcmpgtsd %xmm4,%xmm6,%xmm2
vcmpgtsd (%rcx),%xmm6,%xmm2
vcmptruesd %xmm4,%xmm6,%xmm2
vcmptruesd (%rcx),%xmm6,%xmm2
vcmpeq_ossd %xmm4,%xmm6,%xmm2
vcmpeq_ossd (%rcx),%xmm6,%xmm2
vcmplt_oqsd %xmm4,%xmm6,%xmm2
vcmplt_oqsd (%rcx),%xmm6,%xmm2
vcmple_oqsd %xmm4,%xmm6,%xmm2
vcmple_oqsd (%rcx),%xmm6,%xmm2
vcmpunord_ssd %xmm4,%xmm6,%xmm2
vcmpunord_ssd (%rcx),%xmm6,%xmm2
vcmpneq_ussd %xmm4,%xmm6,%xmm2
vcmpneq_ussd (%rcx),%xmm6,%xmm2
vcmpnlt_uqsd %xmm4,%xmm6,%xmm2
vcmpnlt_uqsd (%rcx),%xmm6,%xmm2
vcmpnle_uqsd %xmm4,%xmm6,%xmm2
vcmpnle_uqsd (%rcx),%xmm6,%xmm2
vcmpord_ssd %xmm4,%xmm6,%xmm2
vcmpord_ssd (%rcx),%xmm6,%xmm2
vcmpeq_ussd %xmm4,%xmm6,%xmm2
vcmpeq_ussd (%rcx),%xmm6,%xmm2
vcmpnge_uqsd %xmm4,%xmm6,%xmm2
vcmpnge_uqsd (%rcx),%xmm6,%xmm2
vcmpngt_uqsd %xmm4,%xmm6,%xmm2
vcmpngt_uqsd (%rcx),%xmm6,%xmm2
vcmpfalse_ossd %xmm4,%xmm6,%xmm2
vcmpfalse_ossd (%rcx),%xmm6,%xmm2
vcmpneq_ossd %xmm4,%xmm6,%xmm2
vcmpneq_ossd (%rcx),%xmm6,%xmm2
vcmpge_oqsd %xmm4,%xmm6,%xmm2
vcmpge_oqsd (%rcx),%xmm6,%xmm2
vcmpgt_oqsd %xmm4,%xmm6,%xmm2
vcmpgt_oqsd (%rcx),%xmm6,%xmm2
vcmptrue_ussd %xmm4,%xmm6,%xmm2
vcmptrue_ussd (%rcx),%xmm6,%xmm2
# Tests for op mem64
vldmxcsr (%rcx)
vstmxcsr (%rcx)
# Tests for op xmm/mem32, xmm, xmm
vaddss %xmm4,%xmm6,%xmm2
vaddss (%rcx),%xmm6,%xmm2
vcvtss2sd %xmm4,%xmm6,%xmm2
vcvtss2sd (%rcx),%xmm6,%xmm2
vdivss %xmm4,%xmm6,%xmm2
vdivss (%rcx),%xmm6,%xmm2
vmaxss %xmm4,%xmm6,%xmm2
vmaxss (%rcx),%xmm6,%xmm2
vminss %xmm4,%xmm6,%xmm2
vminss (%rcx),%xmm6,%xmm2
vmulss %xmm4,%xmm6,%xmm2
vmulss (%rcx),%xmm6,%xmm2
vrcpss %xmm4,%xmm6,%xmm2
vrcpss (%rcx),%xmm6,%xmm2
vrsqrtss %xmm4,%xmm6,%xmm2
vrsqrtss (%rcx),%xmm6,%xmm2
vsqrtss %xmm4,%xmm6,%xmm2
vsqrtss (%rcx),%xmm6,%xmm2
vsubss %xmm4,%xmm6,%xmm2
vsubss (%rcx),%xmm6,%xmm2
vcmpeqss %xmm4,%xmm6,%xmm2
vcmpeqss (%rcx),%xmm6,%xmm2
vcmpltss %xmm4,%xmm6,%xmm2
vcmpltss (%rcx),%xmm6,%xmm2
vcmpless %xmm4,%xmm6,%xmm2
vcmpless (%rcx),%xmm6,%xmm2
vcmpunordss %xmm4,%xmm6,%xmm2
vcmpunordss (%rcx),%xmm6,%xmm2
vcmpneqss %xmm4,%xmm6,%xmm2
vcmpneqss (%rcx),%xmm6,%xmm2
vcmpnltss %xmm4,%xmm6,%xmm2
vcmpnltss (%rcx),%xmm6,%xmm2
vcmpnless %xmm4,%xmm6,%xmm2
vcmpnless (%rcx),%xmm6,%xmm2
vcmpordss %xmm4,%xmm6,%xmm2
vcmpordss (%rcx),%xmm6,%xmm2
vcmpeq_uqss %xmm4,%xmm6,%xmm2
vcmpeq_uqss (%rcx),%xmm6,%xmm2
vcmpngess %xmm4,%xmm6,%xmm2
vcmpngess (%rcx),%xmm6,%xmm2
vcmpngtss %xmm4,%xmm6,%xmm2
vcmpngtss (%rcx),%xmm6,%xmm2
vcmpfalsess %xmm4,%xmm6,%xmm2
vcmpfalsess (%rcx),%xmm6,%xmm2
vcmpneq_oqss %xmm4,%xmm6,%xmm2
vcmpneq_oqss (%rcx),%xmm6,%xmm2
vcmpgess %xmm4,%xmm6,%xmm2
vcmpgess (%rcx),%xmm6,%xmm2
vcmpgtss %xmm4,%xmm6,%xmm2
vcmpgtss (%rcx),%xmm6,%xmm2
vcmptruess %xmm4,%xmm6,%xmm2
vcmptruess (%rcx),%xmm6,%xmm2
vcmpeq_osss %xmm4,%xmm6,%xmm2
vcmpeq_osss (%rcx),%xmm6,%xmm2
vcmplt_oqss %xmm4,%xmm6,%xmm2
vcmplt_oqss (%rcx),%xmm6,%xmm2
vcmple_oqss %xmm4,%xmm6,%xmm2
vcmple_oqss (%rcx),%xmm6,%xmm2
vcmpunord_sss %xmm4,%xmm6,%xmm2
vcmpunord_sss (%rcx),%xmm6,%xmm2
vcmpneq_usss %xmm4,%xmm6,%xmm2
vcmpneq_usss (%rcx),%xmm6,%xmm2
vcmpnlt_uqss %xmm4,%xmm6,%xmm2
vcmpnlt_uqss (%rcx),%xmm6,%xmm2
vcmpnle_uqss %xmm4,%xmm6,%xmm2
vcmpnle_uqss (%rcx),%xmm6,%xmm2
vcmpord_sss %xmm4,%xmm6,%xmm2
vcmpord_sss (%rcx),%xmm6,%xmm2
vcmpeq_usss %xmm4,%xmm6,%xmm2
vcmpeq_usss (%rcx),%xmm6,%xmm2
vcmpnge_uqss %xmm4,%xmm6,%xmm2
vcmpnge_uqss (%rcx),%xmm6,%xmm2
vcmpngt_uqss %xmm4,%xmm6,%xmm2
vcmpngt_uqss (%rcx),%xmm6,%xmm2
vcmpfalse_osss %xmm4,%xmm6,%xmm2
vcmpfalse_osss (%rcx),%xmm6,%xmm2
vcmpneq_osss %xmm4,%xmm6,%xmm2
vcmpneq_osss (%rcx),%xmm6,%xmm2
vcmpge_oqss %xmm4,%xmm6,%xmm2
vcmpge_oqss (%rcx),%xmm6,%xmm2
vcmpgt_oqss %xmm4,%xmm6,%xmm2
vcmpgt_oqss (%rcx),%xmm6,%xmm2
vcmptrue_usss %xmm4,%xmm6,%xmm2
vcmptrue_usss (%rcx),%xmm6,%xmm2
# Tests for op mem32, ymm
vbroadcastss (%rcx),%ymm4
# Tests for op xmm/mem32, xmm
vcomiss %xmm4,%xmm6
vcomiss (%rcx),%xmm4
vpmovsxbd %xmm4,%xmm6
vpmovsxbd (%rcx),%xmm4
vpmovsxwq %xmm4,%xmm6
vpmovsxwq (%rcx),%xmm4
vpmovzxbd %xmm4,%xmm6
vpmovzxbd (%rcx),%xmm4
vpmovzxwq %xmm4,%xmm6
vpmovzxwq (%rcx),%xmm4
vucomiss %xmm4,%xmm6
vucomiss (%rcx),%xmm4
# Tests for op mem32, xmm
vbroadcastss (%rcx),%xmm4
vmovss (%rcx),%xmm4
# Tests for op xmm, mem32
vmovss %xmm4,(%rcx)
# Tests for op xmm, regl/mem32
# Tests for op regl/mem32, xmm
vmovd %xmm4,%ecx
vmovd %xmm4,(%rcx)
vmovd %ecx,%xmm4
vmovd (%rcx),%xmm4
# Tests for op xmm/mem32, regl
vcvtss2si %xmm4,%ecx
vcvtss2si (%rcx),%ecx
vcvttss2si %xmm4,%ecx
vcvttss2si (%rcx),%ecx
# Tests for op xmm/mem32, regq
vcvtss2si %xmm4,%rcx
vcvtss2si (%rcx),%rcx
vcvttss2si %xmm4,%rcx
vcvttss2si (%rcx),%rcx
# Tests for op xmm, regq
vmovmskpd %xmm4,%rcx
vmovmskps %xmm4,%rcx
vpmovmskb %xmm4,%rcx
# Tests for op imm8, xmm, regq/mem32
vextractps $7,%xmm4,%rcx
vextractps $7,%xmm4,(%rcx)
# Tests for op imm8, xmm, regl/mem32
vpextrd $7,%xmm4,%ecx
vpextrd $7,%xmm4,(%rcx)
vextractps $7,%xmm4,%ecx
vextractps $7,%xmm4,(%rcx)
# Tests for op imm8, regl/mem32, xmm, xmm
vpinsrd $7,%ecx,%xmm4,%xmm6
vpinsrd $7,(%rcx),%xmm4,%xmm6
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd %ecx,%xmm4,%xmm6
vcvtsi2sdl (%rcx),%xmm4,%xmm6
vcvtsi2ss %ecx,%xmm4,%xmm6
vcvtsi2ssl (%rcx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss $7,%xmm4,%xmm6,%xmm2
vcmpss $7,(%rcx),%xmm6,%xmm2
vinsertps $7,%xmm4,%xmm6,%xmm2
vinsertps $7,(%rcx),%xmm6,%xmm2
vroundss $7,%xmm4,%xmm6,%xmm2
vroundss $7,(%rcx),%xmm6,%xmm2
# Tests for op xmm/m16, xmm
vpmovsxbq %xmm4,%xmm6
vpmovsxbq (%rcx),%xmm4
vpmovzxbq %xmm4,%xmm6
vpmovzxbq (%rcx),%xmm4
# Tests for op imm8, xmm, regl/mem16
vpextrw $7,%xmm4,%ecx
vpextrw $7,%xmm4,(%rcx)
# Tests for op imm8, xmm, regq/mem16
vpextrw $7,%xmm4,%rcx
vpextrw $7,%xmm4,(%rcx)
# Tests for op imm8, regl/mem16, xmm, xmm
vpinsrw $7,%ecx,%xmm4,%xmm6
vpinsrw $7,(%rcx),%xmm4,%xmm6
vpinsrw $7,%rcx,%xmm4,%xmm6
vpinsrw $7,(%rcx),%xmm4,%xmm6
# Tests for op imm8, xmm, regl/mem8
vpextrb $7,%xmm4,%ecx
vpextrb $7,%xmm4,(%rcx)
# Tests for op imm8, regl/mem8, xmm, xmm
vpinsrb $7,%ecx,%xmm4,%xmm6
vpinsrb $7,(%rcx),%xmm4,%xmm6
# Tests for op imm8, xmm, regq
vpextrw $7,%xmm4,%rcx
# Tests for op imm8, xmm, regq/mem8
vpextrb $7,%xmm4,%rcx
vpextrb $7,%xmm4,(%rcx)
# Tests for op xmm, xmm
vmaskmovdqu %xmm4,%xmm6
vmovq %xmm4,%xmm6
# Tests for op xmm, regl
vmovmskpd %xmm4,%ecx
vmovmskps %xmm4,%ecx
vpmovmskb %xmm4,%ecx
# Tests for op xmm, xmm, xmm
vmovhlps %xmm4,%xmm6,%xmm2
vmovlhps %xmm4,%xmm6,%xmm2
vmovsd %xmm4,%xmm6,%xmm2
vmovss %xmm4,%xmm6,%xmm2
# Tests for op imm8, xmm, xmm
vpslld $7,%xmm4,%xmm6
vpslldq $7,%xmm4,%xmm6
vpsllq $7,%xmm4,%xmm6
vpsllw $7,%xmm4,%xmm6
vpsrad $7,%xmm4,%xmm6
vpsraw $7,%xmm4,%xmm6
vpsrld $7,%xmm4,%xmm6
vpsrldq $7,%xmm4,%xmm6
vpsrlq $7,%xmm4,%xmm6
vpsrlw $7,%xmm4,%xmm6
# Tests for op imm8, xmm, regl
vpextrw $7,%xmm4,%ecx
# Tests for op ymm, regl
vmovmskpd %ymm4,%ecx
vmovmskps %ymm4,%ecx
# Tests for op ymm, regq
vmovmskpd %ymm4,%rcx
vmovmskps %ymm4,%rcx
# Default instructions without suffixes.
vcvtpd2dq %xmm4,%xmm6
vcvtpd2dq %ymm4,%xmm6
vcvtpd2ps %xmm4,%xmm6
vcvtpd2ps %ymm4,%xmm6
vcvttpd2dq %xmm4,%xmm6
vcvttpd2dq %ymm4,%xmm6
#Tests with different memory and register operands.
vldmxcsr 0x12345678
vmovdqa 0x12345678,%xmm8
vmovdqa %xmm8,0x12345678
vmovd %xmm8,0x12345678
vcvtsd2si 0x12345678,%r8d
vcvtdq2pd 0x12345678,%ymm8
vcvtpd2psy 0x12345678,%xmm8
vpavgb 0x12345678,%xmm8,%xmm15
vaeskeygenassist $7,0x12345678,%xmm8
vpextrb $7,%xmm8,0x12345678
vcvtsi2sdl 0x12345678,%xmm8,%xmm15
vpclmulqdq $7,0x12345678,%xmm8,%xmm15
vblendvps %xmm8,0x12345678,%xmm12,%xmm14
vpinsrb $7,0x12345678,%xmm8,%xmm15
vmovdqa 0x12345678,%ymm8
vmovdqa %ymm8,0x12345678
vpermilpd 0x12345678,%ymm8,%ymm15
vroundpd $7,0x12345678,%ymm8
vextractf128 $7,%ymm8,0x12345678
vperm2f128 $7,0x12345678,%ymm8,%ymm15
vblendvpd %ymm8,0x12345678,%ymm12,%ymm14
vldmxcsr (%rbp)
vmovdqa (%rbp),%xmm8
vmovdqa %xmm8,(%rbp)
vmovd %xmm8,(%rbp)
vcvtsd2si (%rbp),%r8d
vcvtdq2pd (%rbp),%ymm8
vcvtpd2psy (%rbp),%xmm8
vpavgb (%rbp),%xmm8,%xmm15
vaeskeygenassist $7,(%rbp),%xmm8
vpextrb $7,%xmm8,(%rbp)
vcvtsi2sdl (%rbp),%xmm8,%xmm15
vpclmulqdq $7,(%rbp),%xmm8,%xmm15
vblendvps %xmm8,(%rbp),%xmm12,%xmm14
vpinsrb $7,(%rbp),%xmm8,%xmm15
vmovdqa (%rbp),%ymm8
vmovdqa %ymm8,(%rbp)
vpermilpd (%rbp),%ymm8,%ymm15
vroundpd $7,(%rbp),%ymm8
vextractf128 $7,%ymm8,(%rbp)
vperm2f128 $7,(%rbp),%ymm8,%ymm15
vblendvpd %ymm8,(%rbp),%ymm12,%ymm14
vldmxcsr (%rsp)
vmovdqa (%rsp),%xmm8
vmovdqa %xmm8,(%rsp)
vmovd %xmm8,(%rsp)
vcvtsd2si (%rsp),%r8d
vcvtdq2pd (%rsp),%ymm8
vcvtpd2psy (%rsp),%xmm8
vpavgb (%rsp),%xmm8,%xmm15
vaeskeygenassist $7,(%rsp),%xmm8
vpextrb $7,%xmm8,(%rsp)
vcvtsi2sdl (%rsp),%xmm8,%xmm15
vpclmulqdq $7,(%rsp),%xmm8,%xmm15
vblendvps %xmm8,(%rsp),%xmm12,%xmm14
vpinsrb $7,(%rsp),%xmm8,%xmm15
vmovdqa (%rsp),%ymm8
vmovdqa %ymm8,(%rsp)
vpermilpd (%rsp),%ymm8,%ymm15
vroundpd $7,(%rsp),%ymm8
vextractf128 $7,%ymm8,(%rsp)
vperm2f128 $7,(%rsp),%ymm8,%ymm15
vblendvpd %ymm8,(%rsp),%ymm12,%ymm14
vldmxcsr 0x99(%rbp)
vmovdqa 0x99(%rbp),%xmm8
vmovdqa %xmm8,0x99(%rbp)
vmovd %xmm8,0x99(%rbp)
vcvtsd2si 0x99(%rbp),%r8d
vcvtdq2pd 0x99(%rbp),%ymm8
vcvtpd2psy 0x99(%rbp),%xmm8
vpavgb 0x99(%rbp),%xmm8,%xmm15
vaeskeygenassist $7,0x99(%rbp),%xmm8
vpextrb $7,%xmm8,0x99(%rbp)
vcvtsi2sdl 0x99(%rbp),%xmm8,%xmm15
vpclmulqdq $7,0x99(%rbp),%xmm8,%xmm15
vblendvps %xmm8,0x99(%rbp),%xmm12,%xmm14
vpinsrb $7,0x99(%rbp),%xmm8,%xmm15
vmovdqa 0x99(%rbp),%ymm8
vmovdqa %ymm8,0x99(%rbp)
vpermilpd 0x99(%rbp),%ymm8,%ymm15
vroundpd $7,0x99(%rbp),%ymm8
vextractf128 $7,%ymm8,0x99(%rbp)
vperm2f128 $7,0x99(%rbp),%ymm8,%ymm15
vblendvpd %ymm8,0x99(%rbp),%ymm12,%ymm14
vldmxcsr 0x99(%r15)
vmovdqa 0x99(%r15),%xmm8
vmovdqa %xmm8,0x99(%r15)
vmovd %xmm8,0x99(%r15)
vcvtsd2si 0x99(%r15),%r8d
vcvtdq2pd 0x99(%r15),%ymm8
vcvtpd2psy 0x99(%r15),%xmm8
vpavgb 0x99(%r15),%xmm8,%xmm15
vaeskeygenassist $7,0x99(%r15),%xmm8
vpextrb $7,%xmm8,0x99(%r15)
vcvtsi2sdl 0x99(%r15),%xmm8,%xmm15
vpclmulqdq $7,0x99(%r15),%xmm8,%xmm15
vblendvps %xmm8,0x99(%r15),%xmm12,%xmm14
vpinsrb $7,0x99(%r15),%xmm8,%xmm15
vmovdqa 0x99(%r15),%ymm8
vmovdqa %ymm8,0x99(%r15)
vpermilpd 0x99(%r15),%ymm8,%ymm15
vroundpd $7,0x99(%r15),%ymm8
vextractf128 $7,%ymm8,0x99(%r15)
vperm2f128 $7,0x99(%r15),%ymm8,%ymm15
vblendvpd %ymm8,0x99(%r15),%ymm12,%ymm14
vldmxcsr 0x99(%rip)
vmovdqa 0x99(%rip),%xmm8
vmovdqa %xmm8,0x99(%rip)
vmovd %xmm8,0x99(%rip)
vcvtsd2si 0x99(%rip),%r8d
vcvtdq2pd 0x99(%rip),%ymm8
vcvtpd2psy 0x99(%rip),%xmm8
vpavgb 0x99(%rip),%xmm8,%xmm15
vaeskeygenassist $7,0x99(%rip),%xmm8
vpextrb $7,%xmm8,0x99(%rip)
vcvtsi2sdl 0x99(%rip),%xmm8,%xmm15
vpclmulqdq $7,0x99(%rip),%xmm8,%xmm15
vblendvps %xmm8,0x99(%rip),%xmm12,%xmm14
vpinsrb $7,0x99(%rip),%xmm8,%xmm15
vmovdqa 0x99(%rip),%ymm8
vmovdqa %ymm8,0x99(%rip)
vpermilpd 0x99(%rip),%ymm8,%ymm15
vroundpd $7,0x99(%rip),%ymm8
vextractf128 $7,%ymm8,0x99(%rip)
vperm2f128 $7,0x99(%rip),%ymm8,%ymm15
vblendvpd %ymm8,0x99(%rip),%ymm12,%ymm14
vldmxcsr 0x99(%rsp)
vmovdqa 0x99(%rsp),%xmm8
vmovdqa %xmm8,0x99(%rsp)
vmovd %xmm8,0x99(%rsp)
vcvtsd2si 0x99(%rsp),%r8d
vcvtdq2pd 0x99(%rsp),%ymm8
vcvtpd2psy 0x99(%rsp),%xmm8
vpavgb 0x99(%rsp),%xmm8,%xmm15
vaeskeygenassist $7,0x99(%rsp),%xmm8
vpextrb $7,%xmm8,0x99(%rsp)
vcvtsi2sdl 0x99(%rsp),%xmm8,%xmm15
vpclmulqdq $7,0x99(%rsp),%xmm8,%xmm15
vblendvps %xmm8,0x99(%rsp),%xmm12,%xmm14
vpinsrb $7,0x99(%rsp),%xmm8,%xmm15
vmovdqa 0x99(%rsp),%ymm8
vmovdqa %ymm8,0x99(%rsp)
vpermilpd 0x99(%rsp),%ymm8,%ymm15
vroundpd $7,0x99(%rsp),%ymm8
vextractf128 $7,%ymm8,0x99(%rsp)
vperm2f128 $7,0x99(%rsp),%ymm8,%ymm15
vblendvpd %ymm8,0x99(%rsp),%ymm12,%ymm14
vldmxcsr 0x99(%r12)
vmovdqa 0x99(%r12),%xmm8
vmovdqa %xmm8,0x99(%r12)
vmovd %xmm8,0x99(%r12)
vcvtsd2si 0x99(%r12),%r8d
vcvtdq2pd 0x99(%r12),%ymm8
vcvtpd2psy 0x99(%r12),%xmm8
vpavgb 0x99(%r12),%xmm8,%xmm15
vaeskeygenassist $7,0x99(%r12),%xmm8
vpextrb $7,%xmm8,0x99(%r12)
vcvtsi2sdl 0x99(%r12),%xmm8,%xmm15
vpclmulqdq $7,0x99(%r12),%xmm8,%xmm15
vblendvps %xmm8,0x99(%r12),%xmm12,%xmm14
vpinsrb $7,0x99(%r12),%xmm8,%xmm15
vmovdqa 0x99(%r12),%ymm8
vmovdqa %ymm8,0x99(%r12)
vpermilpd 0x99(%r12),%ymm8,%ymm15
vroundpd $7,0x99(%r12),%ymm8
vextractf128 $7,%ymm8,0x99(%r12)
vperm2f128 $7,0x99(%r12),%ymm8,%ymm15
vblendvpd %ymm8,0x99(%r12),%ymm12,%ymm14
vldmxcsr -0x99(,%riz)
vmovdqa -0x99(,%riz),%xmm8
vmovdqa %xmm8,-0x99(,%riz)
vmovd %xmm8,-0x99(,%riz)
vcvtsd2si -0x99(,%riz),%r8d
vcvtdq2pd -0x99(,%riz),%ymm8
vcvtpd2psy -0x99(,%riz),%xmm8
vpavgb -0x99(,%riz),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(,%riz),%xmm8
vpextrb $7,%xmm8,-0x99(,%riz)
vcvtsi2sdl -0x99(,%riz),%xmm8,%xmm15
vpclmulqdq $7,-0x99(,%riz),%xmm8,%xmm15
vblendvps %xmm8,-0x99(,%riz),%xmm12,%xmm14
vpinsrb $7,-0x99(,%riz),%xmm8,%xmm15
vmovdqa -0x99(,%riz),%ymm8
vmovdqa %ymm8,-0x99(,%riz)
vpermilpd -0x99(,%riz),%ymm8,%ymm15
vroundpd $7,-0x99(,%riz),%ymm8
vextractf128 $7,%ymm8,-0x99(,%riz)
vperm2f128 $7,-0x99(,%riz),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(,%riz),%ymm12,%ymm14
vldmxcsr -0x99(,%riz,2)
vmovdqa -0x99(,%riz,2),%xmm8
vmovdqa %xmm8,-0x99(,%riz,2)
vmovd %xmm8,-0x99(,%riz,2)
vcvtsd2si -0x99(,%riz,2),%r8d
vcvtdq2pd -0x99(,%riz,2),%ymm8
vcvtpd2psy -0x99(,%riz,2),%xmm8
vpavgb -0x99(,%riz,2),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(,%riz,2),%xmm8
vpextrb $7,%xmm8,-0x99(,%riz,2)
vcvtsi2sdl -0x99(,%riz,2),%xmm8,%xmm15
vpclmulqdq $7,-0x99(,%riz,2),%xmm8,%xmm15
vblendvps %xmm8,-0x99(,%riz,2),%xmm12,%xmm14
vpinsrb $7,-0x99(,%riz,2),%xmm8,%xmm15
vmovdqa -0x99(,%riz,2),%ymm8
vmovdqa %ymm8,-0x99(,%riz,2)
vpermilpd -0x99(,%riz,2),%ymm8,%ymm15
vroundpd $7,-0x99(,%riz,2),%ymm8
vextractf128 $7,%ymm8,-0x99(,%riz,2)
vperm2f128 $7,-0x99(,%riz,2),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(,%riz,2),%ymm12,%ymm14
vldmxcsr -0x99(%rbx,%riz)
vmovdqa -0x99(%rbx,%riz),%xmm8
vmovdqa %xmm8,-0x99(%rbx,%riz)
vmovd %xmm8,-0x99(%rbx,%riz)
vcvtsd2si -0x99(%rbx,%riz),%r8d
vcvtdq2pd -0x99(%rbx,%riz),%ymm8
vcvtpd2psy -0x99(%rbx,%riz),%xmm8
vpavgb -0x99(%rbx,%riz),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(%rbx,%riz),%xmm8
vpextrb $7,%xmm8,-0x99(%rbx,%riz)
vcvtsi2sdl -0x99(%rbx,%riz),%xmm8,%xmm15
vpclmulqdq $7,-0x99(%rbx,%riz),%xmm8,%xmm15
vblendvps %xmm8,-0x99(%rbx,%riz),%xmm12,%xmm14
vpinsrb $7,-0x99(%rbx,%riz),%xmm8,%xmm15
vmovdqa -0x99(%rbx,%riz),%ymm8
vmovdqa %ymm8,-0x99(%rbx,%riz)
vpermilpd -0x99(%rbx,%riz),%ymm8,%ymm15
vroundpd $7,-0x99(%rbx,%riz),%ymm8
vextractf128 $7,%ymm8,-0x99(%rbx,%riz)
vperm2f128 $7,-0x99(%rbx,%riz),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(%rbx,%riz),%ymm12,%ymm14
vldmxcsr -0x99(%rbx,%riz,2)
vmovdqa -0x99(%rbx,%riz,2),%xmm8
vmovdqa %xmm8,-0x99(%rbx,%riz,2)
vmovd %xmm8,-0x99(%rbx,%riz,2)
vcvtsd2si -0x99(%rbx,%riz,2),%r8d
vcvtdq2pd -0x99(%rbx,%riz,2),%ymm8
vcvtpd2psy -0x99(%rbx,%riz,2),%xmm8
vpavgb -0x99(%rbx,%riz,2),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(%rbx,%riz,2),%xmm8
vpextrb $7,%xmm8,-0x99(%rbx,%riz,2)
vcvtsi2sdl -0x99(%rbx,%riz,2),%xmm8,%xmm15
vpclmulqdq $7,-0x99(%rbx,%riz,2),%xmm8,%xmm15
vblendvps %xmm8,-0x99(%rbx,%riz,2),%xmm12,%xmm14
vpinsrb $7,-0x99(%rbx,%riz,2),%xmm8,%xmm15
vmovdqa -0x99(%rbx,%riz,2),%ymm8
vmovdqa %ymm8,-0x99(%rbx,%riz,2)
vpermilpd -0x99(%rbx,%riz,2),%ymm8,%ymm15
vroundpd $7,-0x99(%rbx,%riz,2),%ymm8
vextractf128 $7,%ymm8,-0x99(%rbx,%riz,2)
vperm2f128 $7,-0x99(%rbx,%riz,2),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(%rbx,%riz,2),%ymm12,%ymm14
vldmxcsr -0x99(%r12,%r15,4)
vmovdqa -0x99(%r12,%r15,4),%xmm8
vmovdqa %xmm8,-0x99(%r12,%r15,4)
vmovd %xmm8,-0x99(%r12,%r15,4)
vcvtsd2si -0x99(%r12,%r15,4),%r8d
vcvtdq2pd -0x99(%r12,%r15,4),%ymm8
vcvtpd2psy -0x99(%r12,%r15,4),%xmm8
vpavgb -0x99(%r12,%r15,4),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(%r12,%r15,4),%xmm8
vpextrb $7,%xmm8,-0x99(%r12,%r15,4)
vcvtsi2sdl -0x99(%r12,%r15,4),%xmm8,%xmm15
vpclmulqdq $7,-0x99(%r12,%r15,4),%xmm8,%xmm15
vblendvps %xmm8,-0x99(%r12,%r15,4),%xmm12,%xmm14
vpinsrb $7,-0x99(%r12,%r15,4),%xmm8,%xmm15
vmovdqa -0x99(%r12,%r15,4),%ymm8
vmovdqa %ymm8,-0x99(%r12,%r15,4)
vpermilpd -0x99(%r12,%r15,4),%ymm8,%ymm15
vroundpd $7,-0x99(%r12,%r15,4),%ymm8
vextractf128 $7,%ymm8,-0x99(%r12,%r15,4)
vperm2f128 $7,-0x99(%r12,%r15,4),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(%r12,%r15,4),%ymm12,%ymm14
vldmxcsr -0x99(%r8,%r15,8)
vmovdqa -0x99(%r8,%r15,8),%xmm8
vmovdqa %xmm8,-0x99(%r8,%r15,8)
vmovd %xmm8,-0x99(%r8,%r15,8)
vcvtsd2si -0x99(%r8,%r15,8),%r8d
vcvtdq2pd -0x99(%r8,%r15,8),%ymm8
vcvtpd2psy -0x99(%r8,%r15,8),%xmm8
vpavgb -0x99(%r8,%r15,8),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(%r8,%r15,8),%xmm8
vpextrb $7,%xmm8,-0x99(%r8,%r15,8)
vcvtsi2sdl -0x99(%r8,%r15,8),%xmm8,%xmm15
vpclmulqdq $7,-0x99(%r8,%r15,8),%xmm8,%xmm15
vblendvps %xmm8,-0x99(%r8,%r15,8),%xmm12,%xmm14
vpinsrb $7,-0x99(%r8,%r15,8),%xmm8,%xmm15
vmovdqa -0x99(%r8,%r15,8),%ymm8
vmovdqa %ymm8,-0x99(%r8,%r15,8)
vpermilpd -0x99(%r8,%r15,8),%ymm8,%ymm15
vroundpd $7,-0x99(%r8,%r15,8),%ymm8
vextractf128 $7,%ymm8,-0x99(%r8,%r15,8)
vperm2f128 $7,-0x99(%r8,%r15,8),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(%r8,%r15,8),%ymm12,%ymm14
vldmxcsr -0x99(%rbp,%r13,4)
vmovdqa -0x99(%rbp,%r13,4),%xmm8
vmovdqa %xmm8,-0x99(%rbp,%r13,4)
vmovd %xmm8,-0x99(%rbp,%r13,4)
vcvtsd2si -0x99(%rbp,%r13,4),%r8d
vcvtdq2pd -0x99(%rbp,%r13,4),%ymm8
vcvtpd2psy -0x99(%rbp,%r13,4),%xmm8
vpavgb -0x99(%rbp,%r13,4),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(%rbp,%r13,4),%xmm8
vpextrb $7,%xmm8,-0x99(%rbp,%r13,4)
vcvtsi2sdl -0x99(%rbp,%r13,4),%xmm8,%xmm15
vpclmulqdq $7,-0x99(%rbp,%r13,4),%xmm8,%xmm15
vblendvps %xmm8,-0x99(%rbp,%r13,4),%xmm12,%xmm14
vpinsrb $7,-0x99(%rbp,%r13,4),%xmm8,%xmm15
vmovdqa -0x99(%rbp,%r13,4),%ymm8
vmovdqa %ymm8,-0x99(%rbp,%r13,4)
vpermilpd -0x99(%rbp,%r13,4),%ymm8,%ymm15
vroundpd $7,-0x99(%rbp,%r13,4),%ymm8
vextractf128 $7,%ymm8,-0x99(%rbp,%r13,4)
vperm2f128 $7,-0x99(%rbp,%r13,4),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(%rbp,%r13,4),%ymm12,%ymm14
vldmxcsr -0x99(%rsp,%r12,1)
vmovdqa -0x99(%rsp,%r12,1),%xmm8
vmovdqa %xmm8,-0x99(%rsp,%r12,1)
vmovd %xmm8,-0x99(%rsp,%r12,1)
vcvtsd2si -0x99(%rsp,%r12,1),%r8d
vcvtdq2pd -0x99(%rsp,%r12,1),%ymm8
vcvtpd2psy -0x99(%rsp,%r12,1),%xmm8
vpavgb -0x99(%rsp,%r12,1),%xmm8,%xmm15
vaeskeygenassist $7,-0x99(%rsp,%r12,1),%xmm8
vpextrb $7,%xmm8,-0x99(%rsp,%r12,1)
vcvtsi2sdl -0x99(%rsp,%r12,1),%xmm8,%xmm15
vpclmulqdq $7,-0x99(%rsp,%r12,1),%xmm8,%xmm15
vblendvps %xmm8,-0x99(%rsp,%r12,1),%xmm12,%xmm14
vpinsrb $7,-0x99(%rsp,%r12,1),%xmm8,%xmm15
vmovdqa -0x99(%rsp,%r12,1),%ymm8
vmovdqa %ymm8,-0x99(%rsp,%r12,1)
vpermilpd -0x99(%rsp,%r12,1),%ymm8,%ymm15
vroundpd $7,-0x99(%rsp,%r12,1),%ymm8
vextractf128 $7,%ymm8,-0x99(%rsp,%r12,1)
vperm2f128 $7,-0x99(%rsp,%r12,1),%ymm8,%ymm15
vblendvpd %ymm8,-0x99(%rsp,%r12,1),%ymm12,%ymm14
# Tests for all register operands.
vmovmskpd %xmm8,%r8d
vpslld $7,%xmm8,%xmm15
vmovmskps %ymm8,%r8d
vmovdqa %xmm8,%xmm15
vmovd %xmm8,%r8d
vcvtsd2si %xmm8,%r8d
vcvtdq2pd %xmm8,%ymm8
vcvtpd2psy %ymm8,%xmm8
vaeskeygenassist $7,%xmm8,%xmm15
vpextrb $7,%xmm8,%r8d
vcvtsi2sdl %r8d,%xmm8,%xmm15
vpclmulqdq $7,%xmm8,%xmm15,%xmm12
vblendvps %xmm8,%xmm8,%xmm12,%xmm14
vpinsrb $7,%r8d,%xmm8,%xmm15
vmovdqa %ymm8,%ymm15
vpermilpd %ymm8,%ymm15,%ymm12
vroundpd $7,%ymm8,%ymm15
vextractf128 $7,%ymm8,%xmm8
vperm2f128 $7,%ymm8,%ymm15,%ymm12
vblendvpd %ymm8,%ymm15,%ymm12,%ymm14
vinsertf128 $7,%xmm8,%ymm8,%ymm15
# Tests for different memory/register operand
vcvtsd2si (%rcx),%r8
vextractps $10,%xmm8,%r8
vcvtss2si (%rcx),%r8
vpinsrw $7,%r8,%xmm15,%xmm8
.intel_syntax noprefix
# Tests for op mem64
vldmxcsr DWORD PTR [rcx]
vldmxcsr [rcx]
vstmxcsr DWORD PTR [rcx]
vstmxcsr [rcx]
# Tests for op mem256, mask, ymm
# Tests for op ymm, mask, mem256
vmaskmovpd ymm6,ymm4,YMMWORD PTR [rcx]
vmaskmovpd YMMWORD PTR [rcx],ymm6,ymm4
vmaskmovpd ymm6,ymm4,[rcx]
vmaskmovpd [rcx],ymm6,ymm4
vmaskmovps ymm6,ymm4,YMMWORD PTR [rcx]
vmaskmovps YMMWORD PTR [rcx],ymm6,ymm4
vmaskmovps ymm6,ymm4,[rcx]
vmaskmovps [rcx],ymm6,ymm4
# Tests for op imm8, ymm/mem256, ymm
vpermilpd ymm2,ymm6,7
vpermilpd ymm6,YMMWORD PTR [rcx],7
vpermilpd ymm6,[rcx],7
vpermilps ymm2,ymm6,7
vpermilps ymm6,YMMWORD PTR [rcx],7
vpermilps ymm6,[rcx],7
vroundpd ymm2,ymm6,7
vroundpd ymm6,YMMWORD PTR [rcx],7
vroundpd ymm6,[rcx],7
vroundps ymm2,ymm6,7
vroundps ymm6,YMMWORD PTR [rcx],7
vroundps ymm6,[rcx],7
# Tests for op ymm/mem256, ymm, ymm
vaddpd ymm2,ymm6,ymm4
vaddpd ymm2,ymm6,YMMWORD PTR [rcx]
vaddpd ymm2,ymm6,[rcx]
vaddps ymm2,ymm6,ymm4
vaddps ymm2,ymm6,YMMWORD PTR [rcx]
vaddps ymm2,ymm6,[rcx]
vaddsubpd ymm2,ymm6,ymm4
vaddsubpd ymm2,ymm6,YMMWORD PTR [rcx]
vaddsubpd ymm2,ymm6,[rcx]
vaddsubps ymm2,ymm6,ymm4
vaddsubps ymm2,ymm6,YMMWORD PTR [rcx]
vaddsubps ymm2,ymm6,[rcx]
vandnpd ymm2,ymm6,ymm4
vandnpd ymm2,ymm6,YMMWORD PTR [rcx]
vandnpd ymm2,ymm6,[rcx]
vandnps ymm2,ymm6,ymm4
vandnps ymm2,ymm6,YMMWORD PTR [rcx]
vandnps ymm2,ymm6,[rcx]
vandpd ymm2,ymm6,ymm4
vandpd ymm2,ymm6,YMMWORD PTR [rcx]
vandpd ymm2,ymm6,[rcx]
vandps ymm2,ymm6,ymm4
vandps ymm2,ymm6,YMMWORD PTR [rcx]
vandps ymm2,ymm6,[rcx]
vdivpd ymm2,ymm6,ymm4
vdivpd ymm2,ymm6,YMMWORD PTR [rcx]
vdivpd ymm2,ymm6,[rcx]
vdivps ymm2,ymm6,ymm4
vdivps ymm2,ymm6,YMMWORD PTR [rcx]
vdivps ymm2,ymm6,[rcx]
vhaddpd ymm2,ymm6,ymm4
vhaddpd ymm2,ymm6,YMMWORD PTR [rcx]
vhaddpd ymm2,ymm6,[rcx]
vhaddps ymm2,ymm6,ymm4
vhaddps ymm2,ymm6,YMMWORD PTR [rcx]
vhaddps ymm2,ymm6,[rcx]
vhsubpd ymm2,ymm6,ymm4
vhsubpd ymm2,ymm6,YMMWORD PTR [rcx]
vhsubpd ymm2,ymm6,[rcx]
vhsubps ymm2,ymm6,ymm4
vhsubps ymm2,ymm6,YMMWORD PTR [rcx]
vhsubps ymm2,ymm6,[rcx]
vmaxpd ymm2,ymm6,ymm4
vmaxpd ymm2,ymm6,YMMWORD PTR [rcx]
vmaxpd ymm2,ymm6,[rcx]
vmaxps ymm2,ymm6,ymm4
vmaxps ymm2,ymm6,YMMWORD PTR [rcx]
vmaxps ymm2,ymm6,[rcx]
vminpd ymm2,ymm6,ymm4
vminpd ymm2,ymm6,YMMWORD PTR [rcx]
vminpd ymm2,ymm6,[rcx]
vminps ymm2,ymm6,ymm4
vminps ymm2,ymm6,YMMWORD PTR [rcx]
vminps ymm2,ymm6,[rcx]
vmulpd ymm2,ymm6,ymm4
vmulpd ymm2,ymm6,YMMWORD PTR [rcx]
vmulpd ymm2,ymm6,[rcx]
vmulps ymm2,ymm6,ymm4
vmulps ymm2,ymm6,YMMWORD PTR [rcx]
vmulps ymm2,ymm6,[rcx]
vorpd ymm2,ymm6,ymm4
vorpd ymm2,ymm6,YMMWORD PTR [rcx]
vorpd ymm2,ymm6,[rcx]
vorps ymm2,ymm6,ymm4
vorps ymm2,ymm6,YMMWORD PTR [rcx]
vorps ymm2,ymm6,[rcx]
vpermilpd ymm2,ymm6,ymm4
vpermilpd ymm2,ymm6,YMMWORD PTR [rcx]
vpermilpd ymm2,ymm6,[rcx]
vpermilps ymm2,ymm6,ymm4
vpermilps ymm2,ymm6,YMMWORD PTR [rcx]
vpermilps ymm2,ymm6,[rcx]
vsubpd ymm2,ymm6,ymm4
vsubpd ymm2,ymm6,YMMWORD PTR [rcx]
vsubpd ymm2,ymm6,[rcx]
vsubps ymm2,ymm6,ymm4
vsubps ymm2,ymm6,YMMWORD PTR [rcx]
vsubps ymm2,ymm6,[rcx]
vunpckhpd ymm2,ymm6,ymm4
vunpckhpd ymm2,ymm6,YMMWORD PTR [rcx]
vunpckhpd ymm2,ymm6,[rcx]
vunpckhps ymm2,ymm6,ymm4
vunpckhps ymm2,ymm6,YMMWORD PTR [rcx]
vunpckhps ymm2,ymm6,[rcx]
vunpcklpd ymm2,ymm6,ymm4
vunpcklpd ymm2,ymm6,YMMWORD PTR [rcx]
vunpcklpd ymm2,ymm6,[rcx]
vunpcklps ymm2,ymm6,ymm4
vunpcklps ymm2,ymm6,YMMWORD PTR [rcx]
vunpcklps ymm2,ymm6,[rcx]
vxorpd ymm2,ymm6,ymm4
vxorpd ymm2,ymm6,YMMWORD PTR [rcx]
vxorpd ymm2,ymm6,[rcx]
vxorps ymm2,ymm6,ymm4
vxorps ymm2,ymm6,YMMWORD PTR [rcx]
vxorps ymm2,ymm6,[rcx]
vcmpeqpd ymm2,ymm6,ymm4
vcmpeqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeqpd ymm2,ymm6,[rcx]
vcmpltpd ymm2,ymm6,ymm4
vcmpltpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpltpd ymm2,ymm6,[rcx]
vcmplepd ymm2,ymm6,ymm4
vcmplepd ymm2,ymm6,YMMWORD PTR [rcx]
vcmplepd ymm2,ymm6,[rcx]
vcmpunordpd ymm2,ymm6,ymm4
vcmpunordpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpunordpd ymm2,ymm6,[rcx]
vcmpneqpd ymm2,ymm6,ymm4
vcmpneqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneqpd ymm2,ymm6,[rcx]
vcmpnltpd ymm2,ymm6,ymm4
vcmpnltpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnltpd ymm2,ymm6,[rcx]
vcmpnlepd ymm2,ymm6,ymm4
vcmpnlepd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnlepd ymm2,ymm6,[rcx]
vcmpordpd ymm2,ymm6,ymm4
vcmpordpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpordpd ymm2,ymm6,[rcx]
vcmpeq_uqpd ymm2,ymm6,ymm4
vcmpeq_uqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeq_uqpd ymm2,ymm6,[rcx]
vcmpngepd ymm2,ymm6,ymm4
vcmpngepd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpngepd ymm2,ymm6,[rcx]
vcmpngtpd ymm2,ymm6,ymm4
vcmpngtpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpngtpd ymm2,ymm6,[rcx]
vcmpfalsepd ymm2,ymm6,ymm4
vcmpfalsepd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpfalsepd ymm2,ymm6,[rcx]
vcmpneq_oqpd ymm2,ymm6,ymm4
vcmpneq_oqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneq_oqpd ymm2,ymm6,[rcx]
vcmpgepd ymm2,ymm6,ymm4
vcmpgepd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpgepd ymm2,ymm6,[rcx]
vcmpgtpd ymm2,ymm6,ymm4
vcmpgtpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpgtpd ymm2,ymm6,[rcx]
vcmptruepd ymm2,ymm6,ymm4
vcmptruepd ymm2,ymm6,YMMWORD PTR [rcx]
vcmptruepd ymm2,ymm6,[rcx]
vcmpeq_ospd ymm2,ymm6,ymm4
vcmpeq_ospd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeq_ospd ymm2,ymm6,[rcx]
vcmplt_oqpd ymm2,ymm6,ymm4
vcmplt_oqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmplt_oqpd ymm2,ymm6,[rcx]
vcmple_oqpd ymm2,ymm6,ymm4
vcmple_oqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmple_oqpd ymm2,ymm6,[rcx]
vcmpunord_spd ymm2,ymm6,ymm4
vcmpunord_spd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpunord_spd ymm2,ymm6,[rcx]
vcmpneq_uspd ymm2,ymm6,ymm4
vcmpneq_uspd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneq_uspd ymm2,ymm6,[rcx]
vcmpnlt_uqpd ymm2,ymm6,ymm4
vcmpnlt_uqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnlt_uqpd ymm2,ymm6,[rcx]
vcmpnle_uqpd ymm2,ymm6,ymm4
vcmpnle_uqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnle_uqpd ymm2,ymm6,[rcx]
vcmpord_spd ymm2,ymm6,ymm4
vcmpord_spd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpord_spd ymm2,ymm6,[rcx]
vcmpeq_uspd ymm2,ymm6,ymm4
vcmpeq_uspd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeq_uspd ymm2,ymm6,[rcx]
vcmpnge_uqpd ymm2,ymm6,ymm4
vcmpnge_uqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnge_uqpd ymm2,ymm6,[rcx]
vcmpngt_uqpd ymm2,ymm6,ymm4
vcmpngt_uqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpngt_uqpd ymm2,ymm6,[rcx]
vcmpfalse_ospd ymm2,ymm6,ymm4
vcmpfalse_ospd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpfalse_ospd ymm2,ymm6,[rcx]
vcmpneq_ospd ymm2,ymm6,ymm4
vcmpneq_ospd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneq_ospd ymm2,ymm6,[rcx]
vcmpge_oqpd ymm2,ymm6,ymm4
vcmpge_oqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpge_oqpd ymm2,ymm6,[rcx]
vcmpgt_oqpd ymm2,ymm6,ymm4
vcmpgt_oqpd ymm2,ymm6,YMMWORD PTR [rcx]
vcmpgt_oqpd ymm2,ymm6,[rcx]
vcmptrue_uspd ymm2,ymm6,ymm4
vcmptrue_uspd ymm2,ymm6,YMMWORD PTR [rcx]
vcmptrue_uspd ymm2,ymm6,[rcx]
vcmpeqps ymm2,ymm6,ymm4
vcmpeqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeqps ymm2,ymm6,[rcx]
vcmpltps ymm2,ymm6,ymm4
vcmpltps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpltps ymm2,ymm6,[rcx]
vcmpleps ymm2,ymm6,ymm4
vcmpleps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpleps ymm2,ymm6,[rcx]
vcmpunordps ymm2,ymm6,ymm4
vcmpunordps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpunordps ymm2,ymm6,[rcx]
vcmpneqps ymm2,ymm6,ymm4
vcmpneqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneqps ymm2,ymm6,[rcx]
vcmpnltps ymm2,ymm6,ymm4
vcmpnltps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnltps ymm2,ymm6,[rcx]
vcmpnleps ymm2,ymm6,ymm4
vcmpnleps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnleps ymm2,ymm6,[rcx]
vcmpordps ymm2,ymm6,ymm4
vcmpordps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpordps ymm2,ymm6,[rcx]
vcmpeq_uqps ymm2,ymm6,ymm4
vcmpeq_uqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeq_uqps ymm2,ymm6,[rcx]
vcmpngeps ymm2,ymm6,ymm4
vcmpngeps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpngeps ymm2,ymm6,[rcx]
vcmpngtps ymm2,ymm6,ymm4
vcmpngtps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpngtps ymm2,ymm6,[rcx]
vcmpfalseps ymm2,ymm6,ymm4
vcmpfalseps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpfalseps ymm2,ymm6,[rcx]
vcmpneq_oqps ymm2,ymm6,ymm4
vcmpneq_oqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneq_oqps ymm2,ymm6,[rcx]
vcmpgeps ymm2,ymm6,ymm4
vcmpgeps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpgeps ymm2,ymm6,[rcx]
vcmpgtps ymm2,ymm6,ymm4
vcmpgtps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpgtps ymm2,ymm6,[rcx]
vcmptrueps ymm2,ymm6,ymm4
vcmptrueps ymm2,ymm6,YMMWORD PTR [rcx]
vcmptrueps ymm2,ymm6,[rcx]
vcmpeq_osps ymm2,ymm6,ymm4
vcmpeq_osps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeq_osps ymm2,ymm6,[rcx]
vcmplt_oqps ymm2,ymm6,ymm4
vcmplt_oqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmplt_oqps ymm2,ymm6,[rcx]
vcmple_oqps ymm2,ymm6,ymm4
vcmple_oqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmple_oqps ymm2,ymm6,[rcx]
vcmpunord_sps ymm2,ymm6,ymm4
vcmpunord_sps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpunord_sps ymm2,ymm6,[rcx]
vcmpneq_usps ymm2,ymm6,ymm4
vcmpneq_usps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneq_usps ymm2,ymm6,[rcx]
vcmpnlt_uqps ymm2,ymm6,ymm4
vcmpnlt_uqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnlt_uqps ymm2,ymm6,[rcx]
vcmpnle_uqps ymm2,ymm6,ymm4
vcmpnle_uqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnle_uqps ymm2,ymm6,[rcx]
vcmpord_sps ymm2,ymm6,ymm4
vcmpord_sps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpord_sps ymm2,ymm6,[rcx]
vcmpeq_usps ymm2,ymm6,ymm4
vcmpeq_usps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpeq_usps ymm2,ymm6,[rcx]
vcmpnge_uqps ymm2,ymm6,ymm4
vcmpnge_uqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpnge_uqps ymm2,ymm6,[rcx]
vcmpngt_uqps ymm2,ymm6,ymm4
vcmpngt_uqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpngt_uqps ymm2,ymm6,[rcx]
vcmpfalse_osps ymm2,ymm6,ymm4
vcmpfalse_osps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpfalse_osps ymm2,ymm6,[rcx]
vcmpneq_osps ymm2,ymm6,ymm4
vcmpneq_osps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpneq_osps ymm2,ymm6,[rcx]
vcmpge_oqps ymm2,ymm6,ymm4
vcmpge_oqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpge_oqps ymm2,ymm6,[rcx]
vcmpgt_oqps ymm2,ymm6,ymm4
vcmpgt_oqps ymm2,ymm6,YMMWORD PTR [rcx]
vcmpgt_oqps ymm2,ymm6,[rcx]
vcmptrue_usps ymm2,ymm6,ymm4
vcmptrue_usps ymm2,ymm6,YMMWORD PTR [rcx]
vcmptrue_usps ymm2,ymm6,[rcx]
vgf2p8mulb ymm6, ymm5, ymm4
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [rcx]
vgf2p8mulb ymm6, ymm5, [rcx]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [rax+r14*8-123456]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [rdx+4064]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [rdx+4096]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [rdx-4096]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [rdx-4128]
# Tests for op ymm/mem256, xmm
vcvtpd2dq xmm4,ymm4
vcvtpd2dq xmm4,YMMWORD PTR [rcx]
vcvtpd2ps xmm4,ymm4
vcvtpd2ps xmm4,YMMWORD PTR [rcx]
vcvttpd2dq xmm4,ymm4
vcvttpd2dq xmm4,YMMWORD PTR [rcx]
# Tests for op ymm/mem256, ymm
vcvtdq2ps ymm6,ymm4
vcvtdq2ps ymm4,YMMWORD PTR [rcx]
vcvtdq2ps ymm4,[rcx]
vcvtps2dq ymm6,ymm4
vcvtps2dq ymm4,YMMWORD PTR [rcx]
vcvtps2dq ymm4,[rcx]
vcvttps2dq ymm6,ymm4
vcvttps2dq ymm4,YMMWORD PTR [rcx]
vcvttps2dq ymm4,[rcx]
vmovapd ymm6,ymm4
vmovapd ymm4,YMMWORD PTR [rcx]
vmovapd ymm4,[rcx]
vmovaps ymm6,ymm4
vmovaps ymm4,YMMWORD PTR [rcx]
vmovaps ymm4,[rcx]
vmovdqa ymm6,ymm4
vmovdqa ymm4,YMMWORD PTR [rcx]
vmovdqa ymm4,[rcx]
vmovdqu ymm6,ymm4
vmovdqu ymm4,YMMWORD PTR [rcx]
vmovdqu ymm4,[rcx]
vmovddup ymm6,ymm4
vmovddup ymm4,YMMWORD PTR [rcx]
vmovddup ymm4,[rcx]
vmovshdup ymm6,ymm4
vmovshdup ymm4,YMMWORD PTR [rcx]
vmovshdup ymm4,[rcx]
vmovsldup ymm6,ymm4
vmovsldup ymm4,YMMWORD PTR [rcx]
vmovsldup ymm4,[rcx]
vmovupd ymm6,ymm4
vmovupd ymm4,YMMWORD PTR [rcx]
vmovupd ymm4,[rcx]
vmovups ymm6,ymm4
vmovups ymm4,YMMWORD PTR [rcx]
vmovups ymm4,[rcx]
vptest ymm6,ymm4
vptest ymm4,YMMWORD PTR [rcx]
vptest ymm4,[rcx]
vrcpps ymm6,ymm4
vrcpps ymm4,YMMWORD PTR [rcx]
vrcpps ymm4,[rcx]
vrsqrtps ymm6,ymm4
vrsqrtps ymm4,YMMWORD PTR [rcx]
vrsqrtps ymm4,[rcx]
vsqrtpd ymm6,ymm4
vsqrtpd ymm4,YMMWORD PTR [rcx]
vsqrtpd ymm4,[rcx]
vsqrtps ymm6,ymm4
vsqrtps ymm4,YMMWORD PTR [rcx]
vsqrtps ymm4,[rcx]
vtestpd ymm6,ymm4
vtestpd ymm4,YMMWORD PTR [rcx]
vtestpd ymm4,[rcx]
vtestps ymm6,ymm4
vtestps ymm4,YMMWORD PTR [rcx]
vtestps ymm4,[rcx]
# Tests for op ymm, ymm/mem256
vmovapd ymm6,ymm4
vmovapd YMMWORD PTR [rcx],ymm4
vmovapd [rcx],ymm4
vmovaps ymm6,ymm4
vmovaps YMMWORD PTR [rcx],ymm4
vmovaps [rcx],ymm4
vmovdqa ymm6,ymm4
vmovdqa YMMWORD PTR [rcx],ymm4
vmovdqa [rcx],ymm4
vmovdqu ymm6,ymm4
vmovdqu YMMWORD PTR [rcx],ymm4
vmovdqu [rcx],ymm4
vmovupd ymm6,ymm4
vmovupd YMMWORD PTR [rcx],ymm4
vmovupd [rcx],ymm4
vmovups ymm6,ymm4
vmovups YMMWORD PTR [rcx],ymm4
vmovups [rcx],ymm4
# Tests for op mem256, ymm
vlddqu ymm4,YMMWORD PTR [rcx]
vlddqu ymm4,[rcx]
# Tests for op ymm, mem256
vmovntdq YMMWORD PTR [rcx],ymm4
vmovntdq [rcx],ymm4
vmovntpd YMMWORD PTR [rcx],ymm4
vmovntpd [rcx],ymm4
vmovntps YMMWORD PTR [rcx],ymm4
vmovntps [rcx],ymm4
# Tests for op imm8, ymm/mem256, ymm, ymm
vblendpd ymm2,ymm6,ymm4,7
vblendpd ymm2,ymm6,YMMWORD PTR [rcx],7
vblendpd ymm2,ymm6,[rcx],7
vblendps ymm2,ymm6,ymm4,7
vblendps ymm2,ymm6,YMMWORD PTR [rcx],7
vblendps ymm2,ymm6,[rcx],7
vcmppd ymm2,ymm6,ymm4,7
vcmppd ymm2,ymm6,YMMWORD PTR [rcx],7
vcmppd ymm2,ymm6,[rcx],7
vcmpps ymm2,ymm6,ymm4,7
vcmpps ymm2,ymm6,YMMWORD PTR [rcx],7
vcmpps ymm2,ymm6,[rcx],7
vdpps ymm2,ymm6,ymm4,7
vdpps ymm2,ymm6,YMMWORD PTR [rcx],7
vdpps ymm2,ymm6,[rcx],7
vperm2f128 ymm2,ymm6,ymm4,7
vperm2f128 ymm2,ymm6,YMMWORD PTR [rcx],7
vperm2f128 ymm2,ymm6,[rcx],7
vshufpd ymm2,ymm6,ymm4,7
vshufpd ymm2,ymm6,YMMWORD PTR [rcx],7
vshufpd ymm2,ymm6,[rcx],7
vshufps ymm2,ymm6,ymm4,7
vshufps ymm2,ymm6,YMMWORD PTR [rcx],7
vshufps ymm2,ymm6,[rcx],7
vgf2p8affineqb ymm6, ymm5, ymm4, 0xab
vgf2p8affineqb ymm6, ymm5, ymm4, 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [rcx], 123
vgf2p8affineqb ymm6, ymm5, [rcx], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [rax+r14*8-123456], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [rdx+4064], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [rdx+4096], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [rdx-4096], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [rdx-4128], 123
vgf2p8affineinvqb ymm6, ymm5, ymm4, 0xab
vgf2p8affineinvqb ymm6, ymm5, ymm4, 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [rcx], 123
vgf2p8affineinvqb ymm6, ymm5, [rcx], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [rax+r14*8-123456], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [rdx+4064], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [rdx+4096], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [rdx-4096], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [rdx-4128], 123
# Tests for op ymm, ymm/mem256, ymm, ymm
vblendvpd ymm7,ymm2,ymm6,ymm4
vblendvpd ymm7,ymm2,YMMWORD PTR [rcx],ymm4
vblendvpd ymm7,ymm2,[rcx],ymm4
vblendvps ymm7,ymm2,ymm6,ymm4
vblendvps ymm7,ymm2,YMMWORD PTR [rcx],ymm4
vblendvps ymm7,ymm2,[rcx],ymm4
# Tests for op imm8, xmm/mem128, ymm, ymm
vinsertf128 ymm6,ymm4,xmm4,7
vinsertf128 ymm6,ymm4,XMMWORD PTR [rcx],7
vinsertf128 ymm6,ymm4,[rcx],7
# Tests for op imm8, ymm, xmm/mem128
vextractf128 xmm4,ymm4,7
vextractf128 XMMWORD PTR [rcx],ymm4,7
vextractf128 [rcx],ymm4,7
# Tests for op mem128, ymm
vbroadcastf128 ymm4,XMMWORD PTR [rcx]
vbroadcastf128 ymm4,[rcx]
# Tests for op xmm/mem128, xmm
vcvtdq2ps xmm6,xmm4
vcvtdq2ps xmm4,XMMWORD PTR [rcx]
vcvtdq2ps xmm4,[rcx]
vcvtpd2dq xmm6,xmm4
vcvtpd2dq xmm4,XMMWORD PTR [rcx]
vcvtpd2ps xmm6,xmm4
vcvtpd2ps xmm4,XMMWORD PTR [rcx]
vcvtps2dq xmm6,xmm4
vcvtps2dq xmm4,XMMWORD PTR [rcx]
vcvtps2dq xmm4,[rcx]
vcvttpd2dq xmm6,xmm4
vcvttpd2dq xmm4,XMMWORD PTR [rcx]
vcvttps2dq xmm6,xmm4
vcvttps2dq xmm4,XMMWORD PTR [rcx]
vcvttps2dq xmm4,[rcx]
vmovapd xmm6,xmm4
vmovapd xmm4,XMMWORD PTR [rcx]
vmovapd xmm4,[rcx]
vmovaps xmm6,xmm4
vmovaps xmm4,XMMWORD PTR [rcx]
vmovaps xmm4,[rcx]
vmovdqa xmm6,xmm4
vmovdqa xmm4,XMMWORD PTR [rcx]
vmovdqa xmm4,[rcx]
vmovdqu xmm6,xmm4
vmovdqu xmm4,XMMWORD PTR [rcx]
vmovdqu xmm4,[rcx]
vmovshdup xmm6,xmm4
vmovshdup xmm4,XMMWORD PTR [rcx]
vmovshdup xmm4,[rcx]
vmovsldup xmm6,xmm4
vmovsldup xmm4,XMMWORD PTR [rcx]
vmovsldup xmm4,[rcx]
vmovupd xmm6,xmm4
vmovupd xmm4,XMMWORD PTR [rcx]
vmovupd xmm4,[rcx]
vmovups xmm6,xmm4
vmovups xmm4,XMMWORD PTR [rcx]
vmovups xmm4,[rcx]
vpabsb xmm6,xmm4
vpabsb xmm4,XMMWORD PTR [rcx]
vpabsb xmm4,[rcx]
vpabsw xmm6,xmm4
vpabsw xmm4,XMMWORD PTR [rcx]
vpabsw xmm4,[rcx]
vpabsd xmm6,xmm4
vpabsd xmm4,XMMWORD PTR [rcx]
vpabsd xmm4,[rcx]
vphminposuw xmm6,xmm4
vphminposuw xmm4,XMMWORD PTR [rcx]
vphminposuw xmm4,[rcx]
vptest xmm6,xmm4
vptest xmm4,XMMWORD PTR [rcx]
vptest xmm4,[rcx]
vtestps xmm6,xmm4
vtestps xmm4,XMMWORD PTR [rcx]
vtestps xmm4,[rcx]
vtestpd xmm6,xmm4
vtestpd xmm4,XMMWORD PTR [rcx]
vtestpd xmm4,[rcx]
vrcpps xmm6,xmm4
vrcpps xmm4,XMMWORD PTR [rcx]
vrcpps xmm4,[rcx]
vrsqrtps xmm6,xmm4
vrsqrtps xmm4,XMMWORD PTR [rcx]
vrsqrtps xmm4,[rcx]
vsqrtpd xmm6,xmm4
vsqrtpd xmm4,XMMWORD PTR [rcx]
vsqrtpd xmm4,[rcx]
vsqrtps xmm6,xmm4
vsqrtps xmm4,XMMWORD PTR [rcx]
vsqrtps xmm4,[rcx]
vaesimc xmm6,xmm4
vaesimc xmm4,XMMWORD PTR [rcx]
vaesimc xmm4,[rcx]
# Tests for op xmm, xmm/mem128
vmovapd xmm6,xmm4
vmovapd XMMWORD PTR [rcx],xmm4
vmovapd [rcx],xmm4
vmovaps xmm6,xmm4
vmovaps XMMWORD PTR [rcx],xmm4
vmovaps [rcx],xmm4
vmovdqa xmm6,xmm4
vmovdqa XMMWORD PTR [rcx],xmm4
vmovdqa [rcx],xmm4
vmovdqu xmm6,xmm4
vmovdqu XMMWORD PTR [rcx],xmm4
vmovdqu [rcx],xmm4
vmovupd xmm6,xmm4
vmovupd XMMWORD PTR [rcx],xmm4
vmovupd [rcx],xmm4
vmovups xmm6,xmm4
vmovups XMMWORD PTR [rcx],xmm4
vmovups [rcx],xmm4
# Tests for op mem128, xmm
vlddqu xmm4,XMMWORD PTR [rcx]
vlddqu xmm4,[rcx]
vmovntdqa xmm4,XMMWORD PTR [rcx]
vmovntdqa xmm4,[rcx]
# Tests for op xmm, mem128
vmovntdq XMMWORD PTR [rcx],xmm4
vmovntdq [rcx],xmm4
vmovntpd XMMWORD PTR [rcx],xmm4
vmovntpd [rcx],xmm4
vmovntps XMMWORD PTR [rcx],xmm4
vmovntps [rcx],xmm4
# Tests for op xmm/mem128, ymm
vcvtdq2pd ymm4,xmm4
vcvtdq2pd ymm4,XMMWORD PTR [rcx]
vcvtdq2pd ymm4,[rcx]
vcvtps2pd ymm4,xmm4
vcvtps2pd ymm4,XMMWORD PTR [rcx]
vcvtps2pd ymm4,[rcx]
# Tests for op xmm/mem128, xmm, xmm
vaddpd xmm2,xmm6,xmm4
vaddpd xmm7,xmm6,XMMWORD PTR [rcx]
vaddpd xmm7,xmm6,[rcx]
vaddps xmm2,xmm6,xmm4
vaddps xmm7,xmm6,XMMWORD PTR [rcx]
vaddps xmm7,xmm6,[rcx]
vaddsubpd xmm2,xmm6,xmm4
vaddsubpd xmm7,xmm6,XMMWORD PTR [rcx]
vaddsubpd xmm7,xmm6,[rcx]
vaddsubps xmm2,xmm6,xmm4
vaddsubps xmm7,xmm6,XMMWORD PTR [rcx]
vaddsubps xmm7,xmm6,[rcx]
vandnpd xmm2,xmm6,xmm4
vandnpd xmm7,xmm6,XMMWORD PTR [rcx]
vandnpd xmm7,xmm6,[rcx]
vandnps xmm2,xmm6,xmm4
vandnps xmm7,xmm6,XMMWORD PTR [rcx]
vandnps xmm7,xmm6,[rcx]
vandpd xmm2,xmm6,xmm4
vandpd xmm7,xmm6,XMMWORD PTR [rcx]
vandpd xmm7,xmm6,[rcx]
vandps xmm2,xmm6,xmm4
vandps xmm7,xmm6,XMMWORD PTR [rcx]
vandps xmm7,xmm6,[rcx]
vdivpd xmm2,xmm6,xmm4
vdivpd xmm7,xmm6,XMMWORD PTR [rcx]
vdivpd xmm7,xmm6,[rcx]
vdivps xmm2,xmm6,xmm4
vdivps xmm7,xmm6,XMMWORD PTR [rcx]
vdivps xmm7,xmm6,[rcx]
vhaddpd xmm2,xmm6,xmm4
vhaddpd xmm7,xmm6,XMMWORD PTR [rcx]
vhaddpd xmm7,xmm6,[rcx]
vhaddps xmm2,xmm6,xmm4
vhaddps xmm7,xmm6,XMMWORD PTR [rcx]
vhaddps xmm7,xmm6,[rcx]
vhsubpd xmm2,xmm6,xmm4
vhsubpd xmm7,xmm6,XMMWORD PTR [rcx]
vhsubpd xmm7,xmm6,[rcx]
vhsubps xmm2,xmm6,xmm4
vhsubps xmm7,xmm6,XMMWORD PTR [rcx]
vhsubps xmm7,xmm6,[rcx]
vmaxpd xmm2,xmm6,xmm4
vmaxpd xmm7,xmm6,XMMWORD PTR [rcx]
vmaxpd xmm7,xmm6,[rcx]
vmaxps xmm2,xmm6,xmm4
vmaxps xmm7,xmm6,XMMWORD PTR [rcx]
vmaxps xmm7,xmm6,[rcx]
vminpd xmm2,xmm6,xmm4
vminpd xmm7,xmm6,XMMWORD PTR [rcx]
vminpd xmm7,xmm6,[rcx]
vminps xmm2,xmm6,xmm4
vminps xmm7,xmm6,XMMWORD PTR [rcx]
vminps xmm7,xmm6,[rcx]
vmulpd xmm2,xmm6,xmm4
vmulpd xmm7,xmm6,XMMWORD PTR [rcx]
vmulpd xmm7,xmm6,[rcx]
vmulps xmm2,xmm6,xmm4
vmulps xmm7,xmm6,XMMWORD PTR [rcx]
vmulps xmm7,xmm6,[rcx]
vorpd xmm2,xmm6,xmm4
vorpd xmm7,xmm6,XMMWORD PTR [rcx]
vorpd xmm7,xmm6,[rcx]
vorps xmm2,xmm6,xmm4
vorps xmm7,xmm6,XMMWORD PTR [rcx]
vorps xmm7,xmm6,[rcx]
vpacksswb xmm2,xmm6,xmm4
vpacksswb xmm7,xmm6,XMMWORD PTR [rcx]
vpacksswb xmm7,xmm6,[rcx]
vpackssdw xmm2,xmm6,xmm4
vpackssdw xmm7,xmm6,XMMWORD PTR [rcx]
vpackssdw xmm7,xmm6,[rcx]
vpackuswb xmm2,xmm6,xmm4
vpackuswb xmm7,xmm6,XMMWORD PTR [rcx]
vpackuswb xmm7,xmm6,[rcx]
vpackusdw xmm2,xmm6,xmm4
vpackusdw xmm7,xmm6,XMMWORD PTR [rcx]
vpackusdw xmm7,xmm6,[rcx]
vpaddb xmm2,xmm6,xmm4
vpaddb xmm7,xmm6,XMMWORD PTR [rcx]
vpaddb xmm7,xmm6,[rcx]
vpaddw xmm2,xmm6,xmm4
vpaddw xmm7,xmm6,XMMWORD PTR [rcx]
vpaddw xmm7,xmm6,[rcx]
vpaddd xmm2,xmm6,xmm4
vpaddd xmm7,xmm6,XMMWORD PTR [rcx]
vpaddd xmm7,xmm6,[rcx]
vpaddq xmm2,xmm6,xmm4
vpaddq xmm7,xmm6,XMMWORD PTR [rcx]
vpaddq xmm7,xmm6,[rcx]
vpaddsb xmm2,xmm6,xmm4
vpaddsb xmm7,xmm6,XMMWORD PTR [rcx]
vpaddsb xmm7,xmm6,[rcx]
vpaddsw xmm2,xmm6,xmm4
vpaddsw xmm7,xmm6,XMMWORD PTR [rcx]
vpaddsw xmm7,xmm6,[rcx]
vpaddusb xmm2,xmm6,xmm4
vpaddusb xmm7,xmm6,XMMWORD PTR [rcx]
vpaddusb xmm7,xmm6,[rcx]
vpaddusw xmm2,xmm6,xmm4
vpaddusw xmm7,xmm6,XMMWORD PTR [rcx]
vpaddusw xmm7,xmm6,[rcx]
vpand xmm2,xmm6,xmm4
vpand xmm7,xmm6,XMMWORD PTR [rcx]
vpand xmm7,xmm6,[rcx]
vpandn xmm2,xmm6,xmm4
vpandn xmm7,xmm6,XMMWORD PTR [rcx]
vpandn xmm7,xmm6,[rcx]
vpavgb xmm2,xmm6,xmm4
vpavgb xmm7,xmm6,XMMWORD PTR [rcx]
vpavgb xmm7,xmm6,[rcx]
vpavgw xmm2,xmm6,xmm4
vpavgw xmm7,xmm6,XMMWORD PTR [rcx]
vpavgw xmm7,xmm6,[rcx]
vpclmullqlqdq xmm2,xmm6,xmm4
vpclmullqlqdq xmm7,xmm6,XMMWORD PTR [rcx]
vpclmullqlqdq xmm7,xmm6,[rcx]
vpclmulhqlqdq xmm2,xmm6,xmm4
vpclmulhqlqdq xmm7,xmm6,XMMWORD PTR [rcx]
vpclmulhqlqdq xmm7,xmm6,[rcx]
vpclmullqhqdq xmm2,xmm6,xmm4
vpclmullqhqdq xmm7,xmm6,XMMWORD PTR [rcx]
vpclmullqhqdq xmm7,xmm6,[rcx]
vpclmulhqhqdq xmm2,xmm6,xmm4
vpclmulhqhqdq xmm7,xmm6,XMMWORD PTR [rcx]
vpclmulhqhqdq xmm7,xmm6,[rcx]
vpcmpeqb xmm2,xmm6,xmm4
vpcmpeqb xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpeqb xmm7,xmm6,[rcx]
vpcmpeqw xmm2,xmm6,xmm4
vpcmpeqw xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpeqw xmm7,xmm6,[rcx]
vpcmpeqd xmm2,xmm6,xmm4
vpcmpeqd xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpeqd xmm7,xmm6,[rcx]
vpcmpeqq xmm2,xmm6,xmm4
vpcmpeqq xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpeqq xmm7,xmm6,[rcx]
vpcmpgtb xmm2,xmm6,xmm4
vpcmpgtb xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpgtb xmm7,xmm6,[rcx]
vpcmpgtw xmm2,xmm6,xmm4
vpcmpgtw xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpgtw xmm7,xmm6,[rcx]
vpcmpgtd xmm2,xmm6,xmm4
vpcmpgtd xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpgtd xmm7,xmm6,[rcx]
vpcmpgtq xmm2,xmm6,xmm4
vpcmpgtq xmm7,xmm6,XMMWORD PTR [rcx]
vpcmpgtq xmm7,xmm6,[rcx]
vpermilpd xmm2,xmm6,xmm4
vpermilpd xmm7,xmm6,XMMWORD PTR [rcx]
vpermilpd xmm7,xmm6,[rcx]
vpermilps xmm2,xmm6,xmm4
vpermilps xmm7,xmm6,XMMWORD PTR [rcx]
vpermilps xmm7,xmm6,[rcx]
vphaddw xmm2,xmm6,xmm4
vphaddw xmm7,xmm6,XMMWORD PTR [rcx]
vphaddw xmm7,xmm6,[rcx]
vphaddd xmm2,xmm6,xmm4
vphaddd xmm7,xmm6,XMMWORD PTR [rcx]
vphaddd xmm7,xmm6,[rcx]
vphaddsw xmm2,xmm6,xmm4
vphaddsw xmm7,xmm6,XMMWORD PTR [rcx]
vphaddsw xmm7,xmm6,[rcx]
vphsubw xmm2,xmm6,xmm4
vphsubw xmm7,xmm6,XMMWORD PTR [rcx]
vphsubw xmm7,xmm6,[rcx]
vphsubd xmm2,xmm6,xmm4
vphsubd xmm7,xmm6,XMMWORD PTR [rcx]
vphsubd xmm7,xmm6,[rcx]
vphsubsw xmm2,xmm6,xmm4
vphsubsw xmm7,xmm6,XMMWORD PTR [rcx]
vphsubsw xmm7,xmm6,[rcx]
vpmaddwd xmm2,xmm6,xmm4
vpmaddwd xmm7,xmm6,XMMWORD PTR [rcx]
vpmaddwd xmm7,xmm6,[rcx]
vpmaddubsw xmm2,xmm6,xmm4
vpmaddubsw xmm7,xmm6,XMMWORD PTR [rcx]
vpmaddubsw xmm7,xmm6,[rcx]
vpmaxsb xmm2,xmm6,xmm4
vpmaxsb xmm7,xmm6,XMMWORD PTR [rcx]
vpmaxsb xmm7,xmm6,[rcx]
vpmaxsw xmm2,xmm6,xmm4
vpmaxsw xmm7,xmm6,XMMWORD PTR [rcx]
vpmaxsw xmm7,xmm6,[rcx]
vpmaxsd xmm2,xmm6,xmm4
vpmaxsd xmm7,xmm6,XMMWORD PTR [rcx]
vpmaxsd xmm7,xmm6,[rcx]
vpmaxub xmm2,xmm6,xmm4
vpmaxub xmm7,xmm6,XMMWORD PTR [rcx]
vpmaxub xmm7,xmm6,[rcx]
vpmaxuw xmm2,xmm6,xmm4
vpmaxuw xmm7,xmm6,XMMWORD PTR [rcx]
vpmaxuw xmm7,xmm6,[rcx]
vpmaxud xmm2,xmm6,xmm4
vpmaxud xmm7,xmm6,XMMWORD PTR [rcx]
vpmaxud xmm7,xmm6,[rcx]
vpminsb xmm2,xmm6,xmm4
vpminsb xmm7,xmm6,XMMWORD PTR [rcx]
vpminsb xmm7,xmm6,[rcx]
vpminsw xmm2,xmm6,xmm4
vpminsw xmm7,xmm6,XMMWORD PTR [rcx]
vpminsw xmm7,xmm6,[rcx]
vpminsd xmm2,xmm6,xmm4
vpminsd xmm7,xmm6,XMMWORD PTR [rcx]
vpminsd xmm7,xmm6,[rcx]
vpminub xmm2,xmm6,xmm4
vpminub xmm7,xmm6,XMMWORD PTR [rcx]
vpminub xmm7,xmm6,[rcx]
vpminuw xmm2,xmm6,xmm4
vpminuw xmm7,xmm6,XMMWORD PTR [rcx]
vpminuw xmm7,xmm6,[rcx]
vpminud xmm2,xmm6,xmm4
vpminud xmm7,xmm6,XMMWORD PTR [rcx]
vpminud xmm7,xmm6,[rcx]
vpmulhuw xmm2,xmm6,xmm4
vpmulhuw xmm7,xmm6,XMMWORD PTR [rcx]
vpmulhuw xmm7,xmm6,[rcx]
vpmulhrsw xmm2,xmm6,xmm4
vpmulhrsw xmm7,xmm6,XMMWORD PTR [rcx]
vpmulhrsw xmm7,xmm6,[rcx]
vpmulhw xmm2,xmm6,xmm4
vpmulhw xmm7,xmm6,XMMWORD PTR [rcx]
vpmulhw xmm7,xmm6,[rcx]
vpmullw xmm2,xmm6,xmm4
vpmullw xmm7,xmm6,XMMWORD PTR [rcx]
vpmullw xmm7,xmm6,[rcx]
vpmulld xmm2,xmm6,xmm4
vpmulld xmm7,xmm6,XMMWORD PTR [rcx]
vpmulld xmm7,xmm6,[rcx]
vpmuludq xmm2,xmm6,xmm4
vpmuludq xmm7,xmm6,XMMWORD PTR [rcx]
vpmuludq xmm7,xmm6,[rcx]
vpmuldq xmm2,xmm6,xmm4
vpmuldq xmm7,xmm6,XMMWORD PTR [rcx]
vpmuldq xmm7,xmm6,[rcx]
vpor xmm2,xmm6,xmm4
vpor xmm7,xmm6,XMMWORD PTR [rcx]
vpor xmm7,xmm6,[rcx]
vpsadbw xmm2,xmm6,xmm4
vpsadbw xmm7,xmm6,XMMWORD PTR [rcx]
vpsadbw xmm7,xmm6,[rcx]
vpshufb xmm2,xmm6,xmm4
vpshufb xmm7,xmm6,XMMWORD PTR [rcx]
vpshufb xmm7,xmm6,[rcx]
vpsignb xmm2,xmm6,xmm4
vpsignb xmm7,xmm6,XMMWORD PTR [rcx]
vpsignb xmm7,xmm6,[rcx]
vpsignw xmm2,xmm6,xmm4
vpsignw xmm7,xmm6,XMMWORD PTR [rcx]
vpsignw xmm7,xmm6,[rcx]
vpsignd xmm2,xmm6,xmm4
vpsignd xmm7,xmm6,XMMWORD PTR [rcx]
vpsignd xmm7,xmm6,[rcx]
vpsllw xmm2,xmm6,xmm4
vpsllw xmm7,xmm6,XMMWORD PTR [rcx]
vpsllw xmm7,xmm6,[rcx]
vpslld xmm2,xmm6,xmm4
vpslld xmm7,xmm6,XMMWORD PTR [rcx]
vpslld xmm7,xmm6,[rcx]
vpsllq xmm2,xmm6,xmm4
vpsllq xmm7,xmm6,XMMWORD PTR [rcx]
vpsllq xmm7,xmm6,[rcx]
vpsraw xmm2,xmm6,xmm4
vpsraw xmm7,xmm6,XMMWORD PTR [rcx]
vpsraw xmm7,xmm6,[rcx]
vpsrad xmm2,xmm6,xmm4
vpsrad xmm7,xmm6,XMMWORD PTR [rcx]
vpsrad xmm7,xmm6,[rcx]
vpsrlw xmm2,xmm6,xmm4
vpsrlw xmm7,xmm6,XMMWORD PTR [rcx]
vpsrlw xmm7,xmm6,[rcx]
vpsrld xmm2,xmm6,xmm4
vpsrld xmm7,xmm6,XMMWORD PTR [rcx]
vpsrld xmm7,xmm6,[rcx]
vpsrlq xmm2,xmm6,xmm4
vpsrlq xmm7,xmm6,XMMWORD PTR [rcx]
vpsrlq xmm7,xmm6,[rcx]
vpsubb xmm2,xmm6,xmm4
vpsubb xmm7,xmm6,XMMWORD PTR [rcx]
vpsubb xmm7,xmm6,[rcx]
vpsubw xmm2,xmm6,xmm4
vpsubw xmm7,xmm6,XMMWORD PTR [rcx]
vpsubw xmm7,xmm6,[rcx]
vpsubd xmm2,xmm6,xmm4
vpsubd xmm7,xmm6,XMMWORD PTR [rcx]
vpsubd xmm7,xmm6,[rcx]
vpsubq xmm2,xmm6,xmm4
vpsubq xmm7,xmm6,XMMWORD PTR [rcx]
vpsubq xmm7,xmm6,[rcx]
vpsubsb xmm2,xmm6,xmm4
vpsubsb xmm7,xmm6,XMMWORD PTR [rcx]
vpsubsb xmm7,xmm6,[rcx]
vpsubsw xmm2,xmm6,xmm4
vpsubsw xmm7,xmm6,XMMWORD PTR [rcx]
vpsubsw xmm7,xmm6,[rcx]
vpsubusb xmm2,xmm6,xmm4
vpsubusb xmm7,xmm6,XMMWORD PTR [rcx]
vpsubusb xmm7,xmm6,[rcx]
vpsubusw xmm2,xmm6,xmm4
vpsubusw xmm7,xmm6,XMMWORD PTR [rcx]
vpsubusw xmm7,xmm6,[rcx]
vpunpckhbw xmm2,xmm6,xmm4
vpunpckhbw xmm7,xmm6,XMMWORD PTR [rcx]
vpunpckhbw xmm7,xmm6,[rcx]
vpunpckhwd xmm2,xmm6,xmm4
vpunpckhwd xmm7,xmm6,XMMWORD PTR [rcx]
vpunpckhwd xmm7,xmm6,[rcx]
vpunpckhdq xmm2,xmm6,xmm4
vpunpckhdq xmm7,xmm6,XMMWORD PTR [rcx]
vpunpckhdq xmm7,xmm6,[rcx]
vpunpckhqdq xmm2,xmm6,xmm4
vpunpckhqdq xmm7,xmm6,XMMWORD PTR [rcx]
vpunpckhqdq xmm7,xmm6,[rcx]
vpunpcklbw xmm2,xmm6,xmm4
vpunpcklbw xmm7,xmm6,XMMWORD PTR [rcx]
vpunpcklbw xmm7,xmm6,[rcx]
vpunpcklwd xmm2,xmm6,xmm4
vpunpcklwd xmm7,xmm6,XMMWORD PTR [rcx]
vpunpcklwd xmm7,xmm6,[rcx]
vpunpckldq xmm2,xmm6,xmm4
vpunpckldq xmm7,xmm6,XMMWORD PTR [rcx]
vpunpckldq xmm7,xmm6,[rcx]
vpunpcklqdq xmm2,xmm6,xmm4
vpunpcklqdq xmm7,xmm6,XMMWORD PTR [rcx]
vpunpcklqdq xmm7,xmm6,[rcx]
vpxor xmm2,xmm6,xmm4
vpxor xmm7,xmm6,XMMWORD PTR [rcx]
vpxor xmm7,xmm6,[rcx]
vsubpd xmm2,xmm6,xmm4
vsubpd xmm7,xmm6,XMMWORD PTR [rcx]
vsubpd xmm7,xmm6,[rcx]
vsubps xmm2,xmm6,xmm4
vsubps xmm7,xmm6,XMMWORD PTR [rcx]
vsubps xmm7,xmm6,[rcx]
vunpckhpd xmm2,xmm6,xmm4
vunpckhpd xmm7,xmm6,XMMWORD PTR [rcx]
vunpckhpd xmm7,xmm6,[rcx]
vunpckhps xmm2,xmm6,xmm4
vunpckhps xmm7,xmm6,XMMWORD PTR [rcx]
vunpckhps xmm7,xmm6,[rcx]
vunpcklpd xmm2,xmm6,xmm4
vunpcklpd xmm7,xmm6,XMMWORD PTR [rcx]
vunpcklpd xmm7,xmm6,[rcx]
vunpcklps xmm2,xmm6,xmm4
vunpcklps xmm7,xmm6,XMMWORD PTR [rcx]
vunpcklps xmm7,xmm6,[rcx]
vxorpd xmm2,xmm6,xmm4
vxorpd xmm7,xmm6,XMMWORD PTR [rcx]
vxorpd xmm7,xmm6,[rcx]
vxorps xmm2,xmm6,xmm4
vxorps xmm7,xmm6,XMMWORD PTR [rcx]
vxorps xmm7,xmm6,[rcx]
vaesenc xmm2,xmm6,xmm4
vaesenc xmm7,xmm6,XMMWORD PTR [rcx]
vaesenc xmm7,xmm6,[rcx]
vaesenclast xmm2,xmm6,xmm4
vaesenclast xmm7,xmm6,XMMWORD PTR [rcx]
vaesenclast xmm7,xmm6,[rcx]
vaesdec xmm2,xmm6,xmm4
vaesdec xmm7,xmm6,XMMWORD PTR [rcx]
vaesdec xmm7,xmm6,[rcx]
vaesdeclast xmm2,xmm6,xmm4
vaesdeclast xmm7,xmm6,XMMWORD PTR [rcx]
vaesdeclast xmm7,xmm6,[rcx]
vcmpeqpd xmm2,xmm6,xmm4
vcmpeqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeqpd xmm7,xmm6,[rcx]
vcmpltpd xmm2,xmm6,xmm4
vcmpltpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpltpd xmm7,xmm6,[rcx]
vcmplepd xmm2,xmm6,xmm4
vcmplepd xmm7,xmm6,XMMWORD PTR [rcx]
vcmplepd xmm7,xmm6,[rcx]
vcmpunordpd xmm2,xmm6,xmm4
vcmpunordpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpunordpd xmm7,xmm6,[rcx]
vcmpneqpd xmm2,xmm6,xmm4
vcmpneqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneqpd xmm7,xmm6,[rcx]
vcmpnltpd xmm2,xmm6,xmm4
vcmpnltpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnltpd xmm7,xmm6,[rcx]
vcmpnlepd xmm2,xmm6,xmm4
vcmpnlepd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnlepd xmm7,xmm6,[rcx]
vcmpordpd xmm2,xmm6,xmm4
vcmpordpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpordpd xmm7,xmm6,[rcx]
vcmpeq_uqpd xmm2,xmm6,xmm4
vcmpeq_uqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeq_uqpd xmm7,xmm6,[rcx]
vcmpngepd xmm2,xmm6,xmm4
vcmpngepd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpngepd xmm7,xmm6,[rcx]
vcmpngtpd xmm2,xmm6,xmm4
vcmpngtpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpngtpd xmm7,xmm6,[rcx]
vcmpfalsepd xmm2,xmm6,xmm4
vcmpfalsepd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpfalsepd xmm7,xmm6,[rcx]
vcmpneq_oqpd xmm2,xmm6,xmm4
vcmpneq_oqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneq_oqpd xmm7,xmm6,[rcx]
vcmpgepd xmm2,xmm6,xmm4
vcmpgepd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpgepd xmm7,xmm6,[rcx]
vcmpgtpd xmm2,xmm6,xmm4
vcmpgtpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpgtpd xmm7,xmm6,[rcx]
vcmptruepd xmm2,xmm6,xmm4
vcmptruepd xmm7,xmm6,XMMWORD PTR [rcx]
vcmptruepd xmm7,xmm6,[rcx]
vcmpeq_ospd xmm2,xmm6,xmm4
vcmpeq_ospd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeq_ospd xmm7,xmm6,[rcx]
vcmplt_oqpd xmm2,xmm6,xmm4
vcmplt_oqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmplt_oqpd xmm7,xmm6,[rcx]
vcmple_oqpd xmm2,xmm6,xmm4
vcmple_oqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmple_oqpd xmm7,xmm6,[rcx]
vcmpunord_spd xmm2,xmm6,xmm4
vcmpunord_spd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpunord_spd xmm7,xmm6,[rcx]
vcmpneq_uspd xmm2,xmm6,xmm4
vcmpneq_uspd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneq_uspd xmm7,xmm6,[rcx]
vcmpnlt_uqpd xmm2,xmm6,xmm4
vcmpnlt_uqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnlt_uqpd xmm7,xmm6,[rcx]
vcmpnle_uqpd xmm2,xmm6,xmm4
vcmpnle_uqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnle_uqpd xmm7,xmm6,[rcx]
vcmpord_spd xmm2,xmm6,xmm4
vcmpord_spd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpord_spd xmm7,xmm6,[rcx]
vcmpeq_uspd xmm2,xmm6,xmm4
vcmpeq_uspd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeq_uspd xmm7,xmm6,[rcx]
vcmpnge_uqpd xmm2,xmm6,xmm4
vcmpnge_uqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnge_uqpd xmm7,xmm6,[rcx]
vcmpngt_uqpd xmm2,xmm6,xmm4
vcmpngt_uqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpngt_uqpd xmm7,xmm6,[rcx]
vcmpfalse_ospd xmm2,xmm6,xmm4
vcmpfalse_ospd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpfalse_ospd xmm7,xmm6,[rcx]
vcmpneq_ospd xmm2,xmm6,xmm4
vcmpneq_ospd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneq_ospd xmm7,xmm6,[rcx]
vcmpge_oqpd xmm2,xmm6,xmm4
vcmpge_oqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpge_oqpd xmm7,xmm6,[rcx]
vcmpgt_oqpd xmm2,xmm6,xmm4
vcmpgt_oqpd xmm7,xmm6,XMMWORD PTR [rcx]
vcmpgt_oqpd xmm7,xmm6,[rcx]
vcmptrue_uspd xmm2,xmm6,xmm4
vcmptrue_uspd xmm7,xmm6,XMMWORD PTR [rcx]
vcmptrue_uspd xmm7,xmm6,[rcx]
vcmpeqps xmm2,xmm6,xmm4
vcmpeqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeqps xmm7,xmm6,[rcx]
vcmpltps xmm2,xmm6,xmm4
vcmpltps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpltps xmm7,xmm6,[rcx]
vcmpleps xmm2,xmm6,xmm4
vcmpleps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpleps xmm7,xmm6,[rcx]
vcmpunordps xmm2,xmm6,xmm4
vcmpunordps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpunordps xmm7,xmm6,[rcx]
vcmpneqps xmm2,xmm6,xmm4
vcmpneqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneqps xmm7,xmm6,[rcx]
vcmpnltps xmm2,xmm6,xmm4
vcmpnltps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnltps xmm7,xmm6,[rcx]
vcmpnleps xmm2,xmm6,xmm4
vcmpnleps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnleps xmm7,xmm6,[rcx]
vcmpordps xmm2,xmm6,xmm4
vcmpordps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpordps xmm7,xmm6,[rcx]
vcmpeq_uqps xmm2,xmm6,xmm4
vcmpeq_uqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeq_uqps xmm7,xmm6,[rcx]
vcmpngeps xmm2,xmm6,xmm4
vcmpngeps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpngeps xmm7,xmm6,[rcx]
vcmpngtps xmm2,xmm6,xmm4
vcmpngtps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpngtps xmm7,xmm6,[rcx]
vcmpfalseps xmm2,xmm6,xmm4
vcmpfalseps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpfalseps xmm7,xmm6,[rcx]
vcmpneq_oqps xmm2,xmm6,xmm4
vcmpneq_oqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneq_oqps xmm7,xmm6,[rcx]
vcmpgeps xmm2,xmm6,xmm4
vcmpgeps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpgeps xmm7,xmm6,[rcx]
vcmpgtps xmm2,xmm6,xmm4
vcmpgtps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpgtps xmm7,xmm6,[rcx]
vcmptrueps xmm2,xmm6,xmm4
vcmptrueps xmm7,xmm6,XMMWORD PTR [rcx]
vcmptrueps xmm7,xmm6,[rcx]
vcmpeq_osps xmm2,xmm6,xmm4
vcmpeq_osps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeq_osps xmm7,xmm6,[rcx]
vcmplt_oqps xmm2,xmm6,xmm4
vcmplt_oqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmplt_oqps xmm7,xmm6,[rcx]
vcmple_oqps xmm2,xmm6,xmm4
vcmple_oqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmple_oqps xmm7,xmm6,[rcx]
vcmpunord_sps xmm2,xmm6,xmm4
vcmpunord_sps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpunord_sps xmm7,xmm6,[rcx]
vcmpneq_usps xmm2,xmm6,xmm4
vcmpneq_usps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneq_usps xmm7,xmm6,[rcx]
vcmpnlt_uqps xmm2,xmm6,xmm4
vcmpnlt_uqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnlt_uqps xmm7,xmm6,[rcx]
vcmpnle_uqps xmm2,xmm6,xmm4
vcmpnle_uqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnle_uqps xmm7,xmm6,[rcx]
vcmpord_sps xmm2,xmm6,xmm4
vcmpord_sps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpord_sps xmm7,xmm6,[rcx]
vcmpeq_usps xmm2,xmm6,xmm4
vcmpeq_usps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpeq_usps xmm7,xmm6,[rcx]
vcmpnge_uqps xmm2,xmm6,xmm4
vcmpnge_uqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpnge_uqps xmm7,xmm6,[rcx]
vcmpngt_uqps xmm2,xmm6,xmm4
vcmpngt_uqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpngt_uqps xmm7,xmm6,[rcx]
vcmpfalse_osps xmm2,xmm6,xmm4
vcmpfalse_osps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpfalse_osps xmm7,xmm6,[rcx]
vcmpneq_osps xmm2,xmm6,xmm4
vcmpneq_osps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpneq_osps xmm7,xmm6,[rcx]
vcmpge_oqps xmm2,xmm6,xmm4
vcmpge_oqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpge_oqps xmm7,xmm6,[rcx]
vcmpgt_oqps xmm2,xmm6,xmm4
vcmpgt_oqps xmm7,xmm6,XMMWORD PTR [rcx]
vcmpgt_oqps xmm7,xmm6,[rcx]
vcmptrue_usps xmm2,xmm6,xmm4
vcmptrue_usps xmm7,xmm6,XMMWORD PTR [rcx]
vcmptrue_usps xmm7,xmm6,[rcx]
vgf2p8mulb xmm6, xmm5, xmm4
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [rcx]
vgf2p8mulb xmm6, xmm5, [rcx]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [rax+r14*8-123456]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [rdx+2032]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [rdx+2048]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [rdx-2048]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [rdx-2064]
# Tests for op mem128, xmm, xmm
vmaskmovps xmm6,xmm4,XMMWORD PTR [rcx]
vmaskmovps xmm6,xmm4,[rcx]
vmaskmovpd xmm6,xmm4,XMMWORD PTR [rcx]
vmaskmovpd xmm6,xmm4,[rcx]
# Tests for op imm8, xmm/mem128, xmm
vaeskeygenassist xmm6,xmm4,7
vaeskeygenassist xmm6,XMMWORD PTR [rcx],7
vaeskeygenassist xmm6,[rcx],7
vpcmpestri xmm6,xmm4,7
vpcmpestri xmm6,XMMWORD PTR [rcx],7
vpcmpestri xmm6,[rcx],7
vpcmpestrm xmm6,xmm4,7
vpcmpestrm xmm6,XMMWORD PTR [rcx],7
vpcmpestrm xmm6,[rcx],7
vpcmpistri xmm6,xmm4,7
vpcmpistri xmm6,XMMWORD PTR [rcx],7
vpcmpistri xmm6,[rcx],7
vpcmpistrm xmm6,xmm4,7
vpcmpistrm xmm6,XMMWORD PTR [rcx],7
vpcmpistrm xmm6,[rcx],7
vpermilpd xmm6,xmm4,7
vpermilpd xmm6,XMMWORD PTR [rcx],7
vpermilpd xmm6,[rcx],7
vpermilps xmm6,xmm4,7
vpermilps xmm6,XMMWORD PTR [rcx],7
vpermilps xmm6,[rcx],7
vpshufd xmm6,xmm4,7
vpshufd xmm6,XMMWORD PTR [rcx],7
vpshufd xmm6,[rcx],7
vpshufhw xmm6,xmm4,7
vpshufhw xmm6,XMMWORD PTR [rcx],7
vpshufhw xmm6,[rcx],7
vpshuflw xmm6,xmm4,7
vpshuflw xmm6,XMMWORD PTR [rcx],7
vpshuflw xmm6,[rcx],7
vroundpd xmm6,xmm4,7
vroundpd xmm6,XMMWORD PTR [rcx],7
vroundpd xmm6,[rcx],7
vroundps xmm6,xmm4,7
vroundps xmm6,XMMWORD PTR [rcx],7
vroundps xmm6,[rcx],7
# Tests for op xmm, xmm, mem128
vmaskmovps XMMWORD PTR [rcx],xmm6,xmm4
vmaskmovps [rcx],xmm6,xmm4
vmaskmovpd XMMWORD PTR [rcx],xmm6,xmm4
vmaskmovpd [rcx],xmm6,xmm4
# Tests for op imm8, xmm/mem128, xmm, xmm
vblendpd xmm2,xmm6,xmm4,7
vblendpd xmm2,xmm6,XMMWORD PTR [rcx],7
vblendpd xmm2,xmm6,[rcx],7
vblendps xmm2,xmm6,xmm4,7
vblendps xmm2,xmm6,XMMWORD PTR [rcx],7
vblendps xmm2,xmm6,[rcx],7
vcmppd xmm2,xmm6,xmm4,7
vcmppd xmm2,xmm6,XMMWORD PTR [rcx],7
vcmppd xmm2,xmm6,[rcx],7
vcmpps xmm2,xmm6,xmm4,7
vcmpps xmm2,xmm6,XMMWORD PTR [rcx],7
vcmpps xmm2,xmm6,[rcx],7
vdppd xmm2,xmm6,xmm4,7
vdppd xmm2,xmm6,XMMWORD PTR [rcx],7
vdppd xmm2,xmm6,[rcx],7
vdpps xmm2,xmm6,xmm4,7
vdpps xmm2,xmm6,XMMWORD PTR [rcx],7
vdpps xmm2,xmm6,[rcx],7
vmpsadbw xmm2,xmm6,xmm4,7
vmpsadbw xmm2,xmm6,XMMWORD PTR [rcx],7
vmpsadbw xmm2,xmm6,[rcx],7
vpalignr xmm2,xmm6,xmm4,7
vpalignr xmm2,xmm6,XMMWORD PTR [rcx],7
vpalignr xmm2,xmm6,[rcx],7
vpblendw xmm2,xmm6,xmm4,7
vpblendw xmm2,xmm6,XMMWORD PTR [rcx],7
vpblendw xmm2,xmm6,[rcx],7
vpclmulqdq xmm2,xmm6,xmm4,7
vpclmulqdq xmm2,xmm6,XMMWORD PTR [rcx],7
vpclmulqdq xmm2,xmm6,[rcx],7
vshufpd xmm2,xmm6,xmm4,7
vshufpd xmm2,xmm6,XMMWORD PTR [rcx],7
vshufpd xmm2,xmm6,[rcx],7
vshufps xmm2,xmm6,xmm4,7
vshufps xmm2,xmm6,XMMWORD PTR [rcx],7
vshufps xmm2,xmm6,[rcx],7
vgf2p8affineqb xmm6, xmm5, xmm4, 0xab
vgf2p8affineqb xmm6, xmm5, xmm4, 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [rcx], 123
vgf2p8affineqb xmm6, xmm5, [rcx], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [rax+r14*8-123456], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [rdx+2032], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [rdx+2048], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [rdx-2048], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [rdx-2064], 123
vgf2p8affineinvqb xmm6, xmm5, xmm4, 0xab
vgf2p8affineinvqb xmm6, xmm5, xmm4, 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [rcx], 123
vgf2p8affineinvqb xmm6, xmm5, [rcx], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [rax+r14*8-123456], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [rdx+2032], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [rdx+2048], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [rdx-2048], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [rdx-2064], 123
# Tests for op xmm, xmm/mem128, xmm, xmm
vblendvpd xmm7,xmm2,xmm6,xmm4
vblendvpd xmm7,xmm2,XMMWORD PTR [rcx],xmm4
vblendvpd xmm7,xmm2,[rcx],xmm4
vblendvps xmm7,xmm2,xmm6,xmm4
vblendvps xmm7,xmm2,XMMWORD PTR [rcx],xmm4
vblendvps xmm7,xmm2,[rcx],xmm4
vpblendvb xmm7,xmm2,xmm6,xmm4
vpblendvb xmm7,xmm2,XMMWORD PTR [rcx],xmm4
vpblendvb xmm7,xmm2,[rcx],xmm4
# Tests for op mem64, ymm
vbroadcastsd ymm4,QWORD PTR [rcx]
vbroadcastsd ymm4,[rcx]
# Tests for op xmm/mem64, xmm
vcomisd xmm6,xmm4
vcomisd xmm4,QWORD PTR [rcx]
vcomisd xmm4,[rcx]
vcvtdq2pd xmm6,xmm4
vcvtdq2pd xmm4,QWORD PTR [rcx]
vcvtdq2pd xmm4,[rcx]
vcvtps2pd xmm6,xmm4
vcvtps2pd xmm4,QWORD PTR [rcx]
vcvtps2pd xmm4,[rcx]
vmovddup xmm6,xmm4
vmovddup xmm4,QWORD PTR [rcx]
vmovddup xmm4,[rcx]
vpmovsxbw xmm6,xmm4
vpmovsxbw xmm4,QWORD PTR [rcx]
vpmovsxbw xmm4,[rcx]
vpmovsxwd xmm6,xmm4
vpmovsxwd xmm4,QWORD PTR [rcx]
vpmovsxwd xmm4,[rcx]
vpmovsxdq xmm6,xmm4
vpmovsxdq xmm4,QWORD PTR [rcx]
vpmovsxdq xmm4,[rcx]
vpmovzxbw xmm6,xmm4
vpmovzxbw xmm4,QWORD PTR [rcx]
vpmovzxbw xmm4,[rcx]
vpmovzxwd xmm6,xmm4
vpmovzxwd xmm4,QWORD PTR [rcx]
vpmovzxwd xmm4,[rcx]
vpmovzxdq xmm6,xmm4
vpmovzxdq xmm4,QWORD PTR [rcx]
vpmovzxdq xmm4,[rcx]
vucomisd xmm6,xmm4
vucomisd xmm4,QWORD PTR [rcx]
vucomisd xmm4,[rcx]
# Tests for op mem64, xmm
vmovsd xmm4,QWORD PTR [rcx]
vmovsd xmm4,[rcx]
# Tests for op xmm, mem64
vmovlpd QWORD PTR [rcx],xmm4
vmovlpd [rcx],xmm4
vmovlps QWORD PTR [rcx],xmm4
vmovlps [rcx],xmm4
vmovhpd QWORD PTR [rcx],xmm4
vmovhpd [rcx],xmm4
vmovhps QWORD PTR [rcx],xmm4
vmovhps [rcx],xmm4
vmovsd QWORD PTR [rcx],xmm4
vmovsd [rcx],xmm4
# Tests for op xmm, regq/mem64
# Tests for op regq/mem64, xmm
vmovd rcx,xmm4
vmovd xmm4,rcx
vmovd [rcx],xmm4
vmovd xmm4,[rcx]
vmovq rcx,xmm4
vmovq xmm4,rcx
vmovq QWORD PTR [rcx],xmm4
vmovq xmm4,QWORD PTR [rcx]
vmovq [rcx],xmm4
vmovq xmm4,[rcx]
# Tests for op xmm/mem64, regl
vcvtsd2si ecx,xmm4
vcvtsd2si ecx,QWORD PTR [rcx]
vcvtsd2si ecx,[rcx]
vcvttsd2si ecx,xmm4
vcvttsd2si ecx,QWORD PTR [rcx]
vcvttsd2si ecx,[rcx]
# Tests for op xmm/mem64, regq
vcvtsd2si rcx,xmm4
vcvtsd2si rcx,QWORD PTR [rcx]
vcvtsd2si rcx,[rcx]
vcvttsd2si rcx,xmm4
vcvttsd2si rcx,QWORD PTR [rcx]
vcvttsd2si rcx,[rcx]
# Tests for op regq/mem64, xmm, xmm
vcvtsi2sdq xmm6,xmm4,rcx
vcvtsi2sdq xmm6,xmm4,QWORD PTR [rcx]
vcvtsi2sdq xmm6,xmm4,[rcx]
vcvtsi2ssq xmm6,xmm4,rcx
vcvtsi2ssq xmm6,xmm4,QWORD PTR [rcx]
vcvtsi2ssq xmm6,xmm4,[rcx]
# Tests for op imm8, regq/mem64, xmm, xmm
vpinsrq xmm6,xmm4,rcx,7
vpinsrq xmm6,xmm4,QWORD PTR [rcx],7
vpinsrq xmm6,xmm4,[rcx],7
# Testsf for op imm8, xmm, regq/mem64
vpextrq rcx,xmm4,7
vpextrq QWORD PTR [rcx],xmm4,7
vpextrq [rcx],xmm4,7
# Tests for op mem64, xmm, xmm
vmovlpd xmm6,xmm4,QWORD PTR [rcx]
vmovlpd xmm6,xmm4,[rcx]
vmovlps xmm6,xmm4,QWORD PTR [rcx]
vmovlps xmm6,xmm4,[rcx]
vmovhpd xmm6,xmm4,QWORD PTR [rcx]
vmovhpd xmm6,xmm4,[rcx]
vmovhps xmm6,xmm4,QWORD PTR [rcx]
vmovhps xmm6,xmm4,[rcx]
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd xmm2,xmm6,xmm4,7
vcmpsd xmm2,xmm6,QWORD PTR [rcx],7
vcmpsd xmm2,xmm6,[rcx],7
vroundsd xmm2,xmm6,xmm4,7
vroundsd xmm2,xmm6,QWORD PTR [rcx],7
vroundsd xmm2,xmm6,[rcx],7
# Tests for op xmm/mem64, xmm, xmm
vaddsd xmm2,xmm6,xmm4
vaddsd xmm2,xmm6,QWORD PTR [rcx]
vaddsd xmm2,xmm6,[rcx]
vcvtsd2ss xmm2,xmm6,xmm4
vcvtsd2ss xmm2,xmm6,QWORD PTR [rcx]
vcvtsd2ss xmm2,xmm6,[rcx]
vdivsd xmm2,xmm6,xmm4
vdivsd xmm2,xmm6,QWORD PTR [rcx]
vdivsd xmm2,xmm6,[rcx]
vmaxsd xmm2,xmm6,xmm4
vmaxsd xmm2,xmm6,QWORD PTR [rcx]
vmaxsd xmm2,xmm6,[rcx]
vminsd xmm2,xmm6,xmm4
vminsd xmm2,xmm6,QWORD PTR [rcx]
vminsd xmm2,xmm6,[rcx]
vmulsd xmm2,xmm6,xmm4
vmulsd xmm2,xmm6,QWORD PTR [rcx]
vmulsd xmm2,xmm6,[rcx]
vsqrtsd xmm2,xmm6,xmm4
vsqrtsd xmm2,xmm6,QWORD PTR [rcx]
vsqrtsd xmm2,xmm6,[rcx]
vsubsd xmm2,xmm6,xmm4
vsubsd xmm2,xmm6,QWORD PTR [rcx]
vsubsd xmm2,xmm6,[rcx]
vcmpeqsd xmm2,xmm6,xmm4
vcmpeqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpeqsd xmm2,xmm6,[rcx]
vcmpltsd xmm2,xmm6,xmm4
vcmpltsd xmm2,xmm6,QWORD PTR [rcx]
vcmpltsd xmm2,xmm6,[rcx]
vcmplesd xmm2,xmm6,xmm4
vcmplesd xmm2,xmm6,QWORD PTR [rcx]
vcmplesd xmm2,xmm6,[rcx]
vcmpunordsd xmm2,xmm6,xmm4
vcmpunordsd xmm2,xmm6,QWORD PTR [rcx]
vcmpunordsd xmm2,xmm6,[rcx]
vcmpneqsd xmm2,xmm6,xmm4
vcmpneqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpneqsd xmm2,xmm6,[rcx]
vcmpnltsd xmm2,xmm6,xmm4
vcmpnltsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnltsd xmm2,xmm6,[rcx]
vcmpnlesd xmm2,xmm6,xmm4
vcmpnlesd xmm2,xmm6,QWORD PTR [rcx]
vcmpnlesd xmm2,xmm6,[rcx]
vcmpordsd xmm2,xmm6,xmm4
vcmpordsd xmm2,xmm6,QWORD PTR [rcx]
vcmpordsd xmm2,xmm6,[rcx]
vcmpeq_uqsd xmm2,xmm6,xmm4
vcmpeq_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpeq_uqsd xmm2,xmm6,[rcx]
vcmpngesd xmm2,xmm6,xmm4
vcmpngesd xmm2,xmm6,QWORD PTR [rcx]
vcmpngesd xmm2,xmm6,[rcx]
vcmpngtsd xmm2,xmm6,xmm4
vcmpngtsd xmm2,xmm6,QWORD PTR [rcx]
vcmpngtsd xmm2,xmm6,[rcx]
vcmpfalsesd xmm2,xmm6,xmm4
vcmpfalsesd xmm2,xmm6,QWORD PTR [rcx]
vcmpfalsesd xmm2,xmm6,[rcx]
vcmpneq_oqsd xmm2,xmm6,xmm4
vcmpneq_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpneq_oqsd xmm2,xmm6,[rcx]
vcmpgesd xmm2,xmm6,xmm4
vcmpgesd xmm2,xmm6,QWORD PTR [rcx]
vcmpgesd xmm2,xmm6,[rcx]
vcmpgtsd xmm2,xmm6,xmm4
vcmpgtsd xmm2,xmm6,QWORD PTR [rcx]
vcmpgtsd xmm2,xmm6,[rcx]
vcmptruesd xmm2,xmm6,xmm4
vcmptruesd xmm2,xmm6,QWORD PTR [rcx]
vcmptruesd xmm2,xmm6,[rcx]
vcmpeq_ossd xmm2,xmm6,xmm4
vcmpeq_ossd xmm2,xmm6,QWORD PTR [rcx]
vcmpeq_ossd xmm2,xmm6,[rcx]
vcmplt_oqsd xmm2,xmm6,xmm4
vcmplt_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmplt_oqsd xmm2,xmm6,[rcx]
vcmple_oqsd xmm2,xmm6,xmm4
vcmple_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmple_oqsd xmm2,xmm6,[rcx]
vcmpunord_ssd xmm2,xmm6,xmm4
vcmpunord_ssd xmm2,xmm6,QWORD PTR [rcx]
vcmpunord_ssd xmm2,xmm6,[rcx]
vcmpneq_ussd xmm2,xmm6,xmm4
vcmpneq_ussd xmm2,xmm6,QWORD PTR [rcx]
vcmpneq_ussd xmm2,xmm6,[rcx]
vcmpnlt_uqsd xmm2,xmm6,xmm4
vcmpnlt_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnlt_uqsd xmm2,xmm6,[rcx]
vcmpnle_uqsd xmm2,xmm6,xmm4
vcmpnle_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnle_uqsd xmm2,xmm6,[rcx]
vcmpord_ssd xmm2,xmm6,xmm4
vcmpord_ssd xmm2,xmm6,QWORD PTR [rcx]
vcmpord_ssd xmm2,xmm6,[rcx]
vcmpeq_ussd xmm2,xmm6,xmm4
vcmpeq_ussd xmm2,xmm6,QWORD PTR [rcx]
vcmpeq_ussd xmm2,xmm6,[rcx]
vcmpnge_uqsd xmm2,xmm6,xmm4
vcmpnge_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnge_uqsd xmm2,xmm6,[rcx]
vcmpngt_uqsd xmm2,xmm6,xmm4
vcmpngt_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpngt_uqsd xmm2,xmm6,[rcx]
vcmpfalse_ossd xmm2,xmm6,xmm4
vcmpfalse_ossd xmm2,xmm6,QWORD PTR [rcx]
vcmpfalse_ossd xmm2,xmm6,[rcx]
vcmpneq_ossd xmm2,xmm6,xmm4
vcmpneq_ossd xmm2,xmm6,QWORD PTR [rcx]
vcmpneq_ossd xmm2,xmm6,[rcx]
vcmpge_oqsd xmm2,xmm6,xmm4
vcmpge_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpge_oqsd xmm2,xmm6,[rcx]
vcmpgt_oqsd xmm2,xmm6,xmm4
vcmpgt_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpgt_oqsd xmm2,xmm6,[rcx]
vcmptrue_ussd xmm2,xmm6,xmm4
vcmptrue_ussd xmm2,xmm6,QWORD PTR [rcx]
vcmptrue_ussd xmm2,xmm6,[rcx]
# Tests for op mem64
vldmxcsr DWORD PTR [rcx]
vldmxcsr [rcx]
vstmxcsr DWORD PTR [rcx]
vstmxcsr [rcx]
# Tests for op xmm/mem32, xmm, xmm
vaddss xmm2,xmm6,xmm4
vaddss xmm2,xmm6,DWORD PTR [rcx]
vaddss xmm2,xmm6,[rcx]
vcvtss2sd xmm2,xmm6,xmm4
vcvtss2sd xmm2,xmm6,DWORD PTR [rcx]
vcvtss2sd xmm2,xmm6,[rcx]
vdivss xmm2,xmm6,xmm4
vdivss xmm2,xmm6,DWORD PTR [rcx]
vdivss xmm2,xmm6,[rcx]
vmaxss xmm2,xmm6,xmm4
vmaxss xmm2,xmm6,DWORD PTR [rcx]
vmaxss xmm2,xmm6,[rcx]
vminss xmm2,xmm6,xmm4
vminss xmm2,xmm6,DWORD PTR [rcx]
vminss xmm2,xmm6,[rcx]
vmulss xmm2,xmm6,xmm4
vmulss xmm2,xmm6,DWORD PTR [rcx]
vmulss xmm2,xmm6,[rcx]
vrcpss xmm2,xmm6,xmm4
vrcpss xmm2,xmm6,DWORD PTR [rcx]
vrcpss xmm2,xmm6,[rcx]
vrsqrtss xmm2,xmm6,xmm4
vrsqrtss xmm2,xmm6,DWORD PTR [rcx]
vrsqrtss xmm2,xmm6,[rcx]
vsqrtss xmm2,xmm6,xmm4
vsqrtss xmm2,xmm6,DWORD PTR [rcx]
vsqrtss xmm2,xmm6,[rcx]
vsubss xmm2,xmm6,xmm4
vsubss xmm2,xmm6,DWORD PTR [rcx]
vsubss xmm2,xmm6,[rcx]
vcmpeqss xmm2,xmm6,xmm4
vcmpeqss xmm2,xmm6,DWORD PTR [rcx]
vcmpeqss xmm2,xmm6,[rcx]
vcmpltss xmm2,xmm6,xmm4
vcmpltss xmm2,xmm6,DWORD PTR [rcx]
vcmpltss xmm2,xmm6,[rcx]
vcmpless xmm2,xmm6,xmm4
vcmpless xmm2,xmm6,DWORD PTR [rcx]
vcmpless xmm2,xmm6,[rcx]
vcmpunordss xmm2,xmm6,xmm4
vcmpunordss xmm2,xmm6,DWORD PTR [rcx]
vcmpunordss xmm2,xmm6,[rcx]
vcmpneqss xmm2,xmm6,xmm4
vcmpneqss xmm2,xmm6,DWORD PTR [rcx]
vcmpneqss xmm2,xmm6,[rcx]
vcmpnltss xmm2,xmm6,xmm4
vcmpnltss xmm2,xmm6,DWORD PTR [rcx]
vcmpnltss xmm2,xmm6,[rcx]
vcmpnless xmm2,xmm6,xmm4
vcmpnless xmm2,xmm6,DWORD PTR [rcx]
vcmpnless xmm2,xmm6,[rcx]
vcmpordss xmm2,xmm6,xmm4
vcmpordss xmm2,xmm6,DWORD PTR [rcx]
vcmpordss xmm2,xmm6,[rcx]
vcmpeq_uqss xmm2,xmm6,xmm4
vcmpeq_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpeq_uqss xmm2,xmm6,[rcx]
vcmpngess xmm2,xmm6,xmm4
vcmpngess xmm2,xmm6,DWORD PTR [rcx]
vcmpngess xmm2,xmm6,[rcx]
vcmpngtss xmm2,xmm6,xmm4
vcmpngtss xmm2,xmm6,DWORD PTR [rcx]
vcmpngtss xmm2,xmm6,[rcx]
vcmpfalsess xmm2,xmm6,xmm4
vcmpfalsess xmm2,xmm6,DWORD PTR [rcx]
vcmpfalsess xmm2,xmm6,[rcx]
vcmpneq_oqss xmm2,xmm6,xmm4
vcmpneq_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmpneq_oqss xmm2,xmm6,[rcx]
vcmpgess xmm2,xmm6,xmm4
vcmpgess xmm2,xmm6,DWORD PTR [rcx]
vcmpgess xmm2,xmm6,[rcx]
vcmpgtss xmm2,xmm6,xmm4
vcmpgtss xmm2,xmm6,DWORD PTR [rcx]
vcmpgtss xmm2,xmm6,[rcx]
vcmptruess xmm2,xmm6,xmm4
vcmptruess xmm2,xmm6,DWORD PTR [rcx]
vcmptruess xmm2,xmm6,[rcx]
vcmpeq_osss xmm2,xmm6,xmm4
vcmpeq_osss xmm2,xmm6,DWORD PTR [rcx]
vcmpeq_osss xmm2,xmm6,[rcx]
vcmplt_oqss xmm2,xmm6,xmm4
vcmplt_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmplt_oqss xmm2,xmm6,[rcx]
vcmple_oqss xmm2,xmm6,xmm4
vcmple_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmple_oqss xmm2,xmm6,[rcx]
vcmpunord_sss xmm2,xmm6,xmm4
vcmpunord_sss xmm2,xmm6,DWORD PTR [rcx]
vcmpunord_sss xmm2,xmm6,[rcx]
vcmpneq_usss xmm2,xmm6,xmm4
vcmpneq_usss xmm2,xmm6,DWORD PTR [rcx]
vcmpneq_usss xmm2,xmm6,[rcx]
vcmpnlt_uqss xmm2,xmm6,xmm4
vcmpnlt_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpnlt_uqss xmm2,xmm6,[rcx]
vcmpnle_uqss xmm2,xmm6,xmm4
vcmpnle_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpnle_uqss xmm2,xmm6,[rcx]
vcmpord_sss xmm2,xmm6,xmm4
vcmpord_sss xmm2,xmm6,DWORD PTR [rcx]
vcmpord_sss xmm2,xmm6,[rcx]
vcmpeq_usss xmm2,xmm6,xmm4
vcmpeq_usss xmm2,xmm6,DWORD PTR [rcx]
vcmpeq_usss xmm2,xmm6,[rcx]
vcmpnge_uqss xmm2,xmm6,xmm4
vcmpnge_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpnge_uqss xmm2,xmm6,[rcx]
vcmpngt_uqss xmm2,xmm6,xmm4
vcmpngt_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpngt_uqss xmm2,xmm6,[rcx]
vcmpfalse_osss xmm2,xmm6,xmm4
vcmpfalse_osss xmm2,xmm6,DWORD PTR [rcx]
vcmpfalse_osss xmm2,xmm6,[rcx]
vcmpneq_osss xmm2,xmm6,xmm4
vcmpneq_osss xmm2,xmm6,DWORD PTR [rcx]
vcmpneq_osss xmm2,xmm6,[rcx]
vcmpge_oqss xmm2,xmm6,xmm4
vcmpge_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmpge_oqss xmm2,xmm6,[rcx]
vcmpgt_oqss xmm2,xmm6,xmm4
vcmpgt_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmpgt_oqss xmm2,xmm6,[rcx]
vcmptrue_usss xmm2,xmm6,xmm4
vcmptrue_usss xmm2,xmm6,DWORD PTR [rcx]
vcmptrue_usss xmm2,xmm6,[rcx]
# Tests for op mem32, ymm
vbroadcastss ymm4,DWORD PTR [rcx]
vbroadcastss ymm4,[rcx]
# Tests for op xmm/mem32, xmm
vcomiss xmm6,xmm4
vcomiss xmm4,DWORD PTR [rcx]
vcomiss xmm4,[rcx]
vpmovsxbd xmm6,xmm4
vpmovsxbd xmm4,DWORD PTR [rcx]
vpmovsxbd xmm4,[rcx]
vpmovsxwq xmm6,xmm4
vpmovsxwq xmm4,DWORD PTR [rcx]
vpmovsxwq xmm4,[rcx]
vpmovzxbd xmm6,xmm4
vpmovzxbd xmm4,DWORD PTR [rcx]
vpmovzxbd xmm4,[rcx]
vpmovzxwq xmm6,xmm4
vpmovzxwq xmm4,DWORD PTR [rcx]
vpmovzxwq xmm4,[rcx]
vucomiss xmm6,xmm4
vucomiss xmm4,DWORD PTR [rcx]
vucomiss xmm4,[rcx]
# Tests for op mem32, xmm
vbroadcastss xmm4,DWORD PTR [rcx]
vbroadcastss xmm4,[rcx]
vmovss xmm4,DWORD PTR [rcx]
vmovss xmm4,[rcx]
# Tests for op xmm, mem32
vmovss DWORD PTR [rcx],xmm4
vmovss [rcx],xmm4
# Tests for op xmm, regl/mem32
# Tests for op regl/mem32, xmm
vmovd ecx,xmm4
vmovd DWORD PTR [rcx],xmm4
vmovd xmm4,ecx
vmovd xmm4,DWORD PTR [rcx]
vmovd [rcx],xmm4
vmovd xmm4,[rcx]
# Tests for op xmm/mem32, regl
vcvtss2si ecx,xmm4
vcvtss2si ecx,DWORD PTR [rcx]
vcvtss2si ecx,[rcx]
vcvttss2si ecx,xmm4
vcvttss2si ecx,DWORD PTR [rcx]
vcvttss2si ecx,[rcx]
# Tests for op xmm/mem32, regq
vcvtss2si rcx,xmm4
vcvtss2si rcx,DWORD PTR [rcx]
vcvtss2si rcx,[rcx]
vcvttss2si rcx,xmm4
vcvttss2si rcx,DWORD PTR [rcx]
vcvttss2si rcx,[rcx]
# Tests for op xmm, regq
vmovmskpd rcx,xmm4
vmovmskps rcx,xmm4
vpmovmskb rcx,xmm4
# Tests for op imm8, xmm, regq/mem32
vextractps rcx,xmm4,7
vextractps DWORD PTR [rcx],xmm4,7
vextractps [rcx],xmm4,7
# Tests for op imm8, xmm, regl/mem32
vpextrd ecx,xmm4,7
vpextrd DWORD PTR [rcx],xmm4,7
vpextrd [rcx],xmm4,7
vextractps ecx,xmm4,7
vextractps DWORD PTR [rcx],xmm4,7
vextractps [rcx],xmm4,7
# Tests for op imm8, regl/mem32, xmm, xmm
vpinsrd xmm6,xmm4,ecx,7
vpinsrd xmm6,xmm4,DWORD PTR [rcx],7
vpinsrd xmm6,xmm4,[rcx],7
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd xmm6,xmm4,ecx
vcvtsi2sd xmm6,xmm4,DWORD PTR [rcx]
vcvtsi2ss xmm6,xmm4,ecx
vcvtsi2ss xmm6,xmm4,DWORD PTR [rcx]
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss xmm2,xmm6,xmm4,7
vcmpss xmm2,xmm6,DWORD PTR [rcx],7
vcmpss xmm2,xmm6,[rcx],7
vinsertps xmm2,xmm6,xmm4,7
vinsertps xmm2,xmm6,DWORD PTR [rcx],7
vinsertps xmm2,xmm6,[rcx],7
vroundss xmm2,xmm6,xmm4,7
vroundss xmm2,xmm6,DWORD PTR [rcx],7
vroundss xmm2,xmm6,[rcx],7
# Tests for op xmm/m16, xmm
vpmovsxbq xmm6,xmm4
vpmovsxbq xmm4,WORD PTR [rcx]
vpmovsxbq xmm4,[rcx]
vpmovzxbq xmm6,xmm4
vpmovzxbq xmm4,WORD PTR [rcx]
vpmovzxbq xmm4,[rcx]
# Tests for op imm8, xmm, regl/mem16
vpextrw ecx,xmm4,7
vpextrw WORD PTR [rcx],xmm4,7
vpextrw [rcx],xmm4,7
# Tests for op imm8, xmm, regq/mem16
vpextrw rcx,xmm4,7
vpextrw WORD PTR [rcx],xmm4,7
vpextrw [rcx],xmm4,7
# Tests for op imm8, regl/mem16, xmm, xmm
vpinsrw xmm6,xmm4,ecx,7
vpinsrw xmm6,xmm4,WORD PTR [rcx],7
vpinsrw xmm6,xmm4,[rcx],7
vpinsrw xmm6,xmm4,rcx,7
vpinsrw xmm6,xmm4,WORD PTR [rcx],7
vpinsrw xmm6,xmm4,[rcx],7
# Tests for op imm8, xmm, regl/mem8
vpextrb ecx,xmm4,7
vpextrb BYTE PTR [rcx],xmm4,7
vpextrb [rcx],xmm4,7
# Tests for op imm8, regl/mem8, xmm, xmm
vpinsrb xmm6,xmm4,ecx,7
vpinsrb xmm6,xmm4,BYTE PTR [rcx],7
vpinsrb xmm6,xmm4,[rcx],7
# Tests for op imm8, xmm, regq
vpextrw rcx,xmm4,7
# Tests for op imm8, xmm, regq/mem8
vpextrb rcx,xmm4,7
vpextrb BYTE PTR [rcx],xmm4,7
vpextrb [rcx],xmm4,7
# Tests for op xmm, xmm
vmaskmovdqu xmm6,xmm4
vmovq xmm6,xmm4
# Tests for op xmm, regl
vmovmskpd ecx,xmm4
vmovmskps ecx,xmm4
vpmovmskb ecx,xmm4
# Tests for op xmm, xmm, xmm
vmovhlps xmm2,xmm6,xmm4
vmovlhps xmm2,xmm6,xmm4
vmovsd xmm2,xmm6,xmm4
vmovss xmm2,xmm6,xmm4
# Tests for op imm8, xmm, xmm
vpslld xmm6,xmm4,7
vpslldq xmm6,xmm4,7
vpsllq xmm6,xmm4,7
vpsllw xmm6,xmm4,7
vpsrad xmm6,xmm4,7
vpsraw xmm6,xmm4,7
vpsrld xmm6,xmm4,7
vpsrldq xmm6,xmm4,7
vpsrlq xmm6,xmm4,7
vpsrlw xmm6,xmm4,7
# Tests for op imm8, xmm, regl
vpextrw ecx,xmm4,7
# Tests for op ymm, regl
vmovmskpd ecx,ymm4
vmovmskps ecx,ymm4
# Tests for op ymm, regq
vmovmskpd rcx,ymm4
vmovmskps rcx,ymm4
# Default instructions without suffixes.
vcvtpd2dq xmm6,xmm4
vcvtpd2dq xmm6,ymm4
vcvtpd2ps xmm6,xmm4
vcvtpd2ps xmm6,ymm4
vcvttpd2dq xmm6,xmm4
vcvttpd2dq xmm6,ymm4
#Tests with different memory and register operands.
vldmxcsr DWORD PTR ds:0x12345678
vmovdqa xmm8,XMMWORD PTR ds:0x12345678
vmovdqa XMMWORD PTR ds:0x12345678,xmm8
vmovd DWORD PTR ds:0x12345678,xmm8
vcvtsd2si r8d,QWORD PTR ds:0x12345678
vcvtdq2pd ymm8,XMMWORD PTR ds:0x12345678
vcvtpd2ps xmm8,YMMWORD PTR ds:0x12345678
vpavgb xmm15,xmm8,XMMWORD PTR ds:0x12345678
vaeskeygenassist xmm8,XMMWORD PTR ds:0x12345678,7
vpextrb ds:0x12345678,xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR ds:0x12345678
vpclmulqdq xmm15,xmm8,XMMWORD PTR ds:0x12345678,7
vblendvps xmm14,xmm12,XMMWORD PTR ds:0x12345678,xmm8
vpinsrb xmm15,xmm8,ds:0x12345678,7
vmovdqa ymm8,YMMWORD PTR ds:0x12345678
vmovdqa YMMWORD PTR ds:0x12345678,ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR ds:0x12345678
vroundpd ymm8,YMMWORD PTR ds:0x12345678,7
vextractf128 XMMWORD PTR ds:0x12345678,ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR ds:0x12345678,7
vblendvpd ymm14,ymm12,YMMWORD PTR ds:0x12345678,ymm8
vldmxcsr DWORD PTR [rbp]
vmovdqa xmm8,XMMWORD PTR [rbp]
vmovdqa XMMWORD PTR [rbp],xmm8
vmovd DWORD PTR [rbp],xmm8
vcvtsd2si r8d,QWORD PTR [rbp]
vcvtdq2pd ymm8,XMMWORD PTR [rbp]
vcvtpd2ps xmm8,YMMWORD PTR [rbp]
vpavgb xmm15,xmm8,XMMWORD PTR [rbp]
vaeskeygenassist xmm8,XMMWORD PTR [rbp],7
vpextrb [rbp],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbp]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rbp],7
vblendvps xmm14,xmm12,XMMWORD PTR [rbp],xmm8
vpinsrb xmm15,xmm8,[rbp],7
vmovdqa ymm8,YMMWORD PTR [rbp]
vmovdqa YMMWORD PTR [rbp],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rbp]
vroundpd ymm8,YMMWORD PTR [rbp],7
vextractf128 XMMWORD PTR [rbp],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rbp],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rbp],ymm8
vldmxcsr DWORD PTR [rbp+0x99]
vmovdqa xmm8,XMMWORD PTR [rbp+0x99]
vmovdqa XMMWORD PTR [rbp+0x99],xmm8
vmovd DWORD PTR [rbp+0x99],xmm8
vcvtsd2si r8d,QWORD PTR [rbp+0x99]
vcvtdq2pd ymm8,XMMWORD PTR [rbp+0x99]
vcvtpd2ps xmm8,YMMWORD PTR [rbp+0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [rbp+0x99]
vaeskeygenassist xmm8,XMMWORD PTR [rbp+0x99],7
vpextrb [rbp+0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbp+0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rbp+0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [rbp+0x99],xmm8
vpinsrb xmm15,xmm8,[rbp+0x99],7
vmovdqa ymm8,YMMWORD PTR [rbp+0x99]
vmovdqa YMMWORD PTR [rbp+0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rbp+0x99]
vroundpd ymm8,YMMWORD PTR [rbp+0x99],7
vextractf128 XMMWORD PTR [rbp+0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rbp+0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rbp+0x99],ymm8
vldmxcsr DWORD PTR [r15+0x99]
vmovdqa xmm8,XMMWORD PTR [r15+0x99]
vmovdqa XMMWORD PTR [r15+0x99],xmm8
vmovd DWORD PTR [r15+0x99],xmm8
vcvtsd2si r8d,QWORD PTR [r15+0x99]
vcvtdq2pd ymm8,XMMWORD PTR [r15+0x99]
vcvtpd2ps xmm8,YMMWORD PTR [r15+0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [r15+0x99]
vaeskeygenassist xmm8,XMMWORD PTR [r15+0x99],7
vpextrb [r15+0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [r15+0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [r15+0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [r15+0x99],xmm8
vpinsrb xmm15,xmm8,[r15+0x99],7
vmovdqa ymm8,YMMWORD PTR [r15+0x99]
vmovdqa YMMWORD PTR [r15+0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [r15+0x99]
vroundpd ymm8,YMMWORD PTR [r15+0x99],7
vextractf128 XMMWORD PTR [r15+0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [r15+0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [r15+0x99],ymm8
vldmxcsr DWORD PTR [rip+0x99]
vmovdqa xmm8,XMMWORD PTR [rip+0x99]
vmovdqa XMMWORD PTR [rip+0x99],xmm8
vmovd DWORD PTR [rip+0x99],xmm8
vcvtsd2si r8d,QWORD PTR [rip+0x99]
vcvtdq2pd ymm8,XMMWORD PTR [rip+0x99]
vcvtpd2ps xmm8,YMMWORD PTR [rip+0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [rip+0x99]
vaeskeygenassist xmm8,XMMWORD PTR [rip+0x99],7
vpextrb [rip+0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rip+0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rip+0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [rip+0x99],xmm8
vpinsrb xmm15,xmm8,[rip+0x99],7
vmovdqa ymm8,YMMWORD PTR [rip+0x99]
vmovdqa YMMWORD PTR [rip+0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rip+0x99]
vroundpd ymm8,YMMWORD PTR [rip+0x99],7
vextractf128 XMMWORD PTR [rip+0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rip+0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rip+0x99],ymm8
vldmxcsr DWORD PTR [rsp+0x99]
vmovdqa xmm8,XMMWORD PTR [rsp+0x99]
vmovdqa XMMWORD PTR [rsp+0x99],xmm8
vmovd DWORD PTR [rsp+0x99],xmm8
vcvtsd2si r8d,QWORD PTR [rsp+0x99]
vcvtdq2pd ymm8,XMMWORD PTR [rsp+0x99]
vcvtpd2ps xmm8,YMMWORD PTR [rsp+0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [rsp+0x99]
vaeskeygenassist xmm8,XMMWORD PTR [rsp+0x99],7
vpextrb [rsp+0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rsp+0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rsp+0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [rsp+0x99],xmm8
vpinsrb xmm15,xmm8,[rsp+0x99],7
vmovdqa ymm8,YMMWORD PTR [rsp+0x99]
vmovdqa YMMWORD PTR [rsp+0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rsp+0x99]
vroundpd ymm8,YMMWORD PTR [rsp+0x99],7
vextractf128 XMMWORD PTR [rsp+0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rsp+0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rsp+0x99],ymm8
vldmxcsr DWORD PTR [r12+0x99]
vmovdqa xmm8,XMMWORD PTR [r12+0x99]
vmovdqa XMMWORD PTR [r12+0x99],xmm8
vmovd DWORD PTR [r12+0x99],xmm8
vcvtsd2si r8d,QWORD PTR [r12+0x99]
vcvtdq2pd ymm8,XMMWORD PTR [r12+0x99]
vcvtpd2ps xmm8,YMMWORD PTR [r12+0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [r12+0x99]
vaeskeygenassist xmm8,XMMWORD PTR [r12+0x99],7
vpextrb [r12+0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [r12+0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [r12+0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [r12+0x99],xmm8
vpinsrb xmm15,xmm8,[r12+0x99],7
vmovdqa ymm8,YMMWORD PTR [r12+0x99]
vmovdqa YMMWORD PTR [r12+0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [r12+0x99]
vroundpd ymm8,YMMWORD PTR [r12+0x99],7
vextractf128 XMMWORD PTR [r12+0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [r12+0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [r12+0x99],ymm8
vldmxcsr DWORD PTR [riz*1-0x99]
vmovdqa xmm8,XMMWORD PTR [riz*1-0x99]
vmovdqa XMMWORD PTR [riz*1-0x99],xmm8
vmovd DWORD PTR [riz*1-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [riz*1-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [riz*1-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [riz*1-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [riz*1-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [riz*1-0x99],7
vpextrb [riz*1-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [riz*1-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [riz*1-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [riz*1-0x99],xmm8
vpinsrb xmm15,xmm8,[riz*1-0x99],7
vmovdqa ymm8,YMMWORD PTR [riz*1-0x99]
vmovdqa YMMWORD PTR [riz*1-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [riz*1-0x99]
vroundpd ymm8,YMMWORD PTR [riz*1-0x99],7
vextractf128 XMMWORD PTR [riz*1-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [riz*1-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [riz*1-0x99],ymm8
vldmxcsr DWORD PTR [riz*2-0x99]
vmovdqa xmm8,XMMWORD PTR [riz*2-0x99]
vmovdqa XMMWORD PTR [riz*2-0x99],xmm8
vmovd DWORD PTR [riz*2-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [riz*2-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [riz*2-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [riz*2-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [riz*2-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [riz*2-0x99],7
vpextrb [riz*2-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [riz*2-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [riz*2-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [riz*2-0x99],xmm8
vpinsrb xmm15,xmm8,[riz*2-0x99],7
vmovdqa ymm8,YMMWORD PTR [riz*2-0x99]
vmovdqa YMMWORD PTR [riz*2-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [riz*2-0x99]
vroundpd ymm8,YMMWORD PTR [riz*2-0x99],7
vextractf128 XMMWORD PTR [riz*2-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [riz*2-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [riz*2-0x99],ymm8
vldmxcsr DWORD PTR [rbx+riz*1-0x99]
vmovdqa xmm8,XMMWORD PTR [rbx+riz*1-0x99]
vmovdqa XMMWORD PTR [rbx+riz*1-0x99],xmm8
vmovd DWORD PTR [rbx+riz*1-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [rbx+riz*1-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [rbx+riz*1-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [rbx+riz*1-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [rbx+riz*1-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [rbx+riz*1-0x99],7
vpextrb [rbx+riz*1-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbx+riz*1-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rbx+riz*1-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [rbx+riz*1-0x99],xmm8
vpinsrb xmm15,xmm8,[rbx+riz*1-0x99],7
vmovdqa ymm8,YMMWORD PTR [rbx+riz*1-0x99]
vmovdqa YMMWORD PTR [rbx+riz*1-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rbx+riz*1-0x99]
vroundpd ymm8,YMMWORD PTR [rbx+riz*1-0x99],7
vextractf128 XMMWORD PTR [rbx+riz*1-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rbx+riz*1-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rbx+riz*1-0x99],ymm8
vldmxcsr DWORD PTR [rbx+riz*2-0x99]
vmovdqa xmm8,XMMWORD PTR [rbx+riz*2-0x99]
vmovdqa XMMWORD PTR [rbx+riz*2-0x99],xmm8
vmovd DWORD PTR [rbx+riz*2-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [rbx+riz*2-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [rbx+riz*2-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [rbx+riz*2-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [rbx+riz*2-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [rbx+riz*2-0x99],7
vpextrb [rbx+riz*2-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbx+riz*2-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rbx+riz*2-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [rbx+riz*2-0x99],xmm8
vpinsrb xmm15,xmm8,[rbx+riz*2-0x99],7
vmovdqa ymm8,YMMWORD PTR [rbx+riz*2-0x99]
vmovdqa YMMWORD PTR [rbx+riz*2-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rbx+riz*2-0x99]
vroundpd ymm8,YMMWORD PTR [rbx+riz*2-0x99],7
vextractf128 XMMWORD PTR [rbx+riz*2-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rbx+riz*2-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rbx+riz*2-0x99],ymm8
vldmxcsr DWORD PTR [r12+r15*4-0x99]
vmovdqa xmm8,XMMWORD PTR [r12+r15*4-0x99]
vmovdqa XMMWORD PTR [r12+r15*4-0x99],xmm8
vmovd DWORD PTR [r12+r15*4-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [r12+r15*4-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [r12+r15*4-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [r12+r15*4-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [r12+r15*4-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [r12+r15*4-0x99],7
vpextrb [r12+r15*4-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [r12+r15*4-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [r12+r15*4-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [r12+r15*4-0x99],xmm8
vpinsrb xmm15,xmm8,[r12+r15*4-0x99],7
vmovdqa ymm8,YMMWORD PTR [r12+r15*4-0x99]
vmovdqa YMMWORD PTR [r12+r15*4-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [r12+r15*4-0x99]
vroundpd ymm8,YMMWORD PTR [r12+r15*4-0x99],7
vextractf128 XMMWORD PTR [r12+r15*4-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [r12+r15*4-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [r12+r15*4-0x99],ymm8
vldmxcsr DWORD PTR [r8+r15*8-0x99]
vmovdqa xmm8,XMMWORD PTR [r8+r15*8-0x99]
vmovdqa XMMWORD PTR [r8+r15*8-0x99],xmm8
vmovd DWORD PTR [r8+r15*8-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [r8+r15*8-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [r8+r15*8-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [r8+r15*8-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [r8+r15*8-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [r8+r15*8-0x99],7
vpextrb [r8+r15*8-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [r8+r15*8-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [r8+r15*8-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [r8+r15*8-0x99],xmm8
vpinsrb xmm15,xmm8,[r8+r15*8-0x99],7
vmovdqa ymm8,YMMWORD PTR [r8+r15*8-0x99]
vmovdqa YMMWORD PTR [r8+r15*8-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [r8+r15*8-0x99]
vroundpd ymm8,YMMWORD PTR [r8+r15*8-0x99],7
vextractf128 XMMWORD PTR [r8+r15*8-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [r8+r15*8-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [r8+r15*8-0x99],ymm8
vldmxcsr DWORD PTR [rbp+r12*4-0x99]
vmovdqa xmm8,XMMWORD PTR [rbp+r12*4-0x99]
vmovdqa XMMWORD PTR [rbp+r12*4-0x99],xmm8
vmovd DWORD PTR [rbp+r12*4-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [rbp+r12*4-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [rbp+r12*4-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [rbp+r12*4-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [rbp+r12*4-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [rbp+r12*4-0x99],7
vpextrb [rbp+r12*4-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbp+r12*4-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rbp+r12*4-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [rbp+r12*4-0x99],xmm8
vpinsrb xmm15,xmm8,[rbp+r12*4-0x99],7
vmovdqa ymm8,YMMWORD PTR [rbp+r12*4-0x99]
vmovdqa YMMWORD PTR [rbp+r12*4-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rbp+r12*4-0x99]
vroundpd ymm8,YMMWORD PTR [rbp+r12*4-0x99],7
vextractf128 XMMWORD PTR [rbp+r12*4-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rbp+r12*4-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rbp+r12*4-0x99],ymm8
vldmxcsr DWORD PTR [rsp+r13*1-0x99]
vmovdqa xmm8,XMMWORD PTR [rsp+r13*1-0x99]
vmovdqa XMMWORD PTR [rsp+r13*1-0x99],xmm8
vmovd DWORD PTR [rsp+r13*1-0x99],xmm8
vcvtsd2si r8d,QWORD PTR [rsp+r13*1-0x99]
vcvtdq2pd ymm8,XMMWORD PTR [rsp+r13*1-0x99]
vcvtpd2ps xmm8,YMMWORD PTR [rsp+r13*1-0x99]
vpavgb xmm15,xmm8,XMMWORD PTR [rsp+r13*1-0x99]
vaeskeygenassist xmm8,XMMWORD PTR [rsp+r13*1-0x99],7
vpextrb [rsp+r13*1-0x99],xmm8,7
vcvtsi2sd xmm15,xmm8,DWORD PTR [rsp+r13*1-0x99]
vpclmulqdq xmm15,xmm8,XMMWORD PTR [rsp+r13*1-0x99],7
vblendvps xmm14,xmm12,XMMWORD PTR [rsp+r13*1-0x99],xmm8
vpinsrb xmm15,xmm8,[rsp+r13*1-0x99],7
vmovdqa ymm8,YMMWORD PTR [rsp+r13*1-0x99]
vmovdqa YMMWORD PTR [rsp+r13*1-0x99],ymm8
vpermilpd ymm15,ymm8,YMMWORD PTR [rsp+r13*1-0x99]
vroundpd ymm8,YMMWORD PTR [rsp+r13*1-0x99],7
vextractf128 XMMWORD PTR [rsp+r13*1-0x99],ymm8,7
vperm2f128 ymm15,ymm8,YMMWORD PTR [rsp+r13*1-0x99],7
vblendvpd ymm14,ymm12,YMMWORD PTR [rsp+r13*1-0x99],ymm8
# Tests for all register operands.
vmovmskpd r8d,xmm8
vpslld xmm15,xmm8,7
vmovmskps r8d,ymm8
vmovdqa xmm15,xmm8
vmovd r8d,xmm8
vcvtsd2si r8d,xmm8
vcvtdq2pd ymm8,xmm8
vcvtpd2ps xmm8,ymm8
vaeskeygenassist xmm15,xmm8,7
vpextrb r8d,xmm8,7
vcvtsi2sd xmm15,xmm8,r8d
vpclmulqdq xmm12,xmm15,xmm8,7
vblendvps xmm14,xmm12,xmm8,xmm8
vpinsrb xmm15,xmm8,r8d,7
vmovdqa ymm15,ymm8
vpermilpd ymm12,ymm15,ymm8
vroundpd ymm15,ymm8,7
vextractf128 xmm8,ymm8,7
vperm2f128 ymm12,ymm15,ymm8,7
vblendvpd ymm14,ymm12,ymm15,ymm8
vinsertf128 ymm15,ymm8,xmm8,7
# Tests for different memory/register operand
vcvtsd2si r8,QWORD PTR [rcx]
vextractps r8,xmm8,10
vcvtss2si r8,DWORD PTR [rcx]
vpinsrw xmm8,xmm15,r8,7
|
stsp/binutils-ia16
| 10,578
|
gas/testsuite/gas/i386/x86-64-avx512vbmi2.s
|
# Check 64bit AVX512VBMI2 instructions
.allow_index_reg
.text
_start:
vpcompressb %zmm30, (%rcx){%k7} # AVX512VBMI2
vpcompressb %zmm30, 0x123(%rax,%r14,8) # AVX512VBMI2
vpcompressb %zmm30, 126(%rdx) # AVX512VBMI2 Disp8
vpcompressb %zmm29, %zmm30 # AVX512VBMI2
vpcompressb %zmm29, %zmm30{%k7} # AVX512VBMI2
vpcompressb %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpcompressw %zmm30, (%rcx){%k7} # AVX512VBMI2
vpcompressw %zmm30, 0x123(%rax,%r14,8) # AVX512VBMI2
vpcompressw %zmm30, 254(%rdx) # AVX512VBMI2 Disp8
vpcompressw %zmm29, %zmm30 # AVX512VBMI2
vpcompressw %zmm29, %zmm30{%k7} # AVX512VBMI2
vpcompressw %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpexpandb (%rcx), %zmm30{%k7} # AVX512VBMI2
vpexpandb (%rcx), %zmm30{%k7}{z} # AVX512VBMI2
vpexpandb 0x123(%rax,%r14,8), %zmm30 # AVX512VBMI2
vpexpandb 126(%rdx), %zmm30 # AVX512VBMI2 Disp8
vpexpandb %zmm29, %zmm30 # AVX512VBMI2
vpexpandb %zmm29, %zmm30{%k7} # AVX512VBMI2
vpexpandb %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpexpandw (%rcx), %zmm30{%k7} # AVX512VBMI2
vpexpandw (%rcx), %zmm30{%k7}{z} # AVX512VBMI2
vpexpandw 0x123(%rax,%r14,8), %zmm30 # AVX512VBMI2
vpexpandw 254(%rdx), %zmm30 # AVX512VBMI2 Disp8
vpexpandw %zmm29, %zmm30 # AVX512VBMI2
vpexpandw %zmm29, %zmm30{%k7} # AVX512VBMI2
vpexpandw %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshldvw %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshldvw %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshldvw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshldvw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshldvw 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshldvd %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshldvd %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshldvd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshldvd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshldvd 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshldvd 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512VBMI2 Disp8
vpshldvq %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshldvq %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshldvq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshldvq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshldvq 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshldvq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512VBMI2 Disp8
vpshrdvw %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshrdvw %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshrdvw %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshrdvw 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshrdvw 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshrdvd %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshrdvd %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshrdvd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshrdvd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshrdvd 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshrdvq %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshrdvq %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshrdvq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshrdvq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshrdvq 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshldw $0xab, %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshldw $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshldw $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshldw $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshldw $123, 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshldd $0xab, %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshldd $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshldd $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshldd $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshldd $123, 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshldq $0xab, %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshldq $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshldq $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshldq $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshldq $123, 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshldq $123, 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512VBMI2
vpshrdw $0xab, %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshrdw $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshrdw $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshrdw $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshrdw $123, 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshrdd $0xab, %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshrdd $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshrdd $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshrdd $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshrdd $123, (%rcx){1to16}, %zmm29, %zmm30 # AVX512VBMI2
vpshrdd $123, 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
vpshrdq $0xab, %zmm28, %zmm29, %zmm30 # AVX512VBMI2
vpshrdq $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512VBMI2
vpshrdq $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512VBMI2
vpshrdq $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512VBMI2
vpshrdq $123, (%rcx){1to8}, %zmm29, %zmm30 # AVX512VBMI2
vpshrdq $123, 8128(%rdx), %zmm29, %zmm30 # AVX512VBMI2
.intel_syntax noprefix
vpcompressb ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512VBMI2
vpcompressb ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512VBMI2
vpcompressb ZMMWORD PTR [rdx+126], zmm30 # AVX512VBMI2 Disp8
vpcompressb zmm30, zmm29 # AVX512VBMI2
vpcompressb zmm30{k7}, zmm29 # AVX512VBMI2
vpcompressb zmm30{k7}{z}, zmm29 # AVX512VBMI2
vpcompressw ZMMWORD PTR [rcx]{k7}, zmm30 # AVX512VBMI2
vpcompressw ZMMWORD PTR [rax+r14*8+0x1234], zmm30 # AVX512VBMI2
vpcompressw ZMMWORD PTR [rdx+254], zmm30 # AVX512VBMI2 Disp8
vpcompressw zmm30, zmm29 # AVX512VBMI2
vpcompressw zmm30{k7}, zmm29 # AVX512VBMI2
vpcompressw zmm30{k7}{z}, zmm29 # AVX512VBMI2
vpexpandb zmm30{k7}, ZMMWORD PTR [rcx] # AVX512VBMI2
vpexpandb zmm30{k7}{z}, ZMMWORD PTR [rcx] # AVX512VBMI2
vpexpandb zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpexpandb zmm30, ZMMWORD PTR [rdx+126] # AVX512VBMI2 Disp8
vpexpandb zmm30, zmm29 # AVX512VBMI2
vpexpandb zmm30{k7}, zmm29 # AVX512VBMI2
vpexpandb zmm30{k7}{z}, zmm29 # AVX512VBMI2
vpexpandw zmm30{k7}, ZMMWORD PTR [rcx] # AVX512VBMI2
vpexpandw zmm30{k7}{z}, ZMMWORD PTR [rcx] # AVX512VBMI2
vpexpandw zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpexpandw zmm30, ZMMWORD PTR [rdx+254] # AVX512VBMI2 Disp8
vpexpandw zmm30, zmm29 # AVX512VBMI2
vpexpandw zmm30{k7}, zmm29 # AVX512VBMI2
vpexpandw zmm30{k7}{z}, zmm29 # AVX512VBMI2
vpshldvw zmm30, zmm29, zmm28 # AVX512VBMI2
vpshldvw zmm30{k7}, zmm29, zmm28 # AVX512VBMI2
vpshldvw zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI2
vpshldvw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpshldvw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI2 Disp8
vpshldvd zmm30, zmm29, zmm28 # AVX512VBMI2
vpshldvd zmm30{k7}, zmm29, zmm28 # AVX512VBMI2
vpshldvd zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI2
vpshldvd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpshldvd zmm30, zmm29, [rcx]{1to16} # AVX512VBMI2
vpshldvd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI2 Disp8
vpshldvd zmm30, zmm29, [rdx+508]{1to16} # AVX512VBMI2 Disp8
vpshldvq zmm30, zmm29, zmm28 # AVX512VBMI2
vpshldvq zmm30{k7}, zmm29, zmm28 # AVX512VBMI2
vpshldvq zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI2
vpshldvq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpshldvq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI2 Disp8
vpshldvq zmm30, zmm29, [rdx+1016]{1to8} # AVX512VBMI2 Disp8
vpshrdvw zmm30, zmm29, zmm28 # AVX512VBMI2
vpshrdvw zmm30{k7}, zmm29, zmm28 # AVX512VBMI2
vpshrdvw zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI2
vpshrdvw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpshrdvw zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI2 Disp8
vpshrdvd zmm30, zmm29, zmm28 # AVX512VBMI2
vpshrdvd zmm30{k7}, zmm29, zmm28 # AVX512VBMI2
vpshrdvd zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI2
vpshrdvd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpshrdvd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI2 Disp8
vpshrdvd zmm30, zmm29, [rdx+508]{1to16} # AVX512VBMI2 Disp8
vpshrdvq zmm30, zmm29, zmm28 # AVX512VBMI2
vpshrdvq zmm30{k7}, zmm29, zmm28 # AVX512VBMI2
vpshrdvq zmm30{k7}{z}, zmm29, zmm28 # AVX512VBMI2
vpshrdvq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512VBMI2
vpshrdvq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512VBMI2 Disp8
vpshrdvq zmm30, zmm29, [rdx+1016]{1to8} # AVX512VBMI2 Disp8
vpshldw zmm30, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldw zmm30{k7}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldw zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VBMI2
vpshldw zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512VBMI2 Disp8
vpshldd zmm30, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldd zmm30{k7}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldd zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VBMI2
vpshldd zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512VBMI2 Disp8
vpshldd zmm30, zmm29, [rdx+508]{1to16}, 123 # AVX512VBMI2 Disp8
vpshldq zmm30, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldq zmm30{k7}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldq zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshldq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VBMI2
vpshldq zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512VBMI2 Disp8
vpshldq zmm30, zmm29, [rdx+1016]{1to8}, 123 # AVX512VBMI2 Disp8
vpshrdw zmm30, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdw zmm30{k7}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdw zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdw zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VBMI2
vpshrdw zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512VBMI2 Disp8
vpshrdd zmm30, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdd zmm30{k7}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdd zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VBMI2
vpshrdd zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512VBMI2 Disp8
vpshrdd zmm30, zmm29, [rdx+508]{1to16}, 123 # AVX512VBMI2 Disp8
vpshrdq zmm30, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdq zmm30{k7}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdq zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512VBMI2
vpshrdq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512VBMI2
vpshrdq zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512VBMI2 Disp8
vpshrdq zmm30, zmm29, [rdx+1016]{1to8}, 123 # AVX512VBMI2 Disp8
|
stsp/binutils-ia16
| 4,117
|
gas/testsuite/gas/i386/x86-64-fma-scalar.s
|
# Check 64bit AVX scalar instructions
.allow_index_reg
.text
_start:
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd %xmm4,%xmm6,%xmm2
vfmadd132sd (%rcx),%xmm6,%xmm2
vfmadd213sd %xmm4,%xmm6,%xmm2
vfmadd213sd (%rcx),%xmm6,%xmm2
vfmadd231sd %xmm4,%xmm6,%xmm2
vfmadd231sd (%rcx),%xmm6,%xmm2
vfmsub132sd %xmm4,%xmm6,%xmm2
vfmsub132sd (%rcx),%xmm6,%xmm2
vfmsub213sd %xmm4,%xmm6,%xmm2
vfmsub213sd (%rcx),%xmm6,%xmm2
vfmsub231sd %xmm4,%xmm6,%xmm2
vfmsub231sd (%rcx),%xmm6,%xmm2
vfnmadd132sd %xmm4,%xmm6,%xmm2
vfnmadd132sd (%rcx),%xmm6,%xmm2
vfnmadd213sd %xmm4,%xmm6,%xmm2
vfnmadd213sd (%rcx),%xmm6,%xmm2
vfnmadd231sd %xmm4,%xmm6,%xmm2
vfnmadd231sd (%rcx),%xmm6,%xmm2
vfnmsub132sd %xmm4,%xmm6,%xmm2
vfnmsub132sd (%rcx),%xmm6,%xmm2
vfnmsub213sd %xmm4,%xmm6,%xmm2
vfnmsub213sd (%rcx),%xmm6,%xmm2
vfnmsub231sd %xmm4,%xmm6,%xmm2
vfnmsub231sd (%rcx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss %xmm4,%xmm6,%xmm2
vfmadd132ss (%rcx),%xmm6,%xmm2
vfmadd213ss %xmm4,%xmm6,%xmm2
vfmadd213ss (%rcx),%xmm6,%xmm2
vfmadd231ss %xmm4,%xmm6,%xmm2
vfmadd231ss (%rcx),%xmm6,%xmm2
vfmsub132ss %xmm4,%xmm6,%xmm2
vfmsub132ss (%rcx),%xmm6,%xmm2
vfmsub213ss %xmm4,%xmm6,%xmm2
vfmsub213ss (%rcx),%xmm6,%xmm2
vfmsub231ss %xmm4,%xmm6,%xmm2
vfmsub231ss (%rcx),%xmm6,%xmm2
vfnmadd132ss %xmm4,%xmm6,%xmm2
vfnmadd132ss (%rcx),%xmm6,%xmm2
vfnmadd213ss %xmm4,%xmm6,%xmm2
vfnmadd213ss (%rcx),%xmm6,%xmm2
vfnmadd231ss %xmm4,%xmm6,%xmm2
vfnmadd231ss (%rcx),%xmm6,%xmm2
vfnmsub132ss %xmm4,%xmm6,%xmm2
vfnmsub132ss (%rcx),%xmm6,%xmm2
vfnmsub213ss %xmm4,%xmm6,%xmm2
vfnmsub213ss (%rcx),%xmm6,%xmm2
vfnmsub231ss %xmm4,%xmm6,%xmm2
vfnmsub231ss (%rcx),%xmm6,%xmm2
.intel_syntax noprefix
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd xmm2,xmm6,xmm4
vfmadd132sd xmm2,xmm6,QWORD PTR [rcx]
vfmadd132sd xmm2,xmm6,[rcx]
vfmadd213sd xmm2,xmm6,xmm4
vfmadd213sd xmm2,xmm6,QWORD PTR [rcx]
vfmadd213sd xmm2,xmm6,[rcx]
vfmadd231sd xmm2,xmm6,xmm4
vfmadd231sd xmm2,xmm6,QWORD PTR [rcx]
vfmadd231sd xmm2,xmm6,[rcx]
vfmsub132sd xmm2,xmm6,xmm4
vfmsub132sd xmm2,xmm6,QWORD PTR [rcx]
vfmsub132sd xmm2,xmm6,[rcx]
vfmsub213sd xmm2,xmm6,xmm4
vfmsub213sd xmm2,xmm6,QWORD PTR [rcx]
vfmsub213sd xmm2,xmm6,[rcx]
vfmsub231sd xmm2,xmm6,xmm4
vfmsub231sd xmm2,xmm6,QWORD PTR [rcx]
vfmsub231sd xmm2,xmm6,[rcx]
vfnmadd132sd xmm2,xmm6,xmm4
vfnmadd132sd xmm2,xmm6,QWORD PTR [rcx]
vfnmadd132sd xmm2,xmm6,[rcx]
vfnmadd213sd xmm2,xmm6,xmm4
vfnmadd213sd xmm2,xmm6,QWORD PTR [rcx]
vfnmadd213sd xmm2,xmm6,[rcx]
vfnmadd231sd xmm2,xmm6,xmm4
vfnmadd231sd xmm2,xmm6,QWORD PTR [rcx]
vfnmadd231sd xmm2,xmm6,[rcx]
vfnmsub132sd xmm2,xmm6,xmm4
vfnmsub132sd xmm2,xmm6,QWORD PTR [rcx]
vfnmsub132sd xmm2,xmm6,[rcx]
vfnmsub213sd xmm2,xmm6,xmm4
vfnmsub213sd xmm2,xmm6,QWORD PTR [rcx]
vfnmsub213sd xmm2,xmm6,[rcx]
vfnmsub231sd xmm2,xmm6,xmm4
vfnmsub231sd xmm2,xmm6,QWORD PTR [rcx]
vfnmsub231sd xmm2,xmm6,[rcx]
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss xmm2,xmm6,xmm4
vfmadd132ss xmm2,xmm6,DWORD PTR [rcx]
vfmadd132ss xmm2,xmm6,[rcx]
vfmadd213ss xmm2,xmm6,xmm4
vfmadd213ss xmm2,xmm6,DWORD PTR [rcx]
vfmadd213ss xmm2,xmm6,[rcx]
vfmadd231ss xmm2,xmm6,xmm4
vfmadd231ss xmm2,xmm6,DWORD PTR [rcx]
vfmadd231ss xmm2,xmm6,[rcx]
vfmsub132ss xmm2,xmm6,xmm4
vfmsub132ss xmm2,xmm6,DWORD PTR [rcx]
vfmsub132ss xmm2,xmm6,[rcx]
vfmsub213ss xmm2,xmm6,xmm4
vfmsub213ss xmm2,xmm6,DWORD PTR [rcx]
vfmsub213ss xmm2,xmm6,[rcx]
vfmsub231ss xmm2,xmm6,xmm4
vfmsub231ss xmm2,xmm6,DWORD PTR [rcx]
vfmsub231ss xmm2,xmm6,[rcx]
vfnmadd132ss xmm2,xmm6,xmm4
vfnmadd132ss xmm2,xmm6,DWORD PTR [rcx]
vfnmadd132ss xmm2,xmm6,[rcx]
vfnmadd213ss xmm2,xmm6,xmm4
vfnmadd213ss xmm2,xmm6,DWORD PTR [rcx]
vfnmadd213ss xmm2,xmm6,[rcx]
vfnmadd231ss xmm2,xmm6,xmm4
vfnmadd231ss xmm2,xmm6,DWORD PTR [rcx]
vfnmadd231ss xmm2,xmm6,[rcx]
vfnmsub132ss xmm2,xmm6,xmm4
vfnmsub132ss xmm2,xmm6,DWORD PTR [rcx]
vfnmsub132ss xmm2,xmm6,[rcx]
vfnmsub213ss xmm2,xmm6,xmm4
vfnmsub213ss xmm2,xmm6,DWORD PTR [rcx]
vfnmsub213ss xmm2,xmm6,[rcx]
vfnmsub231ss xmm2,xmm6,xmm4
vfnmsub231ss xmm2,xmm6,DWORD PTR [rcx]
vfnmsub231ss xmm2,xmm6,[rcx]
|
stsp/binutils-ia16
| 16,330
|
gas/testsuite/gas/i386/x86-64-fma.s
|
# Check 64bit FMA instructions
.allow_index_reg
.text
_start:
# Tests for op ymm/mem256, ymm, ymm
vfmadd132pd %ymm4,%ymm6,%ymm2
vfmadd132pd (%rcx),%ymm6,%ymm2
vfmadd132ps %ymm4,%ymm6,%ymm2
vfmadd132ps (%rcx),%ymm6,%ymm2
vfmadd213pd %ymm4,%ymm6,%ymm2
vfmadd213pd (%rcx),%ymm6,%ymm2
vfmadd213ps %ymm4,%ymm6,%ymm2
vfmadd213ps (%rcx),%ymm6,%ymm2
vfmadd231pd %ymm4,%ymm6,%ymm2
vfmadd231pd (%rcx),%ymm6,%ymm2
vfmadd231ps %ymm4,%ymm6,%ymm2
vfmadd231ps (%rcx),%ymm6,%ymm2
vfmaddsub132pd %ymm4,%ymm6,%ymm2
vfmaddsub132pd (%rcx),%ymm6,%ymm2
vfmaddsub132ps %ymm4,%ymm6,%ymm2
vfmaddsub132ps (%rcx),%ymm6,%ymm2
vfmaddsub213pd %ymm4,%ymm6,%ymm2
vfmaddsub213pd (%rcx),%ymm6,%ymm2
vfmaddsub213ps %ymm4,%ymm6,%ymm2
vfmaddsub213ps (%rcx),%ymm6,%ymm2
vfmaddsub231pd %ymm4,%ymm6,%ymm2
vfmaddsub231pd (%rcx),%ymm6,%ymm2
vfmaddsub231ps %ymm4,%ymm6,%ymm2
vfmaddsub231ps (%rcx),%ymm6,%ymm2
vfmsubadd132pd %ymm4,%ymm6,%ymm2
vfmsubadd132pd (%rcx),%ymm6,%ymm2
vfmsubadd132ps %ymm4,%ymm6,%ymm2
vfmsubadd132ps (%rcx),%ymm6,%ymm2
vfmsubadd213pd %ymm4,%ymm6,%ymm2
vfmsubadd213pd (%rcx),%ymm6,%ymm2
vfmsubadd213ps %ymm4,%ymm6,%ymm2
vfmsubadd213ps (%rcx),%ymm6,%ymm2
vfmsubadd231pd %ymm4,%ymm6,%ymm2
vfmsubadd231pd (%rcx),%ymm6,%ymm2
vfmsubadd231ps %ymm4,%ymm6,%ymm2
vfmsubadd231ps (%rcx),%ymm6,%ymm2
vfmsub132pd %ymm4,%ymm6,%ymm2
vfmsub132pd (%rcx),%ymm6,%ymm2
vfmsub132ps %ymm4,%ymm6,%ymm2
vfmsub132ps (%rcx),%ymm6,%ymm2
vfmsub213pd %ymm4,%ymm6,%ymm2
vfmsub213pd (%rcx),%ymm6,%ymm2
vfmsub213ps %ymm4,%ymm6,%ymm2
vfmsub213ps (%rcx),%ymm6,%ymm2
vfmsub231pd %ymm4,%ymm6,%ymm2
vfmsub231pd (%rcx),%ymm6,%ymm2
vfmsub231ps %ymm4,%ymm6,%ymm2
vfmsub231ps (%rcx),%ymm6,%ymm2
vfnmadd132pd %ymm4,%ymm6,%ymm2
vfnmadd132pd (%rcx),%ymm6,%ymm2
vfnmadd132ps %ymm4,%ymm6,%ymm2
vfnmadd132ps (%rcx),%ymm6,%ymm2
vfnmadd213pd %ymm4,%ymm6,%ymm2
vfnmadd213pd (%rcx),%ymm6,%ymm2
vfnmadd213ps %ymm4,%ymm6,%ymm2
vfnmadd213ps (%rcx),%ymm6,%ymm2
vfnmadd231pd %ymm4,%ymm6,%ymm2
vfnmadd231pd (%rcx),%ymm6,%ymm2
vfnmadd231ps %ymm4,%ymm6,%ymm2
vfnmadd231ps (%rcx),%ymm6,%ymm2
vfnmsub132pd %ymm4,%ymm6,%ymm2
vfnmsub132pd (%rcx),%ymm6,%ymm2
vfnmsub132ps %ymm4,%ymm6,%ymm2
vfnmsub132ps (%rcx),%ymm6,%ymm2
vfnmsub213pd %ymm4,%ymm6,%ymm2
vfnmsub213pd (%rcx),%ymm6,%ymm2
vfnmsub213ps %ymm4,%ymm6,%ymm2
vfnmsub213ps (%rcx),%ymm6,%ymm2
vfnmsub231pd %ymm4,%ymm6,%ymm2
vfnmsub231pd (%rcx),%ymm6,%ymm2
vfnmsub231ps %ymm4,%ymm6,%ymm2
vfnmsub231ps (%rcx),%ymm6,%ymm2
# Tests for op xmm/mem128, xmm, xmm
vfmadd132pd %xmm4,%xmm6,%xmm2
vfmadd132pd (%rcx),%xmm6,%xmm7
vfmadd132ps %xmm4,%xmm6,%xmm2
vfmadd132ps (%rcx),%xmm6,%xmm7
vfmadd213pd %xmm4,%xmm6,%xmm2
vfmadd213pd (%rcx),%xmm6,%xmm7
vfmadd213ps %xmm4,%xmm6,%xmm2
vfmadd213ps (%rcx),%xmm6,%xmm7
vfmadd231pd %xmm4,%xmm6,%xmm2
vfmadd231pd (%rcx),%xmm6,%xmm7
vfmadd231ps %xmm4,%xmm6,%xmm2
vfmadd231ps (%rcx),%xmm6,%xmm7
vfmaddsub132pd %xmm4,%xmm6,%xmm2
vfmaddsub132pd (%rcx),%xmm6,%xmm7
vfmaddsub132ps %xmm4,%xmm6,%xmm2
vfmaddsub132ps (%rcx),%xmm6,%xmm7
vfmaddsub213pd %xmm4,%xmm6,%xmm2
vfmaddsub213pd (%rcx),%xmm6,%xmm7
vfmaddsub213ps %xmm4,%xmm6,%xmm2
vfmaddsub213ps (%rcx),%xmm6,%xmm7
vfmaddsub231pd %xmm4,%xmm6,%xmm2
vfmaddsub231pd (%rcx),%xmm6,%xmm7
vfmaddsub231ps %xmm4,%xmm6,%xmm2
vfmaddsub231ps (%rcx),%xmm6,%xmm7
vfmsubadd132pd %xmm4,%xmm6,%xmm2
vfmsubadd132pd (%rcx),%xmm6,%xmm7
vfmsubadd132ps %xmm4,%xmm6,%xmm2
vfmsubadd132ps (%rcx),%xmm6,%xmm7
vfmsubadd213pd %xmm4,%xmm6,%xmm2
vfmsubadd213pd (%rcx),%xmm6,%xmm7
vfmsubadd213ps %xmm4,%xmm6,%xmm2
vfmsubadd213ps (%rcx),%xmm6,%xmm7
vfmsubadd231pd %xmm4,%xmm6,%xmm2
vfmsubadd231pd (%rcx),%xmm6,%xmm7
vfmsubadd231ps %xmm4,%xmm6,%xmm2
vfmsubadd231ps (%rcx),%xmm6,%xmm7
vfmsub132pd %xmm4,%xmm6,%xmm2
vfmsub132pd (%rcx),%xmm6,%xmm7
vfmsub132ps %xmm4,%xmm6,%xmm2
vfmsub132ps (%rcx),%xmm6,%xmm7
vfmsub213pd %xmm4,%xmm6,%xmm2
vfmsub213pd (%rcx),%xmm6,%xmm7
vfmsub213ps %xmm4,%xmm6,%xmm2
vfmsub213ps (%rcx),%xmm6,%xmm7
vfmsub231pd %xmm4,%xmm6,%xmm2
vfmsub231pd (%rcx),%xmm6,%xmm7
vfmsub231ps %xmm4,%xmm6,%xmm2
vfmsub231ps (%rcx),%xmm6,%xmm7
vfnmadd132pd %xmm4,%xmm6,%xmm2
vfnmadd132pd (%rcx),%xmm6,%xmm7
vfnmadd132ps %xmm4,%xmm6,%xmm2
vfnmadd132ps (%rcx),%xmm6,%xmm7
vfnmadd213pd %xmm4,%xmm6,%xmm2
vfnmadd213pd (%rcx),%xmm6,%xmm7
vfnmadd213ps %xmm4,%xmm6,%xmm2
vfnmadd213ps (%rcx),%xmm6,%xmm7
vfnmadd231pd %xmm4,%xmm6,%xmm2
vfnmadd231pd (%rcx),%xmm6,%xmm7
vfnmadd231ps %xmm4,%xmm6,%xmm2
vfnmadd231ps (%rcx),%xmm6,%xmm7
vfnmsub132pd %xmm4,%xmm6,%xmm2
vfnmsub132pd (%rcx),%xmm6,%xmm7
vfnmsub132ps %xmm4,%xmm6,%xmm2
vfnmsub132ps (%rcx),%xmm6,%xmm7
vfnmsub213pd %xmm4,%xmm6,%xmm2
vfnmsub213pd (%rcx),%xmm6,%xmm7
vfnmsub213ps %xmm4,%xmm6,%xmm2
vfnmsub213ps (%rcx),%xmm6,%xmm7
vfnmsub231pd %xmm4,%xmm6,%xmm2
vfnmsub231pd (%rcx),%xmm6,%xmm7
vfnmsub231ps %xmm4,%xmm6,%xmm2
vfnmsub231ps (%rcx),%xmm6,%xmm7
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd %xmm4,%xmm6,%xmm2
vfmadd132sd (%rcx),%xmm6,%xmm2
vfmadd213sd %xmm4,%xmm6,%xmm2
vfmadd213sd (%rcx),%xmm6,%xmm2
vfmadd231sd %xmm4,%xmm6,%xmm2
vfmadd231sd (%rcx),%xmm6,%xmm2
vfmsub132sd %xmm4,%xmm6,%xmm2
vfmsub132sd (%rcx),%xmm6,%xmm2
vfmsub213sd %xmm4,%xmm6,%xmm2
vfmsub213sd (%rcx),%xmm6,%xmm2
vfmsub231sd %xmm4,%xmm6,%xmm2
vfmsub231sd (%rcx),%xmm6,%xmm2
vfnmadd132sd %xmm4,%xmm6,%xmm2
vfnmadd132sd (%rcx),%xmm6,%xmm2
vfnmadd213sd %xmm4,%xmm6,%xmm2
vfnmadd213sd (%rcx),%xmm6,%xmm2
vfnmadd231sd %xmm4,%xmm6,%xmm2
vfnmadd231sd (%rcx),%xmm6,%xmm2
vfnmsub132sd %xmm4,%xmm6,%xmm2
vfnmsub132sd (%rcx),%xmm6,%xmm2
vfnmsub213sd %xmm4,%xmm6,%xmm2
vfnmsub213sd (%rcx),%xmm6,%xmm2
vfnmsub231sd %xmm4,%xmm6,%xmm2
vfnmsub231sd (%rcx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss %xmm4,%xmm6,%xmm2
vfmadd132ss (%rcx),%xmm6,%xmm2
vfmadd213ss %xmm4,%xmm6,%xmm2
vfmadd213ss (%rcx),%xmm6,%xmm2
vfmadd231ss %xmm4,%xmm6,%xmm2
vfmadd231ss (%rcx),%xmm6,%xmm2
vfmsub132ss %xmm4,%xmm6,%xmm2
vfmsub132ss (%rcx),%xmm6,%xmm2
vfmsub213ss %xmm4,%xmm6,%xmm2
vfmsub213ss (%rcx),%xmm6,%xmm2
vfmsub231ss %xmm4,%xmm6,%xmm2
vfmsub231ss (%rcx),%xmm6,%xmm2
vfnmadd132ss %xmm4,%xmm6,%xmm2
vfnmadd132ss (%rcx),%xmm6,%xmm2
vfnmadd213ss %xmm4,%xmm6,%xmm2
vfnmadd213ss (%rcx),%xmm6,%xmm2
vfnmadd231ss %xmm4,%xmm6,%xmm2
vfnmadd231ss (%rcx),%xmm6,%xmm2
vfnmsub132ss %xmm4,%xmm6,%xmm2
vfnmsub132ss (%rcx),%xmm6,%xmm2
vfnmsub213ss %xmm4,%xmm6,%xmm2
vfnmsub213ss (%rcx),%xmm6,%xmm2
vfnmsub231ss %xmm4,%xmm6,%xmm2
vfnmsub231ss (%rcx),%xmm6,%xmm2
.intel_syntax noprefix
# Tests for op ymm/mem256, ymm, ymm
vfmadd132pd ymm2,ymm6,ymm4
vfmadd132pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmadd132pd ymm2,ymm6,[rcx]
vfmadd132ps ymm2,ymm6,ymm4
vfmadd132ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmadd132ps ymm2,ymm6,[rcx]
vfmadd213pd ymm2,ymm6,ymm4
vfmadd213pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmadd213pd ymm2,ymm6,[rcx]
vfmadd213ps ymm2,ymm6,ymm4
vfmadd213ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmadd213ps ymm2,ymm6,[rcx]
vfmadd231pd ymm2,ymm6,ymm4
vfmadd231pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmadd231pd ymm2,ymm6,[rcx]
vfmadd231ps ymm2,ymm6,ymm4
vfmadd231ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmadd231ps ymm2,ymm6,[rcx]
vfmaddsub132pd ymm2,ymm6,ymm4
vfmaddsub132pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmaddsub132pd ymm2,ymm6,[rcx]
vfmaddsub132ps ymm2,ymm6,ymm4
vfmaddsub132ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmaddsub132ps ymm2,ymm6,[rcx]
vfmaddsub213pd ymm2,ymm6,ymm4
vfmaddsub213pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmaddsub213pd ymm2,ymm6,[rcx]
vfmaddsub213ps ymm2,ymm6,ymm4
vfmaddsub213ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmaddsub213ps ymm2,ymm6,[rcx]
vfmaddsub231pd ymm2,ymm6,ymm4
vfmaddsub231pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmaddsub231pd ymm2,ymm6,[rcx]
vfmaddsub231ps ymm2,ymm6,ymm4
vfmaddsub231ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmaddsub231ps ymm2,ymm6,[rcx]
vfmsubadd132pd ymm2,ymm6,ymm4
vfmsubadd132pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmsubadd132pd ymm2,ymm6,[rcx]
vfmsubadd132ps ymm2,ymm6,ymm4
vfmsubadd132ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmsubadd132ps ymm2,ymm6,[rcx]
vfmsubadd213pd ymm2,ymm6,ymm4
vfmsubadd213pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmsubadd213pd ymm2,ymm6,[rcx]
vfmsubadd213ps ymm2,ymm6,ymm4
vfmsubadd213ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmsubadd213ps ymm2,ymm6,[rcx]
vfmsubadd231pd ymm2,ymm6,ymm4
vfmsubadd231pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmsubadd231pd ymm2,ymm6,[rcx]
vfmsubadd231ps ymm2,ymm6,ymm4
vfmsubadd231ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmsubadd231ps ymm2,ymm6,[rcx]
vfmsub132pd ymm2,ymm6,ymm4
vfmsub132pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmsub132pd ymm2,ymm6,[rcx]
vfmsub132ps ymm2,ymm6,ymm4
vfmsub132ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmsub132ps ymm2,ymm6,[rcx]
vfmsub213pd ymm2,ymm6,ymm4
vfmsub213pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmsub213pd ymm2,ymm6,[rcx]
vfmsub213ps ymm2,ymm6,ymm4
vfmsub213ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmsub213ps ymm2,ymm6,[rcx]
vfmsub231pd ymm2,ymm6,ymm4
vfmsub231pd ymm2,ymm6,YMMWORD PTR [rcx]
vfmsub231pd ymm2,ymm6,[rcx]
vfmsub231ps ymm2,ymm6,ymm4
vfmsub231ps ymm2,ymm6,YMMWORD PTR [rcx]
vfmsub231ps ymm2,ymm6,[rcx]
vfnmadd132pd ymm2,ymm6,ymm4
vfnmadd132pd ymm2,ymm6,YMMWORD PTR [rcx]
vfnmadd132pd ymm2,ymm6,[rcx]
vfnmadd132ps ymm2,ymm6,ymm4
vfnmadd132ps ymm2,ymm6,YMMWORD PTR [rcx]
vfnmadd132ps ymm2,ymm6,[rcx]
vfnmadd213pd ymm2,ymm6,ymm4
vfnmadd213pd ymm2,ymm6,YMMWORD PTR [rcx]
vfnmadd213pd ymm2,ymm6,[rcx]
vfnmadd213ps ymm2,ymm6,ymm4
vfnmadd213ps ymm2,ymm6,YMMWORD PTR [rcx]
vfnmadd213ps ymm2,ymm6,[rcx]
vfnmadd231pd ymm2,ymm6,ymm4
vfnmadd231pd ymm2,ymm6,YMMWORD PTR [rcx]
vfnmadd231pd ymm2,ymm6,[rcx]
vfnmadd231ps ymm2,ymm6,ymm4
vfnmadd231ps ymm2,ymm6,YMMWORD PTR [rcx]
vfnmadd231ps ymm2,ymm6,[rcx]
vfnmsub132pd ymm2,ymm6,ymm4
vfnmsub132pd ymm2,ymm6,YMMWORD PTR [rcx]
vfnmsub132pd ymm2,ymm6,[rcx]
vfnmsub132ps ymm2,ymm6,ymm4
vfnmsub132ps ymm2,ymm6,YMMWORD PTR [rcx]
vfnmsub132ps ymm2,ymm6,[rcx]
vfnmsub213pd ymm2,ymm6,ymm4
vfnmsub213pd ymm2,ymm6,YMMWORD PTR [rcx]
vfnmsub213pd ymm2,ymm6,[rcx]
vfnmsub213ps ymm2,ymm6,ymm4
vfnmsub213ps ymm2,ymm6,YMMWORD PTR [rcx]
vfnmsub213ps ymm2,ymm6,[rcx]
vfnmsub231pd ymm2,ymm6,ymm4
vfnmsub231pd ymm2,ymm6,YMMWORD PTR [rcx]
vfnmsub231pd ymm2,ymm6,[rcx]
vfnmsub231ps ymm2,ymm6,ymm4
vfnmsub231ps ymm2,ymm6,YMMWORD PTR [rcx]
vfnmsub231ps ymm2,ymm6,[rcx]
# Tests for op xmm/mem128, xmm, xmm
vfmadd132pd xmm2,xmm6,xmm4
vfmadd132pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmadd132pd xmm7,xmm6,[rcx]
vfmadd132ps xmm2,xmm6,xmm4
vfmadd132ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmadd132ps xmm7,xmm6,[rcx]
vfmadd213pd xmm2,xmm6,xmm4
vfmadd213pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmadd213pd xmm7,xmm6,[rcx]
vfmadd213ps xmm2,xmm6,xmm4
vfmadd213ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmadd213ps xmm7,xmm6,[rcx]
vfmadd231pd xmm2,xmm6,xmm4
vfmadd231pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmadd231pd xmm7,xmm6,[rcx]
vfmadd231ps xmm2,xmm6,xmm4
vfmadd231ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmadd231ps xmm7,xmm6,[rcx]
vfmaddsub132pd xmm2,xmm6,xmm4
vfmaddsub132pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmaddsub132pd xmm7,xmm6,[rcx]
vfmaddsub132ps xmm2,xmm6,xmm4
vfmaddsub132ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmaddsub132ps xmm7,xmm6,[rcx]
vfmaddsub213pd xmm2,xmm6,xmm4
vfmaddsub213pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmaddsub213pd xmm7,xmm6,[rcx]
vfmaddsub213ps xmm2,xmm6,xmm4
vfmaddsub213ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmaddsub213ps xmm7,xmm6,[rcx]
vfmaddsub231pd xmm2,xmm6,xmm4
vfmaddsub231pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmaddsub231pd xmm7,xmm6,[rcx]
vfmaddsub231ps xmm2,xmm6,xmm4
vfmaddsub231ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmaddsub231ps xmm7,xmm6,[rcx]
vfmsubadd132pd xmm2,xmm6,xmm4
vfmsubadd132pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmsubadd132pd xmm7,xmm6,[rcx]
vfmsubadd132ps xmm2,xmm6,xmm4
vfmsubadd132ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmsubadd132ps xmm7,xmm6,[rcx]
vfmsubadd213pd xmm2,xmm6,xmm4
vfmsubadd213pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmsubadd213pd xmm7,xmm6,[rcx]
vfmsubadd213ps xmm2,xmm6,xmm4
vfmsubadd213ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmsubadd213ps xmm7,xmm6,[rcx]
vfmsubadd231pd xmm2,xmm6,xmm4
vfmsubadd231pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmsubadd231pd xmm7,xmm6,[rcx]
vfmsubadd231ps xmm2,xmm6,xmm4
vfmsubadd231ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmsubadd231ps xmm7,xmm6,[rcx]
vfmsub132pd xmm2,xmm6,xmm4
vfmsub132pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmsub132pd xmm7,xmm6,[rcx]
vfmsub132ps xmm2,xmm6,xmm4
vfmsub132ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmsub132ps xmm7,xmm6,[rcx]
vfmsub213pd xmm2,xmm6,xmm4
vfmsub213pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmsub213pd xmm7,xmm6,[rcx]
vfmsub213ps xmm2,xmm6,xmm4
vfmsub213ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmsub213ps xmm7,xmm6,[rcx]
vfmsub231pd xmm2,xmm6,xmm4
vfmsub231pd xmm7,xmm6,XMMWORD PTR [rcx]
vfmsub231pd xmm7,xmm6,[rcx]
vfmsub231ps xmm2,xmm6,xmm4
vfmsub231ps xmm7,xmm6,XMMWORD PTR [rcx]
vfmsub231ps xmm7,xmm6,[rcx]
vfnmadd132pd xmm2,xmm6,xmm4
vfnmadd132pd xmm7,xmm6,XMMWORD PTR [rcx]
vfnmadd132pd xmm7,xmm6,[rcx]
vfnmadd132ps xmm2,xmm6,xmm4
vfnmadd132ps xmm7,xmm6,XMMWORD PTR [rcx]
vfnmadd132ps xmm7,xmm6,[rcx]
vfnmadd213pd xmm2,xmm6,xmm4
vfnmadd213pd xmm7,xmm6,XMMWORD PTR [rcx]
vfnmadd213pd xmm7,xmm6,[rcx]
vfnmadd213ps xmm2,xmm6,xmm4
vfnmadd213ps xmm7,xmm6,XMMWORD PTR [rcx]
vfnmadd213ps xmm7,xmm6,[rcx]
vfnmadd231pd xmm2,xmm6,xmm4
vfnmadd231pd xmm7,xmm6,XMMWORD PTR [rcx]
vfnmadd231pd xmm7,xmm6,[rcx]
vfnmadd231ps xmm2,xmm6,xmm4
vfnmadd231ps xmm7,xmm6,XMMWORD PTR [rcx]
vfnmadd231ps xmm7,xmm6,[rcx]
vfnmsub132pd xmm2,xmm6,xmm4
vfnmsub132pd xmm7,xmm6,XMMWORD PTR [rcx]
vfnmsub132pd xmm7,xmm6,[rcx]
vfnmsub132ps xmm2,xmm6,xmm4
vfnmsub132ps xmm7,xmm6,XMMWORD PTR [rcx]
vfnmsub132ps xmm7,xmm6,[rcx]
vfnmsub213pd xmm2,xmm6,xmm4
vfnmsub213pd xmm7,xmm6,XMMWORD PTR [rcx]
vfnmsub213pd xmm7,xmm6,[rcx]
vfnmsub213ps xmm2,xmm6,xmm4
vfnmsub213ps xmm7,xmm6,XMMWORD PTR [rcx]
vfnmsub213ps xmm7,xmm6,[rcx]
vfnmsub231pd xmm2,xmm6,xmm4
vfnmsub231pd xmm7,xmm6,XMMWORD PTR [rcx]
vfnmsub231pd xmm7,xmm6,[rcx]
vfnmsub231ps xmm2,xmm6,xmm4
vfnmsub231ps xmm7,xmm6,XMMWORD PTR [rcx]
vfnmsub231ps xmm7,xmm6,[rcx]
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd xmm2,xmm6,xmm4
vfmadd132sd xmm2,xmm6,QWORD PTR [rcx]
vfmadd132sd xmm2,xmm6,[rcx]
vfmadd213sd xmm2,xmm6,xmm4
vfmadd213sd xmm2,xmm6,QWORD PTR [rcx]
vfmadd213sd xmm2,xmm6,[rcx]
vfmadd231sd xmm2,xmm6,xmm4
vfmadd231sd xmm2,xmm6,QWORD PTR [rcx]
vfmadd231sd xmm2,xmm6,[rcx]
vfmsub132sd xmm2,xmm6,xmm4
vfmsub132sd xmm2,xmm6,QWORD PTR [rcx]
vfmsub132sd xmm2,xmm6,[rcx]
vfmsub213sd xmm2,xmm6,xmm4
vfmsub213sd xmm2,xmm6,QWORD PTR [rcx]
vfmsub213sd xmm2,xmm6,[rcx]
vfmsub231sd xmm2,xmm6,xmm4
vfmsub231sd xmm2,xmm6,QWORD PTR [rcx]
vfmsub231sd xmm2,xmm6,[rcx]
vfnmadd132sd xmm2,xmm6,xmm4
vfnmadd132sd xmm2,xmm6,QWORD PTR [rcx]
vfnmadd132sd xmm2,xmm6,[rcx]
vfnmadd213sd xmm2,xmm6,xmm4
vfnmadd213sd xmm2,xmm6,QWORD PTR [rcx]
vfnmadd213sd xmm2,xmm6,[rcx]
vfnmadd231sd xmm2,xmm6,xmm4
vfnmadd231sd xmm2,xmm6,QWORD PTR [rcx]
vfnmadd231sd xmm2,xmm6,[rcx]
vfnmsub132sd xmm2,xmm6,xmm4
vfnmsub132sd xmm2,xmm6,QWORD PTR [rcx]
vfnmsub132sd xmm2,xmm6,[rcx]
vfnmsub213sd xmm2,xmm6,xmm4
vfnmsub213sd xmm2,xmm6,QWORD PTR [rcx]
vfnmsub213sd xmm2,xmm6,[rcx]
vfnmsub231sd xmm2,xmm6,xmm4
vfnmsub231sd xmm2,xmm6,QWORD PTR [rcx]
vfnmsub231sd xmm2,xmm6,[rcx]
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss xmm2,xmm6,xmm4
vfmadd132ss xmm2,xmm6,DWORD PTR [rcx]
vfmadd132ss xmm2,xmm6,[rcx]
vfmadd213ss xmm2,xmm6,xmm4
vfmadd213ss xmm2,xmm6,DWORD PTR [rcx]
vfmadd213ss xmm2,xmm6,[rcx]
vfmadd231ss xmm2,xmm6,xmm4
vfmadd231ss xmm2,xmm6,DWORD PTR [rcx]
vfmadd231ss xmm2,xmm6,[rcx]
vfmsub132ss xmm2,xmm6,xmm4
vfmsub132ss xmm2,xmm6,DWORD PTR [rcx]
vfmsub132ss xmm2,xmm6,[rcx]
vfmsub213ss xmm2,xmm6,xmm4
vfmsub213ss xmm2,xmm6,DWORD PTR [rcx]
vfmsub213ss xmm2,xmm6,[rcx]
vfmsub231ss xmm2,xmm6,xmm4
vfmsub231ss xmm2,xmm6,DWORD PTR [rcx]
vfmsub231ss xmm2,xmm6,[rcx]
vfnmadd132ss xmm2,xmm6,xmm4
vfnmadd132ss xmm2,xmm6,DWORD PTR [rcx]
vfnmadd132ss xmm2,xmm6,[rcx]
vfnmadd213ss xmm2,xmm6,xmm4
vfnmadd213ss xmm2,xmm6,DWORD PTR [rcx]
vfnmadd213ss xmm2,xmm6,[rcx]
vfnmadd231ss xmm2,xmm6,xmm4
vfnmadd231ss xmm2,xmm6,DWORD PTR [rcx]
vfnmadd231ss xmm2,xmm6,[rcx]
vfnmsub132ss xmm2,xmm6,xmm4
vfnmsub132ss xmm2,xmm6,DWORD PTR [rcx]
vfnmsub132ss xmm2,xmm6,[rcx]
vfnmsub213ss xmm2,xmm6,xmm4
vfnmsub213ss xmm2,xmm6,DWORD PTR [rcx]
vfnmsub213ss xmm2,xmm6,[rcx]
vfnmsub231ss xmm2,xmm6,xmm4
vfnmsub231ss xmm2,xmm6,DWORD PTR [rcx]
vfnmsub231ss xmm2,xmm6,[rcx]
|
subhamb123/sel-embeddeddebugger
| 11,643
|
src/freertos_exception_startup.s
|
.section .text
.global freertos_exception_startup
.extern registers
.macro MRS_REG reg_name, dest
MRS \dest, \reg_name
.endm
freertos_exception_startup:
// Load registers x29 and x30 from the stack
ldp x29, x30, [sp], #16
// Load registers x18 to x2 from the stack
ldp x18, x19, [sp], #16
ldp x16, x17, [sp], #16
ldp x14, x15, [sp], #16
ldp x12, x13, [sp], #16
ldp x10, x11, [sp], #16
ldp x8, x9, [sp], #16
ldp x6, x7, [sp], #16
ldp x4, x5, [sp], #16
ldp x2, x3, [sp], #16
// Load the address of the external register pointer (registers)
adrp x1, registersx // Load the page address of registersx into x1
add x1, x1, :lo12:registersx // Add the low 12 bits of registersx address to x1
// Calculate the memory address for each register and store its value in the corresponding element of the registers array
stp x29, x30, [x1, #29*8] // Write x29 and x30 to registers[29] and registers[30]
stp x27, x28, [x1, #27*8] // not from stack
stp x25, x26, [x1, #25*8] // not from stack
stp x23, x24, [x1, #23*8] // not from stack
stp x21, x22, [x1, #21*8] // not from stack
str x20, [x1, #20*8] // outlier
stp x18, x19, [x1, #18*8] // Write x18 and x19 to registers[18] and registers[19]
stp x16, x17, [x1, #16*8] // Write x16 and x17 to registers[16] and registers[17]
stp x14, x15, [x1, #14*8] // Write x14 and x15 to registers[14] and registers[15]
stp x12, x13, [x1, #12*8] // Write x12 and x13 to registers[12] and registers[13]
stp x10, x11, [x1, #10*8] // Write x10 and x11 to registers[10] and registers[11]
stp x8, x9, [x1, #8*8] // Write x8 and x9 to registers[8] and registers[9]
stp x6, x7, [x1, #6*8] // Write x6 and x7 to registers[6] and registers[7]
stp x4, x5, [x1, #4*8] // Write x4 and x5 to registers[4] and registers[5]
stp x2, x3, [x1, #2*8] // Write x2 and x3 to registers[2] and registers[3]
// Move the address of the external register pointer (registers) from x1 to x2
mov x2, x1
// Load registers x29 and x30 from the stack
ldp x0, x1, [sp], #16
// Store x0 and x1 to registers[0] and registers[1] using stp
stp x0, x1, [x2] // Write x0 and x1 to registers[0] and registers[1]
// Store pc
MRS_REG ELR_EL3, x0 // Get PC from elr_el3
str x0, [x2, #31*8]
// Store 32 bit system registers
// Load the address of the external register pointer (registers)
adrp x1, registers32 // Load the page address of registers32 into x1
add x1, x1, :lo12:registers32 // Add the low 12 bits of registers32 address to x1
// Read and store the values of the specified registers
MRS_REG FPCR, x0
STR x0, [x1, #0] // Store FPCR in the array
MRS_REG FPSR, x0
STR x0, [x1, #4] // Store FPSR in the array
MRS_REG MPIDR_EL1, x0
STR x0, [x1, #8] // Store MPIDR_EL1 in the array
MRS_REG IFSR32_EL2, x0
STR x0, [x1, #12] // Store IFSR32_EL2 in the array
MRS_REG ESR_EL1, x0
STR x0, [x1, #16] // Store ESR_EL1 in the array
MRS_REG ESR_EL2, x0
STR x0, [x1, #20] // Store ESR_EL2 in the array
MRS_REG ESR_EL3, x0
STR x0, [x1, #24] // Store ESR_EL3 in the array
MRS_REG ISR_EL1, x0
STR x0, [x1, #28] // Store ISR_EL1 in the array
MRS_REG SCTLR_EL3, x0
STR x0, [x1, #32] // Store SCTLR_EL3 in the array
MRS_REG TCR_EL3, x0
STR x0, [x1, #36] // Store TCR_EL3 in the array
MRS_REG CONTEXTIDR_EL1, x0
STR x0, [x1, #40] // Store CONTEXTIDR_EL1 in the array
MRS_REG CPACR_EL1, x0
STR x0, [x1, #44] // Store CPACR_EL1 in the array
MRS_REG ACTLR_EL3, x0
STR x0, [x1, #48] // Store ACTLR_EL3 in the array
MRS_REG PMCR_EL0, x0
STR x0, [x1, #52] // Store PMCR_EL0 in the array
MRS_REG PMCNTENSET_EL0, x0
STR x0, [x1, #56] // Store PMCNTENSET_EL0 in the array
MRS_REG PMOVSCLR_EL0, x0
STR x0, [x1, #60] // Store PMOVSCLR_EL0 in the array
MRS_REG PMUSERENR_EL0, x0
STR x0, [x1, #64] // Store PMUSERENR_EL0 in the array
MRS_REG PMINTENSET_EL1, x0
STR x0, [x1, #68] // Store PMINTENSET_EL1 in the array
MRS_REG PMEVCNTR0_EL0, x0
STR x0, [x1, #72] // Store PMEVCNTR0_EL0 in the array
MRS_REG PMEVCNTR1_EL0, x0
STR x0, [x1, #76] // Store PMEVCNTR1_EL0 in the array
MRS_REG PMEVCNTR2_EL0, x0
STR x0, [x1, #80] // Store PMEVCNTR2_EL0 in the array
MRS_REG PMEVCNTR3_EL0, x0
STR x0, [x1, #84] // Store PMEVCNTR3_EL0 in the array
MRS_REG PMEVCNTR4_EL0, x0
STR x0, [x1, #88] // Store PMEVCNTR4_EL0 in the array
MRS_REG PMEVCNTR5_EL0, x0
STR x0, [x1, #92] // Store PMEVCNTR5_EL0 in the array
MRS_REG PMEVTYPER0_EL0, x0
STR x0, [x1, #96] // Store PMEVTYPER0_EL0 in the array
MRS_REG PMEVTYPER1_EL0, x0
STR x0, [x1, #100] // Store PMEVTYPER1_EL0 in the array
MRS_REG PMEVTYPER2_EL0, x0
STR x0, [x1, #104] // Store PMEVTYPER2_EL0 in the array
MRS_REG PMEVTYPER3_EL0, x0
STR x0, [x1, #108] // Store PMEVTYPER3_EL0 in the array
MRS_REG PMEVTYPER4_EL0, x0
STR x0, [x1, #112] // Store PMEVTYPER4_EL0 in the array
MRS_REG PMEVTYPER5_EL0, x0
STR x0, [x1, #116] // Store PMEVTYPER5_EL0 in the array
MRS_REG PMCCFILTR_EL0, x0
STR x0, [x1, #120] // Store PMCCFILTR_EL0 in the array
MRS_REG SCR_EL3, x0
STR x0, [x1, #124] // Store SCR_EL3 in the array
MRS_REG CPTR_EL3, x0
STR x0, [x1, #128] // Store CPTR_EL3 in the array
MRS_REG MDCR_EL3, x0
STR x0, [x1, #132] // Store MDCR_EL3 in the array
MRS_REG CNTKCTL_EL1, x0
STR x0, [x1, #136] // Store CNTKCTL_EL1 in the array
MRS_REG CNTP_TVAL_EL0, x0
STR x0, [x1, #140] // Store CNTP_TVAL_EL0 in the array
MRS_REG CNTP_CTL_EL0, x0
STR x0, [x1, #144] // Store CNTP_CTL_EL0 in the array
MRS_REG CNTV_TVAL_EL0, x0
STR x0, [x1, #148] // Store CNTV_TVAL_EL0 in the array
MRS_REG CNTV_CTL_EL0, x0
STR x0, [x1, #152] // Store CNTV_CTL_EL0 in the array
MRS_REG CNTHCTL_EL2, x0
STR x0, [x1, #156] // Store CNTHCTL_EL2 in the array
MRS_REG CNTHP_TVAL_EL2, x0
STR x0, [x1, #160] // Store CNTHP_TVAL_EL2 in the array
MRS_REG CNTHP_CTL_EL2, x0
STR x0, [x1, #164] // Store CNTHP_CTL_EL2 in the array
MRS_REG CNTPS_TVAL_EL1, x0
STR x0, [x1, #168] // Store CNTPS_TVAL_EL1 in the array
// Store 64 bit system registers
// Load the address of the external register pointer (registers)
adrp x1, registers64 // Load the page address of registers64 into x1
add x1, x1, :lo12:registers64 // Add the low 12 bits of registers64 address to x1
// Read and store the values of the specified registers
MRS_REG FAR_EL3, x0
STR x0, [x1, #0] // Store FAR_EL3 in the array
MRS_REG VBAR_EL3, x0
STR x0, [x1, #8] // Store VBAR_EL3 in the array
MRS_REG TTBR0_EL3, x0
STR x0, [x1, #16] // Store TTBR0_EL3 in the array
MRS_REG MAIR_EL3, x0
STR x0, [x1, #24] // Store MAIR_EL3 in the array
MRS_REG AMAIR_EL3, x0
STR x0, [x1, #32] // Store AMAIR_EL3 in the array
MRS_REG PAR_EL1, x0
STR x0, [x1, #40] // Store PAR_EL1 in the array
MRS_REG TPIDR_EL0, x0
STR x0, [x1, #48] // Store TPIDR_EL0 in the array
MRS_REG TPIDRRO_EL0, x0
STR x0, [x1, #56] // Store TPIDRRO_EL0 in the array
MRS_REG TPIDR_EL1, x0
STR x0, [x1, #64] // Store TPIDR_EL1 in the array
MRS_REG TPIDR_EL3, x0
STR x0, [x1, #72] // Store TPIDR_EL3 in the array
MRS_REG RVBAR_EL3, x0
STR x0, [x1, #80] // Store RVBAR_EL3 in the array
MRS_REG RMR_EL3, x0
STR x0, [x1, #88] // Store RMR_EL3 in the array
MRS_REG SDER32_EL3, x0
STR x0, [x1, #96] // Store SDER32_EL3 in the array
MRS_REG CNTFRQ_EL0, x0
STR x0, [x1, #104] // Store CNTFRQ_EL0 in the array
MRS_REG CNTVCT_EL0, x0
STR x0, [x1, #112] // Store CNTVCT_EL0 in the array
MRS_REG CNTP_CVAL_EL0, x0
STR x0, [x1, #120] // Store CNTP_CVAL_EL0 in the array
MRS_REG CNTV_CVAL_EL0, x0
STR x0, [x1, #128] // Store CNTV_CVAL_EL0 in the array
MRS_REG CNTVOFF_EL2, x0
STR x0, [x1, #136] // Store CNTVOFF_EL2 in the array
MRS_REG CNTHP_CVAL_EL2, x0
STR x0, [x1, #144] // Store CNTHP_CVAL_EL2 in the array
MRS_REG CNTPS_CVAL_EL1, x0
STR x0, [x1, #152] // Store CNTPS_CVAL_EL1 in the array
MRS_REG CNTPS_CTL_EL1, x0
STR x0, [x1, #160] // Store CNTPS_CTL_EL1 in the array
MRS_REG ELR_EL1, x0
STR x0, [x1, #168] // Store ELR_EL1 in the array
MRS_REG ELR_EL2, x0
STR x0, [x1, #176] // Store ELR_EL2 in the array
MRS_REG ELR_EL3, x0
STR x0, [x1, #184] // Store ELR_EL3 in the array
// Store v registers
// Load the address of the external register pointer (registers)
adrp x1, registersv // Load the page address of registersv into x1
add x1, x1, :lo12:registersv // Add the low 12 bits of registersv address to x1
MOV x0, v0.d[0]
STR x0, [x1, #0]
MOV x0, v0.d[1]
STR x0, [x1, #8]
MOV x0, v1.d[0]
STR x0, [x1, #16]
MOV x0, v1.d[1]
STR x0, [x1, #24]
MOV x0, v2.d[0]
STR x0, [x1, #32]
MOV x0, v2.d[1]
STR x0, [x1, #40]
MOV x0, v3.d[0]
STR x0, [x1, #48]
MOV x0, v3.d[1]
STR x0, [x1, #56]
MOV x0, v4.d[0]
STR x0, [x1, #64]
MOV x0, v4.d[1]
STR x0, [x1, #72]
MOV x0, v5.d[0]
STR x0, [x1, #80]
MOV x0, v5.d[1]
STR x0, [x1, #88]
MOV x0, v6.d[0]
STR x0, [x1, #96]
MOV x0, v6.d[1]
STR x0, [x1, #104]
MOV x0, v7.d[0]
STR x0, [x1, #112]
MOV x0, v7.d[1]
STR x0, [x1, #120]
MOV x0, v8.d[0]
STR x0, [x1, #128]
MOV x0, v8.d[1]
STR x0, [x1, #136]
MOV x0, v9.d[0]
STR x0, [x1, #144]
MOV x0, v9.d[1]
STR x0, [x1, #152]
MOV x0, v10.d[0]
STR x0, [x1, #160]
MOV x0, v10.d[1]
STR x0, [x1, #168]
MOV x0, v11.d[0]
STR x0, [x1, #176]
MOV x0, v11.d[1]
STR x0, [x1, #184]
MOV x0, v12.d[0]
STR x0, [x1, #192]
MOV x0, v12.d[1]
STR x0, [x1, #200]
MOV x0, v13.d[0]
STR x0, [x1, #208]
MOV x0, v13.d[1]
STR x0, [x1, #216]
MOV x0, v14.d[0]
STR x0, [x1, #224]
MOV x0, v14.d[1]
STR x0, [x1, #232]
MOV x0, v15.d[0]
STR x0, [x1, #240]
MOV x0, v15.d[1]
STR x0, [x1, #248]
MOV x0, v16.d[0]
STR x0, [x1, #256]
MOV x0, v16.d[1]
STR x0, [x1, #264]
MOV x0, v17.d[0]
STR x0, [x1, #272]
MOV x0, v17.d[1]
STR x0, [x1, #280]
MOV x0, v18.d[0]
STR x0, [x1, #288]
MOV x0, v18.d[1]
STR x0, [x1, #296]
MOV x0, v19.d[0]
STR x0, [x1, #304]
MOV x0, v19.d[1]
STR x0, [x1, #312]
MOV x0, v20.d[0]
STR x0, [x1, #320]
MOV x0, v20.d[1]
STR x0, [x1, #328]
MOV x0, v21.d[0]
STR x0, [x1, #336]
MOV x0, v21.d[1]
STR x0, [x1, #344]
MOV x0, v22.d[0]
STR x0, [x1, #352]
MOV x0, v22.d[1]
STR x0, [x1, #360]
MOV x0, v23.d[0]
STR x0, [x1, #368]
MOV x0, v23.d[1]
STR x0, [x1, #376]
MOV x0, v24.d[0]
STR x0, [x1, #384]
MOV x0, v24.d[1]
STR x0, [x1, #392]
MOV x0, v25.d[0]
STR x0, [x1, #400]
MOV x0, v25.d[1]
STR x0, [x1, #408]
MOV x0, v26.d[0]
STR x0, [x1, #416]
MOV x0, v26.d[1]
STR x0, [x1, #424]
MOV x0, v27.d[0]
STR x0, [x1, #432]
MOV x0, v27.d[1]
STR x0, [x1, #440]
MOV x0, v28.d[0]
STR x0, [x1, #448]
MOV x0, v28.d[1]
STR x0, [x1, #456]
MOV x0, v29.d[0]
STR x0, [x1, #464]
MOV x0, v29.d[1]
STR x0, [x1, #472]
MOV x0, v30.d[0]
STR x0, [x1, #480]
MOV x0, v30.d[1]
STR x0, [x1, #488]
MOV x0, v31.d[0]
STR x0, [x1, #496]
MOV x0, v31.d[1]
STR x0, [x1, #504]
// Store GICD registers
// Branch to the exception_handler() function
adr x0, freertos_exception_handler // Load the address of exception_handler() into x0
b freertos_exception_handler // Branch with link to exception_handler()
|
stsp/binutils-ia16
| 67,144
|
gas/testsuite/gas/i386/avx512dq.s
|
# Check 32bit AVX512DQ instructions
.allow_index_reg
.text
_start:
vbroadcastf32x8 (%ecx), %zmm6 # AVX512DQ
vbroadcastf32x8 (%ecx), %zmm6{%k7} # AVX512DQ
vbroadcastf32x8 (%ecx), %zmm6{%k7}{z} # AVX512DQ
vbroadcastf32x8 -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vbroadcastf32x8 4064(%edx), %zmm6 # AVX512DQ Disp8
vbroadcastf32x8 4096(%edx), %zmm6 # AVX512DQ
vbroadcastf32x8 -4096(%edx), %zmm6 # AVX512DQ Disp8
vbroadcastf32x8 -4128(%edx), %zmm6 # AVX512DQ
vbroadcastf64x2 (%ecx), %zmm6 # AVX512DQ
vbroadcastf64x2 (%ecx), %zmm6{%k7} # AVX512DQ
vbroadcastf64x2 (%ecx), %zmm6{%k7}{z} # AVX512DQ
vbroadcastf64x2 -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vbroadcastf64x2 2032(%edx), %zmm6 # AVX512DQ Disp8
vbroadcastf64x2 2048(%edx), %zmm6 # AVX512DQ
vbroadcastf64x2 -2048(%edx), %zmm6 # AVX512DQ Disp8
vbroadcastf64x2 -2064(%edx), %zmm6 # AVX512DQ
vbroadcasti32x8 (%ecx), %zmm6 # AVX512DQ
vbroadcasti32x8 (%ecx), %zmm6{%k7} # AVX512DQ
vbroadcasti32x8 (%ecx), %zmm6{%k7}{z} # AVX512DQ
vbroadcasti32x8 -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vbroadcasti32x8 4064(%edx), %zmm6 # AVX512DQ Disp8
vbroadcasti32x8 4096(%edx), %zmm6 # AVX512DQ
vbroadcasti32x8 -4096(%edx), %zmm6 # AVX512DQ Disp8
vbroadcasti32x8 -4128(%edx), %zmm6 # AVX512DQ
vbroadcasti64x2 (%ecx), %zmm6 # AVX512DQ
vbroadcasti64x2 (%ecx), %zmm6{%k7} # AVX512DQ
vbroadcasti64x2 (%ecx), %zmm6{%k7}{z} # AVX512DQ
vbroadcasti64x2 -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vbroadcasti64x2 2032(%edx), %zmm6 # AVX512DQ Disp8
vbroadcasti64x2 2048(%edx), %zmm6 # AVX512DQ
vbroadcasti64x2 -2048(%edx), %zmm6 # AVX512DQ Disp8
vbroadcasti64x2 -2064(%edx), %zmm6 # AVX512DQ
vbroadcastf32x2 %xmm7, %zmm6 # AVX512DQ
vbroadcastf32x2 %xmm7, %zmm6{%k7} # AVX512DQ
vbroadcastf32x2 %xmm7, %zmm6{%k7}{z} # AVX512DQ
vbroadcastf32x2 (%ecx), %zmm6 # AVX512DQ
vbroadcastf32x2 -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vbroadcastf32x2 1016(%edx), %zmm6 # AVX512DQ Disp8
vbroadcastf32x2 1024(%edx), %zmm6 # AVX512DQ
vbroadcastf32x2 -1024(%edx), %zmm6 # AVX512DQ Disp8
vbroadcastf32x2 -1032(%edx), %zmm6 # AVX512DQ
vcvtpd2qq %zmm5, %zmm6 # AVX512DQ
vcvtpd2qq %zmm5, %zmm6{%k7} # AVX512DQ
vcvtpd2qq %zmm5, %zmm6{%k7}{z} # AVX512DQ
vcvtpd2qq {rn-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2qq {ru-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2qq {rd-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2qq {rz-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2qq (%ecx), %zmm6 # AVX512DQ
vcvtpd2qq -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vcvtpd2qq (%eax){1to8}, %zmm6 # AVX512DQ
vcvtpd2qq 8128(%edx), %zmm6 # AVX512DQ Disp8
vcvtpd2qq 8192(%edx), %zmm6 # AVX512DQ
vcvtpd2qq -8192(%edx), %zmm6 # AVX512DQ Disp8
vcvtpd2qq -8256(%edx), %zmm6 # AVX512DQ
vcvtpd2qq 1016(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtpd2qq 1024(%edx){1to8}, %zmm6 # AVX512DQ
vcvtpd2qq -1024(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtpd2qq -1032(%edx){1to8}, %zmm6 # AVX512DQ
vcvtpd2uqq %zmm5, %zmm6 # AVX512DQ
vcvtpd2uqq %zmm5, %zmm6{%k7} # AVX512DQ
vcvtpd2uqq %zmm5, %zmm6{%k7}{z} # AVX512DQ
vcvtpd2uqq {rn-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2uqq {ru-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2uqq {rd-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2uqq {rz-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtpd2uqq (%ecx), %zmm6 # AVX512DQ
vcvtpd2uqq -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vcvtpd2uqq (%eax){1to8}, %zmm6 # AVX512DQ
vcvtpd2uqq 8128(%edx), %zmm6 # AVX512DQ Disp8
vcvtpd2uqq 8192(%edx), %zmm6 # AVX512DQ
vcvtpd2uqq -8192(%edx), %zmm6 # AVX512DQ Disp8
vcvtpd2uqq -8256(%edx), %zmm6 # AVX512DQ
vcvtpd2uqq 1016(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtpd2uqq 1024(%edx){1to8}, %zmm6 # AVX512DQ
vcvtpd2uqq -1024(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtpd2uqq -1032(%edx){1to8}, %zmm6 # AVX512DQ
vcvtps2qq %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2qq %ymm5, %zmm6{%k7}{z} # AVX512DQ
vcvtps2qq {rn-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2qq {ru-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2qq {rd-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2qq {rz-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtps2qq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512DQ
vcvtps2qq (%eax){1to8}, %zmm6{%k7} # AVX512DQ
vcvtps2qq 4064(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvtps2qq 4096(%edx), %zmm6{%k7} # AVX512DQ
vcvtps2qq -4096(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvtps2qq -4128(%edx), %zmm6{%k7} # AVX512DQ
vcvtps2qq 508(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvtps2qq 512(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vcvtps2qq -512(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvtps2qq -516(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vcvtps2uqq %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2uqq %ymm5, %zmm6{%k7}{z} # AVX512DQ
vcvtps2uqq {rn-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2uqq {ru-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2uqq {rd-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2uqq {rz-sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvtps2uqq (%ecx), %zmm6{%k7} # AVX512DQ
vcvtps2uqq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512DQ
vcvtps2uqq (%eax){1to8}, %zmm6{%k7} # AVX512DQ
vcvtps2uqq 4064(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvtps2uqq 4096(%edx), %zmm6{%k7} # AVX512DQ
vcvtps2uqq -4096(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvtps2uqq -4128(%edx), %zmm6{%k7} # AVX512DQ
vcvtps2uqq 508(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvtps2uqq 512(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vcvtps2uqq -512(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvtps2uqq -516(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vcvtqq2pd %zmm5, %zmm6 # AVX512DQ
vcvtqq2pd %zmm5, %zmm6{%k7} # AVX512DQ
vcvtqq2pd %zmm5, %zmm6{%k7}{z} # AVX512DQ
vcvtqq2pd {rn-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtqq2pd {ru-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtqq2pd {rd-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtqq2pd {rz-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtqq2pd (%ecx), %zmm6 # AVX512DQ
vcvtqq2pd -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vcvtqq2pd (%eax){1to8}, %zmm6 # AVX512DQ
vcvtqq2pd 8128(%edx), %zmm6 # AVX512DQ Disp8
vcvtqq2pd 8192(%edx), %zmm6 # AVX512DQ
vcvtqq2pd -8192(%edx), %zmm6 # AVX512DQ Disp8
vcvtqq2pd -8256(%edx), %zmm6 # AVX512DQ
vcvtqq2pd 1016(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtqq2pd 1024(%edx){1to8}, %zmm6 # AVX512DQ
vcvtqq2pd -1024(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtqq2pd -1032(%edx){1to8}, %zmm6 # AVX512DQ
vcvtqq2ps %zmm5, %ymm6{%k7} # AVX512DQ
vcvtqq2ps %zmm5, %ymm6{%k7}{z} # AVX512DQ
vcvtqq2ps {rn-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtqq2ps {ru-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtqq2ps {rd-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtqq2ps {rz-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtqq2ps (%ecx), %ymm6{%k7} # AVX512DQ
vcvtqq2ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512DQ
vcvtqq2ps (%eax){1to8}, %ymm6{%k7} # AVX512DQ
vcvtqq2ps 8128(%edx), %ymm6{%k7} # AVX512DQ Disp8
vcvtqq2ps 8192(%edx), %ymm6{%k7} # AVX512DQ
vcvtqq2ps -8192(%edx), %ymm6{%k7} # AVX512DQ Disp8
vcvtqq2ps -8256(%edx), %ymm6{%k7} # AVX512DQ
vcvtqq2ps 1016(%edx){1to8}, %ymm6{%k7} # AVX512DQ Disp8
vcvtqq2ps 1024(%edx){1to8}, %ymm6{%k7} # AVX512DQ
vcvtqq2ps -1024(%edx){1to8}, %ymm6{%k7} # AVX512DQ Disp8
vcvtqq2ps -1032(%edx){1to8}, %ymm6{%k7} # AVX512DQ
vcvtuqq2pd %zmm5, %zmm6 # AVX512DQ
vcvtuqq2pd %zmm5, %zmm6{%k7} # AVX512DQ
vcvtuqq2pd %zmm5, %zmm6{%k7}{z} # AVX512DQ
vcvtuqq2pd {rn-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtuqq2pd {ru-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtuqq2pd {rd-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtuqq2pd {rz-sae}, %zmm5, %zmm6 # AVX512DQ
vcvtuqq2pd (%ecx), %zmm6 # AVX512DQ
vcvtuqq2pd -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vcvtuqq2pd (%eax){1to8}, %zmm6 # AVX512DQ
vcvtuqq2pd 8128(%edx), %zmm6 # AVX512DQ Disp8
vcvtuqq2pd 8192(%edx), %zmm6 # AVX512DQ
vcvtuqq2pd -8192(%edx), %zmm6 # AVX512DQ Disp8
vcvtuqq2pd -8256(%edx), %zmm6 # AVX512DQ
vcvtuqq2pd 1016(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtuqq2pd 1024(%edx){1to8}, %zmm6 # AVX512DQ
vcvtuqq2pd -1024(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvtuqq2pd -1032(%edx){1to8}, %zmm6 # AVX512DQ
vcvtuqq2ps %zmm5, %ymm6{%k7} # AVX512DQ
vcvtuqq2ps %zmm5, %ymm6{%k7}{z} # AVX512DQ
vcvtuqq2ps {rn-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtuqq2ps {ru-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtuqq2ps {rd-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtuqq2ps {rz-sae}, %zmm5, %ymm6{%k7} # AVX512DQ
vcvtuqq2ps (%ecx), %ymm6{%k7} # AVX512DQ
vcvtuqq2ps -123456(%esp,%esi,8), %ymm6{%k7} # AVX512DQ
vcvtuqq2ps (%eax){1to8}, %ymm6{%k7} # AVX512DQ
vcvtuqq2ps 8128(%edx), %ymm6{%k7} # AVX512DQ Disp8
vcvtuqq2ps 8192(%edx), %ymm6{%k7} # AVX512DQ
vcvtuqq2ps -8192(%edx), %ymm6{%k7} # AVX512DQ Disp8
vcvtuqq2ps -8256(%edx), %ymm6{%k7} # AVX512DQ
vcvtuqq2ps 1016(%edx){1to8}, %ymm6{%k7} # AVX512DQ Disp8
vcvtuqq2ps 1024(%edx){1to8}, %ymm6{%k7} # AVX512DQ
vcvtuqq2ps -1024(%edx){1to8}, %ymm6{%k7} # AVX512DQ Disp8
vcvtuqq2ps -1032(%edx){1to8}, %ymm6{%k7} # AVX512DQ
vextractf64x2 $0xab, %zmm5, %xmm6{%k7} # AVX512DQ
vextractf64x2 $0xab, %zmm5, %xmm6{%k7}{z} # AVX512DQ
vextractf64x2 $123, %zmm5, %xmm6{%k7} # AVX512DQ
vextractf32x8 $0xab, %zmm5, %ymm6{%k7} # AVX512DQ
vextractf32x8 $0xab, %zmm5, %ymm6{%k7}{z} # AVX512DQ
vextractf32x8 $123, %zmm5, %ymm6{%k7} # AVX512DQ
vextracti64x2 $0xab, %zmm5, %xmm6{%k7} # AVX512DQ
vextracti64x2 $0xab, %zmm5, %xmm6{%k7}{z} # AVX512DQ
vextracti64x2 $123, %zmm5, %xmm6{%k7} # AVX512DQ
vextracti32x8 $0xab, %zmm5, %ymm6{%k7} # AVX512DQ
vextracti32x8 $0xab, %zmm5, %ymm6{%k7}{z} # AVX512DQ
vextracti32x8 $123, %zmm5, %ymm6{%k7} # AVX512DQ
vfpclasspd $0xab, %zmm6, %k5 # AVX512DQ
vfpclasspd $0xab, %zmm6, %k5{%k7} # AVX512DQ
vfpclasspd $123, %zmm6, %k5 # AVX512DQ
vfpclasspdz $123, (%ecx), %k5 # AVX512DQ
vfpclasspdz $123, -123456(%esp,%esi,8), %k5 # AVX512DQ
vfpclasspd $123, (%eax){1to8}, %k5 # AVX512DQ
vfpclasspdz $123, 8128(%edx), %k5 # AVX512DQ Disp8
vfpclasspdz $123, 8192(%edx), %k5 # AVX512DQ
vfpclasspdz $123, -8192(%edx), %k5 # AVX512DQ Disp8
vfpclasspdz $123, -8256(%edx), %k5 # AVX512DQ
vfpclasspdz $123, 1016(%edx){1to8}, %k5 # AVX512DQ Disp8
vfpclasspdz $123, 1024(%edx){1to8}, %k5 # AVX512DQ
vfpclasspdz $123, -1024(%edx){1to8}, %k5 # AVX512DQ Disp8
vfpclasspdz $123, -1032(%edx){1to8}, %k5 # AVX512DQ
vfpclassps $0xab, %zmm6, %k5 # AVX512DQ
vfpclassps $0xab, %zmm6, %k5{%k7} # AVX512DQ
vfpclassps $123, %zmm6, %k5 # AVX512DQ
vfpclasspsz $123, (%ecx), %k5 # AVX512DQ
vfpclasspsz $123, -123456(%esp,%esi,8), %k5 # AVX512DQ
vfpclassps $123, (%eax){1to16}, %k5 # AVX512DQ
vfpclasspsz $123, 8128(%edx), %k5 # AVX512DQ Disp8
vfpclasspsz $123, 8192(%edx), %k5 # AVX512DQ
vfpclasspsz $123, -8192(%edx), %k5 # AVX512DQ Disp8
vfpclasspsz $123, -8256(%edx), %k5 # AVX512DQ
vfpclasspsz $123, 508(%edx){1to16}, %k5 # AVX512DQ Disp8
vfpclasspsz $123, 512(%edx){1to16}, %k5 # AVX512DQ
vfpclasspsz $123, -512(%edx){1to16}, %k5 # AVX512DQ Disp8
vfpclasspsz $123, -516(%edx){1to16}, %k5 # AVX512DQ
vfpclasssd $0xab, %xmm6, %k5{%k7} # AVX512DQ
vfpclasssd $123, %xmm6, %k5{%k7} # AVX512DQ
vfpclasssd $123, (%ecx), %k5{%k7} # AVX512DQ
vfpclasssd $123, -123456(%esp,%esi,8), %k5{%k7} # AVX512DQ
vfpclasssd $123, 1016(%edx), %k5{%k7} # AVX512DQ Disp8
vfpclasssd $123, 1024(%edx), %k5{%k7} # AVX512DQ
vfpclasssd $123, -1024(%edx), %k5{%k7} # AVX512DQ Disp8
vfpclasssd $123, -1032(%edx), %k5{%k7} # AVX512DQ
vfpclassss $0xab, %xmm6, %k5{%k7} # AVX512DQ
vfpclassss $123, %xmm6, %k5{%k7} # AVX512DQ
vfpclassss $123, (%ecx), %k5{%k7} # AVX512DQ
vfpclassss $123, -123456(%esp,%esi,8), %k5{%k7} # AVX512DQ
vfpclassss $123, 508(%edx), %k5{%k7} # AVX512DQ Disp8
vfpclassss $123, 512(%edx), %k5{%k7} # AVX512DQ
vfpclassss $123, -512(%edx), %k5{%k7} # AVX512DQ Disp8
vfpclassss $123, -516(%edx), %k5{%k7} # AVX512DQ
vinsertf64x2 $0xab, %xmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf64x2 $0xab, %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vinsertf64x2 $123, %xmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf64x2 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf64x2 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf64x2 $123, 2032(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinsertf64x2 $123, 2048(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf64x2 $123, -2048(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinsertf64x2 $123, -2064(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf32x8 $0xab, %ymm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf32x8 $0xab, %ymm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vinsertf32x8 $123, %ymm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf32x8 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf32x8 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf32x8 $123, 4064(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinsertf32x8 $123, 4096(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vinsertf32x8 $123, -4096(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinsertf32x8 $123, -4128(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti64x2 $0xab, %xmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinserti64x2 $0xab, %xmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vinserti64x2 $123, %xmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinserti64x2 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti64x2 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti64x2 $123, 2032(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinserti64x2 $123, 2048(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti64x2 $123, -2048(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinserti64x2 $123, -2064(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti32x8 $0xab, %ymm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinserti32x8 $0xab, %ymm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vinserti32x8 $123, %ymm4, %zmm5, %zmm6{%k7} # AVX512DQ
vinserti32x8 $123, (%ecx), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti32x8 $123, -123456(%esp,%esi,8), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti32x8 $123, 4064(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinserti32x8 $123, 4096(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vinserti32x8 $123, -4096(%edx), %zmm5, %zmm6{%k7} # AVX512DQ Disp8
vinserti32x8 $123, -4128(%edx), %zmm5, %zmm6{%k7} # AVX512DQ
vbroadcasti32x2 %xmm7, %zmm6 # AVX512DQ
vbroadcasti32x2 %xmm7, %zmm6{%k7} # AVX512DQ
vbroadcasti32x2 %xmm7, %zmm6{%k7}{z} # AVX512DQ
vbroadcasti32x2 (%ecx), %zmm6 # AVX512DQ
vbroadcasti32x2 -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vbroadcasti32x2 1016(%edx), %zmm6 # AVX512DQ Disp8
vbroadcasti32x2 1024(%edx), %zmm6 # AVX512DQ
vbroadcasti32x2 -1024(%edx), %zmm6 # AVX512DQ Disp8
vbroadcasti32x2 -1032(%edx), %zmm6 # AVX512DQ
vpmullq %zmm4, %zmm5, %zmm6 # AVX512DQ
vpmullq %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vpmullq %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vpmullq (%ecx), %zmm5, %zmm6 # AVX512DQ
vpmullq -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vpmullq (%eax){1to8}, %zmm5, %zmm6 # AVX512DQ
vpmullq 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vpmullq 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vpmullq -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vpmullq -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vpmullq 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vpmullq 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vpmullq -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vpmullq -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vrangepd $0xab, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangepd $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vrangepd $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vrangepd $0xab, {sae}, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangepd $123, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangepd $123, {sae}, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangepd $123, (%ecx), %zmm5, %zmm6 # AVX512DQ
vrangepd $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vrangepd $123, (%eax){1to8}, %zmm5, %zmm6 # AVX512DQ
vrangepd $123, 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vrangepd $123, 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vrangepd $123, -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vrangepd $123, -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vrangepd $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vrangepd $123, 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vrangepd $123, -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vrangepd $123, -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vrangeps $0xab, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangeps $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vrangeps $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vrangeps $0xab, {sae}, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangeps $123, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangeps $123, {sae}, %zmm4, %zmm5, %zmm6 # AVX512DQ
vrangeps $123, (%ecx), %zmm5, %zmm6 # AVX512DQ
vrangeps $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vrangeps $123, (%eax){1to16}, %zmm5, %zmm6 # AVX512DQ
vrangeps $123, 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vrangeps $123, 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vrangeps $123, -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vrangeps $123, -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vrangeps $123, 508(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vrangeps $123, 512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vrangeps $123, -512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vrangeps $123, -516(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vrangesd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512DQ
vrangesd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vrangesd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
vrangesd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vrangesd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512DQ
vrangess $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vrangess $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
vrangess $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vrangess $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
vandpd %zmm4, %zmm5, %zmm6 # AVX512DQ
vandpd %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vandpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vandpd (%ecx), %zmm5, %zmm6 # AVX512DQ
vandpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vandpd (%eax){1to8}, %zmm5, %zmm6 # AVX512DQ
vandpd 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandpd 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vandpd -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandpd -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vandpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vandpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vandpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vandpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vandps %zmm4, %zmm5, %zmm6 # AVX512DQ
vandps %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vandps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vandps (%ecx), %zmm5, %zmm6 # AVX512DQ
vandps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vandps (%eax){1to16}, %zmm5, %zmm6 # AVX512DQ
vandps 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandps 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vandps -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandps -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vandps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vandps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vandps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vandps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vandnpd %zmm4, %zmm5, %zmm6 # AVX512DQ
vandnpd %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vandnpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vandnpd (%ecx), %zmm5, %zmm6 # AVX512DQ
vandnpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vandnpd (%eax){1to8}, %zmm5, %zmm6 # AVX512DQ
vandnpd 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandnpd 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vandnpd -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandnpd -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vandnpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vandnpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vandnpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vandnpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vandnps %zmm4, %zmm5, %zmm6 # AVX512DQ
vandnps %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vandnps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vandnps (%ecx), %zmm5, %zmm6 # AVX512DQ
vandnps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vandnps (%eax){1to16}, %zmm5, %zmm6 # AVX512DQ
vandnps 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandnps 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vandnps -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vandnps -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vandnps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vandnps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vandnps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vandnps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vorpd %zmm4, %zmm5, %zmm6 # AVX512DQ
vorpd %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vorpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vorpd (%ecx), %zmm5, %zmm6 # AVX512DQ
vorpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vorpd (%eax){1to8}, %zmm5, %zmm6 # AVX512DQ
vorpd 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vorpd 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vorpd -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vorpd -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vorpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vorpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vorpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vorpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vorps %zmm4, %zmm5, %zmm6 # AVX512DQ
vorps %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vorps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vorps (%ecx), %zmm5, %zmm6 # AVX512DQ
vorps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vorps (%eax){1to16}, %zmm5, %zmm6 # AVX512DQ
vorps 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vorps 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vorps -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vorps -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vorps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vorps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vorps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vorps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vxorpd %zmm4, %zmm5, %zmm6 # AVX512DQ
vxorpd %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vxorpd %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vxorpd (%ecx), %zmm5, %zmm6 # AVX512DQ
vxorpd -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vxorpd (%eax){1to8}, %zmm5, %zmm6 # AVX512DQ
vxorpd 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vxorpd 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vxorpd -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vxorpd -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vxorpd 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vxorpd 1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vxorpd -1024(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ Disp8
vxorpd -1032(%edx){1to8}, %zmm5, %zmm6 # AVX512DQ
vxorps %zmm4, %zmm5, %zmm6 # AVX512DQ
vxorps %zmm4, %zmm5, %zmm6{%k7} # AVX512DQ
vxorps %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vxorps (%ecx), %zmm5, %zmm6 # AVX512DQ
vxorps -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512DQ
vxorps (%eax){1to16}, %zmm5, %zmm6 # AVX512DQ
vxorps 8128(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vxorps 8192(%edx), %zmm5, %zmm6 # AVX512DQ
vxorps -8192(%edx), %zmm5, %zmm6 # AVX512DQ Disp8
vxorps -8256(%edx), %zmm5, %zmm6 # AVX512DQ
vxorps 508(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vxorps 512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vxorps -512(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ Disp8
vxorps -516(%edx){1to16}, %zmm5, %zmm6 # AVX512DQ
vreducepd $0xab, %zmm5, %zmm6 # AVX512DQ
vreducepd $0xab, %zmm5, %zmm6{%k7} # AVX512DQ
vreducepd $0xab, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vreducepd $0xab, {sae}, %zmm5, %zmm6 # AVX512DQ
vreducepd $123, %zmm5, %zmm6 # AVX512DQ
vreducepd $123, {sae}, %zmm5, %zmm6 # AVX512DQ
vreducepd $123, (%ecx), %zmm6 # AVX512DQ
vreducepd $123, -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vreducepd $123, (%eax){1to8}, %zmm6 # AVX512DQ
vreducepd $123, 8128(%edx), %zmm6 # AVX512DQ Disp8
vreducepd $123, 8192(%edx), %zmm6 # AVX512DQ
vreducepd $123, -8192(%edx), %zmm6 # AVX512DQ Disp8
vreducepd $123, -8256(%edx), %zmm6 # AVX512DQ
vreducepd $123, 1016(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vreducepd $123, 1024(%edx){1to8}, %zmm6 # AVX512DQ
vreducepd $123, -1024(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vreducepd $123, -1032(%edx){1to8}, %zmm6 # AVX512DQ
vreduceps $0xab, %zmm5, %zmm6 # AVX512DQ
vreduceps $0xab, %zmm5, %zmm6{%k7} # AVX512DQ
vreduceps $0xab, %zmm5, %zmm6{%k7}{z} # AVX512DQ
vreduceps $0xab, {sae}, %zmm5, %zmm6 # AVX512DQ
vreduceps $123, %zmm5, %zmm6 # AVX512DQ
vreduceps $123, {sae}, %zmm5, %zmm6 # AVX512DQ
vreduceps $123, (%ecx), %zmm6 # AVX512DQ
vreduceps $123, -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vreduceps $123, (%eax){1to16}, %zmm6 # AVX512DQ
vreduceps $123, 8128(%edx), %zmm6 # AVX512DQ Disp8
vreduceps $123, 8192(%edx), %zmm6 # AVX512DQ
vreduceps $123, -8192(%edx), %zmm6 # AVX512DQ Disp8
vreduceps $123, -8256(%edx), %zmm6 # AVX512DQ
vreduceps $123, 508(%edx){1to16}, %zmm6 # AVX512DQ Disp8
vreduceps $123, 512(%edx){1to16}, %zmm6 # AVX512DQ
vreduceps $123, -512(%edx){1to16}, %zmm6 # AVX512DQ Disp8
vreduceps $123, -516(%edx){1to16}, %zmm6 # AVX512DQ
vreducesd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512DQ
vreducesd $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $123, 1016(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vreducesd $123, 1024(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
vreducesd $123, -1024(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vreducesd $123, -1032(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512DQ
vreducess $0xab, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $123, {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $123, 508(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vreducess $123, 512(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
vreducess $123, -512(%edx), %xmm5, %xmm6{%k7} # AVX512DQ Disp8
vreducess $123, -516(%edx), %xmm5, %xmm6{%k7} # AVX512DQ
kandb %k7, %k6, %k5 # AVX512DQ
kandnb %k7, %k6, %k5 # AVX512DQ
korb %k7, %k6, %k5 # AVX512DQ
kxnorb %k7, %k6, %k5 # AVX512DQ
kxorb %k7, %k6, %k5 # AVX512DQ
knotb %k6, %k5 # AVX512DQ
kortestb %k6, %k5 # AVX512DQ
ktestw %k6, %k5 # AVX512DQ
ktestb %k6, %k5 # AVX512DQ
kshiftrb $0xab, %k6, %k5 # AVX512DQ
kshiftrb $123, %k6, %k5 # AVX512DQ
kshiftlb $0xab, %k6, %k5 # AVX512DQ
kshiftlb $123, %k6, %k5 # AVX512DQ
kmovb %k6, %k5 # AVX512DQ
kmovb (%ecx), %k5 # AVX512DQ
kmovb -123456(%esp,%esi,8), %k5 # AVX512DQ
kmovb %k5, (%ecx) # AVX512DQ
kmovb %k5, -123456(%esp,%esi,8) # AVX512DQ
kmovb %eax, %k5 # AVX512DQ
kmovb %ebp, %k5 # AVX512DQ
kmovb %k5, %eax # AVX512DQ
kmovb %k5, %ebp # AVX512DQ
kaddw %k7, %k6, %k5 # AVX512DQ
kaddb %k7, %k6, %k5 # AVX512DQ
vextractf64x2 $0xab, %zmm6, (%ecx) # AVX512DQ
vextractf64x2 $0xab, %zmm6, (%ecx){%k7} # AVX512DQ
vextractf64x2 $123, %zmm6, (%ecx) # AVX512DQ
vextractf64x2 $123, %zmm6, -123456(%esp,%esi,8) # AVX512DQ
vextractf64x2 $123, %zmm6, 2032(%edx) # AVX512DQ Disp8
vextractf64x2 $123, %zmm6, 2048(%edx) # AVX512DQ
vextractf64x2 $123, %zmm6, -2048(%edx) # AVX512DQ Disp8
vextractf64x2 $123, %zmm6, -2064(%edx) # AVX512DQ
vextractf32x8 $0xab, %zmm6, (%ecx) # AVX512DQ
vextractf32x8 $0xab, %zmm6, (%ecx){%k7} # AVX512DQ
vextractf32x8 $123, %zmm6, (%ecx) # AVX512DQ
vextractf32x8 $123, %zmm6, -123456(%esp,%esi,8) # AVX512DQ
vextractf32x8 $123, %zmm6, 4064(%edx) # AVX512DQ Disp8
vextractf32x8 $123, %zmm6, 4096(%edx) # AVX512DQ
vextractf32x8 $123, %zmm6, -4096(%edx) # AVX512DQ Disp8
vextractf32x8 $123, %zmm6, -4128(%edx) # AVX512DQ
vextracti64x2 $0xab, %zmm6, (%ecx) # AVX512DQ
vextracti64x2 $0xab, %zmm6, (%ecx){%k7} # AVX512DQ
vextracti64x2 $123, %zmm6, (%ecx) # AVX512DQ
vextracti64x2 $123, %zmm6, -123456(%esp,%esi,8) # AVX512DQ
vextracti64x2 $123, %zmm6, 2032(%edx) # AVX512DQ Disp8
vextracti64x2 $123, %zmm6, 2048(%edx) # AVX512DQ
vextracti64x2 $123, %zmm6, -2048(%edx) # AVX512DQ Disp8
vextracti64x2 $123, %zmm6, -2064(%edx) # AVX512DQ
vextracti32x8 $0xab, %zmm6, (%ecx) # AVX512DQ
vextracti32x8 $0xab, %zmm6, (%ecx){%k7} # AVX512DQ
vextracti32x8 $123, %zmm6, (%ecx) # AVX512DQ
vextracti32x8 $123, %zmm6, -123456(%esp,%esi,8) # AVX512DQ
vextracti32x8 $123, %zmm6, 4064(%edx) # AVX512DQ Disp8
vextracti32x8 $123, %zmm6, 4096(%edx) # AVX512DQ
vextracti32x8 $123, %zmm6, -4096(%edx) # AVX512DQ Disp8
vextracti32x8 $123, %zmm6, -4128(%edx) # AVX512DQ
vcvttpd2qq %zmm5, %zmm6 # AVX512DQ
vcvttpd2qq %zmm5, %zmm6{%k7} # AVX512DQ
vcvttpd2qq %zmm5, %zmm6{%k7}{z} # AVX512DQ
vcvttpd2qq {sae}, %zmm5, %zmm6 # AVX512DQ
vcvttpd2qq (%ecx), %zmm6 # AVX512DQ
vcvttpd2qq -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vcvttpd2qq (%eax){1to8}, %zmm6 # AVX512DQ
vcvttpd2qq 8128(%edx), %zmm6 # AVX512DQ Disp8
vcvttpd2qq 8192(%edx), %zmm6 # AVX512DQ
vcvttpd2qq -8192(%edx), %zmm6 # AVX512DQ Disp8
vcvttpd2qq -8256(%edx), %zmm6 # AVX512DQ
vcvttpd2qq 1016(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvttpd2qq 1024(%edx){1to8}, %zmm6 # AVX512DQ
vcvttpd2qq -1024(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvttpd2qq -1032(%edx){1to8}, %zmm6 # AVX512DQ
vcvttpd2uqq %zmm5, %zmm6 # AVX512DQ
vcvttpd2uqq %zmm5, %zmm6{%k7} # AVX512DQ
vcvttpd2uqq %zmm5, %zmm6{%k7}{z} # AVX512DQ
vcvttpd2uqq {sae}, %zmm5, %zmm6 # AVX512DQ
vcvttpd2uqq (%ecx), %zmm6 # AVX512DQ
vcvttpd2uqq -123456(%esp,%esi,8), %zmm6 # AVX512DQ
vcvttpd2uqq (%eax){1to8}, %zmm6 # AVX512DQ
vcvttpd2uqq 8128(%edx), %zmm6 # AVX512DQ Disp8
vcvttpd2uqq 8192(%edx), %zmm6 # AVX512DQ
vcvttpd2uqq -8192(%edx), %zmm6 # AVX512DQ Disp8
vcvttpd2uqq -8256(%edx), %zmm6 # AVX512DQ
vcvttpd2uqq 1016(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvttpd2uqq 1024(%edx){1to8}, %zmm6 # AVX512DQ
vcvttpd2uqq -1024(%edx){1to8}, %zmm6 # AVX512DQ Disp8
vcvttpd2uqq -1032(%edx){1to8}, %zmm6 # AVX512DQ
vcvttps2qq %ymm5, %zmm6{%k7} # AVX512DQ
vcvttps2qq %ymm5, %zmm6{%k7}{z} # AVX512DQ
vcvttps2qq {sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvttps2qq (%ecx), %zmm6{%k7} # AVX512DQ
vcvttps2qq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512DQ
vcvttps2qq (%eax){1to8}, %zmm6{%k7} # AVX512DQ
vcvttps2qq 4064(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvttps2qq 4096(%edx), %zmm6{%k7} # AVX512DQ
vcvttps2qq -4096(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvttps2qq -4128(%edx), %zmm6{%k7} # AVX512DQ
vcvttps2qq 508(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvttps2qq 512(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vcvttps2qq -512(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvttps2qq -516(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vcvttps2uqq %ymm5, %zmm6{%k7} # AVX512DQ
vcvttps2uqq %ymm5, %zmm6{%k7}{z} # AVX512DQ
vcvttps2uqq {sae}, %ymm5, %zmm6{%k7} # AVX512DQ
vcvttps2uqq (%ecx), %zmm6{%k7} # AVX512DQ
vcvttps2uqq -123456(%esp,%esi,8), %zmm6{%k7} # AVX512DQ
vcvttps2uqq (%eax){1to8}, %zmm6{%k7} # AVX512DQ
vcvttps2uqq 4064(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvttps2uqq 4096(%edx), %zmm6{%k7} # AVX512DQ
vcvttps2uqq -4096(%edx), %zmm6{%k7} # AVX512DQ Disp8
vcvttps2uqq -4128(%edx), %zmm6{%k7} # AVX512DQ
vcvttps2uqq 508(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvttps2uqq 512(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vcvttps2uqq -512(%edx){1to8}, %zmm6{%k7} # AVX512DQ Disp8
vcvttps2uqq -516(%edx){1to8}, %zmm6{%k7} # AVX512DQ
vpmovd2m %zmm6, %k5 # AVX512DQ
vpmovq2m %zmm6, %k5 # AVX512DQ
vpmovm2d %k5, %zmm6 # AVX512DQ
vpmovm2q %k5, %zmm6 # AVX512DQ
.intel_syntax noprefix
vbroadcastf32x8 zmm6, YMMWORD PTR [ecx] # AVX512DQ
vbroadcastf32x8 zmm6{k7}, YMMWORD PTR [ecx] # AVX512DQ
vbroadcastf32x8 zmm6{k7}{z}, YMMWORD PTR [ecx] # AVX512DQ
vbroadcastf32x8 zmm6, YMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vbroadcastf32x8 zmm6, YMMWORD PTR [edx+4064] # AVX512DQ Disp8
vbroadcastf32x8 zmm6, YMMWORD PTR [edx+4096] # AVX512DQ
vbroadcastf32x8 zmm6, YMMWORD PTR [edx-4096] # AVX512DQ Disp8
vbroadcastf32x8 zmm6, YMMWORD PTR [edx-4128] # AVX512DQ
vbroadcastf64x2 zmm6, XMMWORD PTR [ecx] # AVX512DQ
vbroadcastf64x2 zmm6{k7}, XMMWORD PTR [ecx] # AVX512DQ
vbroadcastf64x2 zmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512DQ
vbroadcastf64x2 zmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vbroadcastf64x2 zmm6, XMMWORD PTR [edx+2032] # AVX512DQ Disp8
vbroadcastf64x2 zmm6, XMMWORD PTR [edx+2048] # AVX512DQ
vbroadcastf64x2 zmm6, XMMWORD PTR [edx-2048] # AVX512DQ Disp8
vbroadcastf64x2 zmm6, XMMWORD PTR [edx-2064] # AVX512DQ
vbroadcasti32x8 zmm6, YMMWORD PTR [ecx] # AVX512DQ
vbroadcasti32x8 zmm6{k7}, YMMWORD PTR [ecx] # AVX512DQ
vbroadcasti32x8 zmm6{k7}{z}, YMMWORD PTR [ecx] # AVX512DQ
vbroadcasti32x8 zmm6, YMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vbroadcasti32x8 zmm6, YMMWORD PTR [edx+4064] # AVX512DQ Disp8
vbroadcasti32x8 zmm6, YMMWORD PTR [edx+4096] # AVX512DQ
vbroadcasti32x8 zmm6, YMMWORD PTR [edx-4096] # AVX512DQ Disp8
vbroadcasti32x8 zmm6, YMMWORD PTR [edx-4128] # AVX512DQ
vbroadcasti64x2 zmm6, XMMWORD PTR [ecx] # AVX512DQ
vbroadcasti64x2 zmm6{k7}, XMMWORD PTR [ecx] # AVX512DQ
vbroadcasti64x2 zmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512DQ
vbroadcasti64x2 zmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vbroadcasti64x2 zmm6, XMMWORD PTR [edx+2032] # AVX512DQ Disp8
vbroadcasti64x2 zmm6, XMMWORD PTR [edx+2048] # AVX512DQ
vbroadcasti64x2 zmm6, XMMWORD PTR [edx-2048] # AVX512DQ Disp8
vbroadcasti64x2 zmm6, XMMWORD PTR [edx-2064] # AVX512DQ
vbroadcastf32x2 zmm6, xmm7 # AVX512DQ
vbroadcastf32x2 zmm6{k7}, xmm7 # AVX512DQ
vbroadcastf32x2 zmm6{k7}{z}, xmm7 # AVX512DQ
vbroadcastf32x2 zmm6, QWORD PTR [ecx] # AVX512DQ
vbroadcastf32x2 zmm6, QWORD PTR [esp+esi*8-123456] # AVX512DQ
vbroadcastf32x2 zmm6, QWORD PTR [edx+1016] # AVX512DQ Disp8
vbroadcastf32x2 zmm6, QWORD PTR [edx+1024] # AVX512DQ
vbroadcastf32x2 zmm6, QWORD PTR [edx-1024] # AVX512DQ Disp8
vbroadcastf32x2 zmm6, QWORD PTR [edx-1032] # AVX512DQ
vcvtpd2qq zmm6, zmm5 # AVX512DQ
vcvtpd2qq zmm6{k7}, zmm5 # AVX512DQ
vcvtpd2qq zmm6{k7}{z}, zmm5 # AVX512DQ
vcvtpd2qq zmm6, zmm5{rn-sae} # AVX512DQ
vcvtpd2qq zmm6, zmm5{ru-sae} # AVX512DQ
vcvtpd2qq zmm6, zmm5{rd-sae} # AVX512DQ
vcvtpd2qq zmm6, zmm5{rz-sae} # AVX512DQ
vcvtpd2qq zmm6, ZMMWORD PTR [ecx] # AVX512DQ
vcvtpd2qq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtpd2qq zmm6, qword bcst [eax] # AVX512DQ
vcvtpd2qq zmm6, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvtpd2qq zmm6, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvtpd2qq zmm6, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvtpd2qq zmm6, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvtpd2qq zmm6, qword bcst [edx+1016] # AVX512DQ Disp8
vcvtpd2qq zmm6, qword bcst [edx+1024] # AVX512DQ
vcvtpd2qq zmm6, qword bcst [edx-1024] # AVX512DQ Disp8
vcvtpd2qq zmm6, qword bcst [edx-1032] # AVX512DQ
vcvtpd2uqq zmm6, zmm5 # AVX512DQ
vcvtpd2uqq zmm6{k7}, zmm5 # AVX512DQ
vcvtpd2uqq zmm6{k7}{z}, zmm5 # AVX512DQ
vcvtpd2uqq zmm6, zmm5{rn-sae} # AVX512DQ
vcvtpd2uqq zmm6, zmm5{ru-sae} # AVX512DQ
vcvtpd2uqq zmm6, zmm5{rd-sae} # AVX512DQ
vcvtpd2uqq zmm6, zmm5{rz-sae} # AVX512DQ
vcvtpd2uqq zmm6, ZMMWORD PTR [ecx] # AVX512DQ
vcvtpd2uqq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtpd2uqq zmm6, qword bcst [eax] # AVX512DQ
vcvtpd2uqq zmm6, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvtpd2uqq zmm6, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvtpd2uqq zmm6, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvtpd2uqq zmm6, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvtpd2uqq zmm6, qword bcst [edx+1016] # AVX512DQ Disp8
vcvtpd2uqq zmm6, qword bcst [edx+1024] # AVX512DQ
vcvtpd2uqq zmm6, qword bcst [edx-1024] # AVX512DQ Disp8
vcvtpd2uqq zmm6, qword bcst [edx-1032] # AVX512DQ
vcvtps2qq zmm6{k7}, ymm5 # AVX512DQ
vcvtps2qq zmm6{k7}{z}, ymm5 # AVX512DQ
vcvtps2qq zmm6{k7}, ymm5{rn-sae} # AVX512DQ
vcvtps2qq zmm6{k7}, ymm5{ru-sae} # AVX512DQ
vcvtps2qq zmm6{k7}, ymm5{rd-sae} # AVX512DQ
vcvtps2qq zmm6{k7}, ymm5{rz-sae} # AVX512DQ
vcvtps2qq zmm6{k7}, YMMWORD PTR [ecx] # AVX512DQ
vcvtps2qq zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtps2qq zmm6{k7}, dword bcst [eax] # AVX512DQ
vcvtps2qq zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512DQ Disp8
vcvtps2qq zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512DQ
vcvtps2qq zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512DQ Disp8
vcvtps2qq zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512DQ
vcvtps2qq zmm6{k7}, dword bcst [edx+508] # AVX512DQ Disp8
vcvtps2qq zmm6{k7}, dword bcst [edx+512] # AVX512DQ
vcvtps2qq zmm6{k7}, dword bcst [edx-512] # AVX512DQ Disp8
vcvtps2qq zmm6{k7}, dword bcst [edx-516] # AVX512DQ
vcvtps2qq zmm6{k7}, DWORD BCST [edx+508] # AVX512DQ Disp8
vcvtps2uqq zmm6{k7}, ymm5 # AVX512DQ
vcvtps2uqq zmm6{k7}{z}, ymm5 # AVX512DQ
vcvtps2uqq zmm6{k7}, ymm5{rn-sae} # AVX512DQ
vcvtps2uqq zmm6{k7}, ymm5{ru-sae} # AVX512DQ
vcvtps2uqq zmm6{k7}, ymm5{rd-sae} # AVX512DQ
vcvtps2uqq zmm6{k7}, ymm5{rz-sae} # AVX512DQ
vcvtps2uqq zmm6{k7}, YMMWORD PTR [ecx] # AVX512DQ
vcvtps2uqq zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtps2uqq zmm6{k7}, dword bcst [eax] # AVX512DQ
vcvtps2uqq zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512DQ Disp8
vcvtps2uqq zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512DQ
vcvtps2uqq zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512DQ Disp8
vcvtps2uqq zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512DQ
vcvtps2uqq zmm6{k7}, dword bcst [edx+508] # AVX512DQ Disp8
vcvtps2uqq zmm6{k7}, dword bcst [edx+512] # AVX512DQ
vcvtps2uqq zmm6{k7}, dword bcst [edx-512] # AVX512DQ Disp8
vcvtps2uqq zmm6{k7}, dword bcst [edx-516] # AVX512DQ
vcvtps2uqq zmm6{k7}, DWORD BCST [edx+508] # AVX512DQ Disp8
vcvtqq2pd zmm6, zmm5 # AVX512DQ
vcvtqq2pd zmm6{k7}, zmm5 # AVX512DQ
vcvtqq2pd zmm6{k7}{z}, zmm5 # AVX512DQ
vcvtqq2pd zmm6, zmm5{rn-sae} # AVX512DQ
vcvtqq2pd zmm6, zmm5{ru-sae} # AVX512DQ
vcvtqq2pd zmm6, zmm5{rd-sae} # AVX512DQ
vcvtqq2pd zmm6, zmm5{rz-sae} # AVX512DQ
vcvtqq2pd zmm6, ZMMWORD PTR [ecx] # AVX512DQ
vcvtqq2pd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtqq2pd zmm6, qword bcst [eax] # AVX512DQ
vcvtqq2pd zmm6, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvtqq2pd zmm6, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvtqq2pd zmm6, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvtqq2pd zmm6, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvtqq2pd zmm6, qword bcst [edx+1016] # AVX512DQ Disp8
vcvtqq2pd zmm6, qword bcst [edx+1024] # AVX512DQ
vcvtqq2pd zmm6, qword bcst [edx-1024] # AVX512DQ Disp8
vcvtqq2pd zmm6, qword bcst [edx-1032] # AVX512DQ
vcvtqq2ps ymm6{k7}, zmm5 # AVX512DQ
vcvtqq2ps ymm6{k7}{z}, zmm5 # AVX512DQ
vcvtqq2ps ymm6{k7}, zmm5{rn-sae} # AVX512DQ
vcvtqq2ps ymm6{k7}, zmm5{ru-sae} # AVX512DQ
vcvtqq2ps ymm6{k7}, zmm5{rd-sae} # AVX512DQ
vcvtqq2ps ymm6{k7}, zmm5{rz-sae} # AVX512DQ
vcvtqq2ps ymm6{k7}, ZMMWORD PTR [ecx] # AVX512DQ
vcvtqq2ps ymm6{k7}, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtqq2ps ymm6{k7}, qword bcst [eax] # AVX512DQ
vcvtqq2ps ymm6{k7}, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvtqq2ps ymm6{k7}, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvtqq2ps ymm6{k7}, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvtqq2ps ymm6{k7}, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvtqq2ps ymm6{k7}, qword bcst [edx+1016] # AVX512DQ Disp8
vcvtqq2ps ymm6{k7}, qword bcst [edx+1024] # AVX512DQ
vcvtqq2ps ymm6{k7}, qword bcst [edx-1024] # AVX512DQ Disp8
vcvtqq2ps ymm6{k7}, qword bcst [edx-1032] # AVX512DQ
vcvtuqq2pd zmm6, zmm5 # AVX512DQ
vcvtuqq2pd zmm6{k7}, zmm5 # AVX512DQ
vcvtuqq2pd zmm6{k7}{z}, zmm5 # AVX512DQ
vcvtuqq2pd zmm6, zmm5{rn-sae} # AVX512DQ
vcvtuqq2pd zmm6, zmm5{ru-sae} # AVX512DQ
vcvtuqq2pd zmm6, zmm5{rd-sae} # AVX512DQ
vcvtuqq2pd zmm6, zmm5{rz-sae} # AVX512DQ
vcvtuqq2pd zmm6, ZMMWORD PTR [ecx] # AVX512DQ
vcvtuqq2pd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtuqq2pd zmm6, qword bcst [eax] # AVX512DQ
vcvtuqq2pd zmm6, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvtuqq2pd zmm6, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvtuqq2pd zmm6, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvtuqq2pd zmm6, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvtuqq2pd zmm6, qword bcst [edx+1016] # AVX512DQ Disp8
vcvtuqq2pd zmm6, qword bcst [edx+1024] # AVX512DQ
vcvtuqq2pd zmm6, qword bcst [edx-1024] # AVX512DQ Disp8
vcvtuqq2pd zmm6, qword bcst [edx-1032] # AVX512DQ
vcvtuqq2ps ymm6{k7}, zmm5 # AVX512DQ
vcvtuqq2ps ymm6{k7}{z}, zmm5 # AVX512DQ
vcvtuqq2ps ymm6{k7}, zmm5{rn-sae} # AVX512DQ
vcvtuqq2ps ymm6{k7}, zmm5{ru-sae} # AVX512DQ
vcvtuqq2ps ymm6{k7}, zmm5{rd-sae} # AVX512DQ
vcvtuqq2ps ymm6{k7}, zmm5{rz-sae} # AVX512DQ
vcvtuqq2ps ymm6{k7}, ZMMWORD PTR [ecx] # AVX512DQ
vcvtuqq2ps ymm6{k7}, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvtuqq2ps ymm6{k7}, qword bcst [eax] # AVX512DQ
vcvtuqq2ps ymm6{k7}, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvtuqq2ps ymm6{k7}, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvtuqq2ps ymm6{k7}, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvtuqq2ps ymm6{k7}, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvtuqq2ps ymm6{k7}, qword bcst [edx+1016] # AVX512DQ Disp8
vcvtuqq2ps ymm6{k7}, qword bcst [edx+1024] # AVX512DQ
vcvtuqq2ps ymm6{k7}, qword bcst [edx-1024] # AVX512DQ Disp8
vcvtuqq2ps ymm6{k7}, qword bcst [edx-1032] # AVX512DQ
vextractf64x2 xmm6{k7}, zmm5, 0xab # AVX512DQ
vextractf64x2 xmm6{k7}{z}, zmm5, 0xab # AVX512DQ
vextractf64x2 xmm6{k7}, zmm5, 123 # AVX512DQ
vextractf32x8 ymm6{k7}, zmm5, 0xab # AVX512DQ
vextractf32x8 ymm6{k7}{z}, zmm5, 0xab # AVX512DQ
vextractf32x8 ymm6{k7}, zmm5, 123 # AVX512DQ
vextracti64x2 xmm6{k7}, zmm5, 0xab # AVX512DQ
vextracti64x2 xmm6{k7}{z}, zmm5, 0xab # AVX512DQ
vextracti64x2 xmm6{k7}, zmm5, 123 # AVX512DQ
vextracti32x8 ymm6{k7}, zmm5, 0xab # AVX512DQ
vextracti32x8 ymm6{k7}{z}, zmm5, 0xab # AVX512DQ
vextracti32x8 ymm6{k7}, zmm5, 123 # AVX512DQ
vfpclasspd k5, zmm6, 0xab # AVX512DQ
vfpclasspd k5{k7}, zmm6, 0xab # AVX512DQ
vfpclasspd k5, zmm6, 123 # AVX512DQ
vfpclasspd k5, ZMMWORD PTR [ecx], 123 # AVX512DQ
vfpclasspd k5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vfpclasspd k5, [eax]{1to8}, 123 # AVX512DQ
vfpclasspd k5, ZMMWORD PTR [edx+8128], 123 # AVX512DQ Disp8
vfpclasspd k5, ZMMWORD PTR [edx+8192], 123 # AVX512DQ
vfpclasspd k5, ZMMWORD PTR [edx-8192], 123 # AVX512DQ Disp8
vfpclasspd k5, ZMMWORD PTR [edx-8256], 123 # AVX512DQ
vfpclasspd k5, QWORD BCST [edx+1016]{1to8}, 123 # AVX512DQ Disp8
vfpclasspd k5, QWORD BCST [edx+1024]{1to8}, 123 # AVX512DQ
vfpclasspd k5, QWORD BCST [edx-1024]{1to8}, 123 # AVX512DQ Disp8
vfpclasspd k5, QWORD BCST [edx-1032]{1to8}, 123 # AVX512DQ
vfpclassps k5, zmm6, 0xab # AVX512DQ
vfpclassps k5{k7}, zmm6, 0xab # AVX512DQ
vfpclassps k5, zmm6, 123 # AVX512DQ
vfpclassps k5, ZMMWORD PTR [ecx], 123 # AVX512DQ
vfpclassps k5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vfpclassps k5, [eax]{1to16}, 123 # AVX512DQ
vfpclassps k5, ZMMWORD PTR [edx+8128], 123 # AVX512DQ Disp8
vfpclassps k5, ZMMWORD PTR [edx+8192], 123 # AVX512DQ
vfpclassps k5, ZMMWORD PTR [edx-8192], 123 # AVX512DQ Disp8
vfpclassps k5, ZMMWORD PTR [edx-8256], 123 # AVX512DQ
vfpclassps k5, DWORD BCST [edx+508]{1to16}, 123 # AVX512DQ Disp8
vfpclassps k5, DWORD BCST [edx+512]{1to16}, 123 # AVX512DQ
vfpclassps k5, DWORD BCST [edx-512]{1to16}, 123 # AVX512DQ Disp8
vfpclassps k5, DWORD BCST [edx-516]{1to16}, 123 # AVX512DQ
vfpclasssd k5{k7}, xmm6, 0xab # AVX512DQ
vfpclasssd k5{k7}, xmm6, 123 # AVX512DQ
vfpclasssd k5{k7}, QWORD PTR [ecx], 123 # AVX512DQ
vfpclasssd k5{k7}, QWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vfpclasssd k5{k7}, QWORD PTR [edx+1016], 123 # AVX512DQ Disp8
vfpclasssd k5{k7}, QWORD PTR [edx+1024], 123 # AVX512DQ
vfpclasssd k5{k7}, QWORD PTR [edx-1024], 123 # AVX512DQ Disp8
vfpclasssd k5{k7}, QWORD PTR [edx-1032], 123 # AVX512DQ
vfpclassss k5{k7}, xmm6, 0xab # AVX512DQ
vfpclassss k5{k7}, xmm6, 123 # AVX512DQ
vfpclassss k5{k7}, DWORD PTR [ecx], 123 # AVX512DQ
vfpclassss k5{k7}, DWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vfpclassss k5{k7}, DWORD PTR [edx+508], 123 # AVX512DQ Disp8
vfpclassss k5{k7}, DWORD PTR [edx+512], 123 # AVX512DQ
vfpclassss k5{k7}, DWORD PTR [edx-512], 123 # AVX512DQ Disp8
vfpclassss k5{k7}, DWORD PTR [edx-516], 123 # AVX512DQ
vinsertf64x2 zmm6{k7}, zmm5, xmm4, 0xab # AVX512DQ
vinsertf64x2 zmm6{k7}{z}, zmm5, xmm4, 0xab # AVX512DQ
vinsertf64x2 zmm6{k7}, zmm5, xmm4, 123 # AVX512DQ
vinsertf64x2 zmm6{k7}, zmm5, XMMWORD PTR [ecx], 123 # AVX512DQ
vinsertf64x2 zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vinsertf64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx+2032], 123 # AVX512DQ Disp8
vinsertf64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx+2048], 123 # AVX512DQ
vinsertf64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx-2048], 123 # AVX512DQ Disp8
vinsertf64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx-2064], 123 # AVX512DQ
vinsertf32x8 zmm6{k7}, zmm5, ymm4, 0xab # AVX512DQ
vinsertf32x8 zmm6{k7}{z}, zmm5, ymm4, 0xab # AVX512DQ
vinsertf32x8 zmm6{k7}, zmm5, ymm4, 123 # AVX512DQ
vinsertf32x8 zmm6{k7}, zmm5, YMMWORD PTR [ecx], 123 # AVX512DQ
vinsertf32x8 zmm6{k7}, zmm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vinsertf32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx+4064], 123 # AVX512DQ Disp8
vinsertf32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx+4096], 123 # AVX512DQ
vinsertf32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx-4096], 123 # AVX512DQ Disp8
vinsertf32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx-4128], 123 # AVX512DQ
vinserti64x2 zmm6{k7}, zmm5, xmm4, 0xab # AVX512DQ
vinserti64x2 zmm6{k7}{z}, zmm5, xmm4, 0xab # AVX512DQ
vinserti64x2 zmm6{k7}, zmm5, xmm4, 123 # AVX512DQ
vinserti64x2 zmm6{k7}, zmm5, XMMWORD PTR [ecx], 123 # AVX512DQ
vinserti64x2 zmm6{k7}, zmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vinserti64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx+2032], 123 # AVX512DQ Disp8
vinserti64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx+2048], 123 # AVX512DQ
vinserti64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx-2048], 123 # AVX512DQ Disp8
vinserti64x2 zmm6{k7}, zmm5, XMMWORD PTR [edx-2064], 123 # AVX512DQ
vinserti32x8 zmm6{k7}, zmm5, ymm4, 0xab # AVX512DQ
vinserti32x8 zmm6{k7}{z}, zmm5, ymm4, 0xab # AVX512DQ
vinserti32x8 zmm6{k7}, zmm5, ymm4, 123 # AVX512DQ
vinserti32x8 zmm6{k7}, zmm5, YMMWORD PTR [ecx], 123 # AVX512DQ
vinserti32x8 zmm6{k7}, zmm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vinserti32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx+4064], 123 # AVX512DQ Disp8
vinserti32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx+4096], 123 # AVX512DQ
vinserti32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx-4096], 123 # AVX512DQ Disp8
vinserti32x8 zmm6{k7}, zmm5, YMMWORD PTR [edx-4128], 123 # AVX512DQ
vbroadcasti32x2 zmm6, xmm7 # AVX512DQ
vbroadcasti32x2 zmm6{k7}, xmm7 # AVX512DQ
vbroadcasti32x2 zmm6{k7}{z}, xmm7 # AVX512DQ
vbroadcasti32x2 zmm6, QWORD PTR [ecx] # AVX512DQ
vbroadcasti32x2 zmm6, QWORD PTR [esp+esi*8-123456] # AVX512DQ
vbroadcasti32x2 zmm6, QWORD PTR [edx+1016] # AVX512DQ Disp8
vbroadcasti32x2 zmm6, QWORD PTR [edx+1024] # AVX512DQ
vbroadcasti32x2 zmm6, QWORD PTR [edx-1024] # AVX512DQ Disp8
vbroadcasti32x2 zmm6, QWORD PTR [edx-1032] # AVX512DQ
vpmullq zmm6, zmm5, zmm4 # AVX512DQ
vpmullq zmm6{k7}, zmm5, zmm4 # AVX512DQ
vpmullq zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vpmullq zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vpmullq zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vpmullq zmm6, zmm5, qword bcst [eax] # AVX512DQ
vpmullq zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vpmullq zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vpmullq zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vpmullq zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vpmullq zmm6, zmm5, qword bcst [edx+1016] # AVX512DQ Disp8
vpmullq zmm6, zmm5, qword bcst [edx+1024] # AVX512DQ
vpmullq zmm6, zmm5, qword bcst [edx-1024] # AVX512DQ Disp8
vpmullq zmm6, zmm5, qword bcst [edx-1032] # AVX512DQ
vrangepd zmm6, zmm5, zmm4, 0xab # AVX512DQ
vrangepd zmm6{k7}, zmm5, zmm4, 0xab # AVX512DQ
vrangepd zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512DQ
vrangepd zmm6, zmm5, zmm4{sae}, 0xab # AVX512DQ
vrangepd zmm6, zmm5, zmm4, 123 # AVX512DQ
vrangepd zmm6, zmm5, zmm4{sae}, 123 # AVX512DQ
vrangepd zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512DQ
vrangepd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vrangepd zmm6, zmm5, qword bcst [eax], 123 # AVX512DQ
vrangepd zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512DQ Disp8
vrangepd zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512DQ
vrangepd zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512DQ Disp8
vrangepd zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512DQ
vrangepd zmm6, zmm5, qword bcst [edx+1016], 123 # AVX512DQ Disp8
vrangepd zmm6, zmm5, qword bcst [edx+1024], 123 # AVX512DQ
vrangepd zmm6, zmm5, qword bcst [edx-1024], 123 # AVX512DQ Disp8
vrangepd zmm6, zmm5, qword bcst [edx-1032], 123 # AVX512DQ
vrangeps zmm6, zmm5, zmm4, 0xab # AVX512DQ
vrangeps zmm6{k7}, zmm5, zmm4, 0xab # AVX512DQ
vrangeps zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512DQ
vrangeps zmm6, zmm5, zmm4{sae}, 0xab # AVX512DQ
vrangeps zmm6, zmm5, zmm4, 123 # AVX512DQ
vrangeps zmm6, zmm5, zmm4{sae}, 123 # AVX512DQ
vrangeps zmm6, zmm5, ZMMWORD PTR [ecx], 123 # AVX512DQ
vrangeps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vrangeps zmm6, zmm5, dword bcst [eax], 123 # AVX512DQ
vrangeps zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512DQ Disp8
vrangeps zmm6, zmm5, ZMMWORD PTR [edx+8192], 123 # AVX512DQ
vrangeps zmm6, zmm5, ZMMWORD PTR [edx-8192], 123 # AVX512DQ Disp8
vrangeps zmm6, zmm5, ZMMWORD PTR [edx-8256], 123 # AVX512DQ
vrangeps zmm6, zmm5, dword bcst [edx+508], 123 # AVX512DQ Disp8
vrangeps zmm6, zmm5, dword bcst [edx+512], 123 # AVX512DQ
vrangeps zmm6, zmm5, dword bcst [edx-512], 123 # AVX512DQ Disp8
vrangeps zmm6, zmm5, dword bcst [edx-516], 123 # AVX512DQ
vrangesd xmm6{k7}, xmm5, xmm4, 0xab # AVX512DQ
vrangesd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512DQ
vrangesd xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512DQ
vrangesd xmm6{k7}, xmm5, xmm4, 123 # AVX512DQ
vrangesd xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512DQ
vrangesd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512DQ
vrangesd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vrangesd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512DQ Disp8
vrangesd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512DQ
vrangesd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512DQ Disp8
vrangesd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512DQ
vrangess xmm6{k7}, xmm5, xmm4, 0xab # AVX512DQ
vrangess xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512DQ
vrangess xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512DQ
vrangess xmm6{k7}, xmm5, xmm4, 123 # AVX512DQ
vrangess xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512DQ
vrangess xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512DQ
vrangess xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vrangess xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512DQ Disp8
vrangess xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512DQ
vrangess xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512DQ Disp8
vrangess xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512DQ
vandpd zmm6, zmm5, zmm4 # AVX512DQ
vandpd zmm6{k7}, zmm5, zmm4 # AVX512DQ
vandpd zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vandpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vandpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vandpd zmm6, zmm5, qword bcst [eax] # AVX512DQ
vandpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vandpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vandpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vandpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vandpd zmm6, zmm5, qword bcst [edx+1016] # AVX512DQ Disp8
vandpd zmm6, zmm5, qword bcst [edx+1024] # AVX512DQ
vandpd zmm6, zmm5, qword bcst [edx-1024] # AVX512DQ Disp8
vandpd zmm6, zmm5, qword bcst [edx-1032] # AVX512DQ
vandps zmm6, zmm5, zmm4 # AVX512DQ
vandps zmm6{k7}, zmm5, zmm4 # AVX512DQ
vandps zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vandps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vandps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vandps zmm6, zmm5, dword bcst [eax] # AVX512DQ
vandps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vandps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vandps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vandps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vandps zmm6, zmm5, dword bcst [edx+508] # AVX512DQ Disp8
vandps zmm6, zmm5, dword bcst [edx+512] # AVX512DQ
vandps zmm6, zmm5, dword bcst [edx-512] # AVX512DQ Disp8
vandps zmm6, zmm5, dword bcst [edx-516] # AVX512DQ
vandnpd zmm6, zmm5, zmm4 # AVX512DQ
vandnpd zmm6{k7}, zmm5, zmm4 # AVX512DQ
vandnpd zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vandnpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vandnpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vandnpd zmm6, zmm5, qword bcst [eax] # AVX512DQ
vandnpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vandnpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vandnpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vandnpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vandnpd zmm6, zmm5, qword bcst [edx+1016] # AVX512DQ Disp8
vandnpd zmm6, zmm5, qword bcst [edx+1024] # AVX512DQ
vandnpd zmm6, zmm5, qword bcst [edx-1024] # AVX512DQ Disp8
vandnpd zmm6, zmm5, qword bcst [edx-1032] # AVX512DQ
vandnps zmm6, zmm5, zmm4 # AVX512DQ
vandnps zmm6{k7}, zmm5, zmm4 # AVX512DQ
vandnps zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vandnps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vandnps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vandnps zmm6, zmm5, dword bcst [eax] # AVX512DQ
vandnps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vandnps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vandnps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vandnps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vandnps zmm6, zmm5, dword bcst [edx+508] # AVX512DQ Disp8
vandnps zmm6, zmm5, dword bcst [edx+512] # AVX512DQ
vandnps zmm6, zmm5, dword bcst [edx-512] # AVX512DQ Disp8
vandnps zmm6, zmm5, dword bcst [edx-516] # AVX512DQ
vorpd zmm6, zmm5, zmm4 # AVX512DQ
vorpd zmm6{k7}, zmm5, zmm4 # AVX512DQ
vorpd zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vorpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vorpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vorpd zmm6, zmm5, qword bcst [eax] # AVX512DQ
vorpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vorpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vorpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vorpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vorpd zmm6, zmm5, qword bcst [edx+1016] # AVX512DQ Disp8
vorpd zmm6, zmm5, qword bcst [edx+1024] # AVX512DQ
vorpd zmm6, zmm5, qword bcst [edx-1024] # AVX512DQ Disp8
vorpd zmm6, zmm5, qword bcst [edx-1032] # AVX512DQ
vorps zmm6, zmm5, zmm4 # AVX512DQ
vorps zmm6{k7}, zmm5, zmm4 # AVX512DQ
vorps zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vorps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vorps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vorps zmm6, zmm5, dword bcst [eax] # AVX512DQ
vorps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vorps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vorps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vorps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vorps zmm6, zmm5, dword bcst [edx+508] # AVX512DQ Disp8
vorps zmm6, zmm5, dword bcst [edx+512] # AVX512DQ
vorps zmm6, zmm5, dword bcst [edx-512] # AVX512DQ Disp8
vorps zmm6, zmm5, dword bcst [edx-516] # AVX512DQ
vxorpd zmm6, zmm5, zmm4 # AVX512DQ
vxorpd zmm6{k7}, zmm5, zmm4 # AVX512DQ
vxorpd zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vxorpd zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vxorpd zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vxorpd zmm6, zmm5, qword bcst [eax] # AVX512DQ
vxorpd zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vxorpd zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vxorpd zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vxorpd zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vxorpd zmm6, zmm5, qword bcst [edx+1016] # AVX512DQ Disp8
vxorpd zmm6, zmm5, qword bcst [edx+1024] # AVX512DQ
vxorpd zmm6, zmm5, qword bcst [edx-1024] # AVX512DQ Disp8
vxorpd zmm6, zmm5, qword bcst [edx-1032] # AVX512DQ
vxorps zmm6, zmm5, zmm4 # AVX512DQ
vxorps zmm6{k7}, zmm5, zmm4 # AVX512DQ
vxorps zmm6{k7}{z}, zmm5, zmm4 # AVX512DQ
vxorps zmm6, zmm5, ZMMWORD PTR [ecx] # AVX512DQ
vxorps zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vxorps zmm6, zmm5, dword bcst [eax] # AVX512DQ
vxorps zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vxorps zmm6, zmm5, ZMMWORD PTR [edx+8192] # AVX512DQ
vxorps zmm6, zmm5, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vxorps zmm6, zmm5, ZMMWORD PTR [edx-8256] # AVX512DQ
vxorps zmm6, zmm5, dword bcst [edx+508] # AVX512DQ Disp8
vxorps zmm6, zmm5, dword bcst [edx+512] # AVX512DQ
vxorps zmm6, zmm5, dword bcst [edx-512] # AVX512DQ Disp8
vxorps zmm6, zmm5, dword bcst [edx-516] # AVX512DQ
vreducepd zmm6, zmm5, 0xab # AVX512DQ
vreducepd zmm6{k7}, zmm5, 0xab # AVX512DQ
vreducepd zmm6{k7}{z}, zmm5, 0xab # AVX512DQ
vreducepd zmm6, zmm5{sae}, 0xab # AVX512DQ
vreducepd zmm6, zmm5, 123 # AVX512DQ
vreducepd zmm6, zmm5{sae}, 123 # AVX512DQ
vreducepd zmm6, ZMMWORD PTR [ecx], 123 # AVX512DQ
vreducepd zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vreducepd zmm6, qword bcst [eax], 123 # AVX512DQ
vreducepd zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512DQ Disp8
vreducepd zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512DQ
vreducepd zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512DQ Disp8
vreducepd zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512DQ
vreducepd zmm6, qword bcst [edx+1016], 123 # AVX512DQ Disp8
vreducepd zmm6, qword bcst [edx+1024], 123 # AVX512DQ
vreducepd zmm6, qword bcst [edx-1024], 123 # AVX512DQ Disp8
vreducepd zmm6, qword bcst [edx-1032], 123 # AVX512DQ
vreduceps zmm6, zmm5, 0xab # AVX512DQ
vreduceps zmm6{k7}, zmm5, 0xab # AVX512DQ
vreduceps zmm6{k7}{z}, zmm5, 0xab # AVX512DQ
vreduceps zmm6, zmm5{sae}, 0xab # AVX512DQ
vreduceps zmm6, zmm5, 123 # AVX512DQ
vreduceps zmm6, zmm5{sae}, 123 # AVX512DQ
vreduceps zmm6, ZMMWORD PTR [ecx], 123 # AVX512DQ
vreduceps zmm6, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vreduceps zmm6, dword bcst [eax], 123 # AVX512DQ
vreduceps zmm6, ZMMWORD PTR [edx+8128], 123 # AVX512DQ Disp8
vreduceps zmm6, ZMMWORD PTR [edx+8192], 123 # AVX512DQ
vreduceps zmm6, ZMMWORD PTR [edx-8192], 123 # AVX512DQ Disp8
vreduceps zmm6, ZMMWORD PTR [edx-8256], 123 # AVX512DQ
vreduceps zmm6, dword bcst [edx+508], 123 # AVX512DQ Disp8
vreduceps zmm6, dword bcst [edx+512], 123 # AVX512DQ
vreduceps zmm6, dword bcst [edx-512], 123 # AVX512DQ Disp8
vreduceps zmm6, dword bcst [edx-516], 123 # AVX512DQ
vreducesd xmm6{k7}, xmm5, xmm4, 0xab # AVX512DQ
vreducesd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512DQ
vreducesd xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512DQ
vreducesd xmm6{k7}, xmm5, xmm4, 123 # AVX512DQ
vreducesd xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512DQ
vreducesd xmm6{k7}, xmm5, QWORD PTR [ecx], 123 # AVX512DQ
vreducesd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vreducesd xmm6{k7}, xmm5, QWORD PTR [edx+1016], 123 # AVX512DQ Disp8
vreducesd xmm6{k7}, xmm5, QWORD PTR [edx+1024], 123 # AVX512DQ
vreducesd xmm6{k7}, xmm5, QWORD PTR [edx-1024], 123 # AVX512DQ Disp8
vreducesd xmm6{k7}, xmm5, QWORD PTR [edx-1032], 123 # AVX512DQ
vreducess xmm6{k7}, xmm5, xmm4, 0xab # AVX512DQ
vreducess xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512DQ
vreducess xmm6{k7}, xmm5, xmm4{sae}, 0xab # AVX512DQ
vreducess xmm6{k7}, xmm5, xmm4, 123 # AVX512DQ
vreducess xmm6{k7}, xmm5, xmm4{sae}, 123 # AVX512DQ
vreducess xmm6{k7}, xmm5, DWORD PTR [ecx], 123 # AVX512DQ
vreducess xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456], 123 # AVX512DQ
vreducess xmm6{k7}, xmm5, DWORD PTR [edx+508], 123 # AVX512DQ Disp8
vreducess xmm6{k7}, xmm5, DWORD PTR [edx+512], 123 # AVX512DQ
vreducess xmm6{k7}, xmm5, DWORD PTR [edx-512], 123 # AVX512DQ Disp8
vreducess xmm6{k7}, xmm5, DWORD PTR [edx-516], 123 # AVX512DQ
kandb k5, k6, k7 # AVX512DQ
kandnb k5, k6, k7 # AVX512DQ
korb k5, k6, k7 # AVX512DQ
kxnorb k5, k6, k7 # AVX512DQ
kxorb k5, k6, k7 # AVX512DQ
knotb k5, k6 # AVX512DQ
kortestb k5, k6 # AVX512DQ
ktestw k5, k6 # AVX512DQ
ktestb k5, k6 # AVX512DQ
kshiftrb k5, k6, 0xab # AVX512DQ
kshiftrb k5, k6, 123 # AVX512DQ
kshiftlb k5, k6, 0xab # AVX512DQ
kshiftlb k5, k6, 123 # AVX512DQ
kmovb k5, k6 # AVX512DQ
kmovb k5, BYTE PTR [ecx] # AVX512DQ
kmovb k5, BYTE PTR [esp+esi*8-123456] # AVX512DQ
kmovb BYTE PTR [ecx], k5 # AVX512DQ
kmovb BYTE PTR [esp+esi*8-123456], k5 # AVX512DQ
kmovb k5, eax # AVX512DQ
kmovb k5, ebp # AVX512DQ
kmovb eax, k5 # AVX512DQ
kmovb ebp, k5 # AVX512DQ
kaddw k5, k6, k7 # AVX512DQ
kaddb k5, k6, k7 # AVX512DQ
vextractf64x2 XMMWORD PTR [ecx], zmm6, 0xab # AVX512DQ
vextractf64x2 XMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512DQ
vextractf64x2 XMMWORD PTR [ecx], zmm6, 123 # AVX512DQ
vextractf64x2 XMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512DQ
vextractf64x2 XMMWORD PTR [edx+2032], zmm6, 123 # AVX512DQ Disp8
vextractf64x2 XMMWORD PTR [edx+2048], zmm6, 123 # AVX512DQ
vextractf64x2 XMMWORD PTR [edx-2048], zmm6, 123 # AVX512DQ Disp8
vextractf64x2 XMMWORD PTR [edx-2064], zmm6, 123 # AVX512DQ
vextractf32x8 YMMWORD PTR [ecx], zmm6, 0xab # AVX512DQ
vextractf32x8 YMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512DQ
vextractf32x8 YMMWORD PTR [ecx], zmm6, 123 # AVX512DQ
vextractf32x8 YMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512DQ
vextractf32x8 YMMWORD PTR [edx+4064], zmm6, 123 # AVX512DQ Disp8
vextractf32x8 YMMWORD PTR [edx+4096], zmm6, 123 # AVX512DQ
vextractf32x8 YMMWORD PTR [edx-4096], zmm6, 123 # AVX512DQ Disp8
vextractf32x8 YMMWORD PTR [edx-4128], zmm6, 123 # AVX512DQ
vextracti64x2 XMMWORD PTR [ecx], zmm6, 0xab # AVX512DQ
vextracti64x2 XMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512DQ
vextracti64x2 XMMWORD PTR [ecx], zmm6, 123 # AVX512DQ
vextracti64x2 XMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512DQ
vextracti64x2 XMMWORD PTR [edx+2032], zmm6, 123 # AVX512DQ Disp8
vextracti64x2 XMMWORD PTR [edx+2048], zmm6, 123 # AVX512DQ
vextracti64x2 XMMWORD PTR [edx-2048], zmm6, 123 # AVX512DQ Disp8
vextracti64x2 XMMWORD PTR [edx-2064], zmm6, 123 # AVX512DQ
vextracti32x8 YMMWORD PTR [ecx], zmm6, 0xab # AVX512DQ
vextracti32x8 YMMWORD PTR [ecx]{k7}, zmm6, 0xab # AVX512DQ
vextracti32x8 YMMWORD PTR [ecx], zmm6, 123 # AVX512DQ
vextracti32x8 YMMWORD PTR [esp+esi*8-123456], zmm6, 123 # AVX512DQ
vextracti32x8 YMMWORD PTR [edx+4064], zmm6, 123 # AVX512DQ Disp8
vextracti32x8 YMMWORD PTR [edx+4096], zmm6, 123 # AVX512DQ
vextracti32x8 YMMWORD PTR [edx-4096], zmm6, 123 # AVX512DQ Disp8
vextracti32x8 YMMWORD PTR [edx-4128], zmm6, 123 # AVX512DQ
vcvttpd2qq zmm6, zmm5 # AVX512DQ
vcvttpd2qq zmm6{k7}, zmm5 # AVX512DQ
vcvttpd2qq zmm6{k7}{z}, zmm5 # AVX512DQ
vcvttpd2qq zmm6, zmm5{sae} # AVX512DQ
vcvttpd2qq zmm6, ZMMWORD PTR [ecx] # AVX512DQ
vcvttpd2qq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvttpd2qq zmm6, qword bcst [eax] # AVX512DQ
vcvttpd2qq zmm6, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvttpd2qq zmm6, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvttpd2qq zmm6, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvttpd2qq zmm6, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvttpd2qq zmm6, qword bcst [edx+1016] # AVX512DQ Disp8
vcvttpd2qq zmm6, qword bcst [edx+1024] # AVX512DQ
vcvttpd2qq zmm6, qword bcst [edx-1024] # AVX512DQ Disp8
vcvttpd2qq zmm6, qword bcst [edx-1032] # AVX512DQ
vcvttpd2uqq zmm6, zmm5 # AVX512DQ
vcvttpd2uqq zmm6{k7}, zmm5 # AVX512DQ
vcvttpd2uqq zmm6{k7}{z}, zmm5 # AVX512DQ
vcvttpd2uqq zmm6, zmm5{sae} # AVX512DQ
vcvttpd2uqq zmm6, ZMMWORD PTR [ecx] # AVX512DQ
vcvttpd2uqq zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvttpd2uqq zmm6, qword bcst [eax] # AVX512DQ
vcvttpd2uqq zmm6, ZMMWORD PTR [edx+8128] # AVX512DQ Disp8
vcvttpd2uqq zmm6, ZMMWORD PTR [edx+8192] # AVX512DQ
vcvttpd2uqq zmm6, ZMMWORD PTR [edx-8192] # AVX512DQ Disp8
vcvttpd2uqq zmm6, ZMMWORD PTR [edx-8256] # AVX512DQ
vcvttpd2uqq zmm6, qword bcst [edx+1016] # AVX512DQ Disp8
vcvttpd2uqq zmm6, qword bcst [edx+1024] # AVX512DQ
vcvttpd2uqq zmm6, qword bcst [edx-1024] # AVX512DQ Disp8
vcvttpd2uqq zmm6, qword bcst [edx-1032] # AVX512DQ
vcvttps2qq zmm6{k7}, ymm5 # AVX512DQ
vcvttps2qq zmm6{k7}{z}, ymm5 # AVX512DQ
vcvttps2qq zmm6{k7}, ymm5{sae} # AVX512DQ
vcvttps2qq zmm6{k7}, YMMWORD PTR [ecx] # AVX512DQ
vcvttps2qq zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvttps2qq zmm6{k7}, dword bcst [eax] # AVX512DQ
vcvttps2qq zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512DQ Disp8
vcvttps2qq zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512DQ
vcvttps2qq zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512DQ Disp8
vcvttps2qq zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512DQ
vcvttps2qq zmm6{k7}, dword bcst [edx+508] # AVX512DQ Disp8
vcvttps2qq zmm6{k7}, dword bcst [edx+512] # AVX512DQ
vcvttps2qq zmm6{k7}, dword bcst [edx-512] # AVX512DQ Disp8
vcvttps2qq zmm6{k7}, dword bcst [edx-516] # AVX512DQ
vcvttps2qq zmm6{k7}, DWORD BCST [edx+508] # AVX512DQ Disp8
vcvttps2uqq zmm6{k7}, ymm5 # AVX512DQ
vcvttps2uqq zmm6{k7}{z}, ymm5 # AVX512DQ
vcvttps2uqq zmm6{k7}, ymm5{sae} # AVX512DQ
vcvttps2uqq zmm6{k7}, YMMWORD PTR [ecx] # AVX512DQ
vcvttps2uqq zmm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512DQ
vcvttps2uqq zmm6{k7}, dword bcst [eax] # AVX512DQ
vcvttps2uqq zmm6{k7}, YMMWORD PTR [edx+4064] # AVX512DQ Disp8
vcvttps2uqq zmm6{k7}, YMMWORD PTR [edx+4096] # AVX512DQ
vcvttps2uqq zmm6{k7}, YMMWORD PTR [edx-4096] # AVX512DQ Disp8
vcvttps2uqq zmm6{k7}, YMMWORD PTR [edx-4128] # AVX512DQ
vcvttps2uqq zmm6{k7}, dword bcst [edx+508] # AVX512DQ Disp8
vcvttps2uqq zmm6{k7}, dword bcst [edx+512] # AVX512DQ
vcvttps2uqq zmm6{k7}, dword bcst [edx-512] # AVX512DQ Disp8
vcvttps2uqq zmm6{k7}, dword bcst [edx-516] # AVX512DQ
vcvttps2uqq zmm6{k7}, DWORD BCST [edx+508] # AVX512DQ Disp8
vpmovd2m k5, zmm6 # AVX512DQ
vpmovq2m k5, zmm6 # AVX512DQ
vpmovm2d zmm6, k5 # AVX512DQ
vpmovm2q zmm6, k5 # AVX512DQ
|
stsp/binutils-ia16
| 1,101
|
gas/testsuite/gas/i386/intel-regs.s
|
.text
.intel_syntax noprefix
mov eax, tmm1
.arch i286
.code16
mov ax, eax ; add [bx+si], al
mov ax, rax ; add [bx+si], al
mov ax, axl ; add [bx+si], al
mov ax, r8b ; add [bx+si], al
mov ax, r8w ; add [bx+si], al
mov ax, r8d ; add [bx+si], al
mov ax, r8 ; add [bx+si], al
mov ax, fs ; add [bx+si], al
mov ax, st ; add [bx+si], al
mov ax, cr0 ; add [bx+si], al
mov ax, dr0 ; add [bx+si], al
mov ax, tr0 ; add [bx+si], al
mov ax, mm0 ; add [bx+si], al
mov ax, xmm0 ; add [bx+si], al
mov ax, ymm0 ; add [bx+si], al
mov ax, xmm16 ; add [bx+si], al
mov ax, zmm0 ; add [bx+si], al
.arch generic32
.code32
mov eax, rax
mov eax, axl
mov eax, r8b
mov eax, r8w
mov eax, r8d
mov eax, r8
mov eax, st
mov eax, cr0
mov eax, dr0
mov eax, tr0
mov eax, mm0
mov eax, xmm0
mov eax, ymm0
mov eax, xmm16
mov eax, zmm0
.arch .387
ffree st
.arch .mmx
pxor mm0, mm0
.arch .sse
xorps xmm0, xmm0
.arch .avx
vxorps ymm0, ymm0, ymm0
.arch generic64
.code64
mov axl, r8b
mov ax, r8w
mov eax, r8d
mov rax, r8
ymm8:
jmp ymm8
tmm0:
jmp tmm0
|
stsp/binutils-ia16
| 2,454
|
gas/testsuite/gas/i386/intel-movs.s
|
.text
.intel_syntax noprefix
.ifdef x86_16
.code16
.endif
.ifdef x86_64
.equ adi, rdi
.equ asi, rsi
.else
.equ adi, di
.equ asi, si
.endif
movs:
movsb
movsb es:[edi], [esi]
movsb es:[edi], fs:[esi]
movsb [edi], [esi]
movsb byte ptr es:[edi], [esi]
movsb es:[edi], byte ptr [esi]
movsb byte ptr es:[edi], byte ptr [esi]
movs byte ptr es:[edi], [esi]
movs es:[edi], byte ptr [esi]
movs byte ptr es:[edi], byte ptr [esi]
movsb es:[adi], [asi]
movsb es:[adi], fs:[asi]
movsb [adi], [asi]
movsb byte ptr es:[adi], [asi]
movsb es:[adi], byte ptr [asi]
movsb byte ptr es:[adi], byte ptr [asi]
movs byte ptr es:[adi], [asi]
movs es:[adi], byte ptr [asi]
movs byte ptr es:[adi], byte ptr [asi]
movsw
movsw es:[edi], [esi]
movsw es:[edi], fs:[esi]
movsw [edi], [esi]
movsw word ptr es:[edi], [esi]
movsw es:[edi], word ptr [esi]
movsw word ptr es:[edi], word ptr [esi]
movs word ptr es:[edi], [esi]
movs es:[edi], word ptr [esi]
movs word ptr es:[edi], word ptr [esi]
movsw es:[adi], [asi]
movsw es:[adi], fs:[asi]
movsw [adi], [asi]
movsw word ptr es:[adi], [asi]
movsw es:[adi], word ptr [asi]
movsw word ptr es:[adi], word ptr [asi]
movs word ptr es:[adi], [asi]
movs es:[adi], word ptr [asi]
movs word ptr es:[adi], word ptr [asi]
movsd
movsd es:[edi], [esi]
movsd es:[edi], fs:[esi]
movsd [edi], [esi]
movsd dword ptr es:[edi], [esi]
movsd es:[edi], dword ptr [esi]
movsd dword ptr es:[edi], dword ptr [esi]
movs dword ptr es:[edi], [esi]
movs es:[edi], dword ptr [esi]
movs dword ptr es:[edi], dword ptr [esi]
movsd es:[adi], [asi]
movsd es:[adi], fs:[asi]
movsd [adi], [asi]
movsd dword ptr es:[adi], [asi]
movsd es:[adi], dword ptr [asi]
movsd dword ptr es:[adi], dword ptr [asi]
movs dword ptr es:[adi], [asi]
movs es:[adi], dword ptr [asi]
movs dword ptr es:[adi], dword ptr [asi]
.ifdef x86_64
movsq
movsq es:[rdi], [rsi]
movsq es:[rdi], fs:[rsi]
movsq [rdi], [rsi]
movsq qword ptr es:[rdi], [rsi]
movsq es:[rdi], qword ptr [rsi]
movsq qword ptr es:[rdi], qword ptr [rsi]
movs qword ptr es:[rdi], [rsi]
movs es:[rdi], qword ptr [rsi]
movs qword ptr es:[rdi], qword ptr [rsi]
movsq es:[edi], [esi]
movsq es:[edi], fs:[esi]
movsq [edi], [esi]
movsq qword ptr es:[edi], [esi]
movsq es:[edi], qword ptr [esi]
movsq qword ptr es:[edi], qword ptr [esi]
movs qword ptr es:[edi], [esi]
movs es:[edi], qword ptr [esi]
movs qword ptr es:[edi], qword ptr [esi]
.endif
|
stsp/binutils-ia16
| 1,667
|
gas/testsuite/gas/i386/x86-64-lock-1.s
|
# 64bit lockable Instructions
.text
foo:
lock add %eax, (%rbx)
lock addl $0x64, (%rbx)
lock adc %eax, (%rbx)
lock adcl $0x64, (%rbx)
lock and %eax, (%rbx)
lock andl $0x64, (%rbx)
lock btc %eax, (%rbx)
lock btcl $0x64, (%rbx)
lock btr %eax, (%rbx)
lock btrl $0x64, (%rbx)
lock bts %eax, (%rbx)
lock btsl $0x64, (%rbx)
lock cmpxchg %eax,(%rbx)
lock cmpxchg8b (%rbx)
lock cmpxchg16b (%rbx)
lock decl (%rbx)
lock incl (%rbx)
lock negl (%rbx)
lock notl (%rbx)
lock or %eax, (%rbx)
lock orl $0x64, (%rbx)
lock sbb %eax, (%rbx)
lock sbbl $0x64, (%rbx)
lock sub %eax, (%rbx)
lock subl $0x64, (%rbx)
lock xadd %eax, (%rbx)
lock xchg (%rbx), %eax
lock xchg %eax, (%rbx)
lock xor %eax, (%rbx)
lock xorl $0x64, (%rbx)
.intel_syntax noprefix
lock add DWORD PTR [rbx],eax
lock add DWORD PTR [rbx],0x64
lock adc DWORD PTR [rbx],eax
lock adc DWORD PTR [rbx],0x64
lock and DWORD PTR [rbx],eax
lock and DWORD PTR [rbx],0x64
lock btc DWORD PTR [rbx],eax
lock btc DWORD PTR [rbx],0x64
lock btr DWORD PTR [rbx],eax
lock btr DWORD PTR [rbx],0x64
lock bts DWORD PTR [rbx],eax
lock bts DWORD PTR [rbx],0x64
lock cmpxchg DWORD PTR [rbx],eax
lock cmpxchg8b QWORD PTR [rbx]
lock cmpxchg16b OWORD PTR [rbx]
lock dec DWORD PTR [rbx]
lock inc DWORD PTR [rbx]
lock neg DWORD PTR [rbx]
lock not DWORD PTR [rbx]
lock or DWORD PTR [rbx],eax
lock or DWORD PTR [rbx],0x64
lock sbb DWORD PTR [rbx],eax
lock sbb DWORD PTR [rbx],0x64
lock sub DWORD PTR [rbx],eax
lock sub DWORD PTR [rbx],0x64
lock xadd DWORD PTR [rbx],eax
lock xchg DWORD PTR [rbx],eax
lock xchg DWORD PTR [rbx],eax
lock xor DWORD PTR [rbx],eax
lock xor DWORD PTR [rbx],0x64
|
stsp/binutils-ia16
| 1,107
|
gas/testsuite/gas/i386/tlsnopic.s
|
.section ".tdata", "awT", @progbits
.globl baz
.hidden baz
.globl var
.hidden var2
bar: .long 27
baz: .long 29
var: .long 31
var2: .long 33
.text
.globl fn
.type fn,@function
fn:
/* Main binary, no PIC. */
1: movl 1b, %edx
addl $_GLOBAL_OFFSET_TABLE_+[.-1b], %edx
/* foo can be anywhere in startup TLS. */
movl %gs:0, %eax
subl foo@GOTTPOFF(%edx), %eax
/* %eax now contains &foo. */
/* bar only in the main program. */
movl %gs:0, %eax
subl $bar@TPOFF, %eax
/* %eax now contains &bar. */
/* baz only in the main program. */
movl %gs:0, %ecx
/* Arbitrary instructions in between. */
nop
subl $baz@TPOFF, %ecx
/* %ecx now contains &baz. */
/* var and var2 only in the main program. */
movl %gs:0, %ecx
/* Arbitrary instructions in between. */
nop
nop
leal var@NTPOFF(%ecx), %eax
/* Arbitrary instructions in between. */
nop
leal var2@NTPOFF(%ecx), %edx
/* foo can be anywhere in startup TLS. */
movl foo@INDNTPOFF, %eax
movl %gs:(%eax), %eax
/* %eax now contains foo. */
movl %gs:0, %eax
addl foo@INDNTPOFF, %eax
/* %eax now contains &foo. */
ret
|
stsp/binutils-ia16
| 3,501
|
gas/testsuite/gas/i386/sse2.s
|
foo:
movnti %eax, (%eax)
sfence
lfence
mfence
addpd (%ecx),%xmm0
addpd %xmm2,%xmm1
addsd (%ebx),%xmm2
addsd %xmm4,%xmm3
andnpd 0x0(%ebp),%xmm4
andnpd %xmm6,%xmm5
andpd (%edi),%xmm6
andpd %xmm0,%xmm7
cmppd $0x2,%xmm1,%xmm0
cmppd $0x3,(%edx),%xmm1
cmpsd $0x4,%xmm2,%xmm2
cmpsd $0x5,(%esp,1),%xmm3
cmppd $0x6,%xmm5,%xmm4
cmppd $0x7,(%esi),%xmm5
cmpsd $0x0,%xmm7,%xmm6
cmpsd $0x1,(%eax),%xmm7
cmpeqpd %xmm1,%xmm0
cmpeqpd (%edx),%xmm1
cmpeqsd %xmm2,%xmm2
cmpeqsd (%esp,1),%xmm3
cmpltpd %xmm5,%xmm4
cmpltpd (%esi),%xmm5
cmpltsd %xmm7,%xmm6
cmpltsd (%eax),%xmm7
cmplepd (%ecx),%xmm0
cmplepd %xmm2,%xmm1
cmplesd (%ebx),%xmm2
cmplesd %xmm4,%xmm3
cmpunordpd 0x0(%ebp),%xmm4
cmpunordpd %xmm6,%xmm5
cmpunordsd (%edi),%xmm6
cmpunordsd %xmm0,%xmm7
cmpneqpd %xmm1,%xmm0
cmpneqpd (%edx),%xmm1
cmpneqsd %xmm2,%xmm2
cmpneqsd (%esp,1),%xmm3
cmpnltpd %xmm5,%xmm4
cmpnltpd (%esi),%xmm5
cmpnltsd %xmm7,%xmm6
cmpnltsd (%eax),%xmm7
cmpnlepd (%ecx),%xmm0
cmpnlepd %xmm2,%xmm1
cmpnlesd (%ebx),%xmm2
cmpnlesd %xmm4,%xmm3
cmpordpd 0x0(%ebp),%xmm4
cmpordpd %xmm6,%xmm5
cmpordsd (%edi),%xmm6
cmpordsd %xmm0,%xmm7
comisd %xmm1,%xmm0
comisd (%edx),%xmm1
cvtpi2pd %mm3,%xmm2
cvtpi2pd (%esp,1),%xmm3
cvtsi2sd %ebp,%xmm4
cvtsi2sd (%esi),%xmm5
cvtpd2pi %xmm7,%mm6
cvtpd2pi (%eax),%mm7
cvtsd2si (%ecx),%eax
cvtsd2si %xmm2,%ecx
cvttpd2pi (%ebx),%mm2
cvttpd2pi %xmm4,%mm3
cvttsd2si 0x0(%ebp),%esp
cvttsd2si %xmm6,%ebp
divpd %xmm1,%xmm0
divpd (%edx),%xmm1
divsd %xmm3,%xmm2
divsd (%esp,1),%xmm3
ldmxcsr 0x0(%ebp)
stmxcsr (%esi)
sfence
maxpd %xmm1,%xmm0
maxpd (%edx),%xmm1
maxsd %xmm3,%xmm2
maxsd (%esp,1),%xmm3
minpd %xmm5,%xmm4
minpd (%esi),%xmm5
minsd %xmm7,%xmm6
minsd (%eax),%xmm7
movapd %xmm1,%xmm0
movapd %xmm2,(%ecx)
movapd (%edx),%xmm2
movhpd %xmm5,(%esp,1)
movhpd (%esi),%xmm5
movlpd %xmm0,(%edi)
movlpd (%eax),%xmm0
movmskpd %xmm2,%ecx
movupd %xmm3,%xmm2
movupd %xmm4,(%edx)
movupd 0x0(%ebp),%xmm4
movsd %xmm6,%xmm5
movsd %xmm7,(%esi)
movsd (%eax),%xmm7
mulpd %xmm1,%xmm0
mulpd (%edx),%xmm1
mulsd %xmm2,%xmm2
mulsd (%esp,1),%xmm3
orpd %xmm5,%xmm4
orpd (%esi),%xmm5
shufpd $0x2,(%edi),%xmm6
shufpd $0x3,%xmm0,%xmm7
sqrtpd %xmm1,%xmm0
sqrtpd (%edx),%xmm1
sqrtsd %xmm2,%xmm2
sqrtsd (%esp,1),%xmm3
subpd %xmm5,%xmm4
subpd (%esi),%xmm5
subsd %xmm7,%xmm6
subsd (%eax),%xmm7
ucomisd (%ecx),%xmm0
ucomisd %xmm2,%xmm1
unpckhpd (%ebx),%xmm2
unpckhpd %xmm4,%xmm3
unpcklpd 0x0(%ebp),%xmm4
unpcklpd %xmm6,%xmm5
xorpd (%edi),%xmm6
xorpd %xmm0,%xmm7
movntpd %xmm6,(%ebx)
xorpd %xmm0, %xmm1
cvtdq2pd %xmm0, %xmm1
cvtpd2dq %xmm0, %xmm1
cvtdq2ps %xmm0, %xmm1
cvtpd2ps %xmm0, %xmm1
cvtps2pd %xmm0, %xmm1
cvtps2dq %xmm0, %xmm1
cvtsd2ss %xmm0, %xmm1
cvtss2sd %xmm0, %xmm1
cvttpd2dq %xmm0, %xmm1
cvttps2dq %xmm0, %xmm1
maskmovdqu %xmm0, %xmm1
movdqa %xmm0, %xmm1
movdqa %xmm0, (%esi)
movdqu %xmm0, %xmm1
movdqu %xmm0, (%esi)
movdq2q %xmm0, %mm1
movq2dq %mm0, %xmm1
pmuludq %mm0, %mm1
pmuludq (%eax), %mm1
pmuludq %xmm0, %xmm1
pmuludq (%eax), %xmm1
pshufd $1, %xmm0, %xmm1
pshufhw $1, %xmm0, %xmm1
pshuflw $1, %xmm0, %xmm1
pslldq $1, %xmm0
psrldq $1, %xmm0
punpckhqdq %xmm0, %xmm1
paddq %mm1,%mm0
paddq (%eax),%mm0
paddq %xmm1,%xmm0
paddq (%eax),%xmm0
psubq %mm1,%mm0
psubq (%eax),%mm0
psubq %xmm1,%xmm0
psubq (%eax),%xmm0
|
stsp/binutils-ia16
| 1,233
|
gas/testsuite/gas/i386/lfence-load.s
|
.text
_start:
vldmxcsr (%ebp)
lgdt (%ebp)
vmptrld (%ebp)
vmclear (%ebp)
invpcid (%ebp), %edx
invlpg (%ebp)
clflush (%ebp)
clflushopt (%ebp)
clwb (%ebp)
cldemote (%ebp)
bndmk (%ebp), %bnd1
bndcl (%ebp), %bnd1
bndcu (%ebp), %bnd1
bndcn (%ebp), %bnd1
bndstx %bnd1, (%ebp)
bndldx (%ebp), %bnd1
prefetcht0 (%ebp)
prefetcht1 (%ebp)
prefetcht2 (%ebp)
prefetchw (%ebp)
pop %ds
popf
popa
xlatb (%ebx)
fsts (%ebp)
flds (%ebp)
fistl (%ebp)
fists (%ebp)
fildl (%ebp)
filds (%ebp)
fsave (%ebp)
frstor (%ebp)
filds (%ebp)
fisttps (%ebp)
fldenv (%ebp)
fstenv (%ebp)
fadds (%ebp)
fadds (%esp)
fadd %st(3),%st
fadds (%ecx)
filds (%ecx)
fists (%ecx)
xrstor (%ecx)
prefetchnta (%ecx)
cmpxchg8b (%ecx)
incl %ecx
lgdt (%eax)
pfcmpeq 2(%esi),%mm4
popl (%eax)
popl %eax
rclw (%ecx)
testl $1,(%ecx)
incl (%ecx)
notl (%ecx)
divl (%ecx)
mull (%ecx)
idivl (%ecx)
imull (%ecx)
leal (%eax,%eax,2), %eax
leave
outsb
lodsb
rep movsl
rep scasl
rep cmpsl
rep lodsl
addl $1, (%eax)
btl $1, (%eax)
xadd %eax,(%ebx)
xadd %eax,%ebx
xchg %eax,(%ebx)
xchg %eax,%ebx
cmp %eax,0x40(%ebp)
cmp 0x40(%ebp),%eax
add %eax,0x40(%ebp)
add (%eax),%eax
test %eax,0x40(%ebp)
test 0x40(%ebp),%eax
|
stsp/binutils-ia16
| 2,312
|
gas/testsuite/gas/i386/avx512_bf16.s
|
# Check 32bit AVX512_BF16 instructions
.allow_index_reg
.text
_start:
vcvtne2ps2bf16 %zmm4, %zmm5, %zmm6 #AVX512_BF16
vcvtne2ps2bf16 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512_BF16 MASK_ENABLING
vcvtne2ps2bf16 (%ecx){1to16}, %zmm5, %zmm6 #AVX512_BF16 BROADCAST_EN
vcvtne2ps2bf16 8128(%ecx), %zmm5, %zmm6 #AVX512_BF16 Disp8
vcvtne2ps2bf16 -8192(%edx){1to16}, %zmm5, %zmm6{%k7}{z} #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 %zmm5, %ymm6 #AVX512_BF16
vcvtneps2bf16 0x10000000(%esp, %esi, 8), %ymm6{%k7} #AVX512_BF16 MASK_ENABLING
vcvtneps2bf16 (%ecx){1to16}, %ymm6 #AVX512_BF16 BROADCAST_EN
vcvtneps2bf16 8128(%ecx), %ymm6 #AVX512_BF16 Disp8
vcvtneps2bf16 -8192(%edx){1to16}, %ymm6{%k7}{z} #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps %zmm4, %zmm5, %zmm6 #AVX512_BF16
vdpbf16ps 0x10000000(%esp, %esi, 8), %zmm5, %zmm6{%k7} #AVX512_BF16 MASK_ENABLING
vdpbf16ps (%ecx){1to16}, %zmm5, %zmm6 #AVX512_BF16 BROADCAST_EN
vdpbf16ps 8128(%ecx), %zmm5, %zmm6 #AVX512_BF16 Disp8
vdpbf16ps -8192(%edx){1to16}, %zmm5, %zmm6{%k7}{z} #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
.intel_syntax noprefix
vcvtne2ps2bf16 zmm6, zmm5, zmm4 #AVX512_BF16
vcvtne2ps2bf16 zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512_BF16 MASK_ENABLING
vcvtne2ps2bf16 zmm6, zmm5, DWORD BCST [ecx] #AVX512_BF16 BROADCAST_EN
vcvtne2ps2bf16 zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512_BF16 Disp8
vcvtne2ps2bf16 zmm6{k7}{z}, zmm5, DWORD BCST [edx-8192] #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vcvtneps2bf16 ymm6, zmm5 #AVX512_BF16
vcvtneps2bf16 ymm6{k7}, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512_BF16 MASK_ENABLING
vcvtneps2bf16 ymm6, DWORD BCST [ecx] #AVX512_BF16 BROADCAST_EN
vcvtneps2bf16 ymm6, ZMMWORD PTR [ecx+8128] #AVX512_BF16 Disp8
vcvtneps2bf16 ymm6{k7}{z}, DWORD BCST [edx-8192] #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
vdpbf16ps zmm6, zmm5, zmm4 #AVX512_BF16
vdpbf16ps zmm6{k7}, zmm5, ZMMWORD PTR [esp+esi*8+0x10000000] #AVX512_BF16 MASK_ENABLING
vdpbf16ps zmm6, zmm5, DWORD BCST [ecx] #AVX512_BF16 BROADCAST_EN
vdpbf16ps zmm6, zmm5, ZMMWORD PTR [ecx+8128] #AVX512_BF16 Disp8
vdpbf16ps zmm6{k7}{z}, zmm5, DWORD BCST [edx-8192] #AVX512_BF16 Disp8 BROADCAST_EN MASK_ENABLING ZEROCTL
|
stsp/binutils-ia16
| 5,772
|
gas/testsuite/gas/i386/x86-64-disassem.s
|
.text
.byte 0xFF, 0xEF
.byte 0xFF, 0xD8
.fill 0x5, 0x1, 0x90
.byte 0xC5, 0xEC, 0x4A, 0x9B
.byte 0xC5, 0xEC, 0x4A, 0x6F
.byte 0xC5, 0xEC, 0x4A, 0x3F
.byte 0xC5, 0xED, 0x4A, 0x9B
.byte 0xC5, 0xED, 0x4A, 0x6F
.byte 0xC5, 0xED, 0x4A, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x4A, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x4A, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x4A, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x4A, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x4A, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x4A, 0x3F
.byte 0xC5, 0xEC, 0x41, 0x9B
.byte 0xC5, 0xEC, 0x41, 0x6F
.byte 0xC5, 0xEC, 0x41, 0x3F
.byte 0xC5, 0xED, 0x41, 0x9B
.byte 0xC5, 0xED, 0x41, 0x6F
.byte 0xC5, 0xED, 0x41, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x41, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x41, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x41, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x41, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x41, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x41, 0x3F
.byte 0xC5, 0xEC, 0x42, 0x9B
.byte 0xC5, 0xEC, 0x42, 0x6F
.byte 0xC5, 0xEC, 0x42, 0x3F
.byte 0xC5, 0xED, 0x42, 0x9B
.byte 0xC5, 0xED, 0x42, 0x6F
.byte 0xC5, 0xED, 0x42, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x42, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x42, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x42, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x42, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x42, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x42, 0x3F
.byte 0xC5, 0xEC, 0x4B, 0x9B
.byte 0xC5, 0xEC, 0x4B, 0x6F
.byte 0xC5, 0xEC, 0x4B, 0x3F
.byte 0xC5, 0xED, 0x4B, 0x9B
.byte 0xC5, 0xED, 0x4B, 0x6F
.byte 0xC5, 0xED, 0x4B, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x4B, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x4B, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x4B, 0x3F
.byte 0xC5, 0xF8, 0x44, 0x9B
.byte 0xC5, 0xF8, 0x44, 0x6F
.byte 0xC5, 0xF8, 0x44, 0x3F
.byte 0xC5, 0xF9, 0x44, 0x9B
.byte 0xC5, 0xF9, 0x44, 0x6F
.byte 0xC5, 0xF9, 0x44, 0x3F
.byte 0xC4, 0xE1, 0xF8, 0x44, 0x9B
.byte 0xC4, 0xE1, 0xF8, 0x44, 0x6F
.byte 0xC4, 0xE1, 0xF8, 0x44, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x44, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x44, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x44, 0x3F
.byte 0xC5, 0xEC, 0x45, 0x9B
.byte 0xC5, 0xEC, 0x45, 0x6F
.byte 0xC5, 0xEC, 0x45, 0x3F
.byte 0xC5, 0xED, 0x45, 0x9B
.byte 0xC5, 0xED, 0x45, 0x6F
.byte 0xC5, 0xED, 0x45, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x45, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x45, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x45, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x45, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x45, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x45, 0x3F
.byte 0xC5, 0xF8, 0x98, 0x9B
.byte 0xC5, 0xF8, 0x98, 0x6F
.byte 0xC5, 0xF8, 0x98, 0x3F
.byte 0xC5, 0xF9, 0x98, 0x9B
.byte 0xC5, 0xF9, 0x98, 0x6F
.byte 0xC5, 0xF9, 0x98, 0x3F
.byte 0xC4, 0xE1, 0xF8, 0x98, 0x9B
.byte 0xC4, 0xE1, 0xF8, 0x98, 0x6F
.byte 0xC4, 0xE1, 0xF8, 0x98, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x98, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x98, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x98, 0x3F
.byte 0xC5, 0xEC, 0x46, 0x9B
.byte 0xC5, 0xEC, 0x46, 0x6F
.byte 0xC5, 0xEC, 0x46, 0x3F
.byte 0xC5, 0xED, 0x46, 0x9B
.byte 0xC5, 0xED, 0x46, 0x6F
.byte 0xC5, 0xED, 0x46, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x46, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x46, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x46, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x46, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x46, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x46, 0x3F
.byte 0xC5, 0xEC, 0x47, 0x9B
.byte 0xC5, 0xEC, 0x47, 0x6F
.byte 0xC5, 0xEC, 0x47, 0x3F
.byte 0xC5, 0xED, 0x47, 0x9B
.byte 0xC5, 0xED, 0x47, 0x6F
.byte 0xC5, 0xED, 0x47, 0x3F
.byte 0xC4, 0xE1, 0xEC, 0x47, 0x9B
.byte 0xC4, 0xE1, 0xEC, 0x47, 0x6F
.byte 0xC4, 0xE1, 0xEC, 0x47, 0x3F
.byte 0xC4, 0xE1, 0xED, 0x47, 0x9B
.byte 0xC4, 0xE1, 0xED, 0x47, 0x6F
.byte 0xC4, 0xE1, 0xED, 0x47, 0x3F
.byte 0xC5, 0xF8, 0x99, 0x9B
.byte 0xC5, 0xF8, 0x99, 0x6F
.byte 0xC5, 0xF8, 0x99, 0x3F
.byte 0xC5, 0xF9, 0x99, 0x9B
.byte 0xC5, 0xF9, 0x99, 0x6F
.byte 0xC5, 0xF9, 0x99, 0x3F
.byte 0xC4, 0xE1, 0xF8, 0x99, 0x9B
.byte 0xC4, 0xE1, 0xF8, 0x99, 0x6F
.byte 0xC4, 0xE1, 0xF8, 0x99, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x99, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x99, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x99, 0x3F
.byte 0xC4, 0xE3, 0xF9, 0x30, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x30, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x30, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x30, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x30, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x30, 0x04, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x31, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x31, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x31, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x31, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x31, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x31, 0x04, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x32, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x32, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x32, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x32, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x32, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x32, 0x04, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x33, 0x8F, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x33, 0x6A, 0x01
.byte 0xC4, 0xE3, 0xF9, 0x33, 0x04, 0x01
.byte 0xC4, 0xE3, 0x79, 0x33, 0x8F, 0x01
.byte 0xC4, 0xE3, 0x79, 0x33, 0x6A, 0x01
.byte 0xC4, 0xE3, 0x79, 0x33, 0x04, 0x01
.byte 0xC5, 0xF8, 0x92, 0x9B
.byte 0xC5, 0xF8, 0x92, 0x6F
.byte 0xC5, 0xF8, 0x92, 0x3F
.byte 0xC5, 0xF9, 0x92, 0x9B
.byte 0xC5, 0xF9, 0x92, 0x6F
.byte 0xC5, 0xF9, 0x92, 0x3F
.byte 0xC5, 0xFB, 0x92, 0x9B
.byte 0xC5, 0xFB, 0x92, 0x6F
.byte 0xC5, 0xFB, 0x92, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x92, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x92, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x92, 0x3F
.byte 0xC5, 0xF8, 0x93, 0x9B
.byte 0xC5, 0xF8, 0x93, 0x6F
.byte 0xC5, 0xF8, 0x93, 0x3F
.byte 0xC5, 0xF9, 0x93, 0x9B
.byte 0xC5, 0xF9, 0x93, 0x6F
.byte 0xC5, 0xF9, 0x93, 0x3F
.byte 0xC5, 0xFB, 0x93, 0x9B
.byte 0xC5, 0xFB, 0x93, 0x6F
.byte 0xC5, 0xFB, 0x93, 0x3F
.byte 0xC4, 0xE1, 0xF9, 0x93, 0x9B
.byte 0xC4, 0xE1, 0xF9, 0x93, 0x6F
.byte 0xC4, 0xE1, 0xF9, 0x93, 0x3F
.byte 0xc4, 0x62, 0x1, 0x1c, 0x41, 0x37
.byte 0x62, 0x72, 0xad, 0x08, 0x1c, 0x01
.byte 0x1
.byte 0x62, 0xf3, 0x7d, 0x28, 0x1b, 0xc8, 0x25
.byte 0x62, 0xf3
.byte 0x62, 0xf3, 0x75, 0x08, 0x23, 0xc2, 0x25
.byte 0x62
.byte 0x62, 0xf2, 0x7d, 0x28, 0x5b, 0x41, 0x37
|
stsp/binutils-ia16
| 1,576
|
gas/testsuite/gas/i386/x86-64-optimize-7.s
|
# Check 64bit instructions with optimized encoding
.allow_index_reg
.text
_start:
vandnpd %zmm1, %zmm1, %zmm15{%k7}
vandnpd %zmm1, %zmm1, %zmm15
vandnpd %zmm1, %zmm1, %zmm16
vandnpd %zmm17, %zmm17, %zmm1
vandnps %zmm1, %zmm1, %zmm15{%k7}
vandnps %zmm1, %zmm1, %zmm15
vandnps %zmm1, %zmm1, %zmm16
vandnps %zmm17, %zmm17, %zmm1
vpandnd %zmm1, %zmm1, %zmm15{%k7}
vpandnd %zmm1, %zmm1, %zmm15
vpandnd %zmm1, %zmm1, %zmm16
vpandnd %zmm17, %zmm17, %zmm1
vpandnq %zmm1, %zmm1, %zmm15{%k7}
vpandnq %zmm1, %zmm1, %zmm15
vpandnq %zmm1, %zmm1, %zmm16
vpandnq %zmm17, %zmm17, %zmm1
vxorpd %zmm1, %zmm1, %zmm15{%k7}
vxorpd %zmm1, %zmm1, %zmm15
vxorpd %zmm1, %zmm1, %zmm16
vxorpd %zmm17, %zmm17, %zmm1
vxorps %zmm1, %zmm1, %zmm15{%k7}
vxorps %zmm1, %zmm1, %zmm15
vxorps %zmm1, %zmm1, %zmm16
vxorps %zmm17, %zmm17, %zmm1
vpxord %zmm1, %zmm1, %zmm15{%k7}
vpxord %zmm1, %zmm1, %zmm15
vpxord %zmm1, %zmm1, %zmm16
vpxord %zmm17, %zmm17, %zmm1
vpxorq %zmm1, %zmm1, %zmm15{%k7}
vpxorq %zmm1, %zmm1, %zmm15
vpxorq %zmm1, %zmm1, %zmm16
vpxorq %zmm17, %zmm17, %zmm1
vpsubb %zmm1, %zmm1, %zmm15{%k7}
vpsubb %zmm1, %zmm1, %zmm15
vpsubb %zmm1, %zmm1, %zmm16
vpsubb %zmm17, %zmm17, %zmm1
vpsubw %zmm1, %zmm1, %zmm15{%k7}
vpsubw %zmm1, %zmm1, %zmm15
vpsubw %zmm1, %zmm1, %zmm16
vpsubw %zmm17, %zmm17, %zmm1
vpsubd %zmm1, %zmm1, %zmm15{%k7}
vpsubd %zmm1, %zmm1, %zmm15
vpsubd %zmm1, %zmm1, %zmm16
vpsubd %zmm17, %zmm17, %zmm1
vpsubq %zmm1, %zmm1, %zmm15{%k7}
vpsubq %zmm1, %zmm1, %zmm15
vpsubq %zmm1, %zmm1, %zmm16
vpsubq %zmm17, %zmm17, %zmm1
|
stsp/binutils-ia16
| 73,353
|
gas/testsuite/gas/i386/x86-64-avx512dq.s
|
# Check 64bit AVX512DQ instructions
.allow_index_reg
.text
_start:
vbroadcastf32x8 (%rcx), %zmm30 # AVX512DQ
vbroadcastf32x8 (%rcx), %zmm30{%k7} # AVX512DQ
vbroadcastf32x8 (%rcx), %zmm30{%k7}{z} # AVX512DQ
vbroadcastf32x8 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vbroadcastf32x8 4064(%rdx), %zmm30 # AVX512DQ Disp8
vbroadcastf32x8 4096(%rdx), %zmm30 # AVX512DQ
vbroadcastf32x8 -4096(%rdx), %zmm30 # AVX512DQ Disp8
vbroadcastf32x8 -4128(%rdx), %zmm30 # AVX512DQ
vbroadcastf64x2 (%rcx), %zmm30 # AVX512DQ
vbroadcastf64x2 (%rcx), %zmm30{%k7} # AVX512DQ
vbroadcastf64x2 (%rcx), %zmm30{%k7}{z} # AVX512DQ
vbroadcastf64x2 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vbroadcastf64x2 2032(%rdx), %zmm30 # AVX512DQ Disp8
vbroadcastf64x2 2048(%rdx), %zmm30 # AVX512DQ
vbroadcastf64x2 -2048(%rdx), %zmm30 # AVX512DQ Disp8
vbroadcastf64x2 -2064(%rdx), %zmm30 # AVX512DQ
vbroadcasti32x8 (%rcx), %zmm30 # AVX512DQ
vbroadcasti32x8 (%rcx), %zmm30{%k7} # AVX512DQ
vbroadcasti32x8 (%rcx), %zmm30{%k7}{z} # AVX512DQ
vbroadcasti32x8 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vbroadcasti32x8 4064(%rdx), %zmm30 # AVX512DQ Disp8
vbroadcasti32x8 4096(%rdx), %zmm30 # AVX512DQ
vbroadcasti32x8 -4096(%rdx), %zmm30 # AVX512DQ Disp8
vbroadcasti32x8 -4128(%rdx), %zmm30 # AVX512DQ
vbroadcasti64x2 (%rcx), %zmm30 # AVX512DQ
vbroadcasti64x2 (%rcx), %zmm30{%k7} # AVX512DQ
vbroadcasti64x2 (%rcx), %zmm30{%k7}{z} # AVX512DQ
vbroadcasti64x2 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vbroadcasti64x2 2032(%rdx), %zmm30 # AVX512DQ Disp8
vbroadcasti64x2 2048(%rdx), %zmm30 # AVX512DQ
vbroadcasti64x2 -2048(%rdx), %zmm30 # AVX512DQ Disp8
vbroadcasti64x2 -2064(%rdx), %zmm30 # AVX512DQ
vbroadcastf32x2 %xmm31, %zmm30 # AVX512DQ
vbroadcastf32x2 %xmm31, %zmm30{%k7} # AVX512DQ
vbroadcastf32x2 %xmm31, %zmm30{%k7}{z} # AVX512DQ
vbroadcastf32x2 (%rcx), %zmm30 # AVX512DQ
vbroadcastf32x2 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vbroadcastf32x2 1016(%rdx), %zmm30 # AVX512DQ Disp8
vbroadcastf32x2 1024(%rdx), %zmm30 # AVX512DQ
vbroadcastf32x2 -1024(%rdx), %zmm30 # AVX512DQ Disp8
vbroadcastf32x2 -1032(%rdx), %zmm30 # AVX512DQ
vcvtpd2qq %zmm29, %zmm30 # AVX512DQ
vcvtpd2qq %zmm29, %zmm30{%k7} # AVX512DQ
vcvtpd2qq %zmm29, %zmm30{%k7}{z} # AVX512DQ
vcvtpd2qq {rn-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtpd2qq {ru-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtpd2qq {rd-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtpd2qq {rz-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtpd2qq (%rcx), %zmm30 # AVX512DQ
vcvtpd2qq 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vcvtpd2qq (%rcx){1to8}, %zmm30 # AVX512DQ
vcvtpd2qq 8128(%rdx), %zmm30 # AVX512DQ Disp8
vcvtpd2qq 8192(%rdx), %zmm30 # AVX512DQ
vcvtpd2qq -8192(%rdx), %zmm30 # AVX512DQ Disp8
vcvtpd2qq -8256(%rdx), %zmm30 # AVX512DQ
vcvtpd2qq 1016(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvtpd2qq 1024(%rdx){1to8}, %zmm30 # AVX512DQ
vcvtpd2qq -1024(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvtpd2qq -1032(%rdx){1to8}, %zmm30 # AVX512DQ
vcvtpd2uqq %zmm29, %zmm30 # AVX512DQ
vcvtpd2uqq %zmm29, %zmm30{%k7} # AVX512DQ
vcvtpd2uqq %zmm29, %zmm30{%k7}{z} # AVX512DQ
vcvtpd2uqq {rn-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtpd2uqq {ru-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtpd2uqq {rd-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtpd2uqq {rz-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtpd2uqq (%rcx), %zmm30 # AVX512DQ
vcvtpd2uqq 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vcvtpd2uqq (%rcx){1to8}, %zmm30 # AVX512DQ
vcvtpd2uqq 8128(%rdx), %zmm30 # AVX512DQ Disp8
vcvtpd2uqq 8192(%rdx), %zmm30 # AVX512DQ
vcvtpd2uqq -8192(%rdx), %zmm30 # AVX512DQ Disp8
vcvtpd2uqq -8256(%rdx), %zmm30 # AVX512DQ
vcvtpd2uqq 1016(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvtpd2uqq 1024(%rdx){1to8}, %zmm30 # AVX512DQ
vcvtpd2uqq -1024(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvtpd2uqq -1032(%rdx){1to8}, %zmm30 # AVX512DQ
vcvtps2qq %ymm29, %zmm30 # AVX512DQ
vcvtps2qq %ymm29, %zmm30{%k7} # AVX512DQ
vcvtps2qq %ymm29, %zmm30{%k7}{z} # AVX512DQ
vcvtps2qq {rn-sae}, %ymm29, %zmm30 # AVX512DQ
vcvtps2qq {ru-sae}, %ymm29, %zmm30 # AVX512DQ
vcvtps2qq {rd-sae}, %ymm29, %zmm30 # AVX512DQ
vcvtps2qq {rz-sae}, %ymm29, %zmm30 # AVX512DQ
vcvtps2qq (%rcx), %zmm30 # AVX512DQ
vcvtps2qq 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vcvtps2qq (%rcx){1to8}, %zmm30 # AVX512DQ
vcvtps2qq 4064(%rdx), %zmm30 # AVX512DQ Disp8
vcvtps2qq 4096(%rdx), %zmm30 # AVX512DQ
vcvtps2qq -4096(%rdx), %zmm30 # AVX512DQ Disp8
vcvtps2qq -4128(%rdx), %zmm30 # AVX512DQ
vcvtps2qq 508(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvtps2qq 512(%rdx){1to8}, %zmm30 # AVX512DQ
vcvtps2qq -512(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvtps2qq -516(%rdx){1to8}, %zmm30 # AVX512DQ
vcvtps2uqq %ymm29, %zmm30 # AVX512DQ
vcvtps2uqq %ymm29, %zmm30{%k7} # AVX512DQ
vcvtps2uqq %ymm29, %zmm30{%k7}{z} # AVX512DQ
vcvtps2uqq {rn-sae}, %ymm29, %zmm30 # AVX512DQ
vcvtps2uqq {ru-sae}, %ymm29, %zmm30 # AVX512DQ
vcvtps2uqq {rd-sae}, %ymm29, %zmm30 # AVX512DQ
vcvtps2uqq {rz-sae}, %ymm29, %zmm30 # AVX512DQ
vcvtps2uqq (%rcx), %zmm30 # AVX512DQ
vcvtps2uqq 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vcvtps2uqq (%rcx){1to8}, %zmm30 # AVX512DQ
vcvtps2uqq 4064(%rdx), %zmm30 # AVX512DQ Disp8
vcvtps2uqq 4096(%rdx), %zmm30 # AVX512DQ
vcvtps2uqq -4096(%rdx), %zmm30 # AVX512DQ Disp8
vcvtps2uqq -4128(%rdx), %zmm30 # AVX512DQ
vcvtps2uqq 508(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvtps2uqq 512(%rdx){1to8}, %zmm30 # AVX512DQ
vcvtps2uqq -512(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvtps2uqq -516(%rdx){1to8}, %zmm30 # AVX512DQ
vcvtqq2pd %zmm29, %zmm30 # AVX512DQ
vcvtqq2pd %zmm29, %zmm30{%k7} # AVX512DQ
vcvtqq2pd %zmm29, %zmm30{%k7}{z} # AVX512DQ
vcvtqq2pd {rn-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtqq2pd {ru-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtqq2pd {rd-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtqq2pd {rz-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtqq2pd (%rcx), %zmm30 # AVX512DQ
vcvtqq2pd 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vcvtqq2pd (%rcx){1to8}, %zmm30 # AVX512DQ
vcvtqq2pd 8128(%rdx), %zmm30 # AVX512DQ Disp8
vcvtqq2pd 8192(%rdx), %zmm30 # AVX512DQ
vcvtqq2pd -8192(%rdx), %zmm30 # AVX512DQ Disp8
vcvtqq2pd -8256(%rdx), %zmm30 # AVX512DQ
vcvtqq2pd 1016(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvtqq2pd 1024(%rdx){1to8}, %zmm30 # AVX512DQ
vcvtqq2pd -1024(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvtqq2pd -1032(%rdx){1to8}, %zmm30 # AVX512DQ
vcvtqq2ps %zmm29, %ymm30 # AVX512DQ
vcvtqq2ps %zmm29, %ymm30{%k7} # AVX512DQ
vcvtqq2ps %zmm29, %ymm30{%k7}{z} # AVX512DQ
vcvtqq2ps {rn-sae}, %zmm29, %ymm30 # AVX512DQ
vcvtqq2ps {ru-sae}, %zmm29, %ymm30 # AVX512DQ
vcvtqq2ps {rd-sae}, %zmm29, %ymm30 # AVX512DQ
vcvtqq2ps {rz-sae}, %zmm29, %ymm30 # AVX512DQ
vcvtqq2ps (%rcx), %ymm30 # AVX512DQ
vcvtqq2ps 0x123(%rax,%r14,8), %ymm30 # AVX512DQ
vcvtqq2ps (%rcx){1to8}, %ymm30 # AVX512DQ
vcvtqq2ps 8128(%rdx), %ymm30 # AVX512DQ Disp8
vcvtqq2ps 8192(%rdx), %ymm30 # AVX512DQ
vcvtqq2ps -8192(%rdx), %ymm30 # AVX512DQ Disp8
vcvtqq2ps -8256(%rdx), %ymm30 # AVX512DQ
vcvtqq2ps 1016(%rdx){1to8}, %ymm30 # AVX512DQ Disp8
vcvtqq2ps 1024(%rdx){1to8}, %ymm30 # AVX512DQ
vcvtqq2ps -1024(%rdx){1to8}, %ymm30 # AVX512DQ Disp8
vcvtqq2ps -1032(%rdx){1to8}, %ymm30 # AVX512DQ
vcvtuqq2pd %zmm29, %zmm30 # AVX512DQ
vcvtuqq2pd %zmm29, %zmm30{%k7} # AVX512DQ
vcvtuqq2pd %zmm29, %zmm30{%k7}{z} # AVX512DQ
vcvtuqq2pd {rn-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtuqq2pd {ru-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtuqq2pd {rd-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtuqq2pd {rz-sae}, %zmm29, %zmm30 # AVX512DQ
vcvtuqq2pd (%rcx), %zmm30 # AVX512DQ
vcvtuqq2pd 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vcvtuqq2pd (%rcx){1to8}, %zmm30 # AVX512DQ
vcvtuqq2pd 8128(%rdx), %zmm30 # AVX512DQ Disp8
vcvtuqq2pd 8192(%rdx), %zmm30 # AVX512DQ
vcvtuqq2pd -8192(%rdx), %zmm30 # AVX512DQ Disp8
vcvtuqq2pd -8256(%rdx), %zmm30 # AVX512DQ
vcvtuqq2pd 1016(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvtuqq2pd 1024(%rdx){1to8}, %zmm30 # AVX512DQ
vcvtuqq2pd -1024(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvtuqq2pd -1032(%rdx){1to8}, %zmm30 # AVX512DQ
vcvtuqq2ps %zmm29, %ymm30 # AVX512DQ
vcvtuqq2ps %zmm29, %ymm30{%k7} # AVX512DQ
vcvtuqq2ps %zmm29, %ymm30{%k7}{z} # AVX512DQ
vcvtuqq2ps {rn-sae}, %zmm29, %ymm30 # AVX512DQ
vcvtuqq2ps {ru-sae}, %zmm29, %ymm30 # AVX512DQ
vcvtuqq2ps {rd-sae}, %zmm29, %ymm30 # AVX512DQ
vcvtuqq2ps {rz-sae}, %zmm29, %ymm30 # AVX512DQ
vcvtuqq2ps (%rcx), %ymm30 # AVX512DQ
vcvtuqq2ps 0x123(%rax,%r14,8), %ymm30 # AVX512DQ
vcvtuqq2ps (%rcx){1to8}, %ymm30 # AVX512DQ
vcvtuqq2ps 8128(%rdx), %ymm30 # AVX512DQ Disp8
vcvtuqq2ps 8192(%rdx), %ymm30 # AVX512DQ
vcvtuqq2ps -8192(%rdx), %ymm30 # AVX512DQ Disp8
vcvtuqq2ps -8256(%rdx), %ymm30 # AVX512DQ
vcvtuqq2ps 1016(%rdx){1to8}, %ymm30 # AVX512DQ Disp8
vcvtuqq2ps 1024(%rdx){1to8}, %ymm30 # AVX512DQ
vcvtuqq2ps -1024(%rdx){1to8}, %ymm30 # AVX512DQ Disp8
vcvtuqq2ps -1032(%rdx){1to8}, %ymm30 # AVX512DQ
vextractf64x2 $0xab, %zmm29, %xmm30 # AVX512DQ
vextractf64x2 $0xab, %zmm29, %xmm30{%k7} # AVX512DQ
vextractf64x2 $0xab, %zmm29, %xmm30{%k7}{z} # AVX512DQ
vextractf64x2 $123, %zmm29, %xmm30 # AVX512DQ
vextractf32x8 $0xab, %zmm29, %ymm30 # AVX512DQ
vextractf32x8 $0xab, %zmm29, %ymm30{%k7} # AVX512DQ
vextractf32x8 $0xab, %zmm29, %ymm30{%k7}{z} # AVX512DQ
vextractf32x8 $123, %zmm29, %ymm30 # AVX512DQ
vextracti64x2 $0xab, %zmm29, %xmm30 # AVX512DQ
vextracti64x2 $0xab, %zmm29, %xmm30{%k7} # AVX512DQ
vextracti64x2 $0xab, %zmm29, %xmm30{%k7}{z} # AVX512DQ
vextracti64x2 $123, %zmm29, %xmm30 # AVX512DQ
vextracti32x8 $0xab, %zmm29, %ymm30 # AVX512DQ
vextracti32x8 $0xab, %zmm29, %ymm30{%k7} # AVX512DQ
vextracti32x8 $0xab, %zmm29, %ymm30{%k7}{z} # AVX512DQ
vextracti32x8 $123, %zmm29, %ymm30 # AVX512DQ
vfpclasspd $0xab, %zmm30, %k5 # AVX512DQ
vfpclasspd $0xab, %zmm30, %k5{%k7} # AVX512DQ
vfpclasspd $123, %zmm30, %k5 # AVX512DQ
vfpclasspdz $123, (%rcx), %k5 # AVX512DQ
vfpclasspdz $123, 0x123(%rax,%r14,8), %k5 # AVX512DQ
vfpclasspd $123, (%rcx){1to8}, %k5 # AVX512DQ
vfpclasspdz $123, 8128(%rdx), %k5 # AVX512DQ Disp8
vfpclasspdz $123, 8192(%rdx), %k5 # AVX512DQ
vfpclasspdz $123, -8192(%rdx), %k5 # AVX512DQ Disp8
vfpclasspdz $123, -8256(%rdx), %k5 # AVX512DQ
vfpclasspdz $123, 1016(%rdx){1to8}, %k5 # AVX512DQ Disp8
vfpclasspdz $123, 1024(%rdx){1to8}, %k5 # AVX512DQ
vfpclasspdz $123, -1024(%rdx){1to8}, %k5 # AVX512DQ Disp8
vfpclasspdz $123, -1032(%rdx){1to8}, %k5 # AVX512DQ
vfpclassps $0xab, %zmm30, %k5 # AVX512DQ
vfpclassps $0xab, %zmm30, %k5{%k7} # AVX512DQ
vfpclassps $123, %zmm30, %k5 # AVX512DQ
vfpclasspsz $123, (%rcx), %k5 # AVX512DQ
vfpclasspsz $123, 0x123(%rax,%r14,8), %k5 # AVX512DQ
vfpclassps $123, (%rcx){1to16}, %k5 # AVX512DQ
vfpclasspsz $123, 8128(%rdx), %k5 # AVX512DQ Disp8
vfpclasspsz $123, 8192(%rdx), %k5 # AVX512DQ
vfpclasspsz $123, -8192(%rdx), %k5 # AVX512DQ Disp8
vfpclasspsz $123, -8256(%rdx), %k5 # AVX512DQ
vfpclasspsz $123, 508(%rdx){1to16}, %k5 # AVX512DQ Disp8
vfpclasspsz $123, 512(%rdx){1to16}, %k5 # AVX512DQ
vfpclasspsz $123, -512(%rdx){1to16}, %k5 # AVX512DQ Disp8
vfpclasspsz $123, -516(%rdx){1to16}, %k5 # AVX512DQ
vfpclasssd $0xab, %xmm30, %k5 # AVX512DQ
vfpclasssd $0xab, %xmm30, %k5{%k7} # AVX512DQ
vfpclasssd $123, %xmm30, %k5 # AVX512DQ
vfpclasssd $123, (%rcx), %k5 # AVX512DQ
vfpclasssd $123, 0x123(%rax,%r14,8), %k5 # AVX512DQ
vfpclasssd $123, 1016(%rdx), %k5 # AVX512DQ Disp8
vfpclasssd $123, 1024(%rdx), %k5 # AVX512DQ
vfpclasssd $123, -1024(%rdx), %k5 # AVX512DQ Disp8
vfpclasssd $123, -1032(%rdx), %k5 # AVX512DQ
vfpclassss $0xab, %xmm30, %k5 # AVX512DQ
vfpclassss $0xab, %xmm30, %k5{%k7} # AVX512DQ
vfpclassss $123, %xmm30, %k5 # AVX512DQ
vfpclassss $123, (%rcx), %k5 # AVX512DQ
vfpclassss $123, 0x123(%rax,%r14,8), %k5 # AVX512DQ
vfpclassss $123, 508(%rdx), %k5 # AVX512DQ Disp8
vfpclassss $123, 512(%rdx), %k5 # AVX512DQ
vfpclassss $123, -512(%rdx), %k5 # AVX512DQ Disp8
vfpclassss $123, -516(%rdx), %k5 # AVX512DQ
vinsertf64x2 $0xab, %xmm28, %zmm29, %zmm30 # AVX512DQ
vinsertf64x2 $0xab, %xmm28, %zmm29, %zmm30{%k7} # AVX512DQ
vinsertf64x2 $0xab, %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vinsertf64x2 $123, %xmm28, %zmm29, %zmm30 # AVX512DQ
vinsertf64x2 $123, (%rcx), %zmm29, %zmm30 # AVX512DQ
vinsertf64x2 $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vinsertf64x2 $123, 2032(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vinsertf64x2 $123, 2048(%rdx), %zmm29, %zmm30 # AVX512DQ
vinsertf64x2 $123, -2048(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vinsertf64x2 $123, -2064(%rdx), %zmm29, %zmm30 # AVX512DQ
vinsertf32x8 $0xab, %ymm28, %zmm29, %zmm30 # AVX512DQ
vinsertf32x8 $0xab, %ymm28, %zmm29, %zmm30{%k7} # AVX512DQ
vinsertf32x8 $0xab, %ymm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vinsertf32x8 $123, %ymm28, %zmm29, %zmm30 # AVX512DQ
vinsertf32x8 $123, (%rcx), %zmm29, %zmm30 # AVX512DQ
vinsertf32x8 $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vinsertf32x8 $123, 4064(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vinsertf32x8 $123, 4096(%rdx), %zmm29, %zmm30 # AVX512DQ
vinsertf32x8 $123, -4096(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vinsertf32x8 $123, -4128(%rdx), %zmm29, %zmm30 # AVX512DQ
vinserti64x2 $0xab, %xmm28, %zmm29, %zmm30 # AVX512DQ
vinserti64x2 $0xab, %xmm28, %zmm29, %zmm30{%k7} # AVX512DQ
vinserti64x2 $0xab, %xmm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vinserti64x2 $123, %xmm28, %zmm29, %zmm30 # AVX512DQ
vinserti64x2 $123, (%rcx), %zmm29, %zmm30 # AVX512DQ
vinserti64x2 $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vinserti64x2 $123, 2032(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vinserti64x2 $123, 2048(%rdx), %zmm29, %zmm30 # AVX512DQ
vinserti64x2 $123, -2048(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vinserti64x2 $123, -2064(%rdx), %zmm29, %zmm30 # AVX512DQ
vinserti32x8 $0xab, %ymm28, %zmm29, %zmm30 # AVX512DQ
vinserti32x8 $0xab, %ymm28, %zmm29, %zmm30{%k7} # AVX512DQ
vinserti32x8 $0xab, %ymm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vinserti32x8 $123, %ymm28, %zmm29, %zmm30 # AVX512DQ
vinserti32x8 $123, (%rcx), %zmm29, %zmm30 # AVX512DQ
vinserti32x8 $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vinserti32x8 $123, 4064(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vinserti32x8 $123, 4096(%rdx), %zmm29, %zmm30 # AVX512DQ
vinserti32x8 $123, -4096(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vinserti32x8 $123, -4128(%rdx), %zmm29, %zmm30 # AVX512DQ
vbroadcasti32x2 %xmm31, %zmm30 # AVX512DQ
vbroadcasti32x2 %xmm31, %zmm30{%k7} # AVX512DQ
vbroadcasti32x2 %xmm31, %zmm30{%k7}{z} # AVX512DQ
vbroadcasti32x2 (%rcx), %zmm30 # AVX512DQ
vbroadcasti32x2 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vbroadcasti32x2 1016(%rdx), %zmm30 # AVX512DQ Disp8
vbroadcasti32x2 1024(%rdx), %zmm30 # AVX512DQ
vbroadcasti32x2 -1024(%rdx), %zmm30 # AVX512DQ Disp8
vbroadcasti32x2 -1032(%rdx), %zmm30 # AVX512DQ
vpextrd $0xab, %xmm29, %eax # AVX512DQ
vpextrd $123, %xmm29, %eax # AVX512DQ
vpextrd $123, %xmm29, %ebp # AVX512DQ
vpextrd $123, %xmm29, %r13d # AVX512DQ
vpextrd $123, %xmm29, (%rcx) # AVX512DQ
vpextrd $123, %xmm29, 0x123(%rax,%r14,8) # AVX512DQ
vpextrd $123, %xmm29, 508(%rdx) # AVX512DQ Disp8
vpextrd $123, %xmm29, 512(%rdx) # AVX512DQ
vpextrd $123, %xmm29, -512(%rdx) # AVX512DQ Disp8
vpextrd $123, %xmm29, -516(%rdx) # AVX512DQ
vpextrq $0xab, %xmm29, %rax # AVX512DQ
vpextrq $123, %xmm29, %rax # AVX512DQ
vpextrq $123, %xmm29, %r8 # AVX512DQ
vpextrq $123, %xmm29, (%rcx) # AVX512DQ
vpextrq $123, %xmm29, 0x123(%rax,%r14,8) # AVX512DQ
vpextrq $123, %xmm29, 1016(%rdx) # AVX512DQ Disp8
vpextrq $123, %xmm29, 1024(%rdx) # AVX512DQ
vpextrq $123, %xmm29, -1024(%rdx) # AVX512DQ Disp8
vpextrq $123, %xmm29, -1032(%rdx) # AVX512DQ
vpinsrd $0xab, %eax, %xmm29, %xmm30 # AVX512DQ
vpinsrd $123, %eax, %xmm29, %xmm30 # AVX512DQ
vpinsrd $123, %ebp, %xmm29, %xmm30 # AVX512DQ
vpinsrd $123, %r13d, %xmm29, %xmm30 # AVX512DQ
vpinsrd $123, (%rcx), %xmm29, %xmm30 # AVX512DQ
vpinsrd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512DQ
vpinsrd $123, 508(%rdx), %xmm29, %xmm30 # AVX512DQ Disp8
vpinsrd $123, 512(%rdx), %xmm29, %xmm30 # AVX512DQ
vpinsrd $123, -512(%rdx), %xmm29, %xmm30 # AVX512DQ Disp8
vpinsrd $123, -516(%rdx), %xmm29, %xmm30 # AVX512DQ
vpinsrq $0xab, %rax, %xmm29, %xmm30 # AVX512DQ
vpinsrq $123, %rax, %xmm29, %xmm30 # AVX512DQ
vpinsrq $123, %r8, %xmm29, %xmm30 # AVX512DQ
vpinsrq $123, (%rcx), %xmm29, %xmm30 # AVX512DQ
vpinsrq $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512DQ
vpinsrq $123, 1016(%rdx), %xmm29, %xmm30 # AVX512DQ Disp8
vpinsrq $123, 1024(%rdx), %xmm29, %xmm30 # AVX512DQ
vpinsrq $123, -1024(%rdx), %xmm29, %xmm30 # AVX512DQ Disp8
vpinsrq $123, -1032(%rdx), %xmm29, %xmm30 # AVX512DQ
vpmullq %zmm28, %zmm29, %zmm30 # AVX512DQ
vpmullq %zmm28, %zmm29, %zmm30{%k7} # AVX512DQ
vpmullq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vpmullq (%rcx), %zmm29, %zmm30 # AVX512DQ
vpmullq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vpmullq (%rcx){1to8}, %zmm29, %zmm30 # AVX512DQ
vpmullq 8128(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vpmullq 8192(%rdx), %zmm29, %zmm30 # AVX512DQ
vpmullq -8192(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vpmullq -8256(%rdx), %zmm29, %zmm30 # AVX512DQ
vpmullq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ Disp8
vpmullq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ
vpmullq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ Disp8
vpmullq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ
vrangepd $0xab, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangepd $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512DQ
vrangepd $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vrangepd $0xab, {sae}, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangepd $123, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangepd $123, {sae}, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangepd $123, (%rcx), %zmm29, %zmm30 # AVX512DQ
vrangepd $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vrangepd $123, (%rcx){1to8}, %zmm29, %zmm30 # AVX512DQ
vrangepd $123, 8128(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vrangepd $123, 8192(%rdx), %zmm29, %zmm30 # AVX512DQ
vrangepd $123, -8192(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vrangepd $123, -8256(%rdx), %zmm29, %zmm30 # AVX512DQ
vrangepd $123, 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ Disp8
vrangepd $123, 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ
vrangepd $123, -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ Disp8
vrangepd $123, -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ
vrangeps $0xab, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangeps $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512DQ
vrangeps $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vrangeps $0xab, {sae}, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangeps $123, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangeps $123, {sae}, %zmm28, %zmm29, %zmm30 # AVX512DQ
vrangeps $123, (%rcx), %zmm29, %zmm30 # AVX512DQ
vrangeps $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vrangeps $123, (%rcx){1to16}, %zmm29, %zmm30 # AVX512DQ
vrangeps $123, 8128(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vrangeps $123, 8192(%rdx), %zmm29, %zmm30 # AVX512DQ
vrangeps $123, -8192(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vrangeps $123, -8256(%rdx), %zmm29, %zmm30 # AVX512DQ
vrangeps $123, 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ Disp8
vrangeps $123, 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ
vrangeps $123, -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ Disp8
vrangeps $123, -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ
vrangesd $0xab, %xmm28, %xmm29, %xmm30 # AVX512DQ
vrangesd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512DQ
vrangesd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512DQ
vrangesd $0xab, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vrangesd $123, %xmm28, %xmm29, %xmm30 # AVX512DQ
vrangesd $123, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vrangesd $123, (%rcx), %xmm29, %xmm30 # AVX512DQ
vrangesd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512DQ
vrangesd $123, 1016(%rdx), %xmm29, %xmm30 # AVX512DQ Disp8
vrangesd $123, 1024(%rdx), %xmm29, %xmm30 # AVX512DQ
vrangesd $123, -1024(%rdx), %xmm29, %xmm30 # AVX512DQ Disp8
vrangesd $123, -1032(%rdx), %xmm29, %xmm30 # AVX512DQ
vrangess $0xab, %xmm28, %xmm29, %xmm30 # AVX512DQ
vrangess $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512DQ
vrangess $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512DQ
vrangess $0xab, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vrangess $123, %xmm28, %xmm29, %xmm30 # AVX512DQ
vrangess $123, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vrangess $123, (%rcx), %xmm29, %xmm30 # AVX512DQ
vrangess $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512DQ
vrangess $123, 508(%rdx), %xmm29, %xmm30 # AVX512DQ Disp8
vrangess $123, 512(%rdx), %xmm29, %xmm30 # AVX512DQ
vrangess $123, -512(%rdx), %xmm29, %xmm30 # AVX512DQ Disp8
vrangess $123, -516(%rdx), %xmm29, %xmm30 # AVX512DQ
vandpd %zmm28, %zmm29, %zmm30 # AVX512DQ
vandpd %zmm28, %zmm29, %zmm30{%k7} # AVX512DQ
vandpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vandpd (%rcx), %zmm29, %zmm30 # AVX512DQ
vandpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vandpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512DQ
vandpd 8128(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vandpd 8192(%rdx), %zmm29, %zmm30 # AVX512DQ
vandpd -8192(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vandpd -8256(%rdx), %zmm29, %zmm30 # AVX512DQ
vandpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ Disp8
vandpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ
vandpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ Disp8
vandpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ
vandps %zmm28, %zmm29, %zmm30 # AVX512DQ
vandps %zmm28, %zmm29, %zmm30{%k7} # AVX512DQ
vandps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vandps (%rcx), %zmm29, %zmm30 # AVX512DQ
vandps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vandps (%rcx){1to16}, %zmm29, %zmm30 # AVX512DQ
vandps 8128(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vandps 8192(%rdx), %zmm29, %zmm30 # AVX512DQ
vandps -8192(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vandps -8256(%rdx), %zmm29, %zmm30 # AVX512DQ
vandps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ Disp8
vandps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ
vandps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ Disp8
vandps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ
vandnpd %zmm28, %zmm29, %zmm30 # AVX512DQ
vandnpd %zmm28, %zmm29, %zmm30{%k7} # AVX512DQ
vandnpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vandnpd (%rcx), %zmm29, %zmm30 # AVX512DQ
vandnpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vandnpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512DQ
vandnpd 8128(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vandnpd 8192(%rdx), %zmm29, %zmm30 # AVX512DQ
vandnpd -8192(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vandnpd -8256(%rdx), %zmm29, %zmm30 # AVX512DQ
vandnpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ Disp8
vandnpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ
vandnpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ Disp8
vandnpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ
vandnps %zmm28, %zmm29, %zmm30 # AVX512DQ
vandnps %zmm28, %zmm29, %zmm30{%k7} # AVX512DQ
vandnps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vandnps (%rcx), %zmm29, %zmm30 # AVX512DQ
vandnps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vandnps (%rcx){1to16}, %zmm29, %zmm30 # AVX512DQ
vandnps 8128(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vandnps 8192(%rdx), %zmm29, %zmm30 # AVX512DQ
vandnps -8192(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vandnps -8256(%rdx), %zmm29, %zmm30 # AVX512DQ
vandnps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ Disp8
vandnps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ
vandnps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ Disp8
vandnps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ
vorpd %zmm28, %zmm29, %zmm30 # AVX512DQ
vorpd %zmm28, %zmm29, %zmm30{%k7} # AVX512DQ
vorpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vorpd (%rcx), %zmm29, %zmm30 # AVX512DQ
vorpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vorpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512DQ
vorpd 8128(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vorpd 8192(%rdx), %zmm29, %zmm30 # AVX512DQ
vorpd -8192(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vorpd -8256(%rdx), %zmm29, %zmm30 # AVX512DQ
vorpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ Disp8
vorpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ
vorpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ Disp8
vorpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ
vorps %zmm28, %zmm29, %zmm30 # AVX512DQ
vorps %zmm28, %zmm29, %zmm30{%k7} # AVX512DQ
vorps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vorps (%rcx), %zmm29, %zmm30 # AVX512DQ
vorps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vorps (%rcx){1to16}, %zmm29, %zmm30 # AVX512DQ
vorps 8128(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vorps 8192(%rdx), %zmm29, %zmm30 # AVX512DQ
vorps -8192(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vorps -8256(%rdx), %zmm29, %zmm30 # AVX512DQ
vorps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ Disp8
vorps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ
vorps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ Disp8
vorps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ
vxorpd %zmm28, %zmm29, %zmm30 # AVX512DQ
vxorpd %zmm28, %zmm29, %zmm30{%k7} # AVX512DQ
vxorpd %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vxorpd (%rcx), %zmm29, %zmm30 # AVX512DQ
vxorpd 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vxorpd (%rcx){1to8}, %zmm29, %zmm30 # AVX512DQ
vxorpd 8128(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vxorpd 8192(%rdx), %zmm29, %zmm30 # AVX512DQ
vxorpd -8192(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vxorpd -8256(%rdx), %zmm29, %zmm30 # AVX512DQ
vxorpd 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ Disp8
vxorpd 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ
vxorpd -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ Disp8
vxorpd -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512DQ
vxorps %zmm28, %zmm29, %zmm30 # AVX512DQ
vxorps %zmm28, %zmm29, %zmm30{%k7} # AVX512DQ
vxorps %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vxorps (%rcx), %zmm29, %zmm30 # AVX512DQ
vxorps 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512DQ
vxorps (%rcx){1to16}, %zmm29, %zmm30 # AVX512DQ
vxorps 8128(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vxorps 8192(%rdx), %zmm29, %zmm30 # AVX512DQ
vxorps -8192(%rdx), %zmm29, %zmm30 # AVX512DQ Disp8
vxorps -8256(%rdx), %zmm29, %zmm30 # AVX512DQ
vxorps 508(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ Disp8
vxorps 512(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ
vxorps -512(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ Disp8
vxorps -516(%rdx){1to16}, %zmm29, %zmm30 # AVX512DQ
vreducepd $0xab, %zmm29, %zmm30 # AVX512DQ
vreducepd $0xab, %zmm29, %zmm30{%k7} # AVX512DQ
vreducepd $0xab, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vreducepd $0xab, {sae}, %zmm29, %zmm30 # AVX512DQ
vreducepd $123, %zmm29, %zmm30 # AVX512DQ
vreducepd $123, {sae}, %zmm29, %zmm30 # AVX512DQ
vreducepd $123, (%rcx), %zmm30 # AVX512DQ
vreducepd $123, 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vreducepd $123, (%rcx){1to8}, %zmm30 # AVX512DQ
vreducepd $123, 8128(%rdx), %zmm30 # AVX512DQ Disp8
vreducepd $123, 8192(%rdx), %zmm30 # AVX512DQ
vreducepd $123, -8192(%rdx), %zmm30 # AVX512DQ Disp8
vreducepd $123, -8256(%rdx), %zmm30 # AVX512DQ
vreducepd $123, 1016(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vreducepd $123, 1024(%rdx){1to8}, %zmm30 # AVX512DQ
vreducepd $123, -1024(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vreducepd $123, -1032(%rdx){1to8}, %zmm30 # AVX512DQ
vreduceps $0xab, %zmm29, %zmm30 # AVX512DQ
vreduceps $0xab, %zmm29, %zmm30{%k7} # AVX512DQ
vreduceps $0xab, %zmm29, %zmm30{%k7}{z} # AVX512DQ
vreduceps $0xab, {sae}, %zmm29, %zmm30 # AVX512DQ
vreduceps $123, %zmm29, %zmm30 # AVX512DQ
vreduceps $123, {sae}, %zmm29, %zmm30 # AVX512DQ
vreduceps $123, (%rcx), %zmm30 # AVX512DQ
vreduceps $123, 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vreduceps $123, (%rcx){1to16}, %zmm30 # AVX512DQ
vreduceps $123, 8128(%rdx), %zmm30 # AVX512DQ Disp8
vreduceps $123, 8192(%rdx), %zmm30 # AVX512DQ
vreduceps $123, -8192(%rdx), %zmm30 # AVX512DQ Disp8
vreduceps $123, -8256(%rdx), %zmm30 # AVX512DQ
vreduceps $123, 508(%rdx){1to16}, %zmm30 # AVX512DQ Disp8
vreduceps $123, 512(%rdx){1to16}, %zmm30 # AVX512DQ
vreduceps $123, -512(%rdx){1to16}, %zmm30 # AVX512DQ Disp8
vreduceps $123, -516(%rdx){1to16}, %zmm30 # AVX512DQ
vreducesd $0xab, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducesd $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512DQ
vreducesd $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512DQ
vreducesd $0xab, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducesd $123, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducesd $123, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducesd $123, (%rcx), %xmm29, %xmm30 # AVX512DQ
vreducesd $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512DQ
vreducesd $123, 1016(%rdx), %xmm29, %xmm30 # AVX512DQ Disp8
vreducesd $123, 1024(%rdx), %xmm29, %xmm30 # AVX512DQ
vreducesd $123, -1024(%rdx), %xmm29, %xmm30 # AVX512DQ Disp8
vreducesd $123, -1032(%rdx), %xmm29, %xmm30 # AVX512DQ
vreducess $0xab, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducess $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512DQ
vreducess $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512DQ
vreducess $0xab, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducess $123, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducess $123, {sae}, %xmm28, %xmm29, %xmm30 # AVX512DQ
vreducess $123, (%rcx), %xmm29, %xmm30 # AVX512DQ
vreducess $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512DQ
vreducess $123, 508(%rdx), %xmm29, %xmm30 # AVX512DQ Disp8
vreducess $123, 512(%rdx), %xmm29, %xmm30 # AVX512DQ
vreducess $123, -512(%rdx), %xmm29, %xmm30 # AVX512DQ Disp8
vreducess $123, -516(%rdx), %xmm29, %xmm30 # AVX512DQ
kandb %k7, %k6, %k5 # AVX512DQ
kandnb %k7, %k6, %k5 # AVX512DQ
korb %k7, %k6, %k5 # AVX512DQ
kxnorb %k7, %k6, %k5 # AVX512DQ
kxorb %k7, %k6, %k5 # AVX512DQ
knotb %k6, %k5 # AVX512DQ
kortestb %k6, %k5 # AVX512DQ
ktestw %k6, %k5 # AVX512DQ
ktestb %k6, %k5 # AVX512DQ
kshiftrb $0xab, %k6, %k5 # AVX512DQ
kshiftrb $123, %k6, %k5 # AVX512DQ
kshiftlb $0xab, %k6, %k5 # AVX512DQ
kshiftlb $123, %k6, %k5 # AVX512DQ
kmovb %k6, %k5 # AVX512DQ
kmovb (%rcx), %k5 # AVX512DQ
kmovb 0x123(%rax,%r14,8), %k5 # AVX512DQ
kmovb %k5, (%rcx) # AVX512DQ
kmovb %k5, 0x123(%rax,%r14,8) # AVX512DQ
kmovb %eax, %k5 # AVX512DQ
kmovb %ebp, %k5 # AVX512DQ
kmovb %r13d, %k5 # AVX512DQ
kmovb %k5, %eax # AVX512DQ
kmovb %k5, %ebp # AVX512DQ
kmovb %k5, %r13d # AVX512DQ
kaddw %k7, %k6, %k5 # AVX512DQ
kaddb %k7, %k6, %k5 # AVX512DQ
vextractf64x2 $0xab, %zmm30, (%rcx) # AVX512DQ
vextractf64x2 $0xab, %zmm30, (%rcx){%k7} # AVX512DQ
vextractf64x2 $123, %zmm30, (%rcx) # AVX512DQ
vextractf64x2 $123, %zmm30, 0x123(%rax,%r14,8) # AVX512DQ
vextractf64x2 $123, %zmm30, 2032(%rdx) # AVX512DQ Disp8
vextractf64x2 $123, %zmm30, 2048(%rdx) # AVX512DQ
vextractf64x2 $123, %zmm30, -2048(%rdx) # AVX512DQ Disp8
vextractf64x2 $123, %zmm30, -2064(%rdx) # AVX512DQ
vextractf32x8 $0xab, %zmm30, (%rcx) # AVX512DQ
vextractf32x8 $0xab, %zmm30, (%rcx){%k7} # AVX512DQ
vextractf32x8 $123, %zmm30, (%rcx) # AVX512DQ
vextractf32x8 $123, %zmm30, 0x123(%rax,%r14,8) # AVX512DQ
vextractf32x8 $123, %zmm30, 4064(%rdx) # AVX512DQ Disp8
vextractf32x8 $123, %zmm30, 4096(%rdx) # AVX512DQ
vextractf32x8 $123, %zmm30, -4096(%rdx) # AVX512DQ Disp8
vextractf32x8 $123, %zmm30, -4128(%rdx) # AVX512DQ
vextracti64x2 $0xab, %zmm30, (%rcx) # AVX512DQ
vextracti64x2 $0xab, %zmm30, (%rcx){%k7} # AVX512DQ
vextracti64x2 $123, %zmm30, (%rcx) # AVX512DQ
vextracti64x2 $123, %zmm30, 0x123(%rax,%r14,8) # AVX512DQ
vextracti64x2 $123, %zmm30, 2032(%rdx) # AVX512DQ Disp8
vextracti64x2 $123, %zmm30, 2048(%rdx) # AVX512DQ
vextracti64x2 $123, %zmm30, -2048(%rdx) # AVX512DQ Disp8
vextracti64x2 $123, %zmm30, -2064(%rdx) # AVX512DQ
vextracti32x8 $0xab, %zmm30, (%rcx) # AVX512DQ
vextracti32x8 $0xab, %zmm30, (%rcx){%k7} # AVX512DQ
vextracti32x8 $123, %zmm30, (%rcx) # AVX512DQ
vextracti32x8 $123, %zmm30, 0x123(%rax,%r14,8) # AVX512DQ
vextracti32x8 $123, %zmm30, 4064(%rdx) # AVX512DQ Disp8
vextracti32x8 $123, %zmm30, 4096(%rdx) # AVX512DQ
vextracti32x8 $123, %zmm30, -4096(%rdx) # AVX512DQ Disp8
vextracti32x8 $123, %zmm30, -4128(%rdx) # AVX512DQ
vcvttpd2qq %zmm29, %zmm30 # AVX512DQ
vcvttpd2qq %zmm29, %zmm30{%k7} # AVX512DQ
vcvttpd2qq %zmm29, %zmm30{%k7}{z} # AVX512DQ
vcvttpd2qq {sae}, %zmm29, %zmm30 # AVX512DQ
vcvttpd2qq (%rcx), %zmm30 # AVX512DQ
vcvttpd2qq 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vcvttpd2qq (%rcx){1to8}, %zmm30 # AVX512DQ
vcvttpd2qq 8128(%rdx), %zmm30 # AVX512DQ Disp8
vcvttpd2qq 8192(%rdx), %zmm30 # AVX512DQ
vcvttpd2qq -8192(%rdx), %zmm30 # AVX512DQ Disp8
vcvttpd2qq -8256(%rdx), %zmm30 # AVX512DQ
vcvttpd2qq 1016(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvttpd2qq 1024(%rdx){1to8}, %zmm30 # AVX512DQ
vcvttpd2qq -1024(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvttpd2qq -1032(%rdx){1to8}, %zmm30 # AVX512DQ
vcvttpd2uqq %zmm29, %zmm30 # AVX512DQ
vcvttpd2uqq %zmm29, %zmm30{%k7} # AVX512DQ
vcvttpd2uqq %zmm29, %zmm30{%k7}{z} # AVX512DQ
vcvttpd2uqq {sae}, %zmm29, %zmm30 # AVX512DQ
vcvttpd2uqq (%rcx), %zmm30 # AVX512DQ
vcvttpd2uqq 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vcvttpd2uqq (%rcx){1to8}, %zmm30 # AVX512DQ
vcvttpd2uqq 8128(%rdx), %zmm30 # AVX512DQ Disp8
vcvttpd2uqq 8192(%rdx), %zmm30 # AVX512DQ
vcvttpd2uqq -8192(%rdx), %zmm30 # AVX512DQ Disp8
vcvttpd2uqq -8256(%rdx), %zmm30 # AVX512DQ
vcvttpd2uqq 1016(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvttpd2uqq 1024(%rdx){1to8}, %zmm30 # AVX512DQ
vcvttpd2uqq -1024(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvttpd2uqq -1032(%rdx){1to8}, %zmm30 # AVX512DQ
vcvttps2qq %ymm29, %zmm30 # AVX512DQ
vcvttps2qq %ymm29, %zmm30{%k7} # AVX512DQ
vcvttps2qq %ymm29, %zmm30{%k7}{z} # AVX512DQ
vcvttps2qq {sae}, %ymm29, %zmm30 # AVX512DQ
vcvttps2qq (%rcx), %zmm30 # AVX512DQ
vcvttps2qq 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vcvttps2qq (%rcx){1to8}, %zmm30 # AVX512DQ
vcvttps2qq 4064(%rdx), %zmm30 # AVX512DQ Disp8
vcvttps2qq 4096(%rdx), %zmm30 # AVX512DQ
vcvttps2qq -4096(%rdx), %zmm30 # AVX512DQ Disp8
vcvttps2qq -4128(%rdx), %zmm30 # AVX512DQ
vcvttps2qq 508(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvttps2qq 512(%rdx){1to8}, %zmm30 # AVX512DQ
vcvttps2qq -512(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvttps2qq -516(%rdx){1to8}, %zmm30 # AVX512DQ
vcvttps2uqq %ymm29, %zmm30 # AVX512DQ
vcvttps2uqq %ymm29, %zmm30{%k7} # AVX512DQ
vcvttps2uqq %ymm29, %zmm30{%k7}{z} # AVX512DQ
vcvttps2uqq {sae}, %ymm29, %zmm30 # AVX512DQ
vcvttps2uqq (%rcx), %zmm30 # AVX512DQ
vcvttps2uqq 0x123(%rax,%r14,8), %zmm30 # AVX512DQ
vcvttps2uqq (%rcx){1to8}, %zmm30 # AVX512DQ
vcvttps2uqq 4064(%rdx), %zmm30 # AVX512DQ Disp8
vcvttps2uqq 4096(%rdx), %zmm30 # AVX512DQ
vcvttps2uqq -4096(%rdx), %zmm30 # AVX512DQ Disp8
vcvttps2uqq -4128(%rdx), %zmm30 # AVX512DQ
vcvttps2uqq 508(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvttps2uqq 512(%rdx){1to8}, %zmm30 # AVX512DQ
vcvttps2uqq -512(%rdx){1to8}, %zmm30 # AVX512DQ Disp8
vcvttps2uqq -516(%rdx){1to8}, %zmm30 # AVX512DQ
vpmovd2m %zmm30, %k5 # AVX512DQ
vpmovq2m %zmm30, %k5 # AVX512DQ
vpmovm2d %k5, %zmm30 # AVX512DQ
vpmovm2q %k5, %zmm30 # AVX512DQ
.intel_syntax noprefix
vbroadcastf32x8 zmm30, YMMWORD PTR [rcx] # AVX512DQ
vbroadcastf32x8 zmm30{k7}, YMMWORD PTR [rcx] # AVX512DQ
vbroadcastf32x8 zmm30{k7}{z}, YMMWORD PTR [rcx] # AVX512DQ
vbroadcastf32x8 zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vbroadcastf32x8 zmm30, YMMWORD PTR [rdx+4064] # AVX512DQ Disp8
vbroadcastf32x8 zmm30, YMMWORD PTR [rdx+4096] # AVX512DQ
vbroadcastf32x8 zmm30, YMMWORD PTR [rdx-4096] # AVX512DQ Disp8
vbroadcastf32x8 zmm30, YMMWORD PTR [rdx-4128] # AVX512DQ
vbroadcastf64x2 zmm30, XMMWORD PTR [rcx] # AVX512DQ
vbroadcastf64x2 zmm30{k7}, XMMWORD PTR [rcx] # AVX512DQ
vbroadcastf64x2 zmm30{k7}{z}, XMMWORD PTR [rcx] # AVX512DQ
vbroadcastf64x2 zmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vbroadcastf64x2 zmm30, XMMWORD PTR [rdx+2032] # AVX512DQ Disp8
vbroadcastf64x2 zmm30, XMMWORD PTR [rdx+2048] # AVX512DQ
vbroadcastf64x2 zmm30, XMMWORD PTR [rdx-2048] # AVX512DQ Disp8
vbroadcastf64x2 zmm30, XMMWORD PTR [rdx-2064] # AVX512DQ
vbroadcasti32x8 zmm30, YMMWORD PTR [rcx] # AVX512DQ
vbroadcasti32x8 zmm30{k7}, YMMWORD PTR [rcx] # AVX512DQ
vbroadcasti32x8 zmm30{k7}{z}, YMMWORD PTR [rcx] # AVX512DQ
vbroadcasti32x8 zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vbroadcasti32x8 zmm30, YMMWORD PTR [rdx+4064] # AVX512DQ Disp8
vbroadcasti32x8 zmm30, YMMWORD PTR [rdx+4096] # AVX512DQ
vbroadcasti32x8 zmm30, YMMWORD PTR [rdx-4096] # AVX512DQ Disp8
vbroadcasti32x8 zmm30, YMMWORD PTR [rdx-4128] # AVX512DQ
vbroadcasti64x2 zmm30, XMMWORD PTR [rcx] # AVX512DQ
vbroadcasti64x2 zmm30{k7}, XMMWORD PTR [rcx] # AVX512DQ
vbroadcasti64x2 zmm30{k7}{z}, XMMWORD PTR [rcx] # AVX512DQ
vbroadcasti64x2 zmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vbroadcasti64x2 zmm30, XMMWORD PTR [rdx+2032] # AVX512DQ Disp8
vbroadcasti64x2 zmm30, XMMWORD PTR [rdx+2048] # AVX512DQ
vbroadcasti64x2 zmm30, XMMWORD PTR [rdx-2048] # AVX512DQ Disp8
vbroadcasti64x2 zmm30, XMMWORD PTR [rdx-2064] # AVX512DQ
vbroadcastf32x2 zmm30, xmm31 # AVX512DQ
vbroadcastf32x2 zmm30{k7}, xmm31 # AVX512DQ
vbroadcastf32x2 zmm30{k7}{z}, xmm31 # AVX512DQ
vbroadcastf32x2 zmm30, QWORD PTR [rcx] # AVX512DQ
vbroadcastf32x2 zmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vbroadcastf32x2 zmm30, QWORD PTR [rdx+1016] # AVX512DQ Disp8
vbroadcastf32x2 zmm30, QWORD PTR [rdx+1024] # AVX512DQ
vbroadcastf32x2 zmm30, QWORD PTR [rdx-1024] # AVX512DQ Disp8
vbroadcastf32x2 zmm30, QWORD PTR [rdx-1032] # AVX512DQ
vcvtpd2qq zmm30, zmm29 # AVX512DQ
vcvtpd2qq zmm30{k7}, zmm29 # AVX512DQ
vcvtpd2qq zmm30{k7}{z}, zmm29 # AVX512DQ
vcvtpd2qq zmm30, zmm29{rn-sae} # AVX512DQ
vcvtpd2qq zmm30, zmm29{ru-sae} # AVX512DQ
vcvtpd2qq zmm30, zmm29{rd-sae} # AVX512DQ
vcvtpd2qq zmm30, zmm29{rz-sae} # AVX512DQ
vcvtpd2qq zmm30, ZMMWORD PTR [rcx] # AVX512DQ
vcvtpd2qq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vcvtpd2qq zmm30, qword bcst [rcx] # AVX512DQ
vcvtpd2qq zmm30, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vcvtpd2qq zmm30, ZMMWORD PTR [rdx+8192] # AVX512DQ
vcvtpd2qq zmm30, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vcvtpd2qq zmm30, ZMMWORD PTR [rdx-8256] # AVX512DQ
vcvtpd2qq zmm30, qword bcst [rdx+1016] # AVX512DQ Disp8
vcvtpd2qq zmm30, qword bcst [rdx+1024] # AVX512DQ
vcvtpd2qq zmm30, qword bcst [rdx-1024] # AVX512DQ Disp8
vcvtpd2qq zmm30, qword bcst [rdx-1032] # AVX512DQ
vcvtpd2uqq zmm30, zmm29 # AVX512DQ
vcvtpd2uqq zmm30{k7}, zmm29 # AVX512DQ
vcvtpd2uqq zmm30{k7}{z}, zmm29 # AVX512DQ
vcvtpd2uqq zmm30, zmm29{rn-sae} # AVX512DQ
vcvtpd2uqq zmm30, zmm29{ru-sae} # AVX512DQ
vcvtpd2uqq zmm30, zmm29{rd-sae} # AVX512DQ
vcvtpd2uqq zmm30, zmm29{rz-sae} # AVX512DQ
vcvtpd2uqq zmm30, ZMMWORD PTR [rcx] # AVX512DQ
vcvtpd2uqq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vcvtpd2uqq zmm30, qword bcst [rcx] # AVX512DQ
vcvtpd2uqq zmm30, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vcvtpd2uqq zmm30, ZMMWORD PTR [rdx+8192] # AVX512DQ
vcvtpd2uqq zmm30, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vcvtpd2uqq zmm30, ZMMWORD PTR [rdx-8256] # AVX512DQ
vcvtpd2uqq zmm30, qword bcst [rdx+1016] # AVX512DQ Disp8
vcvtpd2uqq zmm30, qword bcst [rdx+1024] # AVX512DQ
vcvtpd2uqq zmm30, qword bcst [rdx-1024] # AVX512DQ Disp8
vcvtpd2uqq zmm30, qword bcst [rdx-1032] # AVX512DQ
vcvtps2qq zmm30, ymm29 # AVX512DQ
vcvtps2qq zmm30{k7}, ymm29 # AVX512DQ
vcvtps2qq zmm30{k7}{z}, ymm29 # AVX512DQ
vcvtps2qq zmm30, ymm29{rn-sae} # AVX512DQ
vcvtps2qq zmm30, ymm29{ru-sae} # AVX512DQ
vcvtps2qq zmm30, ymm29{rd-sae} # AVX512DQ
vcvtps2qq zmm30, ymm29{rz-sae} # AVX512DQ
vcvtps2qq zmm30, YMMWORD PTR [rcx] # AVX512DQ
vcvtps2qq zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vcvtps2qq zmm30, dword bcst [rcx] # AVX512DQ
vcvtps2qq zmm30, YMMWORD PTR [rdx+4064] # AVX512DQ Disp8
vcvtps2qq zmm30, YMMWORD PTR [rdx+4096] # AVX512DQ
vcvtps2qq zmm30, YMMWORD PTR [rdx-4096] # AVX512DQ Disp8
vcvtps2qq zmm30, YMMWORD PTR [rdx-4128] # AVX512DQ
vcvtps2qq zmm30, dword bcst [rdx+508] # AVX512DQ Disp8
vcvtps2qq zmm30, dword bcst [rdx+512] # AVX512DQ
vcvtps2qq zmm30, dword bcst [rdx-512] # AVX512DQ Disp8
vcvtps2qq zmm30, dword bcst [rdx-516] # AVX512DQ
vcvtps2qq zmm30, DWORD BCST [rdx+508] # AVX512DQ Disp8
vcvtps2uqq zmm30, ymm29 # AVX512DQ
vcvtps2uqq zmm30{k7}, ymm29 # AVX512DQ
vcvtps2uqq zmm30{k7}{z}, ymm29 # AVX512DQ
vcvtps2uqq zmm30, ymm29{rn-sae} # AVX512DQ
vcvtps2uqq zmm30, ymm29{ru-sae} # AVX512DQ
vcvtps2uqq zmm30, ymm29{rd-sae} # AVX512DQ
vcvtps2uqq zmm30, ymm29{rz-sae} # AVX512DQ
vcvtps2uqq zmm30, YMMWORD PTR [rcx] # AVX512DQ
vcvtps2uqq zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vcvtps2uqq zmm30, dword bcst [rcx] # AVX512DQ
vcvtps2uqq zmm30, YMMWORD PTR [rdx+4064] # AVX512DQ Disp8
vcvtps2uqq zmm30, YMMWORD PTR [rdx+4096] # AVX512DQ
vcvtps2uqq zmm30, YMMWORD PTR [rdx-4096] # AVX512DQ Disp8
vcvtps2uqq zmm30, YMMWORD PTR [rdx-4128] # AVX512DQ
vcvtps2uqq zmm30, dword bcst [rdx+508] # AVX512DQ Disp8
vcvtps2uqq zmm30, dword bcst [rdx+512] # AVX512DQ
vcvtps2uqq zmm30, dword bcst [rdx-512] # AVX512DQ Disp8
vcvtps2uqq zmm30, dword bcst [rdx-516] # AVX512DQ
vcvtps2uqq zmm30, DWORD BCST [rdx+508] # AVX512DQ Disp8
vcvtqq2pd zmm30, zmm29 # AVX512DQ
vcvtqq2pd zmm30{k7}, zmm29 # AVX512DQ
vcvtqq2pd zmm30{k7}{z}, zmm29 # AVX512DQ
vcvtqq2pd zmm30, zmm29{rn-sae} # AVX512DQ
vcvtqq2pd zmm30, zmm29{ru-sae} # AVX512DQ
vcvtqq2pd zmm30, zmm29{rd-sae} # AVX512DQ
vcvtqq2pd zmm30, zmm29{rz-sae} # AVX512DQ
vcvtqq2pd zmm30, ZMMWORD PTR [rcx] # AVX512DQ
vcvtqq2pd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vcvtqq2pd zmm30, qword bcst [rcx] # AVX512DQ
vcvtqq2pd zmm30, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vcvtqq2pd zmm30, ZMMWORD PTR [rdx+8192] # AVX512DQ
vcvtqq2pd zmm30, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vcvtqq2pd zmm30, ZMMWORD PTR [rdx-8256] # AVX512DQ
vcvtqq2pd zmm30, qword bcst [rdx+1016] # AVX512DQ Disp8
vcvtqq2pd zmm30, qword bcst [rdx+1024] # AVX512DQ
vcvtqq2pd zmm30, qword bcst [rdx-1024] # AVX512DQ Disp8
vcvtqq2pd zmm30, qword bcst [rdx-1032] # AVX512DQ
vcvtqq2ps ymm30, zmm29 # AVX512DQ
vcvtqq2ps ymm30{k7}, zmm29 # AVX512DQ
vcvtqq2ps ymm30{k7}{z}, zmm29 # AVX512DQ
vcvtqq2ps ymm30, zmm29{rn-sae} # AVX512DQ
vcvtqq2ps ymm30, zmm29{ru-sae} # AVX512DQ
vcvtqq2ps ymm30, zmm29{rd-sae} # AVX512DQ
vcvtqq2ps ymm30, zmm29{rz-sae} # AVX512DQ
vcvtqq2ps ymm30, ZMMWORD PTR [rcx] # AVX512DQ
vcvtqq2ps ymm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vcvtqq2ps ymm30, qword bcst [rcx] # AVX512DQ
vcvtqq2ps ymm30, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vcvtqq2ps ymm30, ZMMWORD PTR [rdx+8192] # AVX512DQ
vcvtqq2ps ymm30, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vcvtqq2ps ymm30, ZMMWORD PTR [rdx-8256] # AVX512DQ
vcvtqq2ps ymm30, qword bcst [rdx+1016] # AVX512DQ Disp8
vcvtqq2ps ymm30, qword bcst [rdx+1024] # AVX512DQ
vcvtqq2ps ymm30, qword bcst [rdx-1024] # AVX512DQ Disp8
vcvtqq2ps ymm30, qword bcst [rdx-1032] # AVX512DQ
vcvtuqq2pd zmm30, zmm29 # AVX512DQ
vcvtuqq2pd zmm30{k7}, zmm29 # AVX512DQ
vcvtuqq2pd zmm30{k7}{z}, zmm29 # AVX512DQ
vcvtuqq2pd zmm30, zmm29{rn-sae} # AVX512DQ
vcvtuqq2pd zmm30, zmm29{ru-sae} # AVX512DQ
vcvtuqq2pd zmm30, zmm29{rd-sae} # AVX512DQ
vcvtuqq2pd zmm30, zmm29{rz-sae} # AVX512DQ
vcvtuqq2pd zmm30, ZMMWORD PTR [rcx] # AVX512DQ
vcvtuqq2pd zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vcvtuqq2pd zmm30, qword bcst [rcx] # AVX512DQ
vcvtuqq2pd zmm30, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vcvtuqq2pd zmm30, ZMMWORD PTR [rdx+8192] # AVX512DQ
vcvtuqq2pd zmm30, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vcvtuqq2pd zmm30, ZMMWORD PTR [rdx-8256] # AVX512DQ
vcvtuqq2pd zmm30, qword bcst [rdx+1016] # AVX512DQ Disp8
vcvtuqq2pd zmm30, qword bcst [rdx+1024] # AVX512DQ
vcvtuqq2pd zmm30, qword bcst [rdx-1024] # AVX512DQ Disp8
vcvtuqq2pd zmm30, qword bcst [rdx-1032] # AVX512DQ
vcvtuqq2ps ymm30, zmm29 # AVX512DQ
vcvtuqq2ps ymm30{k7}, zmm29 # AVX512DQ
vcvtuqq2ps ymm30{k7}{z}, zmm29 # AVX512DQ
vcvtuqq2ps ymm30, zmm29{rn-sae} # AVX512DQ
vcvtuqq2ps ymm30, zmm29{ru-sae} # AVX512DQ
vcvtuqq2ps ymm30, zmm29{rd-sae} # AVX512DQ
vcvtuqq2ps ymm30, zmm29{rz-sae} # AVX512DQ
vcvtuqq2ps ymm30, ZMMWORD PTR [rcx] # AVX512DQ
vcvtuqq2ps ymm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vcvtuqq2ps ymm30, qword bcst [rcx] # AVX512DQ
vcvtuqq2ps ymm30, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vcvtuqq2ps ymm30, ZMMWORD PTR [rdx+8192] # AVX512DQ
vcvtuqq2ps ymm30, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vcvtuqq2ps ymm30, ZMMWORD PTR [rdx-8256] # AVX512DQ
vcvtuqq2ps ymm30, qword bcst [rdx+1016] # AVX512DQ Disp8
vcvtuqq2ps ymm30, qword bcst [rdx+1024] # AVX512DQ
vcvtuqq2ps ymm30, qword bcst [rdx-1024] # AVX512DQ Disp8
vcvtuqq2ps ymm30, qword bcst [rdx-1032] # AVX512DQ
vextractf64x2 xmm30, zmm29, 0xab # AVX512DQ
vextractf64x2 xmm30{k7}, zmm29, 0xab # AVX512DQ
vextractf64x2 xmm30{k7}{z}, zmm29, 0xab # AVX512DQ
vextractf64x2 xmm30, zmm29, 123 # AVX512DQ
vextractf32x8 ymm30, zmm29, 0xab # AVX512DQ
vextractf32x8 ymm30{k7}, zmm29, 0xab # AVX512DQ
vextractf32x8 ymm30{k7}{z}, zmm29, 0xab # AVX512DQ
vextractf32x8 ymm30, zmm29, 123 # AVX512DQ
vextracti64x2 xmm30, zmm29, 0xab # AVX512DQ
vextracti64x2 xmm30{k7}, zmm29, 0xab # AVX512DQ
vextracti64x2 xmm30{k7}{z}, zmm29, 0xab # AVX512DQ
vextracti64x2 xmm30, zmm29, 123 # AVX512DQ
vextracti32x8 ymm30, zmm29, 0xab # AVX512DQ
vextracti32x8 ymm30{k7}, zmm29, 0xab # AVX512DQ
vextracti32x8 ymm30{k7}{z}, zmm29, 0xab # AVX512DQ
vextracti32x8 ymm30, zmm29, 123 # AVX512DQ
vfpclasspd k5, zmm30, 0xab # AVX512DQ
vfpclasspd k5{k7}, zmm30, 0xab # AVX512DQ
vfpclasspd k5, zmm30, 123 # AVX512DQ
vfpclasspd k5, ZMMWORD PTR [rcx], 123 # AVX512DQ
vfpclasspd k5, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vfpclasspd k5, [rcx]{1to8}, 123 # AVX512DQ
vfpclasspd k5, ZMMWORD PTR [rdx+8128], 123 # AVX512DQ Disp8
vfpclasspd k5, ZMMWORD PTR [rdx+8192], 123 # AVX512DQ
vfpclasspd k5, ZMMWORD PTR [rdx-8192], 123 # AVX512DQ Disp8
vfpclasspd k5, ZMMWORD PTR [rdx-8256], 123 # AVX512DQ
vfpclasspd k5, QWORD BCST [rdx+1016]{1to8}, 123 # AVX512DQ Disp8
vfpclasspd k5, QWORD BCST [rdx+1024]{1to8}, 123 # AVX512DQ
vfpclasspd k5, QWORD BCST [rdx-1024]{1to8}, 123 # AVX512DQ Disp8
vfpclasspd k5, QWORD BCST [rdx-1032]{1to8}, 123 # AVX512DQ
vfpclassps k5, zmm30, 0xab # AVX512DQ
vfpclassps k5{k7}, zmm30, 0xab # AVX512DQ
vfpclassps k5, zmm30, 123 # AVX512DQ
vfpclassps k5, ZMMWORD PTR [rcx], 123 # AVX512DQ
vfpclassps k5, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vfpclassps k5, [rcx]{1to16}, 123 # AVX512DQ
vfpclassps k5, ZMMWORD PTR [rdx+8128], 123 # AVX512DQ Disp8
vfpclassps k5, ZMMWORD PTR [rdx+8192], 123 # AVX512DQ
vfpclassps k5, ZMMWORD PTR [rdx-8192], 123 # AVX512DQ Disp8
vfpclassps k5, ZMMWORD PTR [rdx-8256], 123 # AVX512DQ
vfpclassps k5, DWORD BCST [rdx+508]{1to16}, 123 # AVX512DQ Disp8
vfpclassps k5, DWORD BCST [rdx+512]{1to16}, 123 # AVX512DQ
vfpclassps k5, DWORD BCST [rdx-512]{1to16}, 123 # AVX512DQ Disp8
vfpclassps k5, DWORD BCST [rdx-516]{1to16}, 123 # AVX512DQ
vfpclasssd k5, xmm30, 0xab # AVX512DQ
vfpclasssd k5{k7}, xmm30, 0xab # AVX512DQ
vfpclasssd k5, xmm30, 123 # AVX512DQ
vfpclasssd k5, QWORD PTR [rcx], 123 # AVX512DQ
vfpclasssd k5, QWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vfpclasssd k5, QWORD PTR [rdx+1016], 123 # AVX512DQ Disp8
vfpclasssd k5, QWORD PTR [rdx+1024], 123 # AVX512DQ
vfpclasssd k5, QWORD PTR [rdx-1024], 123 # AVX512DQ Disp8
vfpclasssd k5, QWORD PTR [rdx-1032], 123 # AVX512DQ
vfpclassss k5, xmm30, 0xab # AVX512DQ
vfpclassss k5{k7}, xmm30, 0xab # AVX512DQ
vfpclassss k5, xmm30, 123 # AVX512DQ
vfpclassss k5, DWORD PTR [rcx], 123 # AVX512DQ
vfpclassss k5, DWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vfpclassss k5, DWORD PTR [rdx+508], 123 # AVX512DQ Disp8
vfpclassss k5, DWORD PTR [rdx+512], 123 # AVX512DQ
vfpclassss k5, DWORD PTR [rdx-512], 123 # AVX512DQ Disp8
vfpclassss k5, DWORD PTR [rdx-516], 123 # AVX512DQ
vinsertf64x2 zmm30, zmm29, xmm28, 0xab # AVX512DQ
vinsertf64x2 zmm30{k7}, zmm29, xmm28, 0xab # AVX512DQ
vinsertf64x2 zmm30{k7}{z}, zmm29, xmm28, 0xab # AVX512DQ
vinsertf64x2 zmm30, zmm29, xmm28, 123 # AVX512DQ
vinsertf64x2 zmm30, zmm29, XMMWORD PTR [rcx], 123 # AVX512DQ
vinsertf64x2 zmm30, zmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vinsertf64x2 zmm30, zmm29, XMMWORD PTR [rdx+2032], 123 # AVX512DQ Disp8
vinsertf64x2 zmm30, zmm29, XMMWORD PTR [rdx+2048], 123 # AVX512DQ
vinsertf64x2 zmm30, zmm29, XMMWORD PTR [rdx-2048], 123 # AVX512DQ Disp8
vinsertf64x2 zmm30, zmm29, XMMWORD PTR [rdx-2064], 123 # AVX512DQ
vinsertf32x8 zmm30, zmm29, ymm28, 0xab # AVX512DQ
vinsertf32x8 zmm30{k7}, zmm29, ymm28, 0xab # AVX512DQ
vinsertf32x8 zmm30{k7}{z}, zmm29, ymm28, 0xab # AVX512DQ
vinsertf32x8 zmm30, zmm29, ymm28, 123 # AVX512DQ
vinsertf32x8 zmm30, zmm29, YMMWORD PTR [rcx], 123 # AVX512DQ
vinsertf32x8 zmm30, zmm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vinsertf32x8 zmm30, zmm29, YMMWORD PTR [rdx+4064], 123 # AVX512DQ Disp8
vinsertf32x8 zmm30, zmm29, YMMWORD PTR [rdx+4096], 123 # AVX512DQ
vinsertf32x8 zmm30, zmm29, YMMWORD PTR [rdx-4096], 123 # AVX512DQ Disp8
vinsertf32x8 zmm30, zmm29, YMMWORD PTR [rdx-4128], 123 # AVX512DQ
vinserti64x2 zmm30, zmm29, xmm28, 0xab # AVX512DQ
vinserti64x2 zmm30{k7}, zmm29, xmm28, 0xab # AVX512DQ
vinserti64x2 zmm30{k7}{z}, zmm29, xmm28, 0xab # AVX512DQ
vinserti64x2 zmm30, zmm29, xmm28, 123 # AVX512DQ
vinserti64x2 zmm30, zmm29, XMMWORD PTR [rcx], 123 # AVX512DQ
vinserti64x2 zmm30, zmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vinserti64x2 zmm30, zmm29, XMMWORD PTR [rdx+2032], 123 # AVX512DQ Disp8
vinserti64x2 zmm30, zmm29, XMMWORD PTR [rdx+2048], 123 # AVX512DQ
vinserti64x2 zmm30, zmm29, XMMWORD PTR [rdx-2048], 123 # AVX512DQ Disp8
vinserti64x2 zmm30, zmm29, XMMWORD PTR [rdx-2064], 123 # AVX512DQ
vinserti32x8 zmm30, zmm29, ymm28, 0xab # AVX512DQ
vinserti32x8 zmm30{k7}, zmm29, ymm28, 0xab # AVX512DQ
vinserti32x8 zmm30{k7}{z}, zmm29, ymm28, 0xab # AVX512DQ
vinserti32x8 zmm30, zmm29, ymm28, 123 # AVX512DQ
vinserti32x8 zmm30, zmm29, YMMWORD PTR [rcx], 123 # AVX512DQ
vinserti32x8 zmm30, zmm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vinserti32x8 zmm30, zmm29, YMMWORD PTR [rdx+4064], 123 # AVX512DQ Disp8
vinserti32x8 zmm30, zmm29, YMMWORD PTR [rdx+4096], 123 # AVX512DQ
vinserti32x8 zmm30, zmm29, YMMWORD PTR [rdx-4096], 123 # AVX512DQ Disp8
vinserti32x8 zmm30, zmm29, YMMWORD PTR [rdx-4128], 123 # AVX512DQ
vbroadcasti32x2 zmm30, xmm31 # AVX512DQ
vbroadcasti32x2 zmm30{k7}, xmm31 # AVX512DQ
vbroadcasti32x2 zmm30{k7}{z}, xmm31 # AVX512DQ
vbroadcasti32x2 zmm30, QWORD PTR [rcx] # AVX512DQ
vbroadcasti32x2 zmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vbroadcasti32x2 zmm30, QWORD PTR [rdx+1016] # AVX512DQ Disp8
vbroadcasti32x2 zmm30, QWORD PTR [rdx+1024] # AVX512DQ
vbroadcasti32x2 zmm30, QWORD PTR [rdx-1024] # AVX512DQ Disp8
vbroadcasti32x2 zmm30, QWORD PTR [rdx-1032] # AVX512DQ
vpextrd eax, xmm29, 0xab # AVX512DQ
vpextrd eax, xmm29, 123 # AVX512DQ
vpextrd ebp, xmm29, 123 # AVX512DQ
vpextrd r13d, xmm29, 123 # AVX512DQ
vpextrd DWORD PTR [rcx], xmm29, 123 # AVX512DQ
vpextrd DWORD PTR [rax+r14*8+0x1234], xmm29, 123 # AVX512DQ
vpextrd DWORD PTR [rdx+508], xmm29, 123 # AVX512DQ Disp8
vpextrd DWORD PTR [rdx+512], xmm29, 123 # AVX512DQ
vpextrd DWORD PTR [rdx-512], xmm29, 123 # AVX512DQ Disp8
vpextrd DWORD PTR [rdx-516], xmm29, 123 # AVX512DQ
vpextrq rax, xmm29, 0xab # AVX512DQ
vpextrq rax, xmm29, 123 # AVX512DQ
vpextrq r8, xmm29, 123 # AVX512DQ
vpextrq QWORD PTR [rcx], xmm29, 123 # AVX512DQ
vpextrq QWORD PTR [rax+r14*8+0x1234], xmm29, 123 # AVX512DQ
vpextrq QWORD PTR [rdx+1016], xmm29, 123 # AVX512DQ Disp8
vpextrq QWORD PTR [rdx+1024], xmm29, 123 # AVX512DQ
vpextrq QWORD PTR [rdx-1024], xmm29, 123 # AVX512DQ Disp8
vpextrq QWORD PTR [rdx-1032], xmm29, 123 # AVX512DQ
vpinsrd xmm30, xmm29, eax, 0xab # AVX512DQ
vpinsrd xmm30, xmm29, eax, 123 # AVX512DQ
vpinsrd xmm30, xmm29, ebp, 123 # AVX512DQ
vpinsrd xmm30, xmm29, r13d, 123 # AVX512DQ
vpinsrd xmm30, xmm29, DWORD PTR [rcx], 123 # AVX512DQ
vpinsrd xmm30, xmm29, DWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vpinsrd xmm30, xmm29, DWORD PTR [rdx+508], 123 # AVX512DQ Disp8
vpinsrd xmm30, xmm29, DWORD PTR [rdx+512], 123 # AVX512DQ
vpinsrd xmm30, xmm29, DWORD PTR [rdx-512], 123 # AVX512DQ Disp8
vpinsrd xmm30, xmm29, DWORD PTR [rdx-516], 123 # AVX512DQ
vpinsrq xmm30, xmm29, rax, 0xab # AVX512DQ
vpinsrq xmm30, xmm29, rax, 123 # AVX512DQ
vpinsrq xmm30, xmm29, r8, 123 # AVX512DQ
vpinsrq xmm30, xmm29, QWORD PTR [rcx], 123 # AVX512DQ
vpinsrq xmm30, xmm29, QWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vpinsrq xmm30, xmm29, QWORD PTR [rdx+1016], 123 # AVX512DQ Disp8
vpinsrq xmm30, xmm29, QWORD PTR [rdx+1024], 123 # AVX512DQ
vpinsrq xmm30, xmm29, QWORD PTR [rdx-1024], 123 # AVX512DQ Disp8
vpinsrq xmm30, xmm29, QWORD PTR [rdx-1032], 123 # AVX512DQ
vpmullq zmm30, zmm29, zmm28 # AVX512DQ
vpmullq zmm30{k7}, zmm29, zmm28 # AVX512DQ
vpmullq zmm30{k7}{z}, zmm29, zmm28 # AVX512DQ
vpmullq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512DQ
vpmullq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vpmullq zmm30, zmm29, qword bcst [rcx] # AVX512DQ
vpmullq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vpmullq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512DQ
vpmullq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vpmullq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512DQ
vpmullq zmm30, zmm29, qword bcst [rdx+1016] # AVX512DQ Disp8
vpmullq zmm30, zmm29, qword bcst [rdx+1024] # AVX512DQ
vpmullq zmm30, zmm29, qword bcst [rdx-1024] # AVX512DQ Disp8
vpmullq zmm30, zmm29, qword bcst [rdx-1032] # AVX512DQ
vrangepd zmm30, zmm29, zmm28, 0xab # AVX512DQ
vrangepd zmm30{k7}, zmm29, zmm28, 0xab # AVX512DQ
vrangepd zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512DQ
vrangepd zmm30, zmm29, zmm28{sae}, 0xab # AVX512DQ
vrangepd zmm30, zmm29, zmm28, 123 # AVX512DQ
vrangepd zmm30, zmm29, zmm28{sae}, 123 # AVX512DQ
vrangepd zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512DQ
vrangepd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vrangepd zmm30, zmm29, qword bcst [rcx], 123 # AVX512DQ
vrangepd zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512DQ Disp8
vrangepd zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512DQ
vrangepd zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512DQ Disp8
vrangepd zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512DQ
vrangepd zmm30, zmm29, qword bcst [rdx+1016], 123 # AVX512DQ Disp8
vrangepd zmm30, zmm29, qword bcst [rdx+1024], 123 # AVX512DQ
vrangepd zmm30, zmm29, qword bcst [rdx-1024], 123 # AVX512DQ Disp8
vrangepd zmm30, zmm29, qword bcst [rdx-1032], 123 # AVX512DQ
vrangeps zmm30, zmm29, zmm28, 0xab # AVX512DQ
vrangeps zmm30{k7}, zmm29, zmm28, 0xab # AVX512DQ
vrangeps zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512DQ
vrangeps zmm30, zmm29, zmm28{sae}, 0xab # AVX512DQ
vrangeps zmm30, zmm29, zmm28, 123 # AVX512DQ
vrangeps zmm30, zmm29, zmm28{sae}, 123 # AVX512DQ
vrangeps zmm30, zmm29, ZMMWORD PTR [rcx], 123 # AVX512DQ
vrangeps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vrangeps zmm30, zmm29, dword bcst [rcx], 123 # AVX512DQ
vrangeps zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512DQ Disp8
vrangeps zmm30, zmm29, ZMMWORD PTR [rdx+8192], 123 # AVX512DQ
vrangeps zmm30, zmm29, ZMMWORD PTR [rdx-8192], 123 # AVX512DQ Disp8
vrangeps zmm30, zmm29, ZMMWORD PTR [rdx-8256], 123 # AVX512DQ
vrangeps zmm30, zmm29, dword bcst [rdx+508], 123 # AVX512DQ Disp8
vrangeps zmm30, zmm29, dword bcst [rdx+512], 123 # AVX512DQ
vrangeps zmm30, zmm29, dword bcst [rdx-512], 123 # AVX512DQ Disp8
vrangeps zmm30, zmm29, dword bcst [rdx-516], 123 # AVX512DQ
vrangesd xmm30, xmm29, xmm28, 0xab # AVX512DQ
vrangesd xmm30{k7}, xmm29, xmm28, 0xab # AVX512DQ
vrangesd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512DQ
vrangesd xmm30, xmm29, xmm28{sae}, 0xab # AVX512DQ
vrangesd xmm30, xmm29, xmm28, 123 # AVX512DQ
vrangesd xmm30, xmm29, xmm28{sae}, 123 # AVX512DQ
vrangesd xmm30, xmm29, QWORD PTR [rcx], 123 # AVX512DQ
vrangesd xmm30, xmm29, QWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vrangesd xmm30, xmm29, QWORD PTR [rdx+1016], 123 # AVX512DQ Disp8
vrangesd xmm30, xmm29, QWORD PTR [rdx+1024], 123 # AVX512DQ
vrangesd xmm30, xmm29, QWORD PTR [rdx-1024], 123 # AVX512DQ Disp8
vrangesd xmm30, xmm29, QWORD PTR [rdx-1032], 123 # AVX512DQ
vrangess xmm30, xmm29, xmm28, 0xab # AVX512DQ
vrangess xmm30{k7}, xmm29, xmm28, 0xab # AVX512DQ
vrangess xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512DQ
vrangess xmm30, xmm29, xmm28{sae}, 0xab # AVX512DQ
vrangess xmm30, xmm29, xmm28, 123 # AVX512DQ
vrangess xmm30, xmm29, xmm28{sae}, 123 # AVX512DQ
vrangess xmm30, xmm29, DWORD PTR [rcx], 123 # AVX512DQ
vrangess xmm30, xmm29, DWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vrangess xmm30, xmm29, DWORD PTR [rdx+508], 123 # AVX512DQ Disp8
vrangess xmm30, xmm29, DWORD PTR [rdx+512], 123 # AVX512DQ
vrangess xmm30, xmm29, DWORD PTR [rdx-512], 123 # AVX512DQ Disp8
vrangess xmm30, xmm29, DWORD PTR [rdx-516], 123 # AVX512DQ
vandpd zmm30, zmm29, zmm28 # AVX512DQ
vandpd zmm30{k7}, zmm29, zmm28 # AVX512DQ
vandpd zmm30{k7}{z}, zmm29, zmm28 # AVX512DQ
vandpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512DQ
vandpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vandpd zmm30, zmm29, qword bcst [rcx] # AVX512DQ
vandpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vandpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512DQ
vandpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vandpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512DQ
vandpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512DQ Disp8
vandpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512DQ
vandpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512DQ Disp8
vandpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512DQ
vandps zmm30, zmm29, zmm28 # AVX512DQ
vandps zmm30{k7}, zmm29, zmm28 # AVX512DQ
vandps zmm30{k7}{z}, zmm29, zmm28 # AVX512DQ
vandps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512DQ
vandps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vandps zmm30, zmm29, dword bcst [rcx] # AVX512DQ
vandps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vandps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512DQ
vandps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vandps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512DQ
vandps zmm30, zmm29, dword bcst [rdx+508] # AVX512DQ Disp8
vandps zmm30, zmm29, dword bcst [rdx+512] # AVX512DQ
vandps zmm30, zmm29, dword bcst [rdx-512] # AVX512DQ Disp8
vandps zmm30, zmm29, dword bcst [rdx-516] # AVX512DQ
vandnpd zmm30, zmm29, zmm28 # AVX512DQ
vandnpd zmm30{k7}, zmm29, zmm28 # AVX512DQ
vandnpd zmm30{k7}{z}, zmm29, zmm28 # AVX512DQ
vandnpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512DQ
vandnpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vandnpd zmm30, zmm29, qword bcst [rcx] # AVX512DQ
vandnpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vandnpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512DQ
vandnpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vandnpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512DQ
vandnpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512DQ Disp8
vandnpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512DQ
vandnpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512DQ Disp8
vandnpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512DQ
vandnps zmm30, zmm29, zmm28 # AVX512DQ
vandnps zmm30{k7}, zmm29, zmm28 # AVX512DQ
vandnps zmm30{k7}{z}, zmm29, zmm28 # AVX512DQ
vandnps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512DQ
vandnps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vandnps zmm30, zmm29, dword bcst [rcx] # AVX512DQ
vandnps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vandnps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512DQ
vandnps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vandnps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512DQ
vandnps zmm30, zmm29, dword bcst [rdx+508] # AVX512DQ Disp8
vandnps zmm30, zmm29, dword bcst [rdx+512] # AVX512DQ
vandnps zmm30, zmm29, dword bcst [rdx-512] # AVX512DQ Disp8
vandnps zmm30, zmm29, dword bcst [rdx-516] # AVX512DQ
vorpd zmm30, zmm29, zmm28 # AVX512DQ
vorpd zmm30{k7}, zmm29, zmm28 # AVX512DQ
vorpd zmm30{k7}{z}, zmm29, zmm28 # AVX512DQ
vorpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512DQ
vorpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vorpd zmm30, zmm29, qword bcst [rcx] # AVX512DQ
vorpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vorpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512DQ
vorpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vorpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512DQ
vorpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512DQ Disp8
vorpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512DQ
vorpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512DQ Disp8
vorpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512DQ
vorps zmm30, zmm29, zmm28 # AVX512DQ
vorps zmm30{k7}, zmm29, zmm28 # AVX512DQ
vorps zmm30{k7}{z}, zmm29, zmm28 # AVX512DQ
vorps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512DQ
vorps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vorps zmm30, zmm29, dword bcst [rcx] # AVX512DQ
vorps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vorps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512DQ
vorps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vorps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512DQ
vorps zmm30, zmm29, dword bcst [rdx+508] # AVX512DQ Disp8
vorps zmm30, zmm29, dword bcst [rdx+512] # AVX512DQ
vorps zmm30, zmm29, dword bcst [rdx-512] # AVX512DQ Disp8
vorps zmm30, zmm29, dword bcst [rdx-516] # AVX512DQ
vxorpd zmm30, zmm29, zmm28 # AVX512DQ
vxorpd zmm30{k7}, zmm29, zmm28 # AVX512DQ
vxorpd zmm30{k7}{z}, zmm29, zmm28 # AVX512DQ
vxorpd zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512DQ
vxorpd zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vxorpd zmm30, zmm29, qword bcst [rcx] # AVX512DQ
vxorpd zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vxorpd zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512DQ
vxorpd zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vxorpd zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512DQ
vxorpd zmm30, zmm29, qword bcst [rdx+1016] # AVX512DQ Disp8
vxorpd zmm30, zmm29, qword bcst [rdx+1024] # AVX512DQ
vxorpd zmm30, zmm29, qword bcst [rdx-1024] # AVX512DQ Disp8
vxorpd zmm30, zmm29, qword bcst [rdx-1032] # AVX512DQ
vxorps zmm30, zmm29, zmm28 # AVX512DQ
vxorps zmm30{k7}, zmm29, zmm28 # AVX512DQ
vxorps zmm30{k7}{z}, zmm29, zmm28 # AVX512DQ
vxorps zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512DQ
vxorps zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vxorps zmm30, zmm29, dword bcst [rcx] # AVX512DQ
vxorps zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vxorps zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512DQ
vxorps zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vxorps zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512DQ
vxorps zmm30, zmm29, dword bcst [rdx+508] # AVX512DQ Disp8
vxorps zmm30, zmm29, dword bcst [rdx+512] # AVX512DQ
vxorps zmm30, zmm29, dword bcst [rdx-512] # AVX512DQ Disp8
vxorps zmm30, zmm29, dword bcst [rdx-516] # AVX512DQ
vreducepd zmm30, zmm29, 0xab # AVX512DQ
vreducepd zmm30{k7}, zmm29, 0xab # AVX512DQ
vreducepd zmm30{k7}{z}, zmm29, 0xab # AVX512DQ
vreducepd zmm30, zmm29{sae}, 0xab # AVX512DQ
vreducepd zmm30, zmm29, 123 # AVX512DQ
vreducepd zmm30, zmm29{sae}, 123 # AVX512DQ
vreducepd zmm30, ZMMWORD PTR [rcx], 123 # AVX512DQ
vreducepd zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vreducepd zmm30, qword bcst [rcx], 123 # AVX512DQ
vreducepd zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512DQ Disp8
vreducepd zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512DQ
vreducepd zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512DQ Disp8
vreducepd zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512DQ
vreducepd zmm30, qword bcst [rdx+1016], 123 # AVX512DQ Disp8
vreducepd zmm30, qword bcst [rdx+1024], 123 # AVX512DQ
vreducepd zmm30, qword bcst [rdx-1024], 123 # AVX512DQ Disp8
vreducepd zmm30, qword bcst [rdx-1032], 123 # AVX512DQ
vreduceps zmm30, zmm29, 0xab # AVX512DQ
vreduceps zmm30{k7}, zmm29, 0xab # AVX512DQ
vreduceps zmm30{k7}{z}, zmm29, 0xab # AVX512DQ
vreduceps zmm30, zmm29{sae}, 0xab # AVX512DQ
vreduceps zmm30, zmm29, 123 # AVX512DQ
vreduceps zmm30, zmm29{sae}, 123 # AVX512DQ
vreduceps zmm30, ZMMWORD PTR [rcx], 123 # AVX512DQ
vreduceps zmm30, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vreduceps zmm30, dword bcst [rcx], 123 # AVX512DQ
vreduceps zmm30, ZMMWORD PTR [rdx+8128], 123 # AVX512DQ Disp8
vreduceps zmm30, ZMMWORD PTR [rdx+8192], 123 # AVX512DQ
vreduceps zmm30, ZMMWORD PTR [rdx-8192], 123 # AVX512DQ Disp8
vreduceps zmm30, ZMMWORD PTR [rdx-8256], 123 # AVX512DQ
vreduceps zmm30, dword bcst [rdx+508], 123 # AVX512DQ Disp8
vreduceps zmm30, dword bcst [rdx+512], 123 # AVX512DQ
vreduceps zmm30, dword bcst [rdx-512], 123 # AVX512DQ Disp8
vreduceps zmm30, dword bcst [rdx-516], 123 # AVX512DQ
vreducesd xmm30, xmm29, xmm28, 0xab # AVX512DQ
vreducesd xmm30{k7}, xmm29, xmm28, 0xab # AVX512DQ
vreducesd xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512DQ
vreducesd xmm30, xmm29, xmm28{sae}, 0xab # AVX512DQ
vreducesd xmm30, xmm29, xmm28, 123 # AVX512DQ
vreducesd xmm30, xmm29, xmm28{sae}, 123 # AVX512DQ
vreducesd xmm30, xmm29, QWORD PTR [rcx], 123 # AVX512DQ
vreducesd xmm30, xmm29, QWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vreducesd xmm30, xmm29, QWORD PTR [rdx+1016], 123 # AVX512DQ Disp8
vreducesd xmm30, xmm29, QWORD PTR [rdx+1024], 123 # AVX512DQ
vreducesd xmm30, xmm29, QWORD PTR [rdx-1024], 123 # AVX512DQ Disp8
vreducesd xmm30, xmm29, QWORD PTR [rdx-1032], 123 # AVX512DQ
vreducess xmm30, xmm29, xmm28, 0xab # AVX512DQ
vreducess xmm30{k7}, xmm29, xmm28, 0xab # AVX512DQ
vreducess xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512DQ
vreducess xmm30, xmm29, xmm28{sae}, 0xab # AVX512DQ
vreducess xmm30, xmm29, xmm28, 123 # AVX512DQ
vreducess xmm30, xmm29, xmm28{sae}, 123 # AVX512DQ
vreducess xmm30, xmm29, DWORD PTR [rcx], 123 # AVX512DQ
vreducess xmm30, xmm29, DWORD PTR [rax+r14*8+0x1234], 123 # AVX512DQ
vreducess xmm30, xmm29, DWORD PTR [rdx+508], 123 # AVX512DQ Disp8
vreducess xmm30, xmm29, DWORD PTR [rdx+512], 123 # AVX512DQ
vreducess xmm30, xmm29, DWORD PTR [rdx-512], 123 # AVX512DQ Disp8
vreducess xmm30, xmm29, DWORD PTR [rdx-516], 123 # AVX512DQ
kandb k5, k6, k7 # AVX512DQ
kandnb k5, k6, k7 # AVX512DQ
korb k5, k6, k7 # AVX512DQ
kxnorb k5, k6, k7 # AVX512DQ
kxorb k5, k6, k7 # AVX512DQ
knotb k5, k6 # AVX512DQ
kortestb k5, k6 # AVX512DQ
ktestw k5, k6 # AVX512DQ
ktestb k5, k6 # AVX512DQ
kshiftrb k5, k6, 0xab # AVX512DQ
kshiftrb k5, k6, 123 # AVX512DQ
kshiftlb k5, k6, 0xab # AVX512DQ
kshiftlb k5, k6, 123 # AVX512DQ
kmovb k5, k6 # AVX512DQ
kmovb k5, BYTE PTR [rcx] # AVX512DQ
kmovb k5, BYTE PTR [rax+r14*8+0x1234] # AVX512DQ
kmovb BYTE PTR [rcx], k5 # AVX512DQ
kmovb BYTE PTR [rax+r14*8+0x1234], k5 # AVX512DQ
kmovb k5, eax # AVX512DQ
kmovb k5, ebp # AVX512DQ
kmovb k5, r13d # AVX512DQ
kmovb eax, k5 # AVX512DQ
kmovb ebp, k5 # AVX512DQ
kmovb r13d, k5 # AVX512DQ
kaddw k5, k6, k7 # AVX512DQ
kaddb k5, k6, k7 # AVX512DQ
vextractf64x2 XMMWORD PTR [rcx], zmm30, 0xab # AVX512DQ
vextractf64x2 XMMWORD PTR [rcx]{k7}, zmm30, 0xab # AVX512DQ
vextractf64x2 XMMWORD PTR [rcx], zmm30, 123 # AVX512DQ
vextractf64x2 XMMWORD PTR [rax+r14*8+0x1234], zmm30, 123 # AVX512DQ
vextractf64x2 XMMWORD PTR [rdx+2032], zmm30, 123 # AVX512DQ Disp8
vextractf64x2 XMMWORD PTR [rdx+2048], zmm30, 123 # AVX512DQ
vextractf64x2 XMMWORD PTR [rdx-2048], zmm30, 123 # AVX512DQ Disp8
vextractf64x2 XMMWORD PTR [rdx-2064], zmm30, 123 # AVX512DQ
vextractf32x8 YMMWORD PTR [rcx], zmm30, 0xab # AVX512DQ
vextractf32x8 YMMWORD PTR [rcx]{k7}, zmm30, 0xab # AVX512DQ
vextractf32x8 YMMWORD PTR [rcx], zmm30, 123 # AVX512DQ
vextractf32x8 YMMWORD PTR [rax+r14*8+0x1234], zmm30, 123 # AVX512DQ
vextractf32x8 YMMWORD PTR [rdx+4064], zmm30, 123 # AVX512DQ Disp8
vextractf32x8 YMMWORD PTR [rdx+4096], zmm30, 123 # AVX512DQ
vextractf32x8 YMMWORD PTR [rdx-4096], zmm30, 123 # AVX512DQ Disp8
vextractf32x8 YMMWORD PTR [rdx-4128], zmm30, 123 # AVX512DQ
vextracti64x2 XMMWORD PTR [rcx], zmm30, 0xab # AVX512DQ
vextracti64x2 XMMWORD PTR [rcx]{k7}, zmm30, 0xab # AVX512DQ
vextracti64x2 XMMWORD PTR [rcx], zmm30, 123 # AVX512DQ
vextracti64x2 XMMWORD PTR [rax+r14*8+0x1234], zmm30, 123 # AVX512DQ
vextracti64x2 XMMWORD PTR [rdx+2032], zmm30, 123 # AVX512DQ Disp8
vextracti64x2 XMMWORD PTR [rdx+2048], zmm30, 123 # AVX512DQ
vextracti64x2 XMMWORD PTR [rdx-2048], zmm30, 123 # AVX512DQ Disp8
vextracti64x2 XMMWORD PTR [rdx-2064], zmm30, 123 # AVX512DQ
vextracti32x8 YMMWORD PTR [rcx], zmm30, 0xab # AVX512DQ
vextracti32x8 YMMWORD PTR [rcx]{k7}, zmm30, 0xab # AVX512DQ
vextracti32x8 YMMWORD PTR [rcx], zmm30, 123 # AVX512DQ
vextracti32x8 YMMWORD PTR [rax+r14*8+0x1234], zmm30, 123 # AVX512DQ
vextracti32x8 YMMWORD PTR [rdx+4064], zmm30, 123 # AVX512DQ Disp8
vextracti32x8 YMMWORD PTR [rdx+4096], zmm30, 123 # AVX512DQ
vextracti32x8 YMMWORD PTR [rdx-4096], zmm30, 123 # AVX512DQ Disp8
vextracti32x8 YMMWORD PTR [rdx-4128], zmm30, 123 # AVX512DQ
vcvttpd2qq zmm30, zmm29 # AVX512DQ
vcvttpd2qq zmm30{k7}, zmm29 # AVX512DQ
vcvttpd2qq zmm30{k7}{z}, zmm29 # AVX512DQ
vcvttpd2qq zmm30, zmm29{sae} # AVX512DQ
vcvttpd2qq zmm30, ZMMWORD PTR [rcx] # AVX512DQ
vcvttpd2qq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vcvttpd2qq zmm30, qword bcst [rcx] # AVX512DQ
vcvttpd2qq zmm30, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vcvttpd2qq zmm30, ZMMWORD PTR [rdx+8192] # AVX512DQ
vcvttpd2qq zmm30, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vcvttpd2qq zmm30, ZMMWORD PTR [rdx-8256] # AVX512DQ
vcvttpd2qq zmm30, qword bcst [rdx+1016] # AVX512DQ Disp8
vcvttpd2qq zmm30, qword bcst [rdx+1024] # AVX512DQ
vcvttpd2qq zmm30, qword bcst [rdx-1024] # AVX512DQ Disp8
vcvttpd2qq zmm30, qword bcst [rdx-1032] # AVX512DQ
vcvttpd2uqq zmm30, zmm29 # AVX512DQ
vcvttpd2uqq zmm30{k7}, zmm29 # AVX512DQ
vcvttpd2uqq zmm30{k7}{z}, zmm29 # AVX512DQ
vcvttpd2uqq zmm30, zmm29{sae} # AVX512DQ
vcvttpd2uqq zmm30, ZMMWORD PTR [rcx] # AVX512DQ
vcvttpd2uqq zmm30, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vcvttpd2uqq zmm30, qword bcst [rcx] # AVX512DQ
vcvttpd2uqq zmm30, ZMMWORD PTR [rdx+8128] # AVX512DQ Disp8
vcvttpd2uqq zmm30, ZMMWORD PTR [rdx+8192] # AVX512DQ
vcvttpd2uqq zmm30, ZMMWORD PTR [rdx-8192] # AVX512DQ Disp8
vcvttpd2uqq zmm30, ZMMWORD PTR [rdx-8256] # AVX512DQ
vcvttpd2uqq zmm30, qword bcst [rdx+1016] # AVX512DQ Disp8
vcvttpd2uqq zmm30, qword bcst [rdx+1024] # AVX512DQ
vcvttpd2uqq zmm30, qword bcst [rdx-1024] # AVX512DQ Disp8
vcvttpd2uqq zmm30, qword bcst [rdx-1032] # AVX512DQ
vcvttps2qq zmm30, ymm29 # AVX512DQ
vcvttps2qq zmm30{k7}, ymm29 # AVX512DQ
vcvttps2qq zmm30{k7}{z}, ymm29 # AVX512DQ
vcvttps2qq zmm30, ymm29{sae} # AVX512DQ
vcvttps2qq zmm30, YMMWORD PTR [rcx] # AVX512DQ
vcvttps2qq zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vcvttps2qq zmm30, dword bcst [rcx] # AVX512DQ
vcvttps2qq zmm30, YMMWORD PTR [rdx+4064] # AVX512DQ Disp8
vcvttps2qq zmm30, YMMWORD PTR [rdx+4096] # AVX512DQ
vcvttps2qq zmm30, YMMWORD PTR [rdx-4096] # AVX512DQ Disp8
vcvttps2qq zmm30, YMMWORD PTR [rdx-4128] # AVX512DQ
vcvttps2qq zmm30, dword bcst [rdx+508] # AVX512DQ Disp8
vcvttps2qq zmm30, dword bcst [rdx+512] # AVX512DQ
vcvttps2qq zmm30, dword bcst [rdx-512] # AVX512DQ Disp8
vcvttps2qq zmm30, dword bcst [rdx-516] # AVX512DQ
vcvttps2qq zmm30, DWORD BCST [rdx+508] # AVX512DQ Disp8
vcvttps2uqq zmm30, ymm29 # AVX512DQ
vcvttps2uqq zmm30{k7}, ymm29 # AVX512DQ
vcvttps2uqq zmm30{k7}{z}, ymm29 # AVX512DQ
vcvttps2uqq zmm30, ymm29{sae} # AVX512DQ
vcvttps2uqq zmm30, YMMWORD PTR [rcx] # AVX512DQ
vcvttps2uqq zmm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512DQ
vcvttps2uqq zmm30, dword bcst [rcx] # AVX512DQ
vcvttps2uqq zmm30, YMMWORD PTR [rdx+4064] # AVX512DQ Disp8
vcvttps2uqq zmm30, YMMWORD PTR [rdx+4096] # AVX512DQ
vcvttps2uqq zmm30, YMMWORD PTR [rdx-4096] # AVX512DQ Disp8
vcvttps2uqq zmm30, YMMWORD PTR [rdx-4128] # AVX512DQ
vcvttps2uqq zmm30, dword bcst [rdx+508] # AVX512DQ Disp8
vcvttps2uqq zmm30, dword bcst [rdx+512] # AVX512DQ
vcvttps2uqq zmm30, dword bcst [rdx-512] # AVX512DQ Disp8
vcvttps2uqq zmm30, dword bcst [rdx-516] # AVX512DQ
vcvttps2uqq zmm30, DWORD BCST [rdx+508] # AVX512DQ Disp8
vpmovd2m k5, zmm30 # AVX512DQ
vpmovq2m k5, zmm30 # AVX512DQ
vpmovm2d zmm30, k5 # AVX512DQ
vpmovm2q zmm30, k5 # AVX512DQ
|
stsp/binutils-ia16
| 1,561
|
gas/testsuite/gas/i386/noextreg.s
|
.intel_syntax noprefix
.text
ix86:
vpand xmm0, xmm0, xmm0
.byte 0xc4, 0xc1, 0x79, 0xdb, 0xc0
.byte 0xc4, 0xc1, 0x39, 0xdb, 0xc0
vpandd xmm0, xmm0, xmm0
.byte 0x62, 0xd1, 0x7d, 0x08, 0xdb, 0xc0
.byte 0x62, 0xf1, 0x3d, 0x08, 0xdb, 0xc0
.byte 0x62, 0xf1, 0x7d, 0x00, 0xdb, 0xc0
vpblendvb xmm0, xmm0, xmm0, xmm0
.byte 0xc4, 0xc3, 0x79, 0x4c, 0xc0, 0x00
.byte 0xc4, 0xe3, 0x39, 0x4c, 0xc0, 0x00
.byte 0xc4, 0xe3, 0x79, 0x4c, 0xc0, 0x80
vpgatherdd xmm1{k7}, [eax+xmm0]
.byte 0x62, 0xd2, 0x7d, 0x0f, 0x90, 0x0c, 0x00
.byte 0x62, 0xf2, 0x7d, 0x07, 0x90, 0x0c, 0x00
andn eax, eax, [eax]
.byte 0xc4, 0xe2, 0x38, 0xf2, 0x00
.byte 0xc4, 0xc2, 0x78, 0xf2, 0x00
.byte 0xc4, 0xe2, 0xf8, 0xf2, 0x00
tzmsk eax, [eax]
.byte 0x8f, 0xc9, 0x78, 0x01, 0x20
.byte 0x8f, 0xe9, 0x38, 0x01, 0x20
.byte 0x8f, 0xe9, 0xf8, 0x01, 0x20
llwpcb eax
.byte 0x8f, 0xc9, 0x78, 0x12, 0xc0
.byte 0x8f, 0xe9, 0xf8, 0x12, 0xc0
vprotb xmm0, xmm0, 1
.byte 0x8f, 0xc8, 0x78, 0xc0, 0xc0, 0x01
vprotb xmm0, [eax], 1
.byte 0x8f, 0xc8, 0x78, 0xc0, 0x00, 0x01
vprotb xmm0, xmm0, xmm0
.byte 0x8f, 0xc9, 0xb8, 0x90, 0xc0
.byte 0x8f, 0xe9, 0x38, 0x90, 0xc0
vprotb xmm0, [eax], xmm0
.byte 0x8f, 0xc9, 0x78, 0x90, 0x00
vprotb xmm0, xmm0, [eax]
.byte 0x8f, 0xc9, 0xf8, 0x90, 0x00
vfmaddps xmm0, xmm0, [eax], xmm0
.byte 0xc4, 0xe3, 0x39, 0x68, 0x00, 0x00
.byte 0xc4, 0xe3, 0x79, 0x68, 0x00, 0x80
.byte 0xc4, 0xe3, 0x79, 0x68, 0x00, 0x0f
vpermil2ps xmm0, xmm0, [eax], xmm0, 0
.byte 0xc4, 0xe3, 0x39, 0x48, 0x00, 0x00
.byte 0xc4, 0xe3, 0x79, 0x48, 0x00, 0x80
ret
|
stsp/binutils-ia16
| 1,202
|
gas/testsuite/gas/i386/avx512_fp16-inval-bcast.s
|
# Check error for invalid {1toXX} and {2toXX} broadcasts.
.allow_index_reg
.text
_start:
vcvtpd2ph (%ecx){1to16}, %xmm3
vcvtuqq2ph -1024(%edx){1to32}, %xmm3
vcvtdq2ph (%ecx){1to8}, %ymm3
vcvtudq2ph -512(%edx){1to32}, %ymm3
vcmpph $123, (%ecx){1to16}, %zmm2, %k5
vcmpph $123, (%ecx){1to64}, %zmm2, %k5
vfmadd132ph (%ecx){1to8}, %zmm2, %zmm3
vfcmaddcph (%ecx){1to8}, %zmm2, %zmm3
vfcmulcph (%ecx){1to32}, %zmm2, %zmm3
vcvtdq2ph (%ecx){1to8}, %ymm3
vfmaddcph (%ecx){1to8}, %zmm2, %zmm3
vfmulcph -512(%edx){1to32}, %zmm2, %zmm3
vfmulcph -512(%edx){1to4}, %zmm2, %zmm3
.intel_syntax noprefix
vcvtpd2ph xmm3, QWORD PTR [ecx]{1to16}
vcvtuqq2ph xmm3, QWORD PTR [edx-1024]{1to32}
vcvtdq2ph ymm3, DWORD PTR [ecx]{1to8}
vcvtudq2ph ymm3, DWORD PTR [edx-512]{1to32}
vcmpph k5, zmm2, WORD PTR [edx-256]{1to16}, 123
vcmpph k5, zmm2, WORD PTR [edx-256]{1to64}, 123
vfmsubadd231ph zmm3, zmm2, WORD PTR [edx-256]{1to8}
vfcmaddcph zmm3, zmm2, DWORD PTR [ecx]{1to8}
vfcmulcph zmm3, zmm2, DWORD PTR [ecx]{1to32}
vcvtdq2ph ymm3, DWORD PTR [ecx]{1to8}
vfcmaddcph zmm3, zmm2, DWORD PTR [ecx]{1to8}
vfmulcph zmm3, zmm2, DWORD PTR [edx-512]{1to32}
vfmulcph zmm3, zmm2, DWORD PTR [edx-512]{1to4}
|
stsp/binutils-ia16
| 1,228
|
gas/testsuite/gas/i386/x86-64-avx512_fp16-inval-bcast.s
|
# Check error for invalid {1toXX} and {2toXX} broadcasts.
.allow_index_reg
.text
_start:
vcvtpd2ph (%ecx){1to16}, %xmm30
vcvtuqq2ph -1024(%edx){1to32}, %xmm30
vcvtdq2ph (%ecx){1to8}, %ymm30
vcvtudq2ph -512(%edx){1to32}, %ymm30
vcmpph $123, (%ecx){1to16}, %zmm29, %k5
vcmpph $123, (%ecx){1to64}, %zmm29, %k5
vfmadd132ph (%ecx){1to8}, %zmm29, %zmm3
vfcmaddcph (%ecx){1to8}, %zmm29, %zmm3
vfcmulcph (%ecx){1to32}, %zmm29, %zmm3
vcvtdq2ph (%ecx){1to8}, %ymm30
vfmaddcph (%ecx){1to8}, %zmm29, %zmm3
vfmulcph -512(%edx){1to32}, %zmm29, %zmm3
vfmulcph -512(%edx){1to4}, %zmm29, %zmm3
.intel_syntax noprefix
vcvtpd2ph xmm30, QWORD PTR [ecx]{1to16}
vcvtuqq2ph xmm30, QWORD PTR [edx-1024]{1to32}
vcvtdq2ph ymm30, DWORD PTR [ecx]{1to8}
vcvtudq2ph ymm30, DWORD PTR [edx-512]{1to32}
vcmpph k5, zmm29, WORD PTR [edx-256]{1to16}, 123
vcmpph k5, zmm29, WORD PTR [edx-256]{1to64}, 123
vfmsubadd231ph zmm30, zmm2, WORD PTR [edx-256]{1to8}
vfcmaddcph zmm3, zmm29, DWORD PTR [ecx]{1to8}
vfcmulcph zmm3, zmm29, DWORD PTR [ecx]{1to32}
vcvtdq2ph ymm30, DWORD PTR [ecx]{1to8}
vfcmaddcph zmm30, zmm2, DWORD PTR [ecx]{1to8}
vfmulcph zmm30, zmm2, DWORD PTR [edx-512]{1to32}
vfmulcph zmm30, zmm2, DWORD PTR [edx-512]{1to4}
|
stsp/binutils-ia16
| 2,210
|
gas/testsuite/gas/i386/sg.s
|
.text
.intel_syntax noprefix
sg:
vgatherdpd xmm2, qword ptr [eax+xmm1], xmm0
vgatherdpd xmm2{k7}, qword ptr [eax+xmm1]
vscatterdpd qword ptr [eax+xmm1]{k7}, xmm0
vgatherdpd xmm2, xmmword ptr [eax+xmm1], xmm0
vgatherdpd xmm2{k7}, xmmword ptr [eax+xmm1]
vscatterdpd xmmword ptr [eax+xmm1]{k7}, xmm0
vgatherdps xmm2, dword ptr [eax+xmm1], xmm0
vgatherdps xmm2{k7}, dword ptr [eax+xmm1]
vscatterdps dword ptr [eax+xmm1]{k7}, xmm0
vgatherdps xmm2, xmmword ptr [eax+xmm1], xmm0
vgatherdps xmm2{k7}, xmmword ptr [eax+xmm1]
vscatterdps xmmword ptr [eax+xmm1]{k7}, xmm0
vgatherqpd xmm2, qword ptr [eax+xmm1], xmm0
vgatherqpd xmm2{k7}, qword ptr [eax+xmm1]
vscatterqpd qword ptr [eax+xmm1]{k7}, xmm0
vgatherqpd xmm2, xmmword ptr [eax+xmm1], xmm0
vgatherqpd xmm2{k7}, xmmword ptr [eax+xmm1]
vscatterqpd xmmword ptr [eax+xmm1]{k7}, xmm0
vgatherqps xmm2, dword ptr [eax+xmm1], xmm0
vgatherqps xmm2{k7}, dword ptr [eax+xmm1]
vscatterqps dword ptr [eax+xmm1]{k7}, xmm0
vgatherqps xmm2, xmmword ptr [eax+xmm1], xmm0
vgatherqps xmm2{k7}, xmmword ptr [eax+xmm1]
vscatterqps xmmword ptr [eax+xmm1]{k7}, xmm0
vpgatherdd xmm2, dword ptr [eax+xmm1], xmm0
vpgatherdd xmm2{k7}, dword ptr [eax+xmm1]
vpscatterdd dword ptr [eax+xmm1]{k7}, xmm0
vpgatherdd xmm2, xmmword ptr [eax+xmm1], xmm0
vpgatherdd xmm2{k7}, xmmword ptr [eax+xmm1]
vpscatterdd xmmword ptr [eax+xmm1]{k7}, xmm0
vpgatherdq xmm2, qword ptr [eax+xmm1], xmm0
vpgatherdq xmm2{k7}, qword ptr [eax+xmm1]
vpscatterdq qword ptr [eax+xmm1]{k7}, xmm0
vpgatherdq xmm2, xmmword ptr [eax+xmm1], xmm0
vpgatherdq xmm2{k7}, xmmword ptr [eax+xmm1]
vpscatterdq xmmword ptr [eax+xmm1]{k7}, xmm0
vpgatherqd xmm2, dword ptr [eax+xmm1], xmm0
vpgatherqd xmm2{k7}, dword ptr [eax+xmm1]
vpscatterqd dword ptr [eax+xmm1]{k7}, xmm0
vpgatherqd xmm2, xmmword ptr [eax+xmm1], xmm0
vpgatherqd xmm2{k7}, xmmword ptr [eax+xmm1]
vpscatterqd xmmword ptr [eax+xmm1]{k7}, xmm0
vpgatherqq xmm2, qword ptr [eax+xmm1], xmm0
vpgatherqq xmm2{k7}, qword ptr [eax+xmm1]
vpscatterqq qword ptr [eax+xmm1]{k7}, xmm0
vpgatherqq xmm2, xmmword ptr [eax+xmm1], xmm0
vpgatherqq xmm2{k7}, xmmword ptr [eax+xmm1]
vpscatterqq xmmword ptr [eax+xmm1]{k7}, xmm0
|
stsp/binutils-ia16
| 9,776
|
gas/testsuite/gas/i386/pseudos.s
|
# Check instructions with pseudo prefixes for encoding
.text
_start:
{vex3} vmovaps %xmm7,%xmm2
{vex3} {load} vmovaps %xmm7,%xmm2
{vex3} {store} vmovaps %xmm7,%xmm2
vmovaps %xmm7,%xmm2
{vex} vmovaps %xmm7,%xmm2
{vex} {load} vmovaps %xmm7,%xmm2
{vex} {store} vmovaps %xmm7,%xmm2
{vex3} vmovaps (%eax),%xmm2
vmovaps (%eax),%xmm2
{vex2} vmovaps (%eax),%xmm2
{evex} vmovaps (%eax),%xmm2
{disp32} vmovaps (%eax),%xmm2
{evex} {disp8} vmovaps (%eax),%xmm2
{evex} {disp32} vmovaps (%eax),%xmm2
{vex} {disp8} vmovaps 128(%eax),%xmm2
{vex} {disp32} vmovaps 128(%eax),%xmm2
{evex} {disp8} vmovaps 128(%eax),%xmm2
{evex} {disp16} vmovaps 128(%bx),%xmm2
{evex} {disp32} vmovaps 128(%eax),%xmm2
mov %ecx, %eax
{load} mov %ecx, %eax
{store} mov %ecx, %eax
adc %ecx, %eax
{load} adc %ecx, %eax
{store} adc %ecx, %eax
add %ecx, %eax
{load} add %ecx, %eax
{store} add %ecx, %eax
and %ecx, %eax
{load} and %ecx, %eax
{store} and %ecx, %eax
cmp %ecx, %eax
{load} cmp %ecx, %eax
{store} cmp %ecx, %eax
or %ecx, %eax
{load} or %ecx, %eax
{store} or %ecx, %eax
sbb %ecx, %eax
{load} sbb %ecx, %eax
{store} sbb %ecx, %eax
sub %ecx, %eax
{load} sub %ecx, %eax
{store} sub %ecx, %eax
xor %ecx, %eax
{load} xor %ecx, %eax
{store} xor %ecx, %eax
{load} mov 0x12345678, %eax
{load} mov %eax, 0x12345678
{store} mov 0x12345678, %eax
{store} mov %eax, 0x12345678
{load} mov %eax, (%edi)
{load} mov (%edi), %eax
{store} mov %eax, (%edi)
{store} mov (%edi), %eax
{load} mov %es, %edi
{load} mov %eax, %gs
{store} mov %es, %edi
{store} mov %eax, %gs
{load} mov %cr0, %edi
{load} mov %eax, %cr7
{store} mov %cr0, %edi
{store} mov %eax, %cr7
{load} mov %dr0, %edi
{load} mov %eax, %dr7
{store} mov %dr0, %edi
{store} mov %eax, %dr7
{load} kmovb %k0, %edi
{load} kmovb %eax, %k7
{store} kmovb %k0, %edi
{store} kmovb %eax, %k7
{load} kmovd %k0, %edi
{load} kmovd %eax, %k7
{store} kmovd %k0, %edi
{store} kmovd %eax, %k7
{load} kmovw %k0, %edi
{load} kmovw %eax, %k7
{store} kmovw %k0, %edi
{store} kmovw %eax, %k7
{load} kmovb %k0, %k7
{store} kmovb %k0, %k7
{load} kmovd %k0, %k7
{store} kmovd %k0, %k7
{load} kmovq %k0, %k7
{store} kmovq %k0, %k7
{load} kmovw %k0, %k7
{store} kmovw %k0, %k7
{load} adc %eax, (%edi)
{load} adc (%edi), %eax
{store} adc %eax, (%edi)
{store} adc (%edi), %eax
{load} add %eax, (%edi)
{load} add (%edi), %eax
{store} add %eax, (%edi)
{store} add (%edi), %eax
{load} and %eax, (%edi)
{load} and (%edi), %eax
{store} and %eax, (%edi)
{store} and (%edi), %eax
{load} cmp %eax, (%edi)
{load} cmp (%edi), %eax
{store} cmp %eax, (%edi)
{store} cmp (%edi), %eax
{load} or %eax, (%edi)
{load} or (%edi), %eax
{store} or %eax, (%edi)
{store} or (%edi), %eax
{load} sbb %eax, (%edi)
{load} sbb (%edi), %eax
{store} sbb %eax, (%edi)
{store} sbb (%edi), %eax
{load} sub %eax, (%edi)
{load} sub (%edi), %eax
{store} sub %eax, (%edi)
{store} sub (%edi), %eax
{load} xor %eax, (%edi)
{load} xor (%edi), %eax
{store} xor %eax, (%edi)
{store} xor (%edi), %eax
fadd %st, %st
{load} fadd %st, %st
{store} fadd %st, %st
fdiv %st, %st
{load} fdiv %st, %st
{store} fdiv %st, %st
fdivr %st, %st
{load} fdivr %st, %st
{store} fdivr %st, %st
fmul %st, %st
{load} fmul %st, %st
{store} fmul %st, %st
fsub %st, %st
{load} fsub %st, %st
{store} fsub %st, %st
fsubr %st, %st
{load} fsubr %st, %st
{store} fsubr %st, %st
movq %mm0, %mm7
{load} movq %mm0, %mm7
{store} movq %mm0, %mm7
movaps %xmm0, %xmm7
{load} movaps %xmm0, %xmm7
{store} movaps %xmm0, %xmm7
movups %xmm0, %xmm7
{load} movups %xmm0, %xmm7
{store} movups %xmm0, %xmm7
movss %xmm0, %xmm7
{load} movss %xmm0, %xmm7
{store} movss %xmm0, %xmm7
movapd %xmm0, %xmm7
{load} movapd %xmm0, %xmm7
{store} movapd %xmm0, %xmm7
movupd %xmm0, %xmm7
{load} movupd %xmm0, %xmm7
{store} movupd %xmm0, %xmm7
movsd %xmm0, %xmm7
{load} movsd %xmm0, %xmm7
{store} movsd %xmm0, %xmm7
movdqa %xmm0, %xmm7
{load} movdqa %xmm0, %xmm7
{store} movdqa %xmm0, %xmm7
movdqu %xmm0, %xmm7
{load} movdqu %xmm0, %xmm7
{store} movdqu %xmm0, %xmm7
movq %xmm0, %xmm7
{load} movq %xmm0, %xmm7
{store} movq %xmm0, %xmm7
vmovaps %xmm0, %xmm7
{load} vmovaps %xmm0, %xmm7
{store} vmovaps %xmm0, %xmm7
vmovaps %zmm0, %zmm7
{load} vmovaps %zmm0, %zmm7
{store} vmovaps %zmm0, %zmm7
vmovaps %xmm0, %xmm7{%k7}
{load} vmovaps %xmm0, %xmm7{%k7}
{store} vmovaps %xmm0, %xmm7{%k7}
vmovups %zmm0, %zmm7
{load} vmovups %zmm0, %zmm7
{store} vmovups %zmm0, %zmm7
vmovups %xmm0, %xmm7
{load} vmovups %xmm0, %xmm7
{store} vmovups %xmm0, %xmm7
vmovups %xmm0, %xmm7{%k7}
{load} vmovups %xmm0, %xmm7{%k7}
{store} vmovups %xmm0, %xmm7{%k7}
vmovss %xmm0, %xmm1, %xmm7
{load} vmovss %xmm0, %xmm1, %xmm7
{store} vmovss %xmm0, %xmm1, %xmm7
vmovss %xmm0, %xmm1, %xmm7{%k7}
{load} vmovss %xmm0, %xmm1, %xmm7{%k7}
{store} vmovss %xmm0, %xmm1, %xmm7{%k7}
vmovapd %xmm0, %xmm7
{load} vmovapd %xmm0, %xmm7
{store} vmovapd %xmm0, %xmm7
vmovapd %zmm0, %zmm7
{load} vmovapd %zmm0, %zmm7
{store} vmovapd %zmm0, %zmm7
vmovapd %xmm0, %xmm7{%k7}
{load} vmovapd %xmm0, %xmm7{%k7}
{store} vmovapd %xmm0, %xmm7{%k7}
vmovupd %xmm0, %xmm7
{load} vmovupd %xmm0, %xmm7
{store} vmovupd %xmm0, %xmm7
vmovupd %zmm0, %zmm7
{load} vmovupd %zmm0, %zmm7
{store} vmovupd %zmm0, %zmm7
vmovupd %xmm0, %xmm7{%k7}
{load} vmovupd %xmm0, %xmm7{%k7}
{store} vmovupd %xmm0, %xmm7{%k7}
vmovsd %xmm0, %xmm1, %xmm7
{load} vmovsd %xmm0, %xmm1, %xmm7
{store} vmovsd %xmm0, %xmm1, %xmm7
vmovsd %xmm0, %xmm1, %xmm7{%k7}
{load} vmovsd %xmm0, %xmm1, %xmm7{%k7}
{store} vmovsd %xmm0, %xmm1, %xmm7{%k7}
vmovdqa %xmm0, %xmm7
{load} vmovdqa %xmm0, %xmm7
{store} vmovdqa %xmm0, %xmm7
vmovdqa32 %zmm0, %zmm7
{load} vmovdqa32 %zmm0, %zmm7
{store} vmovdqa32 %zmm0, %zmm7
vmovdqa32 %xmm0, %xmm7
{load} vmovdqa32 %xmm0, %xmm7
{store} vmovdqa32 %xmm0, %xmm7
vmovdqa64 %zmm0, %zmm7
{load} vmovdqa64 %zmm0, %zmm7
{store} vmovdqa64 %zmm0, %zmm7
vmovdqa64 %xmm0, %xmm7
{load} vmovdqa64 %xmm0, %xmm7
{store} vmovdqa64 %xmm0, %xmm7
vmovdqu %xmm0, %xmm7
{load} vmovdqu %xmm0, %xmm7
{store} vmovdqu %xmm0, %xmm7
vmovdqu8 %zmm0, %zmm7
{load} vmovdqu8 %zmm0, %zmm7
{store} vmovdqu8 %zmm0, %zmm7
vmovdqu8 %xmm0, %xmm7
{load} vmovdqu8 %xmm0, %xmm7
{store} vmovdqu8 %zmm0, %zmm7
vmovdqu16 %zmm0, %zmm7
{load} vmovdqu16 %zmm0, %zmm7
{store} vmovdqu16 %zmm0, %zmm7
vmovdqu16 %xmm0, %xmm7
{load} vmovdqu16 %xmm0, %xmm7
{store} vmovdqu16 %xmm0, %xmm7
vmovdqu32 %zmm0, %zmm7
{load} vmovdqu32 %zmm0, %zmm7
{store} vmovdqu32 %zmm0, %zmm7
vmovdqu32 %xmm0, %xmm7
{load} vmovdqu32 %xmm0, %xmm7
{store} vmovdqu32 %xmm0, %xmm7
vmovdqu64 %zmm0, %zmm7
{load} vmovdqu64 %zmm0, %zmm7
{store} vmovdqu64 %zmm0, %zmm7
vmovdqu64 %xmm0, %xmm7
{load} vmovdqu64 %xmm0, %xmm7
{store} vmovdqu64 %xmm0, %xmm7
vmovq %xmm0, %xmm7
{load} vmovq %xmm0, %xmm7
{store} vmovq %xmm0, %xmm7
{evex} vmovq %xmm0, %xmm7
{load} {evex} vmovq %xmm0, %xmm7
{store} {evex} vmovq %xmm0, %xmm7
pextrw $0, %xmm0, %edi
{load} pextrw $0, %xmm0, %edi
{store} pextrw $0, %xmm0, %edi
vpextrw $0, %xmm0, %edi
{load} vpextrw $0, %xmm0, %edi
{store} vpextrw $0, %xmm0, %edi
{evex} vpextrw $0, %xmm0, %edi
{load} {evex} vpextrw $0, %xmm0, %edi
{store} {evex} vpextrw $0, %xmm0, %edi
bndmov %bnd3, %bnd0
{load} bndmov %bnd3, %bnd0
{store} bndmov %bnd3, %bnd0
movaps (%eax),%xmm2
{load} movaps (%eax),%xmm2
{store} movaps (%eax),%xmm2
{disp8} movaps (%eax),%xmm2
{disp32} movaps (%eax),%xmm2
movaps -1(%eax),%xmm2
{disp8} movaps -1(%eax),%xmm2
{disp32} movaps -1(%eax),%xmm2
movaps 128(%eax),%xmm2
{disp8} movaps 128(%eax),%xmm2
{disp32} movaps 128(%eax),%xmm2
movb (%ebp),%al
{disp8} movb (%ebp),%al
{disp32} movb (%ebp),%al
movb (%si),%al
{disp8} movb (%si),%al
{disp16} movb (%si),%al
movb (%di),%al
{disp8} movb (%di),%al
{disp16} movb (%di),%al
movb (%bx),%al
{disp8} movb (%bx),%al
{disp16} movb (%bx),%al
movb (%bp),%al
{disp8} movb (%bp),%al
{disp16} movb (%bp),%al
.intel_syntax noprefix
{vex3} vmovaps xmm2,xmm7
{vex3} {load} vmovaps xmm2,xmm7
{vex3} {store} vmovaps xmm2,xmm7
vmovaps xmm2,xmm7
{vex2} vmovaps xmm2,xmm7
{vex2} {load} vmovaps xmm2,xmm7
{vex2} {store} vmovaps xmm2,xmm7
{vex3} vmovaps xmm2,XMMWORD PTR [eax]
vmovaps xmm2,XMMWORD PTR [eax]
{vex2} vmovaps xmm2,XMMWORD PTR [eax]
{evex} vmovaps xmm2,XMMWORD PTR [eax]
{disp32} vmovaps xmm2,XMMWORD PTR [eax]
{evex} {disp8} vmovaps xmm2,XMMWORD PTR [eax]
{evex} {disp32} vmovaps xmm2,XMMWORD PTR [eax]
{vex} {disp8} vmovaps xmm2,XMMWORD PTR [eax+128]
{vex} {disp32} vmovaps xmm2,XMMWORD PTR [eax+128]
{evex} {disp8} vmovaps xmm2,XMMWORD PTR [eax+128]
{evex} {disp16} vmovaps xmm2,XMMWORD PTR [bx+128]
{evex} {disp32} vmovaps xmm2,XMMWORD PTR [eax+128]
mov eax,ecx
{load} mov eax,ecx
{store} mov eax,ecx
movaps xmm2,XMMWORD PTR [eax]
{load} movaps xmm2,XMMWORD PTR [eax]
{store} movaps xmm2,XMMWORD PTR [eax]
{disp8} movaps xmm2,XMMWORD PTR [eax]
{disp32} movaps xmm2,XMMWORD PTR [eax]
movaps xmm2,XMMWORD PTR [eax-1]
{disp8} movaps xmm2,XMMWORD PTR [eax-1]
{disp32} movaps xmm2,XMMWORD PTR [eax-1]
movaps xmm2,XMMWORD PTR [eax+128]
{disp8} movaps xmm2,XMMWORD PTR [eax+128]
{disp32} movaps xmm2,XMMWORD PTR [eax+128]
mov al, BYTE PTR [ebp]
{disp8} mov al, BYTE PTR [ebp]
{disp32} mov al, BYTE PTR [ebp]
mov al, BYTE PTR [si]
{disp8} mov al, BYTE PTR [si]
{disp16} mov al, BYTE PTR [si]
mov al, BYTE PTR [di]
{disp8} mov al, BYTE PTR [di]
{disp16} mov al, BYTE PTR [di]
mov al, BYTE PTR [bx]
{disp8} mov al, BYTE PTR [bx]
{disp16} mov al, BYTE PTR [bx]
mov al, BYTE PTR [bp]
{disp8} mov al, BYTE PTR [bp]
{disp16} mov al, BYTE PTR [bp]
{disp32} jmp .
.code16
{disp16} jmp .
.byte -1, -1
|
stsp/binutils-ia16
| 2,831
|
gas/testsuite/gas/i386/noreg16.s
|
.macro pfx insn:vararg
.ifdef DATA32
data32 \insn
.else
\insn
.endif
.endm
.text
.code16
noreg:
pfx adc $1, (%bx)
pfx adc $0x89, (%bx)
pfx adc $0x1234, (%bx)
pfx add $1, (%bx)
pfx add $0x89, (%bx)
pfx add $0x1234, (%bx)
pfx and $1, (%bx)
pfx and $0x89, (%bx)
pfx and $0x1234, (%bx)
pfx bt $1, (%bx)
pfx btc $1, (%bx)
pfx btr $1, (%bx)
pfx bts $1, (%bx)
pfx call *(%bx)
pfx cmp $1, (%bx)
pfx cmp $0x89, (%bx)
pfx cmp $0x1234, (%bx)
pfx cmps
pfx cmps %es:(%di), (%si)
pfx crc32 (%bx), %eax
cvtsi2sd (%bx), %xmm0
cvtsi2ss (%bx), %xmm0
pfx dec (%bx)
pfx div (%bx)
pfx fadd (%bx)
pfx fcom (%bx)
pfx fcomp (%bx)
pfx fdiv (%bx)
pfx fdivr (%bx)
pfx fiadd (%bx)
pfx ficom (%bx)
pfx ficomp (%bx)
pfx fidiv (%bx)
pfx fidivr (%bx)
pfx fild (%bx)
pfx fimul (%bx)
pfx fist (%bx)
pfx fistp (%bx)
pfx fisttp (%bx)
pfx fisub (%bx)
pfx fisubr (%bx)
pfx fld (%bx)
pfx fmul (%bx)
pfx fst (%bx)
pfx fstp (%bx)
pfx fsub (%bx)
pfx fsubr (%bx)
pfx idiv (%bx)
pfx imul (%bx)
pfx in $0
pfx in %dx
pfx inc (%bx)
pfx ins
pfx ins %dx, %es:(%di)
pfx jmp *(%bx)
pfx lgdt (%bx)
pfx lidt (%bx)
pfx lldt (%bx)
pfx lmsw (%bx)
pfx lods
pfx lods (%si)
pfx ltr (%bx)
pfx mov $0x12, (%bx)
pfx mov $0x1234, (%bx)
pfx mov %es, (%bx)
pfx mov (%bx), %es
pfx movs
pfx movs (%si), %es:(%di)
pfx movsx (%bx), %ax
movsx (%bx), %eax
pfx movzx (%bx), %ax
movzx (%bx), %eax
pfx mul (%bx)
pfx neg (%bx)
pfx nop (%bx)
pfx not (%bx)
pfx or $1, (%bx)
pfx or $0x89, (%bx)
pfx or $0x1234, (%bx)
pfx out $0
pfx out %dx
pfx outs
pfx outs (%si), %dx
pfx pop (%bx)
pfx pop %es
ptwrite (%bx)
pfx push (%bx)
pfx push %es
pfx rcl $1, (%bx)
pfx rcl $2, (%bx)
pfx rcl %cl, (%bx)
pfx rcl (%bx)
pfx rcr $1, (%bx)
pfx rcr $2, (%bx)
pfx rcr %cl, (%bx)
pfx rcr (%bx)
pfx rol $1, (%bx)
pfx rol $2, (%bx)
pfx rol %cl, (%bx)
pfx rol (%bx)
pfx ror $1, (%bx)
pfx ror $2, (%bx)
pfx ror %cl, (%bx)
pfx ror (%bx)
pfx sbb $1, (%bx)
pfx sbb $0x89, (%bx)
pfx sbb $0x1234, (%bx)
pfx scas
pfx scas %es:(%di)
pfx sal $1, (%bx)
pfx sal $2, (%bx)
pfx sal %cl, (%bx)
pfx sal (%bx)
pfx sar $1, (%bx)
pfx sar $2, (%bx)
pfx sar %cl, (%bx)
pfx sar (%bx)
pfx shl $1, (%bx)
pfx shl $2, (%bx)
pfx shl %cl, (%bx)
pfx shl (%bx)
pfx shr $1, (%bx)
pfx shr $2, (%bx)
pfx shr %cl, (%bx)
pfx shr (%bx)
pfx stos
pfx stos %es:(%di)
pfx sub $1, (%bx)
pfx sub $0x89, (%bx)
pfx sub $0x1234, (%bx)
pfx test $0x89, (%bx)
pfx test $0x1234, (%bx)
vcvtsi2sd (%bx), %xmm0, %xmm0
{evex} vcvtsi2sd (%bx), %xmm0, %xmm0
vcvtsi2ss (%bx), %xmm0, %xmm0
{evex} vcvtsi2ss (%bx), %xmm0, %xmm0
vcvtusi2sd (%bx), %xmm0, %xmm0
vcvtusi2ss (%bx), %xmm0, %xmm0
pfx xor $1, (%bx)
pfx xor $0x89, (%bx)
pfx xor $0x1234, (%bx)
|
stsp/binutils-ia16
| 50,595
|
gas/testsuite/gas/i386/modrm.s
|
.psize 0
.text
mov %ds,%ss:(%eax)
mov %ds,%ss:(%ecx)
mov %ds,%ss:(%edx)
mov %ds,%ss:(%ebx)
mov %ds,%ss:0
mov %ds,%ss:(%esi)
mov %ds,%ss:(%edi)
mov %ds,%ss:0x12(%eax)
mov %ds,%ss:0x12(%ecx)
mov %ds,%ss:0x12(%edx)
mov %ds,%ss:0x12(%ebx)
mov %ds,%ss:0x12(%ebp)
mov %ds,%ss:0x12(%esi)
mov %ds,%ss:0x12(%edi)
mov %ds,%ss:0x12345678(%eax)
mov %ds,%ss:0x12345678(%ecx)
mov %ds,%ss:0x12345678(%edx)
mov %ds,%ss:0x12345678(%ebx)
mov %ds,%ss:0x12345678(%ebp)
mov %ds,%ss:0x12345678(%esi)
mov %ds,%ss:0x12345678(%edi)
mov %ds,%eax
mov %ds,%ecx
mov %ds,%edx
mov %ds,%ebx
mov %ds,%esp
mov %ds,%ebp
mov %ds,%esi
mov %ds,%edi
mov %ds,%ss:(%eax,%eax,1)
mov %ds,%ss:(%ecx,%eax,1)
mov %ds,%ss:(%edx,%eax,1)
mov %ds,%ss:(%ebx,%eax,1)
mov %ds,%ss:(%esp,%eax,1)
mov %ds,%ss:(,%eax,1)
mov %ds,%ss:(%esi,%eax,1)
mov %ds,%ss:(%edi,%eax,1)
mov %ds,%ss:(%eax,%ecx,1)
mov %ds,%ss:(%ecx,%ecx,1)
mov %ds,%ss:(%edx,%ecx,1)
mov %ds,%ss:(%ebx,%ecx,1)
mov %ds,%ss:(%esp,%ecx,1)
mov %ds,%ss:(,%ecx,1)
mov %ds,%ss:(%esi,%ecx,1)
mov %ds,%ss:(%edi,%ecx,1)
mov %ds,%ss:(%eax,%edx,1)
mov %ds,%ss:(%ecx,%edx,1)
mov %ds,%ss:(%edx,%edx,1)
mov %ds,%ss:(%ebx,%edx,1)
mov %ds,%ss:(%esp,%edx,1)
mov %ds,%ss:(,%edx,1)
mov %ds,%ss:(%esi,%edx,1)
mov %ds,%ss:(%edi,%edx,1)
mov %ds,%ss:(%eax,%ebx,1)
mov %ds,%ss:(%ecx,%ebx,1)
mov %ds,%ss:(%edx,%ebx,1)
mov %ds,%ss:(%ebx,%ebx,1)
mov %ds,%ss:(%esp,%ebx,1)
mov %ds,%ss:(,%ebx,1)
mov %ds,%ss:(%esi,%ebx,1)
mov %ds,%ss:(%edi,%ebx,1)
mov %ds,%ss:(%eax,1)
mov %ds,%ss:(%ecx,1)
mov %ds,%ss:(%edx,1)
mov %ds,%ss:(%ebx,1)
mov %ds,%ss:(%esp,1)
mov %ds,%ss:(,1)
mov %ds,%ss:(%esi,1)
mov %ds,%ss:(%edi,1)
mov %ds,%ss:(%eax,%ebp,1)
mov %ds,%ss:(%ecx,%ebp,1)
mov %ds,%ss:(%edx,%ebp,1)
mov %ds,%ss:(%ebx,%ebp,1)
mov %ds,%ss:(%esp,%ebp,1)
mov %ds,%ss:(,%ebp,1)
mov %ds,%ss:(%esi,%ebp,1)
mov %ds,%ss:(%edi,%ebp,1)
mov %ds,%ss:(%eax,%esi,1)
mov %ds,%ss:(%ecx,%esi,1)
mov %ds,%ss:(%edx,%esi,1)
mov %ds,%ss:(%ebx,%esi,1)
mov %ds,%ss:(%esp,%esi,1)
mov %ds,%ss:(,%esi,1)
mov %ds,%ss:(%esi,%esi,1)
mov %ds,%ss:(%edi,%esi,1)
mov %ds,%ss:(%eax,%edi,1)
mov %ds,%ss:(%ecx,%edi,1)
mov %ds,%ss:(%edx,%edi,1)
mov %ds,%ss:(%ebx,%edi,1)
mov %ds,%ss:(%esp,%edi,1)
mov %ds,%ss:(,%edi,1)
mov %ds,%ss:(%esi,%edi,1)
mov %ds,%ss:(%edi,%edi,1)
mov %ds,%ss:(%eax,%eax,2)
mov %ds,%ss:(%ecx,%eax,2)
mov %ds,%ss:(%edx,%eax,2)
mov %ds,%ss:(%ebx,%eax,2)
mov %ds,%ss:(%esp,%eax,2)
mov %ds,%ss:(,%eax,2)
mov %ds,%ss:(%esi,%eax,2)
mov %ds,%ss:(%edi,%eax,2)
mov %ds,%ss:(%eax,%ecx,2)
mov %ds,%ss:(%ecx,%ecx,2)
mov %ds,%ss:(%edx,%ecx,2)
mov %ds,%ss:(%ebx,%ecx,2)
mov %ds,%ss:(%esp,%ecx,2)
mov %ds,%ss:(,%ecx,2)
mov %ds,%ss:(%esi,%ecx,2)
mov %ds,%ss:(%edi,%ecx,2)
mov %ds,%ss:(%eax,%edx,2)
mov %ds,%ss:(%ecx,%edx,2)
mov %ds,%ss:(%edx,%edx,2)
mov %ds,%ss:(%ebx,%edx,2)
mov %ds,%ss:(%esp,%edx,2)
mov %ds,%ss:(,%edx,2)
mov %ds,%ss:(%esi,%edx,2)
mov %ds,%ss:(%edi,%edx,2)
mov %ds,%ss:(%eax,%ebx,2)
mov %ds,%ss:(%ecx,%ebx,2)
mov %ds,%ss:(%edx,%ebx,2)
mov %ds,%ss:(%ebx,%ebx,2)
mov %ds,%ss:(%esp,%ebx,2)
mov %ds,%ss:(,%ebx,2)
mov %ds,%ss:(%esi,%ebx,2)
mov %ds,%ss:(%edi,%ebx,2)
mov %ds,%ss:(%eax,2)
mov %ds,%ss:(%ecx,2)
mov %ds,%ss:(%edx,2)
mov %ds,%ss:(%ebx,2)
mov %ds,%ss:(%esp,2)
mov %ds,%ss:(,2)
mov %ds,%ss:(%esi,2)
mov %ds,%ss:(%edi,2)
mov %ds,%ss:(%eax,%ebp,2)
mov %ds,%ss:(%ecx,%ebp,2)
mov %ds,%ss:(%edx,%ebp,2)
mov %ds,%ss:(%ebx,%ebp,2)
mov %ds,%ss:(%esp,%ebp,2)
mov %ds,%ss:(,%ebp,2)
mov %ds,%ss:(%esi,%ebp,2)
mov %ds,%ss:(%edi,%ebp,2)
mov %ds,%ss:(%eax,%esi,2)
mov %ds,%ss:(%ecx,%esi,2)
mov %ds,%ss:(%edx,%esi,2)
mov %ds,%ss:(%ebx,%esi,2)
mov %ds,%ss:(%esp,%esi,2)
mov %ds,%ss:(,%esi,2)
mov %ds,%ss:(%esi,%esi,2)
mov %ds,%ss:(%edi,%esi,2)
mov %ds,%ss:(%eax,%edi,2)
mov %ds,%ss:(%ecx,%edi,2)
mov %ds,%ss:(%edx,%edi,2)
mov %ds,%ss:(%ebx,%edi,2)
mov %ds,%ss:(%esp,%edi,2)
mov %ds,%ss:(,%edi,2)
mov %ds,%ss:(%esi,%edi,2)
mov %ds,%ss:(%edi,%edi,2)
mov %ds,%ss:(%eax,%eax,4)
mov %ds,%ss:(%ecx,%eax,4)
mov %ds,%ss:(%edx,%eax,4)
mov %ds,%ss:(%ebx,%eax,4)
mov %ds,%ss:(%esp,%eax,4)
mov %ds,%ss:(,%eax,4)
mov %ds,%ss:(%esi,%eax,4)
mov %ds,%ss:(%edi,%eax,4)
mov %ds,%ss:(%eax,%ecx,4)
mov %ds,%ss:(%ecx,%ecx,4)
mov %ds,%ss:(%edx,%ecx,4)
mov %ds,%ss:(%ebx,%ecx,4)
mov %ds,%ss:(%esp,%ecx,4)
mov %ds,%ss:(,%ecx,4)
mov %ds,%ss:(%esi,%ecx,4)
mov %ds,%ss:(%edi,%ecx,4)
mov %ds,%ss:(%eax,%edx,4)
mov %ds,%ss:(%ecx,%edx,4)
mov %ds,%ss:(%edx,%edx,4)
mov %ds,%ss:(%ebx,%edx,4)
mov %ds,%ss:(%esp,%edx,4)
mov %ds,%ss:(,%edx,4)
mov %ds,%ss:(%esi,%edx,4)
mov %ds,%ss:(%edi,%edx,4)
mov %ds,%ss:(%eax,%ebx,4)
mov %ds,%ss:(%ecx,%ebx,4)
mov %ds,%ss:(%edx,%ebx,4)
mov %ds,%ss:(%ebx,%ebx,4)
mov %ds,%ss:(%esp,%ebx,4)
mov %ds,%ss:(,%ebx,4)
mov %ds,%ss:(%esi,%ebx,4)
mov %ds,%ss:(%edi,%ebx,4)
mov %ds,%ss:(%eax,4)
mov %ds,%ss:(%ecx,4)
mov %ds,%ss:(%edx,4)
mov %ds,%ss:(%ebx,4)
mov %ds,%ss:(%esp,4)
mov %ds,%ss:(,4)
mov %ds,%ss:(%esi,4)
mov %ds,%ss:(%edi,4)
mov %ds,%ss:(%eax,%ebp,4)
mov %ds,%ss:(%ecx,%ebp,4)
mov %ds,%ss:(%edx,%ebp,4)
mov %ds,%ss:(%ebx,%ebp,4)
mov %ds,%ss:(%esp,%ebp,4)
mov %ds,%ss:(,%ebp,4)
mov %ds,%ss:(%esi,%ebp,4)
mov %ds,%ss:(%edi,%ebp,4)
mov %ds,%ss:(%eax,%esi,4)
mov %ds,%ss:(%ecx,%esi,4)
mov %ds,%ss:(%edx,%esi,4)
mov %ds,%ss:(%ebx,%esi,4)
mov %ds,%ss:(%esp,%esi,4)
mov %ds,%ss:(,%esi,4)
mov %ds,%ss:(%esi,%esi,4)
mov %ds,%ss:(%edi,%esi,4)
mov %ds,%ss:(%eax,%edi,4)
mov %ds,%ss:(%ecx,%edi,4)
mov %ds,%ss:(%edx,%edi,4)
mov %ds,%ss:(%ebx,%edi,4)
mov %ds,%ss:(%esp,%edi,4)
mov %ds,%ss:(,%edi,4)
mov %ds,%ss:(%esi,%edi,4)
mov %ds,%ss:(%edi,%edi,4)
mov %ds,%ss:(%eax,%eax,8)
mov %ds,%ss:(%ecx,%eax,8)
mov %ds,%ss:(%edx,%eax,8)
mov %ds,%ss:(%ebx,%eax,8)
mov %ds,%ss:(%esp,%eax,8)
mov %ds,%ss:(,%eax,8)
mov %ds,%ss:(%esi,%eax,8)
mov %ds,%ss:(%edi,%eax,8)
mov %ds,%ss:(%eax,%ecx,8)
mov %ds,%ss:(%ecx,%ecx,8)
mov %ds,%ss:(%edx,%ecx,8)
mov %ds,%ss:(%ebx,%ecx,8)
mov %ds,%ss:(%esp,%ecx,8)
mov %ds,%ss:(,%ecx,8)
mov %ds,%ss:(%esi,%ecx,8)
mov %ds,%ss:(%edi,%ecx,8)
mov %ds,%ss:(%eax,%edx,8)
mov %ds,%ss:(%ecx,%edx,8)
mov %ds,%ss:(%edx,%edx,8)
mov %ds,%ss:(%ebx,%edx,8)
mov %ds,%ss:(%esp,%edx,8)
mov %ds,%ss:(,%edx,8)
mov %ds,%ss:(%esi,%edx,8)
mov %ds,%ss:(%edi,%edx,8)
mov %ds,%ss:(%eax,%ebx,8)
mov %ds,%ss:(%ecx,%ebx,8)
mov %ds,%ss:(%edx,%ebx,8)
mov %ds,%ss:(%ebx,%ebx,8)
mov %ds,%ss:(%esp,%ebx,8)
mov %ds,%ss:(,%ebx,8)
mov %ds,%ss:(%esi,%ebx,8)
mov %ds,%ss:(%edi,%ebx,8)
mov %ds,%ss:(%eax,8)
mov %ds,%ss:(%ecx,8)
mov %ds,%ss:(%edx,8)
mov %ds,%ss:(%ebx,8)
mov %ds,%ss:(%esp,8)
mov %ds,%ss:(,8)
mov %ds,%ss:(%esi,8)
mov %ds,%ss:(%edi,8)
mov %ds,%ss:(%eax,%ebp,8)
mov %ds,%ss:(%ecx,%ebp,8)
mov %ds,%ss:(%edx,%ebp,8)
mov %ds,%ss:(%ebx,%ebp,8)
mov %ds,%ss:(%esp,%ebp,8)
mov %ds,%ss:(,%ebp,8)
mov %ds,%ss:(%esi,%ebp,8)
mov %ds,%ss:(%edi,%ebp,8)
mov %ds,%ss:(%eax,%esi,8)
mov %ds,%ss:(%ecx,%esi,8)
mov %ds,%ss:(%edx,%esi,8)
mov %ds,%ss:(%ebx,%esi,8)
mov %ds,%ss:(%esp,%esi,8)
mov %ds,%ss:(,%esi,8)
mov %ds,%ss:(%esi,%esi,8)
mov %ds,%ss:(%edi,%esi,8)
mov %ds,%ss:(%eax,%edi,8)
mov %ds,%ss:(%edx,%edi,8)
mov %ds,%ss:(%ecx,%edi,8)
mov %ds,%ss:(%ebx,%edi,8)
mov %ds,%ss:(%esp,%edi,8)
mov %ds,%ss:(,%edi,8)
mov %ds,%ss:(%esi,%edi,8)
mov %ds,%ss:(%edi,%edi,8)
mov %ds,%ss:0x12(%eax,%eax,1)
mov %ds,%ss:0x12(%ecx,%eax,1)
mov %ds,%ss:0x12(%edx,%eax,1)
mov %ds,%ss:0x12(%ebx,%eax,1)
mov %ds,%ss:0x12(%esp,%eax,1)
mov %ds,%ss:0x12(%ebp,%eax,1)
mov %ds,%ss:0x12(%esi,%eax,1)
mov %ds,%ss:0x12(%edi,%eax,1)
mov %ds,%ss:0x12(%eax,%ecx,1)
mov %ds,%ss:0x12(%ecx,%ecx,1)
mov %ds,%ss:0x12(%edx,%ecx,1)
mov %ds,%ss:0x12(%ebx,%ecx,1)
mov %ds,%ss:0x12(%esp,%ecx,1)
mov %ds,%ss:0x12(%ebp,%ecx,1)
mov %ds,%ss:0x12(%esi,%ecx,1)
mov %ds,%ss:0x12(%edi,%ecx,1)
mov %ds,%ss:0x12(%eax,%edx,1)
mov %ds,%ss:0x12(%ecx,%edx,1)
mov %ds,%ss:0x12(%edx,%edx,1)
mov %ds,%ss:0x12(%ebx,%edx,1)
mov %ds,%ss:0x12(%esp,%edx,1)
mov %ds,%ss:0x12(%ebp,%edx,1)
mov %ds,%ss:0x12(%esi,%edx,1)
mov %ds,%ss:0x12(%edi,%edx,1)
mov %ds,%ss:0x12(%eax,%ebx,1)
mov %ds,%ss:0x12(%ecx,%ebx,1)
mov %ds,%ss:0x12(%edx,%ebx,1)
mov %ds,%ss:0x12(%ebx,%ebx,1)
mov %ds,%ss:0x12(%esp,%ebx,1)
mov %ds,%ss:0x12(%ebp,%ebx,1)
mov %ds,%ss:0x12(%esi,%ebx,1)
mov %ds,%ss:0x12(%edi,%ebx,1)
mov %ds,%ss:0x12(%eax,1)
mov %ds,%ss:0x12(%ecx,1)
mov %ds,%ss:0x12(%edx,1)
mov %ds,%ss:0x12(%ebx,1)
mov %ds,%ss:0x12(%esp,1)
mov %ds,%ss:0x12(%ebp,1)
mov %ds,%ss:0x12(%esi,1)
mov %ds,%ss:0x12(%edi,1)
mov %ds,%ss:0x12(%eax,%ebp,1)
mov %ds,%ss:0x12(%ecx,%ebp,1)
mov %ds,%ss:0x12(%edx,%ebp,1)
mov %ds,%ss:0x12(%ebx,%ebp,1)
mov %ds,%ss:0x12(%esp,%ebp,1)
mov %ds,%ss:0x12(%ebp,%ebp,1)
mov %ds,%ss:0x12(%esi,%ebp,1)
mov %ds,%ss:0x12(%edi,%ebp,1)
mov %ds,%ss:0x12(%eax,%esi,1)
mov %ds,%ss:0x12(%ecx,%esi,1)
mov %ds,%ss:0x12(%edx,%esi,1)
mov %ds,%ss:0x12(%ebx,%esi,1)
mov %ds,%ss:0x12(%esp,%esi,1)
mov %ds,%ss:0x12(%ebp,%esi,1)
mov %ds,%ss:0x12(%esi,%esi,1)
mov %ds,%ss:0x12(%edi,%esi,1)
mov %ds,%ss:0x12(%eax,%edi,1)
mov %ds,%ss:0x12(%ecx,%edi,1)
mov %ds,%ss:0x12(%edx,%edi,1)
mov %ds,%ss:0x12(%ebx,%edi,1)
mov %ds,%ss:0x12(%esp,%edi,1)
mov %ds,%ss:0x12(%ebp,%edi,1)
mov %ds,%ss:0x12(%esi,%edi,1)
mov %ds,%ss:0x12(%edi,%edi,1)
mov %ds,%ss:0x12(%eax,%eax,2)
mov %ds,%ss:0x12(%ecx,%eax,2)
mov %ds,%ss:0x12(%edx,%eax,2)
mov %ds,%ss:0x12(%ebx,%eax,2)
mov %ds,%ss:0x12(%esp,%eax,2)
mov %ds,%ss:0x12(%ebp,%eax,2)
mov %ds,%ss:0x12(%esi,%eax,2)
mov %ds,%ss:0x12(%edi,%eax,2)
mov %ds,%ss:0x12(%eax,%ecx,2)
mov %ds,%ss:0x12(%ecx,%ecx,2)
mov %ds,%ss:0x12(%edx,%ecx,2)
mov %ds,%ss:0x12(%ebx,%ecx,2)
mov %ds,%ss:0x12(%esp,%ecx,2)
mov %ds,%ss:0x12(%ebp,%ecx,2)
mov %ds,%ss:0x12(%esi,%ecx,2)
mov %ds,%ss:0x12(%edi,%ecx,2)
mov %ds,%ss:0x12(%eax,%edx,2)
mov %ds,%ss:0x12(%ecx,%edx,2)
mov %ds,%ss:0x12(%edx,%edx,2)
mov %ds,%ss:0x12(%ebx,%edx,2)
mov %ds,%ss:0x12(%esp,%edx,2)
mov %ds,%ss:0x12(%ebp,%edx,2)
mov %ds,%ss:0x12(%esi,%edx,2)
mov %ds,%ss:0x12(%edi,%edx,2)
mov %ds,%ss:0x12(%eax,%ebx,2)
mov %ds,%ss:0x12(%ecx,%ebx,2)
mov %ds,%ss:0x12(%edx,%ebx,2)
mov %ds,%ss:0x12(%ebx,%ebx,2)
mov %ds,%ss:0x12(%esp,%ebx,2)
mov %ds,%ss:0x12(%ebp,%ebx,2)
mov %ds,%ss:0x12(%esi,%ebx,2)
mov %ds,%ss:0x12(%edi,%ebx,2)
mov %ds,%ss:0x12(%eax,2)
mov %ds,%ss:0x12(%ecx,2)
mov %ds,%ss:0x12(%edx,2)
mov %ds,%ss:0x12(%ebx,2)
mov %ds,%ss:0x12(%esp,2)
mov %ds,%ss:0x12(%ebp,2)
mov %ds,%ss:0x12(%esi,2)
mov %ds,%ss:0x12(%edi,2)
mov %ds,%ss:0x12(%eax,%ebp,2)
mov %ds,%ss:0x12(%ecx,%ebp,2)
mov %ds,%ss:0x12(%edx,%ebp,2)
mov %ds,%ss:0x12(%ebx,%ebp,2)
mov %ds,%ss:0x12(%esp,%ebp,2)
mov %ds,%ss:0x12(%ebp,%ebp,2)
mov %ds,%ss:0x12(%esi,%ebp,2)
mov %ds,%ss:0x12(%edi,%ebp,2)
mov %ds,%ss:0x12(%eax,%esi,2)
mov %ds,%ss:0x12(%ecx,%esi,2)
mov %ds,%ss:0x12(%edx,%esi,2)
mov %ds,%ss:0x12(%ebx,%esi,2)
mov %ds,%ss:0x12(%esp,%esi,2)
mov %ds,%ss:0x12(%ebp,%esi,2)
mov %ds,%ss:0x12(%esi,%esi,2)
mov %ds,%ss:0x12(%edi,%esi,2)
mov %ds,%ss:0x12(%eax,%edi,2)
mov %ds,%ss:0x12(%ecx,%edi,2)
mov %ds,%ss:0x12(%edx,%edi,2)
mov %ds,%ss:0x12(%ebx,%edi,2)
mov %ds,%ss:0x12(%esp,%edi,2)
mov %ds,%ss:0x12(%ebp,%edi,2)
mov %ds,%ss:0x12(%esi,%edi,2)
mov %ds,%ss:0x12(%edi,%edi,2)
mov %ds,%ss:0x12(%eax,%eax,4)
mov %ds,%ss:0x12(%ecx,%eax,4)
mov %ds,%ss:0x12(%edx,%eax,4)
mov %ds,%ss:0x12(%ebx,%eax,4)
mov %ds,%ss:0x12(%esp,%eax,4)
mov %ds,%ss:0x12(%ebp,%eax,4)
mov %ds,%ss:0x12(%esi,%eax,4)
mov %ds,%ss:0x12(%edi,%eax,4)
mov %ds,%ss:0x12(%eax,%ecx,4)
mov %ds,%ss:0x12(%ecx,%ecx,4)
mov %ds,%ss:0x12(%edx,%ecx,4)
mov %ds,%ss:0x12(%ebx,%ecx,4)
mov %ds,%ss:0x12(%esp,%ecx,4)
mov %ds,%ss:0x12(%ebp,%ecx,4)
mov %ds,%ss:0x12(%esi,%ecx,4)
mov %ds,%ss:0x12(%edi,%ecx,4)
mov %ds,%ss:0x12(%eax,%edx,4)
mov %ds,%ss:0x12(%ecx,%edx,4)
mov %ds,%ss:0x12(%edx,%edx,4)
mov %ds,%ss:0x12(%ebx,%edx,4)
mov %ds,%ss:0x12(%esp,%edx,4)
mov %ds,%ss:0x12(%ebp,%edx,4)
mov %ds,%ss:0x12(%esi,%edx,4)
mov %ds,%ss:0x12(%edi,%edx,4)
mov %ds,%ss:0x12(%eax,%ebx,4)
mov %ds,%ss:0x12(%ecx,%ebx,4)
mov %ds,%ss:0x12(%edx,%ebx,4)
mov %ds,%ss:0x12(%ebx,%ebx,4)
mov %ds,%ss:0x12(%esp,%ebx,4)
mov %ds,%ss:0x12(%ebp,%ebx,4)
mov %ds,%ss:0x12(%esi,%ebx,4)
mov %ds,%ss:0x12(%edi,%ebx,4)
mov %ds,%ss:0x12(%eax,4)
mov %ds,%ss:0x12(%ecx,4)
mov %ds,%ss:0x12(%edx,4)
mov %ds,%ss:0x12(%ebx,4)
mov %ds,%ss:0x12(%esp,4)
mov %ds,%ss:0x12(%ebp,4)
mov %ds,%ss:0x12(%esi,4)
mov %ds,%ss:0x12(%edi,4)
mov %ds,%ss:0x12(%eax,%ebp,4)
mov %ds,%ss:0x12(%ecx,%ebp,4)
mov %ds,%ss:0x12(%edx,%ebp,4)
mov %ds,%ss:0x12(%ebx,%ebp,4)
mov %ds,%ss:0x12(%esp,%ebp,4)
mov %ds,%ss:0x12(%ebp,%ebp,4)
mov %ds,%ss:0x12(%esi,%ebp,4)
mov %ds,%ss:0x12(%edi,%ebp,4)
mov %ds,%ss:0x12(%eax,%esi,4)
mov %ds,%ss:0x12(%ecx,%esi,4)
mov %ds,%ss:0x12(%edx,%esi,4)
mov %ds,%ss:0x12(%ebx,%esi,4)
mov %ds,%ss:0x12(%esp,%esi,4)
mov %ds,%ss:0x12(%ebp,%esi,4)
mov %ds,%ss:0x12(%esi,%esi,4)
mov %ds,%ss:0x12(%edi,%esi,4)
mov %ds,%ss:0x12(%eax,%edi,4)
mov %ds,%ss:0x12(%ecx,%edi,4)
mov %ds,%ss:0x12(%edx,%edi,4)
mov %ds,%ss:0x12(%ebx,%edi,4)
mov %ds,%ss:0x12(%esp,%edi,4)
mov %ds,%ss:0x12(%ebp,%edi,4)
mov %ds,%ss:0x12(%esi,%edi,4)
mov %ds,%ss:0x12(%edi,%edi,4)
mov %ds,%ss:0x12(%eax,%eax,8)
mov %ds,%ss:0x12(%ecx,%eax,8)
mov %ds,%ss:0x12(%edx,%eax,8)
mov %ds,%ss:0x12(%ebx,%eax,8)
mov %ds,%ss:0x12(%esp,%eax,8)
mov %ds,%ss:0x12(%ebp,%eax,8)
mov %ds,%ss:0x12(%esi,%eax,8)
mov %ds,%ss:0x12(%edi,%eax,8)
mov %ds,%ss:0x12(%eax,%ecx,8)
mov %ds,%ss:0x12(%ecx,%ecx,8)
mov %ds,%ss:0x12(%edx,%ecx,8)
mov %ds,%ss:0x12(%ebx,%ecx,8)
mov %ds,%ss:0x12(%esp,%ecx,8)
mov %ds,%ss:0x12(%ebp,%ecx,8)
mov %ds,%ss:0x12(%esi,%ecx,8)
mov %ds,%ss:0x12(%edi,%ecx,8)
mov %ds,%ss:0x12(%eax,%edx,8)
mov %ds,%ss:0x12(%ecx,%edx,8)
mov %ds,%ss:0x12(%edx,%edx,8)
mov %ds,%ss:0x12(%ebx,%edx,8)
mov %ds,%ss:0x12(%esp,%edx,8)
mov %ds,%ss:0x12(%ebp,%edx,8)
mov %ds,%ss:0x12(%esi,%edx,8)
mov %ds,%ss:0x12(%edi,%edx,8)
mov %ds,%ss:0x12(%eax,%ebx,8)
mov %ds,%ss:0x12(%ecx,%ebx,8)
mov %ds,%ss:0x12(%edx,%ebx,8)
mov %ds,%ss:0x12(%ebx,%ebx,8)
mov %ds,%ss:0x12(%esp,%ebx,8)
mov %ds,%ss:0x12(%ebp,%ebx,8)
mov %ds,%ss:0x12(%esi,%ebx,8)
mov %ds,%ss:0x12(%edi,%ebx,8)
mov %ds,%ss:0x12(%eax,8)
mov %ds,%ss:0x12(%ecx,8)
mov %ds,%ss:0x12(%edx,8)
mov %ds,%ss:0x12(%ebx,8)
mov %ds,%ss:0x12(%esp,8)
mov %ds,%ss:0x12(%ebp,8)
mov %ds,%ss:0x12(%esi,8)
mov %ds,%ss:0x12(%edi,8)
mov %ds,%ss:0x12(%eax,%ebp,8)
mov %ds,%ss:0x12(%ecx,%ebp,8)
mov %ds,%ss:0x12(%edx,%ebp,8)
mov %ds,%ss:0x12(%ebx,%ebp,8)
mov %ds,%ss:0x12(%esp,%ebp,8)
mov %ds,%ss:0x12(%ebp,%ebp,8)
mov %ds,%ss:0x12(%esi,%ebp,8)
mov %ds,%ss:0x12(%edi,%ebp,8)
mov %ds,%ss:0x12(%eax,%esi,8)
mov %ds,%ss:0x12(%ecx,%esi,8)
mov %ds,%ss:0x12(%edx,%esi,8)
mov %ds,%ss:0x12(%ebx,%esi,8)
mov %ds,%ss:0x12(%esp,%esi,8)
mov %ds,%ss:0x12(%ebp,%esi,8)
mov %ds,%ss:0x12(%esi,%esi,8)
mov %ds,%ss:0x12(%edi,%esi,8)
mov %ds,%ss:0x12(%eax,%edi,8)
mov %ds,%ss:0x12(%edx,%edi,8)
mov %ds,%ss:0x12(%ecx,%edi,8)
mov %ds,%ss:0x12(%ebx,%edi,8)
mov %ds,%ss:0x12(%esp,%edi,8)
mov %ds,%ss:0x12(%ebp,%edi,8)
mov %ds,%ss:0x12(%esi,%edi,8)
mov %ds,%ss:0x12(%edi,%edi,8)
mov %ds,%ss:0x12345678(%eax,%eax,1)
mov %ds,%ss:0x12345678(%ecx,%eax,1)
mov %ds,%ss:0x12345678(%edx,%eax,1)
mov %ds,%ss:0x12345678(%ebx,%eax,1)
mov %ds,%ss:0x12345678(%esp,%eax,1)
mov %ds,%ss:0x12345678(%ebp,%eax,1)
mov %ds,%ss:0x12345678(%esi,%eax,1)
mov %ds,%ss:0x12345678(%edi,%eax,1)
mov %ds,%ss:0x12345678(%eax,%ecx,1)
mov %ds,%ss:0x12345678(%ecx,%ecx,1)
mov %ds,%ss:0x12345678(%edx,%ecx,1)
mov %ds,%ss:0x12345678(%ebx,%ecx,1)
mov %ds,%ss:0x12345678(%esp,%ecx,1)
mov %ds,%ss:0x12345678(%ebp,%ecx,1)
mov %ds,%ss:0x12345678(%esi,%ecx,1)
mov %ds,%ss:0x12345678(%edi,%ecx,1)
mov %ds,%ss:0x12345678(%eax,%edx,1)
mov %ds,%ss:0x12345678(%ecx,%edx,1)
mov %ds,%ss:0x12345678(%edx,%edx,1)
mov %ds,%ss:0x12345678(%ebx,%edx,1)
mov %ds,%ss:0x12345678(%esp,%edx,1)
mov %ds,%ss:0x12345678(%ebp,%edx,1)
mov %ds,%ss:0x12345678(%esi,%edx,1)
mov %ds,%ss:0x12345678(%edi,%edx,1)
mov %ds,%ss:0x12345678(%eax,%ebx,1)
mov %ds,%ss:0x12345678(%ecx,%ebx,1)
mov %ds,%ss:0x12345678(%edx,%ebx,1)
mov %ds,%ss:0x12345678(%ebx,%ebx,1)
mov %ds,%ss:0x12345678(%esp,%ebx,1)
mov %ds,%ss:0x12345678(%ebp,%ebx,1)
mov %ds,%ss:0x12345678(%esi,%ebx,1)
mov %ds,%ss:0x12345678(%edi,%ebx,1)
mov %ds,%ss:0x12345678(%eax,1)
mov %ds,%ss:0x12345678(%ecx,1)
mov %ds,%ss:0x12345678(%edx,1)
mov %ds,%ss:0x12345678(%ebx,1)
mov %ds,%ss:0x12345678(%esp,1)
mov %ds,%ss:0x12345678(%ebp,1)
mov %ds,%ss:0x12345678(%esi,1)
mov %ds,%ss:0x12345678(%edi,1)
mov %ds,%ss:0x12345678(%eax,%ebp,1)
mov %ds,%ss:0x12345678(%ecx,%ebp,1)
mov %ds,%ss:0x12345678(%edx,%ebp,1)
mov %ds,%ss:0x12345678(%ebx,%ebp,1)
mov %ds,%ss:0x12345678(%esp,%ebp,1)
mov %ds,%ss:0x12345678(%ebp,%ebp,1)
mov %ds,%ss:0x12345678(%esi,%ebp,1)
mov %ds,%ss:0x12345678(%edi,%ebp,1)
mov %ds,%ss:0x12345678(%eax,%esi,1)
mov %ds,%ss:0x12345678(%ecx,%esi,1)
mov %ds,%ss:0x12345678(%edx,%esi,1)
mov %ds,%ss:0x12345678(%ebx,%esi,1)
mov %ds,%ss:0x12345678(%esp,%esi,1)
mov %ds,%ss:0x12345678(%ebp,%esi,1)
mov %ds,%ss:0x12345678(%esi,%esi,1)
mov %ds,%ss:0x12345678(%edi,%esi,1)
mov %ds,%ss:0x12345678(%eax,%edi,1)
mov %ds,%ss:0x12345678(%ecx,%edi,1)
mov %ds,%ss:0x12345678(%edx,%edi,1)
mov %ds,%ss:0x12345678(%ebx,%edi,1)
mov %ds,%ss:0x12345678(%esp,%edi,1)
mov %ds,%ss:0x12345678(%ebp,%edi,1)
mov %ds,%ss:0x12345678(%esi,%edi,1)
mov %ds,%ss:0x12345678(%edi,%edi,1)
mov %ds,%ss:0x12345678(%eax,%eax,2)
mov %ds,%ss:0x12345678(%ecx,%eax,2)
mov %ds,%ss:0x12345678(%edx,%eax,2)
mov %ds,%ss:0x12345678(%ebx,%eax,2)
mov %ds,%ss:0x12345678(%esp,%eax,2)
mov %ds,%ss:0x12345678(%ebp,%eax,2)
mov %ds,%ss:0x12345678(%esi,%eax,2)
mov %ds,%ss:0x12345678(%edi,%eax,2)
mov %ds,%ss:0x12345678(%eax,%ecx,2)
mov %ds,%ss:0x12345678(%ecx,%ecx,2)
mov %ds,%ss:0x12345678(%edx,%ecx,2)
mov %ds,%ss:0x12345678(%ebx,%ecx,2)
mov %ds,%ss:0x12345678(%esp,%ecx,2)
mov %ds,%ss:0x12345678(%ebp,%ecx,2)
mov %ds,%ss:0x12345678(%esi,%ecx,2)
mov %ds,%ss:0x12345678(%edi,%ecx,2)
mov %ds,%ss:0x12345678(%eax,%edx,2)
mov %ds,%ss:0x12345678(%ecx,%edx,2)
mov %ds,%ss:0x12345678(%edx,%edx,2)
mov %ds,%ss:0x12345678(%ebx,%edx,2)
mov %ds,%ss:0x12345678(%esp,%edx,2)
mov %ds,%ss:0x12345678(%ebp,%edx,2)
mov %ds,%ss:0x12345678(%esi,%edx,2)
mov %ds,%ss:0x12345678(%edi,%edx,2)
mov %ds,%ss:0x12345678(%eax,%ebx,2)
mov %ds,%ss:0x12345678(%ecx,%ebx,2)
mov %ds,%ss:0x12345678(%edx,%ebx,2)
mov %ds,%ss:0x12345678(%ebx,%ebx,2)
mov %ds,%ss:0x12345678(%esp,%ebx,2)
mov %ds,%ss:0x12345678(%ebp,%ebx,2)
mov %ds,%ss:0x12345678(%esi,%ebx,2)
mov %ds,%ss:0x12345678(%edi,%ebx,2)
mov %ds,%ss:0x12345678(%eax,2)
mov %ds,%ss:0x12345678(%ecx,2)
mov %ds,%ss:0x12345678(%edx,2)
mov %ds,%ss:0x12345678(%ebx,2)
mov %ds,%ss:0x12345678(%esp,2)
mov %ds,%ss:0x12345678(%ebp,2)
mov %ds,%ss:0x12345678(%esi,2)
mov %ds,%ss:0x12345678(%edi,2)
mov %ds,%ss:0x12345678(%eax,%ebp,2)
mov %ds,%ss:0x12345678(%ecx,%ebp,2)
mov %ds,%ss:0x12345678(%edx,%ebp,2)
mov %ds,%ss:0x12345678(%ebx,%ebp,2)
mov %ds,%ss:0x12345678(%esp,%ebp,2)
mov %ds,%ss:0x12345678(%ebp,%ebp,2)
mov %ds,%ss:0x12345678(%esi,%ebp,2)
mov %ds,%ss:0x12345678(%edi,%ebp,2)
mov %ds,%ss:0x12345678(%eax,%esi,2)
mov %ds,%ss:0x12345678(%ecx,%esi,2)
mov %ds,%ss:0x12345678(%edx,%esi,2)
mov %ds,%ss:0x12345678(%ebx,%esi,2)
mov %ds,%ss:0x12345678(%esp,%esi,2)
mov %ds,%ss:0x12345678(%ebp,%esi,2)
mov %ds,%ss:0x12345678(%esi,%esi,2)
mov %ds,%ss:0x12345678(%edi,%esi,2)
mov %ds,%ss:0x12345678(%eax,%edi,2)
mov %ds,%ss:0x12345678(%ecx,%edi,2)
mov %ds,%ss:0x12345678(%edx,%edi,2)
mov %ds,%ss:0x12345678(%ebx,%edi,2)
mov %ds,%ss:0x12345678(%esp,%edi,2)
mov %ds,%ss:0x12345678(%ebp,%edi,2)
mov %ds,%ss:0x12345678(%esi,%edi,2)
mov %ds,%ss:0x12345678(%edi,%edi,2)
mov %ds,%ss:0x12345678(%eax,%eax,4)
mov %ds,%ss:0x12345678(%ecx,%eax,4)
mov %ds,%ss:0x12345678(%edx,%eax,4)
mov %ds,%ss:0x12345678(%ebx,%eax,4)
mov %ds,%ss:0x12345678(%esp,%eax,4)
mov %ds,%ss:0x12345678(%ebp,%eax,4)
mov %ds,%ss:0x12345678(%esi,%eax,4)
mov %ds,%ss:0x12345678(%edi,%eax,4)
mov %ds,%ss:0x12345678(%eax,%ecx,4)
mov %ds,%ss:0x12345678(%ecx,%ecx,4)
mov %ds,%ss:0x12345678(%edx,%ecx,4)
mov %ds,%ss:0x12345678(%ebx,%ecx,4)
mov %ds,%ss:0x12345678(%esp,%ecx,4)
mov %ds,%ss:0x12345678(%ebp,%ecx,4)
mov %ds,%ss:0x12345678(%esi,%ecx,4)
mov %ds,%ss:0x12345678(%edi,%ecx,4)
mov %ds,%ss:0x12345678(%eax,%edx,4)
mov %ds,%ss:0x12345678(%ecx,%edx,4)
mov %ds,%ss:0x12345678(%edx,%edx,4)
mov %ds,%ss:0x12345678(%ebx,%edx,4)
mov %ds,%ss:0x12345678(%esp,%edx,4)
mov %ds,%ss:0x12345678(%ebp,%edx,4)
mov %ds,%ss:0x12345678(%esi,%edx,4)
mov %ds,%ss:0x12345678(%edi,%edx,4)
mov %ds,%ss:0x12345678(%eax,%ebx,4)
mov %ds,%ss:0x12345678(%ecx,%ebx,4)
mov %ds,%ss:0x12345678(%edx,%ebx,4)
mov %ds,%ss:0x12345678(%ebx,%ebx,4)
mov %ds,%ss:0x12345678(%esp,%ebx,4)
mov %ds,%ss:0x12345678(%ebp,%ebx,4)
mov %ds,%ss:0x12345678(%esi,%ebx,4)
mov %ds,%ss:0x12345678(%edi,%ebx,4)
mov %ds,%ss:0x12345678(%eax,4)
mov %ds,%ss:0x12345678(%ecx,4)
mov %ds,%ss:0x12345678(%edx,4)
mov %ds,%ss:0x12345678(%ebx,4)
mov %ds,%ss:0x12345678(%esp,4)
mov %ds,%ss:0x12345678(%ebp,4)
mov %ds,%ss:0x12345678(%esi,4)
mov %ds,%ss:0x12345678(%edi,4)
mov %ds,%ss:0x12345678(%eax,%ebp,4)
mov %ds,%ss:0x12345678(%ecx,%ebp,4)
mov %ds,%ss:0x12345678(%edx,%ebp,4)
mov %ds,%ss:0x12345678(%ebx,%ebp,4)
mov %ds,%ss:0x12345678(%esp,%ebp,4)
mov %ds,%ss:0x12345678(%ebp,%ebp,4)
mov %ds,%ss:0x12345678(%esi,%ebp,4)
mov %ds,%ss:0x12345678(%edi,%ebp,4)
mov %ds,%ss:0x12345678(%eax,%esi,4)
mov %ds,%ss:0x12345678(%ecx,%esi,4)
mov %ds,%ss:0x12345678(%edx,%esi,4)
mov %ds,%ss:0x12345678(%ebx,%esi,4)
mov %ds,%ss:0x12345678(%esp,%esi,4)
mov %ds,%ss:0x12345678(%ebp,%esi,4)
mov %ds,%ss:0x12345678(%esi,%esi,4)
mov %ds,%ss:0x12345678(%edi,%esi,4)
mov %ds,%ss:0x12345678(%eax,%edi,4)
mov %ds,%ss:0x12345678(%ecx,%edi,4)
mov %ds,%ss:0x12345678(%edx,%edi,4)
mov %ds,%ss:0x12345678(%ebx,%edi,4)
mov %ds,%ss:0x12345678(%esp,%edi,4)
mov %ds,%ss:0x12345678(%ebp,%edi,4)
mov %ds,%ss:0x12345678(%esi,%edi,4)
mov %ds,%ss:0x12345678(%edi,%edi,4)
mov %ds,%ss:0x12345678(%eax,%eax,8)
mov %ds,%ss:0x12345678(%ecx,%eax,8)
mov %ds,%ss:0x12345678(%edx,%eax,8)
mov %ds,%ss:0x12345678(%ebx,%eax,8)
mov %ds,%ss:0x12345678(%esp,%eax,8)
mov %ds,%ss:0x12345678(%ebp,%eax,8)
mov %ds,%ss:0x12345678(%esi,%eax,8)
mov %ds,%ss:0x12345678(%edi,%eax,8)
mov %ds,%ss:0x12345678(%eax,%ecx,8)
mov %ds,%ss:0x12345678(%ecx,%ecx,8)
mov %ds,%ss:0x12345678(%edx,%ecx,8)
mov %ds,%ss:0x12345678(%ebx,%ecx,8)
mov %ds,%ss:0x12345678(%esp,%ecx,8)
mov %ds,%ss:0x12345678(%ebp,%ecx,8)
mov %ds,%ss:0x12345678(%esi,%ecx,8)
mov %ds,%ss:0x12345678(%edi,%ecx,8)
mov %ds,%ss:0x12345678(%eax,%edx,8)
mov %ds,%ss:0x12345678(%ecx,%edx,8)
mov %ds,%ss:0x12345678(%edx,%edx,8)
mov %ds,%ss:0x12345678(%ebx,%edx,8)
mov %ds,%ss:0x12345678(%esp,%edx,8)
mov %ds,%ss:0x12345678(%ebp,%edx,8)
mov %ds,%ss:0x12345678(%esi,%edx,8)
mov %ds,%ss:0x12345678(%edi,%edx,8)
mov %ds,%ss:0x12345678(%eax,%ebx,8)
mov %ds,%ss:0x12345678(%ecx,%ebx,8)
mov %ds,%ss:0x12345678(%edx,%ebx,8)
mov %ds,%ss:0x12345678(%ebx,%ebx,8)
mov %ds,%ss:0x12345678(%esp,%ebx,8)
mov %ds,%ss:0x12345678(%ebp,%ebx,8)
mov %ds,%ss:0x12345678(%esi,%ebx,8)
mov %ds,%ss:0x12345678(%edi,%ebx,8)
mov %ds,%ss:0x12345678(%eax,8)
mov %ds,%ss:0x12345678(%ecx,8)
mov %ds,%ss:0x12345678(%edx,8)
mov %ds,%ss:0x12345678(%ebx,8)
mov %ds,%ss:0x12345678(%esp,8)
mov %ds,%ss:0x12345678(%ebp,8)
mov %ds,%ss:0x12345678(%esi,8)
mov %ds,%ss:0x12345678(%edi,8)
mov %ds,%ss:0x12345678(%eax,%ebp,8)
mov %ds,%ss:0x12345678(%ecx,%ebp,8)
mov %ds,%ss:0x12345678(%edx,%ebp,8)
mov %ds,%ss:0x12345678(%ebx,%ebp,8)
mov %ds,%ss:0x12345678(%esp,%ebp,8)
mov %ds,%ss:0x12345678(%ebp,%ebp,8)
mov %ds,%ss:0x12345678(%esi,%ebp,8)
mov %ds,%ss:0x12345678(%edi,%ebp,8)
mov %ds,%ss:0x12345678(%eax,%esi,8)
mov %ds,%ss:0x12345678(%ecx,%esi,8)
mov %ds,%ss:0x12345678(%edx,%esi,8)
mov %ds,%ss:0x12345678(%ebx,%esi,8)
mov %ds,%ss:0x12345678(%esp,%esi,8)
mov %ds,%ss:0x12345678(%ebp,%esi,8)
mov %ds,%ss:0x12345678(%esi,%esi,8)
mov %ds,%ss:0x12345678(%edi,%esi,8)
mov %ds,%ss:0x12345678(%eax,%edi,8)
mov %ds,%ss:0x12345678(%edx,%edi,8)
mov %ds,%ss:0x12345678(%ecx,%edi,8)
mov %ds,%ss:0x12345678(%ebx,%edi,8)
mov %ds,%ss:0x12345678(%esp,%edi,8)
mov %ds,%ss:0x12345678(%ebp,%edi,8)
mov %ds,%ss:0x12345678(%esi,%edi,8)
mov %ds,%ss:0x12345678(%edi,%edi,8)
mov %ds,%ss:(%ebp,%eax,1)
mov %ds,%ss:(%ebp,%ecx,1)
mov %ds,%ss:(%ebp,%edx,1)
mov %ds,%ss:(%ebp,%ebx,1)
mov %ds,%ss:(%ebp,1)
mov %ds,%ss:(%ebp,%ebp,1)
mov %ds,%ss:(%ebp,%esi,1)
mov %ds,%ss:(%ebp,%edi,1)
mov %ds,%ss:(%ebp,%eax,2)
mov %ds,%ss:(%ebp,%ecx,2)
mov %ds,%ss:(%ebp,%edx,2)
mov %ds,%ss:(%ebp,%ebx,2)
mov %ds,%ss:(%ebp,2)
mov %ds,%ss:(%ebp,%ebp,2)
mov %ds,%ss:(%ebp,%esi,2)
mov %ds,%ss:(%ebp,%edi,2)
mov %ds,%ss:(%ebp,%eax,4)
mov %ds,%ss:(%ebp,%ecx,4)
mov %ds,%ss:(%ebp,%edx,4)
mov %ds,%ss:(%ebp,%ebx,4)
mov %ds,%ss:(%ebp,4)
mov %ds,%ss:(%ebp,%ebp,4)
mov %ds,%ss:(%ebp,%esi,4)
mov %ds,%ss:(%ebp,%edi,4)
mov %ds,%ss:(%ebp,%eax,8)
mov %ds,%ss:(%ebp,%ecx,8)
mov %ds,%ss:(%ebp,%edx,8)
mov %ds,%ss:(%ebp,%ebx,8)
mov %ds,%ss:(%ebp,8)
mov %ds,%ss:(%ebp,%ebp,8)
mov %ds,%ss:(%ebp,%esi,8)
mov %ds,%ss:(%ebp,%edi,8)
mov %ds,%ss:0x12(,1)
mov %ds,%ss:0x12(,2)
mov %ds,%ss:0x12(,4)
mov %ds,%ss:0x12(,8)
mov %ds,%ds:(%eax)
mov %ds,%ds:(%ecx)
mov %ds,%ds:(%edx)
mov %ds,%ds:(%ebx)
mov %ds,%ds:0
mov %ds,%ds:(%esi)
mov %ds,%ds:(%edi)
mov %ds,%ds:0x12(%eax)
mov %ds,%ds:0x12(%ecx)
mov %ds,%ds:0x12(%edx)
mov %ds,%ds:0x12(%ebx)
mov %ds,%ds:0x12(%ebp)
mov %ds,%ds:0x12(%esi)
mov %ds,%ds:0x12(%edi)
mov %ds,%ds:0x12345678(%eax)
mov %ds,%ds:0x12345678(%ecx)
mov %ds,%ds:0x12345678(%edx)
mov %ds,%ds:0x12345678(%ebx)
mov %ds,%ds:0x12345678(%ebp)
mov %ds,%ds:0x12345678(%esi)
mov %ds,%ds:0x12345678(%edi)
mov %ds,%eax
mov %ds,%ecx
mov %ds,%edx
mov %ds,%ebx
mov %ds,%esp
mov %ds,%ebp
mov %ds,%esi
mov %ds,%edi
mov %ds,%ds:(%eax,%eax,1)
mov %ds,%ds:(%ecx,%eax,1)
mov %ds,%ds:(%edx,%eax,1)
mov %ds,%ds:(%ebx,%eax,1)
mov %ds,%ds:(%esp,%eax,1)
mov %ds,%ds:(,%eax,1)
mov %ds,%ds:(%esi,%eax,1)
mov %ds,%ds:(%edi,%eax,1)
mov %ds,%ds:(%eax,%ecx,1)
mov %ds,%ds:(%ecx,%ecx,1)
mov %ds,%ds:(%edx,%ecx,1)
mov %ds,%ds:(%ebx,%ecx,1)
mov %ds,%ds:(%esp,%ecx,1)
mov %ds,%ds:(,%ecx,1)
mov %ds,%ds:(%esi,%ecx,1)
mov %ds,%ds:(%edi,%ecx,1)
mov %ds,%ds:(%eax,%edx,1)
mov %ds,%ds:(%ecx,%edx,1)
mov %ds,%ds:(%edx,%edx,1)
mov %ds,%ds:(%ebx,%edx,1)
mov %ds,%ds:(%esp,%edx,1)
mov %ds,%ds:(,%edx,1)
mov %ds,%ds:(%esi,%edx,1)
mov %ds,%ds:(%edi,%edx,1)
mov %ds,%ds:(%eax,%ebx,1)
mov %ds,%ds:(%ecx,%ebx,1)
mov %ds,%ds:(%edx,%ebx,1)
mov %ds,%ds:(%ebx,%ebx,1)
mov %ds,%ds:(%esp,%ebx,1)
mov %ds,%ds:(,%ebx,1)
mov %ds,%ds:(%esi,%ebx,1)
mov %ds,%ds:(%edi,%ebx,1)
mov %ds,%ds:(%eax,1)
mov %ds,%ds:(%ecx,1)
mov %ds,%ds:(%edx,1)
mov %ds,%ds:(%ebx,1)
mov %ds,%ds:(%esp,1)
mov %ds,%ds:(,1)
mov %ds,%ds:(%esi,1)
mov %ds,%ds:(%edi,1)
mov %ds,%ds:(%eax,%ebp,1)
mov %ds,%ds:(%ecx,%ebp,1)
mov %ds,%ds:(%edx,%ebp,1)
mov %ds,%ds:(%ebx,%ebp,1)
mov %ds,%ds:(%esp,%ebp,1)
mov %ds,%ds:(,%ebp,1)
mov %ds,%ds:(%esi,%ebp,1)
mov %ds,%ds:(%edi,%ebp,1)
mov %ds,%ds:(%eax,%esi,1)
mov %ds,%ds:(%ecx,%esi,1)
mov %ds,%ds:(%edx,%esi,1)
mov %ds,%ds:(%ebx,%esi,1)
mov %ds,%ds:(%esp,%esi,1)
mov %ds,%ds:(,%esi,1)
mov %ds,%ds:(%esi,%esi,1)
mov %ds,%ds:(%edi,%esi,1)
mov %ds,%ds:(%eax,%edi,1)
mov %ds,%ds:(%ecx,%edi,1)
mov %ds,%ds:(%edx,%edi,1)
mov %ds,%ds:(%ebx,%edi,1)
mov %ds,%ds:(%esp,%edi,1)
mov %ds,%ds:(,%edi,1)
mov %ds,%ds:(%esi,%edi,1)
mov %ds,%ds:(%edi,%edi,1)
mov %ds,%ds:(%eax,%eax,2)
mov %ds,%ds:(%ecx,%eax,2)
mov %ds,%ds:(%edx,%eax,2)
mov %ds,%ds:(%ebx,%eax,2)
mov %ds,%ds:(%esp,%eax,2)
mov %ds,%ds:(,%eax,2)
mov %ds,%ds:(%esi,%eax,2)
mov %ds,%ds:(%edi,%eax,2)
mov %ds,%ds:(%eax,%ecx,2)
mov %ds,%ds:(%ecx,%ecx,2)
mov %ds,%ds:(%edx,%ecx,2)
mov %ds,%ds:(%ebx,%ecx,2)
mov %ds,%ds:(%esp,%ecx,2)
mov %ds,%ds:(,%ecx,2)
mov %ds,%ds:(%esi,%ecx,2)
mov %ds,%ds:(%edi,%ecx,2)
mov %ds,%ds:(%eax,%edx,2)
mov %ds,%ds:(%ecx,%edx,2)
mov %ds,%ds:(%edx,%edx,2)
mov %ds,%ds:(%ebx,%edx,2)
mov %ds,%ds:(%esp,%edx,2)
mov %ds,%ds:(,%edx,2)
mov %ds,%ds:(%esi,%edx,2)
mov %ds,%ds:(%edi,%edx,2)
mov %ds,%ds:(%eax,%ebx,2)
mov %ds,%ds:(%ecx,%ebx,2)
mov %ds,%ds:(%edx,%ebx,2)
mov %ds,%ds:(%ebx,%ebx,2)
mov %ds,%ds:(%esp,%ebx,2)
mov %ds,%ds:(,%ebx,2)
mov %ds,%ds:(%esi,%ebx,2)
mov %ds,%ds:(%edi,%ebx,2)
mov %ds,%ds:(%eax,2)
mov %ds,%ds:(%ecx,2)
mov %ds,%ds:(%edx,2)
mov %ds,%ds:(%ebx,2)
mov %ds,%ds:(%esp,2)
mov %ds,%ds:(,2)
mov %ds,%ds:(%esi,2)
mov %ds,%ds:(%edi,2)
mov %ds,%ds:(%eax,%ebp,2)
mov %ds,%ds:(%ecx,%ebp,2)
mov %ds,%ds:(%edx,%ebp,2)
mov %ds,%ds:(%ebx,%ebp,2)
mov %ds,%ds:(%esp,%ebp,2)
mov %ds,%ds:(,%ebp,2)
mov %ds,%ds:(%esi,%ebp,2)
mov %ds,%ds:(%edi,%ebp,2)
mov %ds,%ds:(%eax,%esi,2)
mov %ds,%ds:(%ecx,%esi,2)
mov %ds,%ds:(%edx,%esi,2)
mov %ds,%ds:(%ebx,%esi,2)
mov %ds,%ds:(%esp,%esi,2)
mov %ds,%ds:(,%esi,2)
mov %ds,%ds:(%esi,%esi,2)
mov %ds,%ds:(%edi,%esi,2)
mov %ds,%ds:(%eax,%edi,2)
mov %ds,%ds:(%ecx,%edi,2)
mov %ds,%ds:(%edx,%edi,2)
mov %ds,%ds:(%ebx,%edi,2)
mov %ds,%ds:(%esp,%edi,2)
mov %ds,%ds:(,%edi,2)
mov %ds,%ds:(%esi,%edi,2)
mov %ds,%ds:(%edi,%edi,2)
mov %ds,%ds:(%eax,%eax,4)
mov %ds,%ds:(%ecx,%eax,4)
mov %ds,%ds:(%edx,%eax,4)
mov %ds,%ds:(%ebx,%eax,4)
mov %ds,%ds:(%esp,%eax,4)
mov %ds,%ds:(,%eax,4)
mov %ds,%ds:(%esi,%eax,4)
mov %ds,%ds:(%edi,%eax,4)
mov %ds,%ds:(%eax,%ecx,4)
mov %ds,%ds:(%ecx,%ecx,4)
mov %ds,%ds:(%edx,%ecx,4)
mov %ds,%ds:(%ebx,%ecx,4)
mov %ds,%ds:(%esp,%ecx,4)
mov %ds,%ds:(,%ecx,4)
mov %ds,%ds:(%esi,%ecx,4)
mov %ds,%ds:(%edi,%ecx,4)
mov %ds,%ds:(%eax,%edx,4)
mov %ds,%ds:(%ecx,%edx,4)
mov %ds,%ds:(%edx,%edx,4)
mov %ds,%ds:(%ebx,%edx,4)
mov %ds,%ds:(%esp,%edx,4)
mov %ds,%ds:(,%edx,4)
mov %ds,%ds:(%esi,%edx,4)
mov %ds,%ds:(%edi,%edx,4)
mov %ds,%ds:(%eax,%ebx,4)
mov %ds,%ds:(%ecx,%ebx,4)
mov %ds,%ds:(%edx,%ebx,4)
mov %ds,%ds:(%ebx,%ebx,4)
mov %ds,%ds:(%esp,%ebx,4)
mov %ds,%ds:(,%ebx,4)
mov %ds,%ds:(%esi,%ebx,4)
mov %ds,%ds:(%edi,%ebx,4)
mov %ds,%ds:(%eax,4)
mov %ds,%ds:(%ecx,4)
mov %ds,%ds:(%edx,4)
mov %ds,%ds:(%ebx,4)
mov %ds,%ds:(%esp,4)
mov %ds,%ds:(,4)
mov %ds,%ds:(%esi,4)
mov %ds,%ds:(%edi,4)
mov %ds,%ds:(%eax,%ebp,4)
mov %ds,%ds:(%ecx,%ebp,4)
mov %ds,%ds:(%edx,%ebp,4)
mov %ds,%ds:(%ebx,%ebp,4)
mov %ds,%ds:(%esp,%ebp,4)
mov %ds,%ds:(,%ebp,4)
mov %ds,%ds:(%esi,%ebp,4)
mov %ds,%ds:(%edi,%ebp,4)
mov %ds,%ds:(%eax,%esi,4)
mov %ds,%ds:(%ecx,%esi,4)
mov %ds,%ds:(%edx,%esi,4)
mov %ds,%ds:(%ebx,%esi,4)
mov %ds,%ds:(%esp,%esi,4)
mov %ds,%ds:(,%esi,4)
mov %ds,%ds:(%esi,%esi,4)
mov %ds,%ds:(%edi,%esi,4)
mov %ds,%ds:(%eax,%edi,4)
mov %ds,%ds:(%ecx,%edi,4)
mov %ds,%ds:(%edx,%edi,4)
mov %ds,%ds:(%ebx,%edi,4)
mov %ds,%ds:(%esp,%edi,4)
mov %ds,%ds:(,%edi,4)
mov %ds,%ds:(%esi,%edi,4)
mov %ds,%ds:(%edi,%edi,4)
mov %ds,%ds:(%eax,%eax,8)
mov %ds,%ds:(%ecx,%eax,8)
mov %ds,%ds:(%edx,%eax,8)
mov %ds,%ds:(%ebx,%eax,8)
mov %ds,%ds:(%esp,%eax,8)
mov %ds,%ds:(,%eax,8)
mov %ds,%ds:(%esi,%eax,8)
mov %ds,%ds:(%edi,%eax,8)
mov %ds,%ds:(%eax,%ecx,8)
mov %ds,%ds:(%ecx,%ecx,8)
mov %ds,%ds:(%edx,%ecx,8)
mov %ds,%ds:(%ebx,%ecx,8)
mov %ds,%ds:(%esp,%ecx,8)
mov %ds,%ds:(,%ecx,8)
mov %ds,%ds:(%esi,%ecx,8)
mov %ds,%ds:(%edi,%ecx,8)
mov %ds,%ds:(%eax,%edx,8)
mov %ds,%ds:(%ecx,%edx,8)
mov %ds,%ds:(%edx,%edx,8)
mov %ds,%ds:(%ebx,%edx,8)
mov %ds,%ds:(%esp,%edx,8)
mov %ds,%ds:(,%edx,8)
mov %ds,%ds:(%esi,%edx,8)
mov %ds,%ds:(%edi,%edx,8)
mov %ds,%ds:(%eax,%ebx,8)
mov %ds,%ds:(%ecx,%ebx,8)
mov %ds,%ds:(%edx,%ebx,8)
mov %ds,%ds:(%ebx,%ebx,8)
mov %ds,%ds:(%esp,%ebx,8)
mov %ds,%ds:(,%ebx,8)
mov %ds,%ds:(%esi,%ebx,8)
mov %ds,%ds:(%edi,%ebx,8)
mov %ds,%ds:(%eax,8)
mov %ds,%ds:(%ecx,8)
mov %ds,%ds:(%edx,8)
mov %ds,%ds:(%ebx,8)
mov %ds,%ds:(%esp,8)
mov %ds,%ds:(,8)
mov %ds,%ds:(%esi,8)
mov %ds,%ds:(%edi,8)
mov %ds,%ds:(%eax,%ebp,8)
mov %ds,%ds:(%ecx,%ebp,8)
mov %ds,%ds:(%edx,%ebp,8)
mov %ds,%ds:(%ebx,%ebp,8)
mov %ds,%ds:(%esp,%ebp,8)
mov %ds,%ds:(,%ebp,8)
mov %ds,%ds:(%esi,%ebp,8)
mov %ds,%ds:(%edi,%ebp,8)
mov %ds,%ds:(%eax,%esi,8)
mov %ds,%ds:(%ecx,%esi,8)
mov %ds,%ds:(%edx,%esi,8)
mov %ds,%ds:(%ebx,%esi,8)
mov %ds,%ds:(%esp,%esi,8)
mov %ds,%ds:(,%esi,8)
mov %ds,%ds:(%esi,%esi,8)
mov %ds,%ds:(%edi,%esi,8)
mov %ds,%ds:(%eax,%edi,8)
mov %ds,%ds:(%edx,%edi,8)
mov %ds,%ds:(%ecx,%edi,8)
mov %ds,%ds:(%ebx,%edi,8)
mov %ds,%ds:(%esp,%edi,8)
mov %ds,%ds:(,%edi,8)
mov %ds,%ds:(%esi,%edi,8)
mov %ds,%ds:(%edi,%edi,8)
mov %ds,%ds:0x12(%eax,%eax,1)
mov %ds,%ds:0x12(%ecx,%eax,1)
mov %ds,%ds:0x12(%edx,%eax,1)
mov %ds,%ds:0x12(%ebx,%eax,1)
mov %ds,%ds:0x12(%esp,%eax,1)
mov %ds,%ds:0x12(%ebp,%eax,1)
mov %ds,%ds:0x12(%esi,%eax,1)
mov %ds,%ds:0x12(%edi,%eax,1)
mov %ds,%ds:0x12(%eax,%ecx,1)
mov %ds,%ds:0x12(%ecx,%ecx,1)
mov %ds,%ds:0x12(%edx,%ecx,1)
mov %ds,%ds:0x12(%ebx,%ecx,1)
mov %ds,%ds:0x12(%esp,%ecx,1)
mov %ds,%ds:0x12(%ebp,%ecx,1)
mov %ds,%ds:0x12(%esi,%ecx,1)
mov %ds,%ds:0x12(%edi,%ecx,1)
mov %ds,%ds:0x12(%eax,%edx,1)
mov %ds,%ds:0x12(%ecx,%edx,1)
mov %ds,%ds:0x12(%edx,%edx,1)
mov %ds,%ds:0x12(%ebx,%edx,1)
mov %ds,%ds:0x12(%esp,%edx,1)
mov %ds,%ds:0x12(%ebp,%edx,1)
mov %ds,%ds:0x12(%esi,%edx,1)
mov %ds,%ds:0x12(%edi,%edx,1)
mov %ds,%ds:0x12(%eax,%ebx,1)
mov %ds,%ds:0x12(%ecx,%ebx,1)
mov %ds,%ds:0x12(%edx,%ebx,1)
mov %ds,%ds:0x12(%ebx,%ebx,1)
mov %ds,%ds:0x12(%esp,%ebx,1)
mov %ds,%ds:0x12(%ebp,%ebx,1)
mov %ds,%ds:0x12(%esi,%ebx,1)
mov %ds,%ds:0x12(%edi,%ebx,1)
mov %ds,%ds:0x12(%eax,1)
mov %ds,%ds:0x12(%ecx,1)
mov %ds,%ds:0x12(%edx,1)
mov %ds,%ds:0x12(%ebx,1)
mov %ds,%ds:0x12(%esp,1)
mov %ds,%ds:0x12(%ebp,1)
mov %ds,%ds:0x12(%esi,1)
mov %ds,%ds:0x12(%edi,1)
mov %ds,%ds:0x12(%eax,%ebp,1)
mov %ds,%ds:0x12(%ecx,%ebp,1)
mov %ds,%ds:0x12(%edx,%ebp,1)
mov %ds,%ds:0x12(%ebx,%ebp,1)
mov %ds,%ds:0x12(%esp,%ebp,1)
mov %ds,%ds:0x12(%ebp,%ebp,1)
mov %ds,%ds:0x12(%esi,%ebp,1)
mov %ds,%ds:0x12(%edi,%ebp,1)
mov %ds,%ds:0x12(%eax,%esi,1)
mov %ds,%ds:0x12(%ecx,%esi,1)
mov %ds,%ds:0x12(%edx,%esi,1)
mov %ds,%ds:0x12(%ebx,%esi,1)
mov %ds,%ds:0x12(%esp,%esi,1)
mov %ds,%ds:0x12(%ebp,%esi,1)
mov %ds,%ds:0x12(%esi,%esi,1)
mov %ds,%ds:0x12(%edi,%esi,1)
mov %ds,%ds:0x12(%eax,%edi,1)
mov %ds,%ds:0x12(%ecx,%edi,1)
mov %ds,%ds:0x12(%edx,%edi,1)
mov %ds,%ds:0x12(%ebx,%edi,1)
mov %ds,%ds:0x12(%esp,%edi,1)
mov %ds,%ds:0x12(%ebp,%edi,1)
mov %ds,%ds:0x12(%esi,%edi,1)
mov %ds,%ds:0x12(%edi,%edi,1)
mov %ds,%ds:0x12(%eax,%eax,2)
mov %ds,%ds:0x12(%ecx,%eax,2)
mov %ds,%ds:0x12(%edx,%eax,2)
mov %ds,%ds:0x12(%ebx,%eax,2)
mov %ds,%ds:0x12(%esp,%eax,2)
mov %ds,%ds:0x12(%ebp,%eax,2)
mov %ds,%ds:0x12(%esi,%eax,2)
mov %ds,%ds:0x12(%edi,%eax,2)
mov %ds,%ds:0x12(%eax,%ecx,2)
mov %ds,%ds:0x12(%ecx,%ecx,2)
mov %ds,%ds:0x12(%edx,%ecx,2)
mov %ds,%ds:0x12(%ebx,%ecx,2)
mov %ds,%ds:0x12(%esp,%ecx,2)
mov %ds,%ds:0x12(%ebp,%ecx,2)
mov %ds,%ds:0x12(%esi,%ecx,2)
mov %ds,%ds:0x12(%edi,%ecx,2)
mov %ds,%ds:0x12(%eax,%edx,2)
mov %ds,%ds:0x12(%ecx,%edx,2)
mov %ds,%ds:0x12(%edx,%edx,2)
mov %ds,%ds:0x12(%ebx,%edx,2)
mov %ds,%ds:0x12(%esp,%edx,2)
mov %ds,%ds:0x12(%ebp,%edx,2)
mov %ds,%ds:0x12(%esi,%edx,2)
mov %ds,%ds:0x12(%edi,%edx,2)
mov %ds,%ds:0x12(%eax,%ebx,2)
mov %ds,%ds:0x12(%ecx,%ebx,2)
mov %ds,%ds:0x12(%edx,%ebx,2)
mov %ds,%ds:0x12(%ebx,%ebx,2)
mov %ds,%ds:0x12(%esp,%ebx,2)
mov %ds,%ds:0x12(%ebp,%ebx,2)
mov %ds,%ds:0x12(%esi,%ebx,2)
mov %ds,%ds:0x12(%edi,%ebx,2)
mov %ds,%ds:0x12(%eax,2)
mov %ds,%ds:0x12(%ecx,2)
mov %ds,%ds:0x12(%edx,2)
mov %ds,%ds:0x12(%ebx,2)
mov %ds,%ds:0x12(%esp,2)
mov %ds,%ds:0x12(%ebp,2)
mov %ds,%ds:0x12(%esi,2)
mov %ds,%ds:0x12(%edi,2)
mov %ds,%ds:0x12(%eax,%ebp,2)
mov %ds,%ds:0x12(%ecx,%ebp,2)
mov %ds,%ds:0x12(%edx,%ebp,2)
mov %ds,%ds:0x12(%ebx,%ebp,2)
mov %ds,%ds:0x12(%esp,%ebp,2)
mov %ds,%ds:0x12(%ebp,%ebp,2)
mov %ds,%ds:0x12(%esi,%ebp,2)
mov %ds,%ds:0x12(%edi,%ebp,2)
mov %ds,%ds:0x12(%eax,%esi,2)
mov %ds,%ds:0x12(%ecx,%esi,2)
mov %ds,%ds:0x12(%edx,%esi,2)
mov %ds,%ds:0x12(%ebx,%esi,2)
mov %ds,%ds:0x12(%esp,%esi,2)
mov %ds,%ds:0x12(%ebp,%esi,2)
mov %ds,%ds:0x12(%esi,%esi,2)
mov %ds,%ds:0x12(%edi,%esi,2)
mov %ds,%ds:0x12(%eax,%edi,2)
mov %ds,%ds:0x12(%ecx,%edi,2)
mov %ds,%ds:0x12(%edx,%edi,2)
mov %ds,%ds:0x12(%ebx,%edi,2)
mov %ds,%ds:0x12(%esp,%edi,2)
mov %ds,%ds:0x12(%ebp,%edi,2)
mov %ds,%ds:0x12(%esi,%edi,2)
mov %ds,%ds:0x12(%edi,%edi,2)
mov %ds,%ds:0x12(%eax,%eax,4)
mov %ds,%ds:0x12(%ecx,%eax,4)
mov %ds,%ds:0x12(%edx,%eax,4)
mov %ds,%ds:0x12(%ebx,%eax,4)
mov %ds,%ds:0x12(%esp,%eax,4)
mov %ds,%ds:0x12(%ebp,%eax,4)
mov %ds,%ds:0x12(%esi,%eax,4)
mov %ds,%ds:0x12(%edi,%eax,4)
mov %ds,%ds:0x12(%eax,%ecx,4)
mov %ds,%ds:0x12(%ecx,%ecx,4)
mov %ds,%ds:0x12(%edx,%ecx,4)
mov %ds,%ds:0x12(%ebx,%ecx,4)
mov %ds,%ds:0x12(%esp,%ecx,4)
mov %ds,%ds:0x12(%ebp,%ecx,4)
mov %ds,%ds:0x12(%esi,%ecx,4)
mov %ds,%ds:0x12(%edi,%ecx,4)
mov %ds,%ds:0x12(%eax,%edx,4)
mov %ds,%ds:0x12(%ecx,%edx,4)
mov %ds,%ds:0x12(%edx,%edx,4)
mov %ds,%ds:0x12(%ebx,%edx,4)
mov %ds,%ds:0x12(%esp,%edx,4)
mov %ds,%ds:0x12(%ebp,%edx,4)
mov %ds,%ds:0x12(%esi,%edx,4)
mov %ds,%ds:0x12(%edi,%edx,4)
mov %ds,%ds:0x12(%eax,%ebx,4)
mov %ds,%ds:0x12(%ecx,%ebx,4)
mov %ds,%ds:0x12(%edx,%ebx,4)
mov %ds,%ds:0x12(%ebx,%ebx,4)
mov %ds,%ds:0x12(%esp,%ebx,4)
mov %ds,%ds:0x12(%ebp,%ebx,4)
mov %ds,%ds:0x12(%esi,%ebx,4)
mov %ds,%ds:0x12(%edi,%ebx,4)
mov %ds,%ds:0x12(%eax,4)
mov %ds,%ds:0x12(%ecx,4)
mov %ds,%ds:0x12(%edx,4)
mov %ds,%ds:0x12(%ebx,4)
mov %ds,%ds:0x12(%esp,4)
mov %ds,%ds:0x12(%ebp,4)
mov %ds,%ds:0x12(%esi,4)
mov %ds,%ds:0x12(%edi,4)
mov %ds,%ds:0x12(%eax,%ebp,4)
mov %ds,%ds:0x12(%ecx,%ebp,4)
mov %ds,%ds:0x12(%edx,%ebp,4)
mov %ds,%ds:0x12(%ebx,%ebp,4)
mov %ds,%ds:0x12(%esp,%ebp,4)
mov %ds,%ds:0x12(%ebp,%ebp,4)
mov %ds,%ds:0x12(%esi,%ebp,4)
mov %ds,%ds:0x12(%edi,%ebp,4)
mov %ds,%ds:0x12(%eax,%esi,4)
mov %ds,%ds:0x12(%ecx,%esi,4)
mov %ds,%ds:0x12(%edx,%esi,4)
mov %ds,%ds:0x12(%ebx,%esi,4)
mov %ds,%ds:0x12(%esp,%esi,4)
mov %ds,%ds:0x12(%ebp,%esi,4)
mov %ds,%ds:0x12(%esi,%esi,4)
mov %ds,%ds:0x12(%edi,%esi,4)
mov %ds,%ds:0x12(%eax,%edi,4)
mov %ds,%ds:0x12(%ecx,%edi,4)
mov %ds,%ds:0x12(%edx,%edi,4)
mov %ds,%ds:0x12(%ebx,%edi,4)
mov %ds,%ds:0x12(%esp,%edi,4)
mov %ds,%ds:0x12(%ebp,%edi,4)
mov %ds,%ds:0x12(%esi,%edi,4)
mov %ds,%ds:0x12(%edi,%edi,4)
mov %ds,%ds:0x12(%eax,%eax,8)
mov %ds,%ds:0x12(%ecx,%eax,8)
mov %ds,%ds:0x12(%edx,%eax,8)
mov %ds,%ds:0x12(%ebx,%eax,8)
mov %ds,%ds:0x12(%esp,%eax,8)
mov %ds,%ds:0x12(%ebp,%eax,8)
mov %ds,%ds:0x12(%esi,%eax,8)
mov %ds,%ds:0x12(%edi,%eax,8)
mov %ds,%ds:0x12(%eax,%ecx,8)
mov %ds,%ds:0x12(%ecx,%ecx,8)
mov %ds,%ds:0x12(%edx,%ecx,8)
mov %ds,%ds:0x12(%ebx,%ecx,8)
mov %ds,%ds:0x12(%esp,%ecx,8)
mov %ds,%ds:0x12(%ebp,%ecx,8)
mov %ds,%ds:0x12(%esi,%ecx,8)
mov %ds,%ds:0x12(%edi,%ecx,8)
mov %ds,%ds:0x12(%eax,%edx,8)
mov %ds,%ds:0x12(%ecx,%edx,8)
mov %ds,%ds:0x12(%edx,%edx,8)
mov %ds,%ds:0x12(%ebx,%edx,8)
mov %ds,%ds:0x12(%esp,%edx,8)
mov %ds,%ds:0x12(%ebp,%edx,8)
mov %ds,%ds:0x12(%esi,%edx,8)
mov %ds,%ds:0x12(%edi,%edx,8)
mov %ds,%ds:0x12(%eax,%ebx,8)
mov %ds,%ds:0x12(%ecx,%ebx,8)
mov %ds,%ds:0x12(%edx,%ebx,8)
mov %ds,%ds:0x12(%ebx,%ebx,8)
mov %ds,%ds:0x12(%esp,%ebx,8)
mov %ds,%ds:0x12(%ebp,%ebx,8)
mov %ds,%ds:0x12(%esi,%ebx,8)
mov %ds,%ds:0x12(%edi,%ebx,8)
mov %ds,%ds:0x12(%eax,8)
mov %ds,%ds:0x12(%ecx,8)
mov %ds,%ds:0x12(%edx,8)
mov %ds,%ds:0x12(%ebx,8)
mov %ds,%ds:0x12(%esp,8)
mov %ds,%ds:0x12(%ebp,8)
mov %ds,%ds:0x12(%esi,8)
mov %ds,%ds:0x12(%edi,8)
mov %ds,%ds:0x12(%eax,%ebp,8)
mov %ds,%ds:0x12(%ecx,%ebp,8)
mov %ds,%ds:0x12(%edx,%ebp,8)
mov %ds,%ds:0x12(%ebx,%ebp,8)
mov %ds,%ds:0x12(%esp,%ebp,8)
mov %ds,%ds:0x12(%ebp,%ebp,8)
mov %ds,%ds:0x12(%esi,%ebp,8)
mov %ds,%ds:0x12(%edi,%ebp,8)
mov %ds,%ds:0x12(%eax,%esi,8)
mov %ds,%ds:0x12(%ecx,%esi,8)
mov %ds,%ds:0x12(%edx,%esi,8)
mov %ds,%ds:0x12(%ebx,%esi,8)
mov %ds,%ds:0x12(%esp,%esi,8)
mov %ds,%ds:0x12(%ebp,%esi,8)
mov %ds,%ds:0x12(%esi,%esi,8)
mov %ds,%ds:0x12(%edi,%esi,8)
mov %ds,%ds:0x12(%eax,%edi,8)
mov %ds,%ds:0x12(%edx,%edi,8)
mov %ds,%ds:0x12(%ecx,%edi,8)
mov %ds,%ds:0x12(%ebx,%edi,8)
mov %ds,%ds:0x12(%esp,%edi,8)
mov %ds,%ds:0x12(%ebp,%edi,8)
mov %ds,%ds:0x12(%esi,%edi,8)
mov %ds,%ds:0x12(%edi,%edi,8)
mov %ds,%ds:0x12345678(%eax,%eax,1)
mov %ds,%ds:0x12345678(%ecx,%eax,1)
mov %ds,%ds:0x12345678(%edx,%eax,1)
mov %ds,%ds:0x12345678(%ebx,%eax,1)
mov %ds,%ds:0x12345678(%esp,%eax,1)
mov %ds,%ds:0x12345678(%ebp,%eax,1)
mov %ds,%ds:0x12345678(%esi,%eax,1)
mov %ds,%ds:0x12345678(%edi,%eax,1)
mov %ds,%ds:0x12345678(%eax,%ecx,1)
mov %ds,%ds:0x12345678(%ecx,%ecx,1)
mov %ds,%ds:0x12345678(%edx,%ecx,1)
mov %ds,%ds:0x12345678(%ebx,%ecx,1)
mov %ds,%ds:0x12345678(%esp,%ecx,1)
mov %ds,%ds:0x12345678(%ebp,%ecx,1)
mov %ds,%ds:0x12345678(%esi,%ecx,1)
mov %ds,%ds:0x12345678(%edi,%ecx,1)
mov %ds,%ds:0x12345678(%eax,%edx,1)
mov %ds,%ds:0x12345678(%ecx,%edx,1)
mov %ds,%ds:0x12345678(%edx,%edx,1)
mov %ds,%ds:0x12345678(%ebx,%edx,1)
mov %ds,%ds:0x12345678(%esp,%edx,1)
mov %ds,%ds:0x12345678(%ebp,%edx,1)
mov %ds,%ds:0x12345678(%esi,%edx,1)
mov %ds,%ds:0x12345678(%edi,%edx,1)
mov %ds,%ds:0x12345678(%eax,%ebx,1)
mov %ds,%ds:0x12345678(%ecx,%ebx,1)
mov %ds,%ds:0x12345678(%edx,%ebx,1)
mov %ds,%ds:0x12345678(%ebx,%ebx,1)
mov %ds,%ds:0x12345678(%esp,%ebx,1)
mov %ds,%ds:0x12345678(%ebp,%ebx,1)
mov %ds,%ds:0x12345678(%esi,%ebx,1)
mov %ds,%ds:0x12345678(%edi,%ebx,1)
mov %ds,%ds:0x12345678(%eax,1)
mov %ds,%ds:0x12345678(%ecx,1)
mov %ds,%ds:0x12345678(%edx,1)
mov %ds,%ds:0x12345678(%ebx,1)
mov %ds,%ds:0x12345678(%esp,1)
mov %ds,%ds:0x12345678(%ebp,1)
mov %ds,%ds:0x12345678(%esi,1)
mov %ds,%ds:0x12345678(%edi,1)
mov %ds,%ds:0x12345678(%eax,%ebp,1)
mov %ds,%ds:0x12345678(%ecx,%ebp,1)
mov %ds,%ds:0x12345678(%edx,%ebp,1)
mov %ds,%ds:0x12345678(%ebx,%ebp,1)
mov %ds,%ds:0x12345678(%esp,%ebp,1)
mov %ds,%ds:0x12345678(%ebp,%ebp,1)
mov %ds,%ds:0x12345678(%esi,%ebp,1)
mov %ds,%ds:0x12345678(%edi,%ebp,1)
mov %ds,%ds:0x12345678(%eax,%esi,1)
mov %ds,%ds:0x12345678(%ecx,%esi,1)
mov %ds,%ds:0x12345678(%edx,%esi,1)
mov %ds,%ds:0x12345678(%ebx,%esi,1)
mov %ds,%ds:0x12345678(%esp,%esi,1)
mov %ds,%ds:0x12345678(%ebp,%esi,1)
mov %ds,%ds:0x12345678(%esi,%esi,1)
mov %ds,%ds:0x12345678(%edi,%esi,1)
mov %ds,%ds:0x12345678(%eax,%edi,1)
mov %ds,%ds:0x12345678(%ecx,%edi,1)
mov %ds,%ds:0x12345678(%edx,%edi,1)
mov %ds,%ds:0x12345678(%ebx,%edi,1)
mov %ds,%ds:0x12345678(%esp,%edi,1)
mov %ds,%ds:0x12345678(%ebp,%edi,1)
mov %ds,%ds:0x12345678(%esi,%edi,1)
mov %ds,%ds:0x12345678(%edi,%edi,1)
mov %ds,%ds:0x12345678(%eax,%eax,2)
mov %ds,%ds:0x12345678(%ecx,%eax,2)
mov %ds,%ds:0x12345678(%edx,%eax,2)
mov %ds,%ds:0x12345678(%ebx,%eax,2)
mov %ds,%ds:0x12345678(%esp,%eax,2)
mov %ds,%ds:0x12345678(%ebp,%eax,2)
mov %ds,%ds:0x12345678(%esi,%eax,2)
mov %ds,%ds:0x12345678(%edi,%eax,2)
mov %ds,%ds:0x12345678(%eax,%ecx,2)
mov %ds,%ds:0x12345678(%ecx,%ecx,2)
mov %ds,%ds:0x12345678(%edx,%ecx,2)
mov %ds,%ds:0x12345678(%ebx,%ecx,2)
mov %ds,%ds:0x12345678(%esp,%ecx,2)
mov %ds,%ds:0x12345678(%ebp,%ecx,2)
mov %ds,%ds:0x12345678(%esi,%ecx,2)
mov %ds,%ds:0x12345678(%edi,%ecx,2)
mov %ds,%ds:0x12345678(%eax,%edx,2)
mov %ds,%ds:0x12345678(%ecx,%edx,2)
mov %ds,%ds:0x12345678(%edx,%edx,2)
mov %ds,%ds:0x12345678(%ebx,%edx,2)
mov %ds,%ds:0x12345678(%esp,%edx,2)
mov %ds,%ds:0x12345678(%ebp,%edx,2)
mov %ds,%ds:0x12345678(%esi,%edx,2)
mov %ds,%ds:0x12345678(%edi,%edx,2)
mov %ds,%ds:0x12345678(%eax,%ebx,2)
mov %ds,%ds:0x12345678(%ecx,%ebx,2)
mov %ds,%ds:0x12345678(%edx,%ebx,2)
mov %ds,%ds:0x12345678(%ebx,%ebx,2)
mov %ds,%ds:0x12345678(%esp,%ebx,2)
mov %ds,%ds:0x12345678(%ebp,%ebx,2)
mov %ds,%ds:0x12345678(%esi,%ebx,2)
mov %ds,%ds:0x12345678(%edi,%ebx,2)
mov %ds,%ds:0x12345678(%eax,2)
mov %ds,%ds:0x12345678(%ecx,2)
mov %ds,%ds:0x12345678(%edx,2)
mov %ds,%ds:0x12345678(%ebx,2)
mov %ds,%ds:0x12345678(%esp,2)
mov %ds,%ds:0x12345678(%ebp,2)
mov %ds,%ds:0x12345678(%esi,2)
mov %ds,%ds:0x12345678(%edi,2)
mov %ds,%ds:0x12345678(%eax,%ebp,2)
mov %ds,%ds:0x12345678(%ecx,%ebp,2)
mov %ds,%ds:0x12345678(%edx,%ebp,2)
mov %ds,%ds:0x12345678(%ebx,%ebp,2)
mov %ds,%ds:0x12345678(%esp,%ebp,2)
mov %ds,%ds:0x12345678(%ebp,%ebp,2)
mov %ds,%ds:0x12345678(%esi,%ebp,2)
mov %ds,%ds:0x12345678(%edi,%ebp,2)
mov %ds,%ds:0x12345678(%eax,%esi,2)
mov %ds,%ds:0x12345678(%ecx,%esi,2)
mov %ds,%ds:0x12345678(%edx,%esi,2)
mov %ds,%ds:0x12345678(%ebx,%esi,2)
mov %ds,%ds:0x12345678(%esp,%esi,2)
mov %ds,%ds:0x12345678(%ebp,%esi,2)
mov %ds,%ds:0x12345678(%esi,%esi,2)
mov %ds,%ds:0x12345678(%edi,%esi,2)
mov %ds,%ds:0x12345678(%eax,%edi,2)
mov %ds,%ds:0x12345678(%ecx,%edi,2)
mov %ds,%ds:0x12345678(%edx,%edi,2)
mov %ds,%ds:0x12345678(%ebx,%edi,2)
mov %ds,%ds:0x12345678(%esp,%edi,2)
mov %ds,%ds:0x12345678(%ebp,%edi,2)
mov %ds,%ds:0x12345678(%esi,%edi,2)
mov %ds,%ds:0x12345678(%edi,%edi,2)
mov %ds,%ds:0x12345678(%eax,%eax,4)
mov %ds,%ds:0x12345678(%ecx,%eax,4)
mov %ds,%ds:0x12345678(%edx,%eax,4)
mov %ds,%ds:0x12345678(%ebx,%eax,4)
mov %ds,%ds:0x12345678(%esp,%eax,4)
mov %ds,%ds:0x12345678(%ebp,%eax,4)
mov %ds,%ds:0x12345678(%esi,%eax,4)
mov %ds,%ds:0x12345678(%edi,%eax,4)
mov %ds,%ds:0x12345678(%eax,%ecx,4)
mov %ds,%ds:0x12345678(%ecx,%ecx,4)
mov %ds,%ds:0x12345678(%edx,%ecx,4)
mov %ds,%ds:0x12345678(%ebx,%ecx,4)
mov %ds,%ds:0x12345678(%esp,%ecx,4)
mov %ds,%ds:0x12345678(%ebp,%ecx,4)
mov %ds,%ds:0x12345678(%esi,%ecx,4)
mov %ds,%ds:0x12345678(%edi,%ecx,4)
mov %ds,%ds:0x12345678(%eax,%edx,4)
mov %ds,%ds:0x12345678(%ecx,%edx,4)
mov %ds,%ds:0x12345678(%edx,%edx,4)
mov %ds,%ds:0x12345678(%ebx,%edx,4)
mov %ds,%ds:0x12345678(%esp,%edx,4)
mov %ds,%ds:0x12345678(%ebp,%edx,4)
mov %ds,%ds:0x12345678(%esi,%edx,4)
mov %ds,%ds:0x12345678(%edi,%edx,4)
mov %ds,%ds:0x12345678(%eax,%ebx,4)
mov %ds,%ds:0x12345678(%ecx,%ebx,4)
mov %ds,%ds:0x12345678(%edx,%ebx,4)
mov %ds,%ds:0x12345678(%ebx,%ebx,4)
mov %ds,%ds:0x12345678(%esp,%ebx,4)
mov %ds,%ds:0x12345678(%ebp,%ebx,4)
mov %ds,%ds:0x12345678(%esi,%ebx,4)
mov %ds,%ds:0x12345678(%edi,%ebx,4)
mov %ds,%ds:0x12345678(%eax,4)
mov %ds,%ds:0x12345678(%ecx,4)
mov %ds,%ds:0x12345678(%edx,4)
mov %ds,%ds:0x12345678(%ebx,4)
mov %ds,%ds:0x12345678(%esp,4)
mov %ds,%ds:0x12345678(%ebp,4)
mov %ds,%ds:0x12345678(%esi,4)
mov %ds,%ds:0x12345678(%edi,4)
mov %ds,%ds:0x12345678(%eax,%ebp,4)
mov %ds,%ds:0x12345678(%ecx,%ebp,4)
mov %ds,%ds:0x12345678(%edx,%ebp,4)
mov %ds,%ds:0x12345678(%ebx,%ebp,4)
mov %ds,%ds:0x12345678(%esp,%ebp,4)
mov %ds,%ds:0x12345678(%ebp,%ebp,4)
mov %ds,%ds:0x12345678(%esi,%ebp,4)
mov %ds,%ds:0x12345678(%edi,%ebp,4)
mov %ds,%ds:0x12345678(%eax,%esi,4)
mov %ds,%ds:0x12345678(%ecx,%esi,4)
mov %ds,%ds:0x12345678(%edx,%esi,4)
mov %ds,%ds:0x12345678(%ebx,%esi,4)
mov %ds,%ds:0x12345678(%esp,%esi,4)
mov %ds,%ds:0x12345678(%ebp,%esi,4)
mov %ds,%ds:0x12345678(%esi,%esi,4)
mov %ds,%ds:0x12345678(%edi,%esi,4)
mov %ds,%ds:0x12345678(%eax,%edi,4)
mov %ds,%ds:0x12345678(%ecx,%edi,4)
mov %ds,%ds:0x12345678(%edx,%edi,4)
mov %ds,%ds:0x12345678(%ebx,%edi,4)
mov %ds,%ds:0x12345678(%esp,%edi,4)
mov %ds,%ds:0x12345678(%ebp,%edi,4)
mov %ds,%ds:0x12345678(%esi,%edi,4)
mov %ds,%ds:0x12345678(%edi,%edi,4)
mov %ds,%ds:0x12345678(%eax,%eax,8)
mov %ds,%ds:0x12345678(%ecx,%eax,8)
mov %ds,%ds:0x12345678(%edx,%eax,8)
mov %ds,%ds:0x12345678(%ebx,%eax,8)
mov %ds,%ds:0x12345678(%esp,%eax,8)
mov %ds,%ds:0x12345678(%ebp,%eax,8)
mov %ds,%ds:0x12345678(%esi,%eax,8)
mov %ds,%ds:0x12345678(%edi,%eax,8)
mov %ds,%ds:0x12345678(%eax,%ecx,8)
mov %ds,%ds:0x12345678(%ecx,%ecx,8)
mov %ds,%ds:0x12345678(%edx,%ecx,8)
mov %ds,%ds:0x12345678(%ebx,%ecx,8)
mov %ds,%ds:0x12345678(%esp,%ecx,8)
mov %ds,%ds:0x12345678(%ebp,%ecx,8)
mov %ds,%ds:0x12345678(%esi,%ecx,8)
mov %ds,%ds:0x12345678(%edi,%ecx,8)
mov %ds,%ds:0x12345678(%eax,%edx,8)
mov %ds,%ds:0x12345678(%ecx,%edx,8)
mov %ds,%ds:0x12345678(%edx,%edx,8)
mov %ds,%ds:0x12345678(%ebx,%edx,8)
mov %ds,%ds:0x12345678(%esp,%edx,8)
mov %ds,%ds:0x12345678(%ebp,%edx,8)
mov %ds,%ds:0x12345678(%esi,%edx,8)
mov %ds,%ds:0x12345678(%edi,%edx,8)
mov %ds,%ds:0x12345678(%eax,%ebx,8)
mov %ds,%ds:0x12345678(%ecx,%ebx,8)
mov %ds,%ds:0x12345678(%edx,%ebx,8)
mov %ds,%ds:0x12345678(%ebx,%ebx,8)
mov %ds,%ds:0x12345678(%esp,%ebx,8)
mov %ds,%ds:0x12345678(%ebp,%ebx,8)
mov %ds,%ds:0x12345678(%esi,%ebx,8)
mov %ds,%ds:0x12345678(%edi,%ebx,8)
mov %ds,%ds:0x12345678(%eax,8)
mov %ds,%ds:0x12345678(%ecx,8)
mov %ds,%ds:0x12345678(%edx,8)
mov %ds,%ds:0x12345678(%ebx,8)
mov %ds,%ds:0x12345678(%esp,8)
mov %ds,%ds:0x12345678(%ebp,8)
mov %ds,%ds:0x12345678(%esi,8)
mov %ds,%ds:0x12345678(%edi,8)
mov %ds,%ds:0x12345678(%eax,%ebp,8)
mov %ds,%ds:0x12345678(%ecx,%ebp,8)
mov %ds,%ds:0x12345678(%edx,%ebp,8)
mov %ds,%ds:0x12345678(%ebx,%ebp,8)
mov %ds,%ds:0x12345678(%esp,%ebp,8)
mov %ds,%ds:0x12345678(%ebp,%ebp,8)
mov %ds,%ds:0x12345678(%esi,%ebp,8)
mov %ds,%ds:0x12345678(%edi,%ebp,8)
mov %ds,%ds:0x12345678(%eax,%esi,8)
mov %ds,%ds:0x12345678(%ecx,%esi,8)
mov %ds,%ds:0x12345678(%edx,%esi,8)
mov %ds,%ds:0x12345678(%ebx,%esi,8)
mov %ds,%ds:0x12345678(%esp,%esi,8)
mov %ds,%ds:0x12345678(%ebp,%esi,8)
mov %ds,%ds:0x12345678(%esi,%esi,8)
mov %ds,%ds:0x12345678(%edi,%esi,8)
mov %ds,%ds:0x12345678(%eax,%edi,8)
mov %ds,%ds:0x12345678(%edx,%edi,8)
mov %ds,%ds:0x12345678(%ecx,%edi,8)
mov %ds,%ds:0x12345678(%ebx,%edi,8)
mov %ds,%ds:0x12345678(%esp,%edi,8)
mov %ds,%ds:0x12345678(%ebp,%edi,8)
mov %ds,%ds:0x12345678(%esi,%edi,8)
mov %ds,%ds:0x12345678(%edi,%edi,8)
mov %ds,%ds:(%ebp,%eax,1)
mov %ds,%ds:(%ebp,%ecx,1)
mov %ds,%ds:(%ebp,%edx,1)
mov %ds,%ds:(%ebp,%ebx,1)
mov %ds,%ds:(%ebp,1)
mov %ds,%ds:(%ebp,%ebp,1)
mov %ds,%ds:(%ebp,%esi,1)
mov %ds,%ds:(%ebp,%edi,1)
mov %ds,%ds:(%ebp,%eax,2)
mov %ds,%ds:(%ebp,%ecx,2)
mov %ds,%ds:(%ebp,%edx,2)
mov %ds,%ds:(%ebp,%ebx,2)
mov %ds,%ds:(%ebp,2)
mov %ds,%ds:(%ebp,%ebp,2)
mov %ds,%ds:(%ebp,%esi,2)
mov %ds,%ds:(%ebp,%edi,2)
mov %ds,%ds:(%ebp,%eax,4)
mov %ds,%ds:(%ebp,%ecx,4)
mov %ds,%ds:(%ebp,%edx,4)
mov %ds,%ds:(%ebp,%ebx,4)
mov %ds,%ds:(%ebp,4)
mov %ds,%ds:(%ebp,%ebp,4)
mov %ds,%ds:(%ebp,%esi,4)
mov %ds,%ds:(%ebp,%edi,4)
mov %ds,%ds:(%ebp,%eax,8)
mov %ds,%ds:(%ebp,%ecx,8)
mov %ds,%ds:(%ebp,%edx,8)
mov %ds,%ds:(%ebp,%ebx,8)
mov %ds,%ds:(%ebp,8)
mov %ds,%ds:(%ebp,%ebp,8)
mov %ds,%ds:(%ebp,%esi,8)
mov %ds,%ds:(%ebp,%edi,8)
mov %ds,%ds:0x12(,1)
mov %ds,%ds:0x12(,2)
mov %ds,%ds:0x12(,4)
mov %ds,%ds:0x12(,8)
# Force a good alignment.
.p2align 4,0
|
stsp/binutils-ia16
| 112,772
|
gas/testsuite/gas/i386/x86-64-avx512bw_vl-wig.s
|
# Check 64bit AVX512{BW,VL} WIG instructions
.allow_index_reg
.text
_start:
vpabsb %xmm29, %xmm30 # AVX512{BW,VL}
vpabsb %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpabsb %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpabsb (%rcx), %xmm30 # AVX512{BW,VL}
vpabsb 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpabsb 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpabsb 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpabsb -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpabsb -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpabsb %ymm29, %ymm30 # AVX512{BW,VL}
vpabsb %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpabsb %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpabsb (%rcx), %ymm30 # AVX512{BW,VL}
vpabsb 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpabsb 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpabsb 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpabsb -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpabsb -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpabsw %xmm29, %xmm30 # AVX512{BW,VL}
vpabsw %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpabsw %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpabsw (%rcx), %xmm30 # AVX512{BW,VL}
vpabsw 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpabsw 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpabsw 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpabsw -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpabsw -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpabsw %ymm29, %ymm30 # AVX512{BW,VL}
vpabsw %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpabsw %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpabsw (%rcx), %ymm30 # AVX512{BW,VL}
vpabsw 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpabsw 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpabsw 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpabsw -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpabsw -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpacksswb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpacksswb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpacksswb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpacksswb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpacksswb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpacksswb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpacksswb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpacksswb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpacksswb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpacksswb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpacksswb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpacksswb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpacksswb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpacksswb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpackuswb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpackuswb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpackuswb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpackuswb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpackuswb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpackuswb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpackuswb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpackuswb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpackuswb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpackuswb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddsb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddsb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddsb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddsb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddsb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddsb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddsb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddsb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddusb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddusb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddusb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddusb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddusb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddusb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddusb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddusb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddusw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddusw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddusw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddusw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddusw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddusw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddusw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddusw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddusw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddusw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpaddw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpaddw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpaddw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpaddw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpaddw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpaddw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpaddw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpaddw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $0xab, %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $0xab, %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpalignr $0xab, %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpalignr $123, %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $123, (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $123, 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $123, 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpalignr $123, 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $123, -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpalignr $123, -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpalignr $0xab, %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $0xab, %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpalignr $0xab, %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpalignr $123, %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $123, (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $123, 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $123, 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpalignr $123, 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpalignr $123, -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpalignr $123, -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpavgb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpavgb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpavgb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpavgb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpavgb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpavgb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpavgb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpavgb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpavgw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpavgw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpavgw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpavgw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpavgw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpavgw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpavgw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpavgw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpavgw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpavgw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpcmpeqb %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqb 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqb -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqb %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqb %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqb 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqb 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqb 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqb -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqb -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqw 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqw -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpeqw %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqw 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpeqw -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpeqw -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtb 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtb -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtb %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtb 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtb -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtb -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw %xmm29, %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw %xmm29, %xmm30, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw (%rcx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw 0x123(%rax,%r14,8), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw 2032(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtw 2048(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw -2048(%rdx), %xmm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtw -2064(%rdx), %xmm30, %k5 # AVX512{BW,VL}
vpcmpgtw %ymm29, %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw %ymm29, %ymm30, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw (%rcx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw 0x123(%rax,%r14,8), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw 4064(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtw 4096(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpcmpgtw -4096(%rdx), %ymm30, %k5 # AVX512{BW,VL} Disp8
vpcmpgtw -4128(%rdx), %ymm30, %k5 # AVX512{BW,VL}
vpmaddubsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaddubsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaddubsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaddubsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaddubsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddubsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddubsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaddubsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaddubsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddubsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddubsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaddubsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddubsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaddubsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaddwd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaddwd (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaddwd 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaddwd -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaddwd %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaddwd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaddwd (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaddwd 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaddwd -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaddwd -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaxsb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaxsb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxsb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxsb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaxsb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaxsb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxsb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxsb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaxsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaxsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaxsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaxsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaxub %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaxub (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxub 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxub -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxub %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaxub %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaxub (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxub 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxub -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxub -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmaxuw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmaxuw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxuw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmaxuw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmaxuw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmaxuw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmaxuw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxuw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmaxuw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmaxuw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpminsb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpminsb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminsb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminsb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpminsb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpminsb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminsb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminsb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpminsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpminsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpminsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpminsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminub %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpminub %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpminub %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpminub (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminub 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpminub 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminub 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminub -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminub -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminub %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpminub %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpminub %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpminub (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminub 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpminub 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminub 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminub -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminub -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpminuw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpminuw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminuw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpminuw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpminuw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpminuw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpminuw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminuw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpminuw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpminuw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmovsxbw %xmm29, %xmm30 # AVX512{BW,VL}
vpmovsxbw %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmovsxbw (%rcx), %xmm30 # AVX512{BW,VL}
vpmovsxbw 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpmovsxbw 1016(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpmovsxbw 1024(%rdx), %xmm30 # AVX512{BW,VL}
vpmovsxbw -1024(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpmovsxbw -1032(%rdx), %xmm30 # AVX512{BW,VL}
vpmovsxbw %xmm29, %ymm30 # AVX512{BW,VL}
vpmovsxbw %xmm29, %ymm30{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmovsxbw (%rcx), %ymm30 # AVX512{BW,VL}
vpmovsxbw 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpmovsxbw 2032(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpmovsxbw 2048(%rdx), %ymm30 # AVX512{BW,VL}
vpmovsxbw -2048(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpmovsxbw -2064(%rdx), %ymm30 # AVX512{BW,VL}
vpmovzxbw %xmm29, %xmm30 # AVX512{BW,VL}
vpmovzxbw %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmovzxbw (%rcx), %xmm30 # AVX512{BW,VL}
vpmovzxbw 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpmovzxbw 1016(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpmovzxbw 1024(%rdx), %xmm30 # AVX512{BW,VL}
vpmovzxbw -1024(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpmovzxbw -1032(%rdx), %xmm30 # AVX512{BW,VL}
vpmovzxbw %xmm29, %ymm30 # AVX512{BW,VL}
vpmovzxbw %xmm29, %ymm30{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmovzxbw (%rcx), %ymm30 # AVX512{BW,VL}
vpmovzxbw 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpmovzxbw 2032(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpmovzxbw 2048(%rdx), %ymm30 # AVX512{BW,VL}
vpmovzxbw -2048(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpmovzxbw -2064(%rdx), %ymm30 # AVX512{BW,VL}
vpmulhrsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmulhrsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmulhrsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhrsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhrsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhrsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhrsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmulhrsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmulhrsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhrsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhrsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhrsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhrsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhrsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmulhuw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmulhuw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhuw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhuw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhuw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmulhuw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmulhuw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhuw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhuw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhuw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmulhw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmulhw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmulhw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmulhw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmulhw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmulhw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmulhw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmulhw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpmullw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpmullw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmullw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpmullw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpmullw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpmullw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpmullw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmullw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpmullw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpmullw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsadbw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsadbw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsadbw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsadbw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsadbw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsadbw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpshufb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpshufb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpshufb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpshufb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpshufb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpshufb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpshufb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpshufb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpshufb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpshufhw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpshufhw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpshufhw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpshufhw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpshufhw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpshufhw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpshufhw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpshufhw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpshufhw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpshufhw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpshufhw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpshufhw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpshufhw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpshufhw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpshufhw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpshufhw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpshufhw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpshufhw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpshuflw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpshuflw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpshuflw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpshuflw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpshuflw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpshuflw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpshuflw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpshuflw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpshuflw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpshuflw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpshuflw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpshuflw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpshuflw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpshuflw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpshuflw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpshuflw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpshuflw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpshuflw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsllw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsllw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsllw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsllw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsllw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw %xmm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw %xmm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsllw %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsllw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw 2032(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsllw 2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw -2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsllw -2064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsraw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsraw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsraw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsraw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw %xmm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw %xmm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsraw %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsraw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw 2032(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsraw 2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw -2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsraw -2064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsrlw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsrlw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsrlw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsrlw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw %xmm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw %xmm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsrlw %xmm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsrlw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw 2032(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsrlw 2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw -2048(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsrlw -2064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsrldq $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrldq $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrldq $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpsrldq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpsrldq $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsrldq $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpsrldq $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsrldq $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpsrldq $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrldq $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrldq $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpsrldq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpsrldq $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsrldq $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpsrldq $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsrldq $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsrlw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsrlw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpsrlw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpsrlw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpsrlw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsrlw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpsrlw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsrlw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpsrlw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsrlw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpsrlw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpsrlw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpsrlw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsrlw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpsrlw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsrlw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsraw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsraw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsraw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpsraw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpsraw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpsraw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsraw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpsraw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsraw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpsraw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsraw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsraw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpsraw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpsraw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpsraw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsraw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpsraw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsraw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsubb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubsb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubsb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubsb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubsb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubsb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubsb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubsb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubsb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubsw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubsw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubsw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubsw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubsw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubsw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubsw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubsw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubsw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubsw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubusb %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubusb (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubusb 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubusb -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusb %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubusb %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubusb (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubusb 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusb -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubusb -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubusw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubusw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubusw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubusw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubusw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubusw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubusw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubusw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubusw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubusw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsubw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsubw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpsubw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpsubw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsubw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsubw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpsubw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpsubw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpunpckhbw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpunpckhbw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpckhbw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpckhbw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhbw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpunpckhbw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpunpckhbw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpckhbw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhbw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpckhbw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpunpckhwd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpunpckhwd (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpckhwd 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpckhwd -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpckhwd %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpunpckhwd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpunpckhwd (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpckhwd 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpckhwd -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpckhwd -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpunpcklbw %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpunpcklbw (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpcklbw 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpcklbw -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklbw %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpunpcklbw %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpunpcklbw (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpcklbw 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklbw -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpcklbw -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd %xmm28, %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd %xmm28, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpunpcklwd %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpunpcklwd (%rcx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd 2032(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpcklwd 2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd -2048(%rdx), %xmm29, %xmm30 # AVX512{BW,VL} Disp8
vpunpcklwd -2064(%rdx), %xmm29, %xmm30 # AVX512{BW,VL}
vpunpcklwd %ymm28, %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd %ymm28, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpunpcklwd %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpunpcklwd (%rcx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd 4064(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpcklwd 4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpunpcklwd -4096(%rdx), %ymm29, %ymm30 # AVX512{BW,VL} Disp8
vpunpcklwd -4128(%rdx), %ymm29, %ymm30 # AVX512{BW,VL}
vpslldq $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpslldq $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpslldq $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpslldq $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpslldq $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpslldq $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpslldq $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpslldq $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpslldq $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpslldq $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpslldq $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpslldq $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpslldq $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpslldq $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpslldq $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpslldq $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
vpsllw $0xab, %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw $0xab, %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vpsllw $0xab, %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vpsllw $123, %xmm29, %xmm30 # AVX512{BW,VL}
vpsllw $123, (%rcx), %xmm30 # AVX512{BW,VL}
vpsllw $123, 0x123(%rax,%r14,8), %xmm30 # AVX512{BW,VL}
vpsllw $123, 2032(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsllw $123, 2048(%rdx), %xmm30 # AVX512{BW,VL}
vpsllw $123, -2048(%rdx), %xmm30 # AVX512{BW,VL} Disp8
vpsllw $123, -2064(%rdx), %xmm30 # AVX512{BW,VL}
vpsllw $0xab, %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw $0xab, %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vpsllw $0xab, %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vpsllw $123, %ymm29, %ymm30 # AVX512{BW,VL}
vpsllw $123, (%rcx), %ymm30 # AVX512{BW,VL}
vpsllw $123, 0x123(%rax,%r14,8), %ymm30 # AVX512{BW,VL}
vpsllw $123, 4064(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsllw $123, 4096(%rdx), %ymm30 # AVX512{BW,VL}
vpsllw $123, -4096(%rdx), %ymm30 # AVX512{BW,VL} Disp8
vpsllw $123, -4128(%rdx), %ymm30 # AVX512{BW,VL}
.intel_syntax noprefix
vpabsb xmm30, xmm29 # AVX512{BW,VL}
vpabsb xmm30{k7}, xmm29 # AVX512{BW,VL}
vpabsb xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpabsb xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpabsb xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpabsb xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpabsb xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpabsb xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpabsb xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpabsb ymm30, ymm29 # AVX512{BW,VL}
vpabsb ymm30{k7}, ymm29 # AVX512{BW,VL}
vpabsb ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vpabsb ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpabsb ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpabsb ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpabsb ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpabsb ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpabsb ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpabsw xmm30, xmm29 # AVX512{BW,VL}
vpabsw xmm30{k7}, xmm29 # AVX512{BW,VL}
vpabsw xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpabsw xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpabsw xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpabsw xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpabsw xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpabsw xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpabsw xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpabsw ymm30, ymm29 # AVX512{BW,VL}
vpabsw ymm30{k7}, ymm29 # AVX512{BW,VL}
vpabsw ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vpabsw ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpabsw ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpabsw ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpabsw ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpabsw ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpabsw ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpacksswb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpacksswb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpacksswb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpacksswb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpacksswb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpacksswb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpacksswb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpacksswb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpacksswb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpacksswb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpacksswb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpacksswb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpacksswb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpacksswb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpacksswb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpacksswb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpacksswb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpacksswb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpackuswb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpackuswb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpackuswb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpackuswb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpackuswb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpackuswb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpackuswb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpackuswb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpackuswb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpackuswb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpackuswb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpackuswb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpackuswb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpackuswb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpackuswb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpackuswb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpackuswb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpackuswb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddsb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddsb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddsb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddsb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddsb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddsb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddsb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddsb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddsb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddsb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddsb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddsb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddsb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddsb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddsb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddsb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddsb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddsb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddusb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddusb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddusb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddusb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddusb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddusb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddusb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddusb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddusb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddusb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddusb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddusb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddusb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddusb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddusb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddusb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddusb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddusb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddusw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddusw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddusw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddusw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddusw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddusw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddusw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddusw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddusw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddusw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddusw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddusw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddusw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddusw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddusw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddusw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddusw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddusw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpaddw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpaddw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpaddw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpaddw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpaddw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpaddw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpaddw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpaddw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpaddw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpaddw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpaddw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpaddw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpaddw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpaddw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpaddw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpaddw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpalignr xmm30, xmm29, xmm28, 0xab # AVX512{BW,VL}
vpalignr xmm30{k7}, xmm29, xmm28, 0xab # AVX512{BW,VL}
vpalignr xmm30{k7}{z}, xmm29, xmm28, 0xab # AVX512{BW,VL}
vpalignr xmm30, xmm29, xmm28, 123 # AVX512{BW,VL}
vpalignr xmm30, xmm29, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpalignr xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpalignr xmm30, xmm29, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpalignr xmm30, xmm29, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpalignr xmm30, xmm29, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpalignr xmm30, xmm29, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, ymm28, 0xab # AVX512{BW,VL}
vpalignr ymm30{k7}, ymm29, ymm28, 0xab # AVX512{BW,VL}
vpalignr ymm30{k7}{z}, ymm29, ymm28, 0xab # AVX512{BW,VL}
vpalignr ymm30, ymm29, ymm28, 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpalignr ymm30, ymm29, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpalignr ymm30, ymm29, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpalignr ymm30, ymm29, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpavgb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpavgb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpavgb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpavgb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpavgb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpavgb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpavgb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpavgb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpavgb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpavgb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpavgb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpavgb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpavgb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpavgb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpavgb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpavgb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpavgb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpavgb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpavgw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpavgw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpavgw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpavgw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpavgw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpavgw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpavgw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpavgw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpavgw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpavgw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpavgw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpavgw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpavgw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpavgw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpavgw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpavgw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpavgw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpavgw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpcmpeqb k5, xmm30, xmm29 # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm30, xmm29 # AVX512{BW,VL}
vpcmpeqb k5, xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpeqb k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpeqb k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpcmpeqb k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpcmpeqb k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpcmpeqb k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpcmpeqb k5, ymm30, ymm29 # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm30, ymm29 # AVX512{BW,VL}
vpcmpeqb k5, ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpeqb k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpeqb k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpcmpeqb k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpcmpeqb k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpcmpeqb k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpcmpeqw k5, xmm30, xmm29 # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm30, xmm29 # AVX512{BW,VL}
vpcmpeqw k5, xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpeqw k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpeqw k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpcmpeqw k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpcmpeqw k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpcmpeqw k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpcmpeqw k5, ymm30, ymm29 # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm30, ymm29 # AVX512{BW,VL}
vpcmpeqw k5, ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpeqw k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpeqw k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpcmpeqw k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpcmpeqw k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpcmpeqw k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpcmpgtb k5, xmm30, xmm29 # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm30, xmm29 # AVX512{BW,VL}
vpcmpgtb k5, xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpgtb k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpgtb k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpcmpgtb k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpcmpgtb k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpcmpgtb k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpcmpgtb k5, ymm30, ymm29 # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm30, ymm29 # AVX512{BW,VL}
vpcmpgtb k5, ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpgtb k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpgtb k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpcmpgtb k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpcmpgtb k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpcmpgtb k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpcmpgtw k5, xmm30, xmm29 # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm30, xmm29 # AVX512{BW,VL}
vpcmpgtw k5, xmm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpgtw k5, xmm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpgtw k5, xmm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpcmpgtw k5, xmm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpcmpgtw k5, xmm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpcmpgtw k5, xmm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpcmpgtw k5, ymm30, ymm29 # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm30, ymm29 # AVX512{BW,VL}
vpcmpgtw k5, ymm30, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpcmpgtw k5, ymm30, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpcmpgtw k5, ymm30, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpcmpgtw k5, ymm30, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpcmpgtw k5, ymm30, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpcmpgtw k5, ymm30, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaddubsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaddubsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaddubsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaddubsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaddubsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaddubsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaddwd xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaddwd xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaddwd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaddwd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaddwd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaddwd ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaddwd ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaddwd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaddwd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaddwd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaxsb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaxsb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaxsb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaxsb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaxsb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaxsb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaxsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaxsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaxsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaxsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaxsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaxsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaxub xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaxub xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxub xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxub xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxub xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxub xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaxub xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaxub xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaxub xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaxub ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaxub ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxub ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxub ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxub ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxub ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaxub ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaxub ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaxub ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmaxuw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxuw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmaxuw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmaxuw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmaxuw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmaxuw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxuw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmaxuw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmaxuw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmaxuw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpminsb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpminsb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpminsb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpminsb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpminsb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminsb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpminsb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpminsb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpminsb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpminsb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpminsb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpminsb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpminsb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpminsb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminsb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpminsb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpminsb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpminsb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpminsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpminsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpminsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpminsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpminsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpminsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpminsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpminsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpminsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpminsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpminsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpminsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpminsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpminsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpminsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpminsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpminub xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpminub xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpminub xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpminub xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpminub xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminub xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpminub xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpminub xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpminub xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpminub ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpminub ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpminub ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpminub ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpminub ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminub ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpminub ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpminub ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpminub ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpminuw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpminuw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpminuw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpminuw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpminuw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminuw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpminuw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpminuw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpminuw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpminuw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpminuw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpminuw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpminuw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpminuw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpminuw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpminuw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpminuw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpminuw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmovsxbw xmm30, xmm29 # AVX512{BW,VL}
vpmovsxbw xmm30{k7}, xmm29 # AVX512{BW,VL}
vpmovsxbw xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovsxbw xmm30, QWORD PTR [rcx] # AVX512{BW,VL}
vpmovsxbw xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmovsxbw xmm30, QWORD PTR [rdx+1016] # AVX512{BW,VL} Disp8
vpmovsxbw xmm30, QWORD PTR [rdx+1024] # AVX512{BW,VL}
vpmovsxbw xmm30, QWORD PTR [rdx-1024] # AVX512{BW,VL} Disp8
vpmovsxbw xmm30, QWORD PTR [rdx-1032] # AVX512{BW,VL}
vpmovsxbw ymm30, xmm29 # AVX512{BW,VL}
vpmovsxbw ymm30{k7}, xmm29 # AVX512{BW,VL}
vpmovsxbw ymm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovsxbw ymm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmovsxbw ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmovsxbw ymm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmovsxbw ymm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmovsxbw ymm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmovsxbw ymm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmovzxbw xmm30, xmm29 # AVX512{BW,VL}
vpmovzxbw xmm30{k7}, xmm29 # AVX512{BW,VL}
vpmovzxbw xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovzxbw xmm30, QWORD PTR [rcx] # AVX512{BW,VL}
vpmovzxbw xmm30, QWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmovzxbw xmm30, QWORD PTR [rdx+1016] # AVX512{BW,VL} Disp8
vpmovzxbw xmm30, QWORD PTR [rdx+1024] # AVX512{BW,VL}
vpmovzxbw xmm30, QWORD PTR [rdx-1024] # AVX512{BW,VL} Disp8
vpmovzxbw xmm30, QWORD PTR [rdx-1032] # AVX512{BW,VL}
vpmovzxbw ymm30, xmm29 # AVX512{BW,VL}
vpmovzxbw ymm30{k7}, xmm29 # AVX512{BW,VL}
vpmovzxbw ymm30{k7}{z}, xmm29 # AVX512{BW,VL}
vpmovzxbw ymm30, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmovzxbw ymm30, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmovzxbw ymm30, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmovzxbw ymm30, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmovzxbw ymm30, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmovzxbw ymm30, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmulhrsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhrsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmulhrsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmulhrsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhrsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmulhrsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmulhuw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhuw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmulhuw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmulhuw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmulhuw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmulhuw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhuw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmulhuw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmulhuw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmulhuw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmulhw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmulhw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmulhw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmulhw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmulhw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmulhw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmulhw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmulhw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmulhw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmulhw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmulhw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmulhw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmulhw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmulhw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpmullw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpmullw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpmullw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpmullw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpmullw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmullw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpmullw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpmullw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpmullw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpmullw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpmullw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpmullw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpmullw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpmullw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpmullw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpmullw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpmullw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpmullw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsadbw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsadbw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsadbw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsadbw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsadbw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsadbw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsadbw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsadbw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsadbw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsadbw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsadbw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsadbw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsadbw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsadbw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpshufb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpshufb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpshufb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpshufb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpshufb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpshufb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpshufb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpshufb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpshufb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpshufb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpshufb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpshufb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpshufb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpshufb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpshufb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpshufb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpshufb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpshufb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpshufhw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpshufhw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpshufhw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpshufhw xmm30, xmm29, 123 # AVX512{BW,VL}
vpshufhw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpshufhw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpshufhw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpshufhw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpshufhw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpshufhw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpshufhw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpshufhw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpshufhw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpshufhw ymm30, ymm29, 123 # AVX512{BW,VL}
vpshufhw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpshufhw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpshufhw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpshufhw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpshufhw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpshufhw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpshuflw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpshuflw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpshuflw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpshuflw xmm30, xmm29, 123 # AVX512{BW,VL}
vpshuflw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpshuflw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpshuflw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpshuflw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpshuflw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpshuflw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpshuflw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpshuflw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpshuflw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpshuflw ymm30, ymm29, 123 # AVX512{BW,VL}
vpshuflw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpshuflw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpshuflw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpshuflw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpshuflw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpshuflw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsllw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsllw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsllw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsllw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsllw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsllw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsllw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsllw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsllw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsllw ymm30, ymm29, xmm28 # AVX512{BW,VL}
vpsllw ymm30{k7}, ymm29, xmm28 # AVX512{BW,VL}
vpsllw ymm30{k7}{z}, ymm29, xmm28 # AVX512{BW,VL}
vpsllw ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsllw ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsllw ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsllw ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsllw ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsllw ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsraw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsraw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsraw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsraw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsraw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsraw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsraw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsraw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsraw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsraw ymm30, ymm29, xmm28 # AVX512{BW,VL}
vpsraw ymm30{k7}, ymm29, xmm28 # AVX512{BW,VL}
vpsraw ymm30{k7}{z}, ymm29, xmm28 # AVX512{BW,VL}
vpsraw ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsraw ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsraw ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsraw ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsraw ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsraw ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsrlw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsrlw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsrlw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsrlw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsrlw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsrlw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsrlw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsrlw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsrlw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsrlw ymm30, ymm29, xmm28 # AVX512{BW,VL}
vpsrlw ymm30{k7}, ymm29, xmm28 # AVX512{BW,VL}
vpsrlw ymm30{k7}{z}, ymm29, xmm28 # AVX512{BW,VL}
vpsrlw ymm30, ymm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsrlw ymm30, ymm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsrlw ymm30, ymm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsrlw ymm30, ymm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsrlw ymm30, ymm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsrlw ymm30, ymm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsrldq xmm30, xmm29, 0xab # AVX512{BW,VL}
vpsrldq xmm30, xmm29, 123 # AVX512{BW,VL}
vpsrldq xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsrldq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsrldq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpsrldq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpsrldq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpsrldq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpsrldq ymm30, ymm29, 0xab # AVX512{BW,VL}
vpsrldq ymm30, ymm29, 123 # AVX512{BW,VL}
vpsrldq ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsrldq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsrldq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpsrldq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpsrldq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpsrldq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsrlw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpsrlw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpsrlw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpsrlw xmm30, xmm29, 123 # AVX512{BW,VL}
vpsrlw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsrlw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsrlw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpsrlw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpsrlw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpsrlw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpsrlw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpsrlw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpsrlw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpsrlw ymm30, ymm29, 123 # AVX512{BW,VL}
vpsrlw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsrlw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsrlw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpsrlw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpsrlw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpsrlw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsraw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpsraw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpsraw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpsraw xmm30, xmm29, 123 # AVX512{BW,VL}
vpsraw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsraw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsraw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpsraw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpsraw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpsraw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpsraw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpsraw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpsraw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpsraw ymm30, ymm29, 123 # AVX512{BW,VL}
vpsraw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsraw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsraw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpsraw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpsraw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpsraw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsubb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubsb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubsb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubsb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubsb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubsb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubsb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubsb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubsb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubsb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubsb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubsb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubsb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubsb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubsb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubsb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubsb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubsb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubsb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubsw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubsw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubsw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubsw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubsw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubsw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubsw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubsw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubsw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubsw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubsw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubsw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubsw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubsw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubsw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubsw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubsw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubsw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubusb xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubusb xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubusb xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubusb xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubusb xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubusb xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubusb xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubusb xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubusb xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubusb ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubusb ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubusb ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubusb ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubusb ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubusb ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubusb ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubusb ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubusb ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubusw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubusw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubusw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubusw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubusw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubusw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubusw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubusw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubusw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubusw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubusw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubusw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubusw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubusw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubusw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubusw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubusw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubusw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpsubw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpsubw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpsubw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpsubw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpsubw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpsubw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpsubw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpsubw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpsubw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpsubw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpsubw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpsubw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpsubw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpsubw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpsubw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpsubw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhbw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhbw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpunpckhbw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhbw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhbw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpunpckhbw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhwd xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhwd xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpunpckhwd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhwd ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhwd ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpunpckhwd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklbw xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklbw xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpunpcklbw xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklbw ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklbw ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpunpcklbw ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklwd xmm30{k7}, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklwd xmm30{k7}{z}, xmm29, xmm28 # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{BW,VL} Disp8
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{BW,VL}
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{BW,VL} Disp8
vpunpcklwd xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklwd ymm30{k7}, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklwd ymm30{k7}{z}, ymm29, ymm28 # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{BW,VL} Disp8
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{BW,VL}
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{BW,VL} Disp8
vpunpcklwd ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{BW,VL}
vpslldq xmm30, xmm29, 0xab # AVX512{BW,VL}
vpslldq xmm30, xmm29, 123 # AVX512{BW,VL}
vpslldq xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpslldq xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpslldq xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpslldq xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpslldq xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpslldq xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpslldq ymm30, ymm29, 0xab # AVX512{BW,VL}
vpslldq ymm30, ymm29, 123 # AVX512{BW,VL}
vpslldq ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpslldq ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpslldq ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpslldq ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpslldq ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpslldq ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
vpsllw xmm30, xmm29, 0xab # AVX512{BW,VL}
vpsllw xmm30{k7}, xmm29, 0xab # AVX512{BW,VL}
vpsllw xmm30{k7}{z}, xmm29, 0xab # AVX512{BW,VL}
vpsllw xmm30, xmm29, 123 # AVX512{BW,VL}
vpsllw xmm30, XMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsllw xmm30, XMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsllw xmm30, XMMWORD PTR [rdx+2032], 123 # AVX512{BW,VL} Disp8
vpsllw xmm30, XMMWORD PTR [rdx+2048], 123 # AVX512{BW,VL}
vpsllw xmm30, XMMWORD PTR [rdx-2048], 123 # AVX512{BW,VL} Disp8
vpsllw xmm30, XMMWORD PTR [rdx-2064], 123 # AVX512{BW,VL}
vpsllw ymm30, ymm29, 0xab # AVX512{BW,VL}
vpsllw ymm30{k7}, ymm29, 0xab # AVX512{BW,VL}
vpsllw ymm30{k7}{z}, ymm29, 0xab # AVX512{BW,VL}
vpsllw ymm30, ymm29, 123 # AVX512{BW,VL}
vpsllw ymm30, YMMWORD PTR [rcx], 123 # AVX512{BW,VL}
vpsllw ymm30, YMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512{BW,VL}
vpsllw ymm30, YMMWORD PTR [rdx+4064], 123 # AVX512{BW,VL} Disp8
vpsllw ymm30, YMMWORD PTR [rdx+4096], 123 # AVX512{BW,VL}
vpsllw ymm30, YMMWORD PTR [rdx-4096], 123 # AVX512{BW,VL} Disp8
vpsllw ymm30, YMMWORD PTR [rdx-4128], 123 # AVX512{BW,VL}
|
stsp/binutils-ia16
| 107,668
|
gas/testsuite/gas/i386/avx.s
|
# Check AVX instructions
.allow_index_reg
.text
_start:
# Tests for op
vzeroall
vzeroupper
# Tests for op mem64
vldmxcsr (%ecx)
vstmxcsr (%ecx)
# Tests for op mem256, mask, ymm
# Tests for op ymm, mask, mem256
vmaskmovpd (%ecx),%ymm4,%ymm6
vmaskmovpd %ymm4,%ymm6,(%ecx)
vmaskmovps (%ecx),%ymm4,%ymm6
vmaskmovps %ymm4,%ymm6,(%ecx)
# Tests for op imm8, ymm/mem256, ymm
vpermilpd $7,%ymm6,%ymm2
vpermilpd $7,(%ecx),%ymm6
vpermilps $7,%ymm6,%ymm2
vpermilps $7,(%ecx),%ymm6
vroundpd $7,%ymm6,%ymm2
vroundpd $7,(%ecx),%ymm6
vroundps $7,%ymm6,%ymm2
vroundps $7,(%ecx),%ymm6
# Tests for op ymm/mem256, ymm, ymm
vaddpd %ymm4,%ymm6,%ymm2
vaddpd (%ecx),%ymm6,%ymm2
vaddps %ymm4,%ymm6,%ymm2
vaddps (%ecx),%ymm6,%ymm2
vaddsubpd %ymm4,%ymm6,%ymm2
vaddsubpd (%ecx),%ymm6,%ymm2
vaddsubps %ymm4,%ymm6,%ymm2
vaddsubps (%ecx),%ymm6,%ymm2
vandnpd %ymm4,%ymm6,%ymm2
vandnpd (%ecx),%ymm6,%ymm2
vandnps %ymm4,%ymm6,%ymm2
vandnps (%ecx),%ymm6,%ymm2
vandpd %ymm4,%ymm6,%ymm2
vandpd (%ecx),%ymm6,%ymm2
vandps %ymm4,%ymm6,%ymm2
vandps (%ecx),%ymm6,%ymm2
vdivpd %ymm4,%ymm6,%ymm2
vdivpd (%ecx),%ymm6,%ymm2
vdivps %ymm4,%ymm6,%ymm2
vdivps (%ecx),%ymm6,%ymm2
vhaddpd %ymm4,%ymm6,%ymm2
vhaddpd (%ecx),%ymm6,%ymm2
vhaddps %ymm4,%ymm6,%ymm2
vhaddps (%ecx),%ymm6,%ymm2
vhsubpd %ymm4,%ymm6,%ymm2
vhsubpd (%ecx),%ymm6,%ymm2
vhsubps %ymm4,%ymm6,%ymm2
vhsubps (%ecx),%ymm6,%ymm2
vmaxpd %ymm4,%ymm6,%ymm2
vmaxpd (%ecx),%ymm6,%ymm2
vmaxps %ymm4,%ymm6,%ymm2
vmaxps (%ecx),%ymm6,%ymm2
vminpd %ymm4,%ymm6,%ymm2
vminpd (%ecx),%ymm6,%ymm2
vminps %ymm4,%ymm6,%ymm2
vminps (%ecx),%ymm6,%ymm2
vmulpd %ymm4,%ymm6,%ymm2
vmulpd (%ecx),%ymm6,%ymm2
vmulps %ymm4,%ymm6,%ymm2
vmulps (%ecx),%ymm6,%ymm2
vorpd %ymm4,%ymm6,%ymm2
vorpd (%ecx),%ymm6,%ymm2
vorps %ymm4,%ymm6,%ymm2
vorps (%ecx),%ymm6,%ymm2
vpermilpd %ymm4,%ymm6,%ymm2
vpermilpd (%ecx),%ymm6,%ymm2
vpermilps %ymm4,%ymm6,%ymm2
vpermilps (%ecx),%ymm6,%ymm2
vsubpd %ymm4,%ymm6,%ymm2
vsubpd (%ecx),%ymm6,%ymm2
vsubps %ymm4,%ymm6,%ymm2
vsubps (%ecx),%ymm6,%ymm2
vunpckhpd %ymm4,%ymm6,%ymm2
vunpckhpd (%ecx),%ymm6,%ymm2
vunpckhps %ymm4,%ymm6,%ymm2
vunpckhps (%ecx),%ymm6,%ymm2
vunpcklpd %ymm4,%ymm6,%ymm2
vunpcklpd (%ecx),%ymm6,%ymm2
vunpcklps %ymm4,%ymm6,%ymm2
vunpcklps (%ecx),%ymm6,%ymm2
vxorpd %ymm4,%ymm6,%ymm2
vxorpd (%ecx),%ymm6,%ymm2
vxorps %ymm4,%ymm6,%ymm2
vxorps (%ecx),%ymm6,%ymm2
vcmpeqpd %ymm4,%ymm6,%ymm2
vcmpeqpd (%ecx),%ymm6,%ymm2
vcmpeq_oqpd %ymm4,%ymm6,%ymm2
vcmpeq_oqpd (%ecx),%ymm6,%ymm2
vcmpltpd %ymm4,%ymm6,%ymm2
vcmpltpd (%ecx),%ymm6,%ymm2
vcmplt_ospd %ymm4,%ymm6,%ymm2
vcmplt_ospd (%ecx),%ymm6,%ymm2
vcmplepd %ymm4,%ymm6,%ymm2
vcmplepd (%ecx),%ymm6,%ymm2
vcmple_ospd %ymm4,%ymm6,%ymm2
vcmple_ospd (%ecx),%ymm6,%ymm2
vcmpunordpd %ymm4,%ymm6,%ymm2
vcmpunordpd (%ecx),%ymm6,%ymm2
vcmpunord_qpd %ymm4,%ymm6,%ymm2
vcmpunord_qpd (%ecx),%ymm6,%ymm2
vcmpneqpd %ymm4,%ymm6,%ymm2
vcmpneqpd (%ecx),%ymm6,%ymm2
vcmpneq_uqpd %ymm4,%ymm6,%ymm2
vcmpneq_uqpd (%ecx),%ymm6,%ymm2
vcmpnltpd %ymm4,%ymm6,%ymm2
vcmpnltpd (%ecx),%ymm6,%ymm2
vcmpnlt_uspd %ymm4,%ymm6,%ymm2
vcmpnlt_uspd (%ecx),%ymm6,%ymm2
vcmpnlepd %ymm4,%ymm6,%ymm2
vcmpnlepd (%ecx),%ymm6,%ymm2
vcmpnle_uspd %ymm4,%ymm6,%ymm2
vcmpnle_uspd (%ecx),%ymm6,%ymm2
vcmpordpd %ymm4,%ymm6,%ymm2
vcmpordpd (%ecx),%ymm6,%ymm2
vcmpord_qpd %ymm4,%ymm6,%ymm2
vcmpord_qpd (%ecx),%ymm6,%ymm2
vcmpeq_uqpd %ymm4,%ymm6,%ymm2
vcmpeq_uqpd (%ecx),%ymm6,%ymm2
vcmpngepd %ymm4,%ymm6,%ymm2
vcmpngepd (%ecx),%ymm6,%ymm2
vcmpnge_uspd %ymm4,%ymm6,%ymm2
vcmpnge_uspd (%ecx),%ymm6,%ymm2
vcmpngtpd %ymm4,%ymm6,%ymm2
vcmpngtpd (%ecx),%ymm6,%ymm2
vcmpngt_uspd %ymm4,%ymm6,%ymm2
vcmpngt_uspd (%ecx),%ymm6,%ymm2
vcmpfalsepd %ymm4,%ymm6,%ymm2
vcmpfalsepd (%ecx),%ymm6,%ymm2
vcmpfalse_oqpd %ymm4,%ymm6,%ymm2
vcmpfalse_oqpd (%ecx),%ymm6,%ymm2
vcmpneq_oqpd %ymm4,%ymm6,%ymm2
vcmpneq_oqpd (%ecx),%ymm6,%ymm2
vcmpgepd %ymm4,%ymm6,%ymm2
vcmpgepd (%ecx),%ymm6,%ymm2
vcmpge_ospd %ymm4,%ymm6,%ymm2
vcmpge_ospd (%ecx),%ymm6,%ymm2
vcmpgtpd %ymm4,%ymm6,%ymm2
vcmpgtpd (%ecx),%ymm6,%ymm2
vcmpgt_ospd %ymm4,%ymm6,%ymm2
vcmpgt_ospd (%ecx),%ymm6,%ymm2
vcmptruepd %ymm4,%ymm6,%ymm2
vcmptruepd (%ecx),%ymm6,%ymm2
vcmptrue_uqpd %ymm4,%ymm6,%ymm2
vcmptrue_uqpd (%ecx),%ymm6,%ymm2
vcmpeq_ospd %ymm4,%ymm6,%ymm2
vcmpeq_ospd (%ecx),%ymm6,%ymm2
vcmplt_oqpd %ymm4,%ymm6,%ymm2
vcmplt_oqpd (%ecx),%ymm6,%ymm2
vcmple_oqpd %ymm4,%ymm6,%ymm2
vcmple_oqpd (%ecx),%ymm6,%ymm2
vcmpunord_spd %ymm4,%ymm6,%ymm2
vcmpunord_spd (%ecx),%ymm6,%ymm2
vcmpneq_uspd %ymm4,%ymm6,%ymm2
vcmpneq_uspd (%ecx),%ymm6,%ymm2
vcmpnlt_uqpd %ymm4,%ymm6,%ymm2
vcmpnlt_uqpd (%ecx),%ymm6,%ymm2
vcmpnle_uqpd %ymm4,%ymm6,%ymm2
vcmpnle_uqpd (%ecx),%ymm6,%ymm2
vcmpord_spd %ymm4,%ymm6,%ymm2
vcmpord_spd (%ecx),%ymm6,%ymm2
vcmpeq_uspd %ymm4,%ymm6,%ymm2
vcmpeq_uspd (%ecx),%ymm6,%ymm2
vcmpnge_uqpd %ymm4,%ymm6,%ymm2
vcmpnge_uqpd (%ecx),%ymm6,%ymm2
vcmpngt_uqpd %ymm4,%ymm6,%ymm2
vcmpngt_uqpd (%ecx),%ymm6,%ymm2
vcmpfalse_ospd %ymm4,%ymm6,%ymm2
vcmpfalse_ospd (%ecx),%ymm6,%ymm2
vcmpneq_ospd %ymm4,%ymm6,%ymm2
vcmpneq_ospd (%ecx),%ymm6,%ymm2
vcmpge_oqpd %ymm4,%ymm6,%ymm2
vcmpge_oqpd (%ecx),%ymm6,%ymm2
vcmpgt_oqpd %ymm4,%ymm6,%ymm2
vcmpgt_oqpd (%ecx),%ymm6,%ymm2
vcmptrue_uspd %ymm4,%ymm6,%ymm2
vcmptrue_uspd (%ecx),%ymm6,%ymm2
vcmpeqps %ymm4,%ymm6,%ymm2
vcmpeqps (%ecx),%ymm6,%ymm2
vcmpeq_oqps %ymm4,%ymm6,%ymm2
vcmpeq_oqps (%ecx),%ymm6,%ymm2
vcmpltps %ymm4,%ymm6,%ymm2
vcmpltps (%ecx),%ymm6,%ymm2
vcmplt_osps %ymm4,%ymm6,%ymm2
vcmplt_osps (%ecx),%ymm6,%ymm2
vcmpleps %ymm4,%ymm6,%ymm2
vcmpleps (%ecx),%ymm6,%ymm2
vcmple_osps %ymm4,%ymm6,%ymm2
vcmple_osps (%ecx),%ymm6,%ymm2
vcmpunordps %ymm4,%ymm6,%ymm2
vcmpunordps (%ecx),%ymm6,%ymm2
vcmpunord_qps %ymm4,%ymm6,%ymm2
vcmpunord_qps (%ecx),%ymm6,%ymm2
vcmpneqps %ymm4,%ymm6,%ymm2
vcmpneqps (%ecx),%ymm6,%ymm2
vcmpneq_uqps %ymm4,%ymm6,%ymm2
vcmpneq_uqps (%ecx),%ymm6,%ymm2
vcmpnltps %ymm4,%ymm6,%ymm2
vcmpnltps (%ecx),%ymm6,%ymm2
vcmpnlt_usps %ymm4,%ymm6,%ymm2
vcmpnlt_usps (%ecx),%ymm6,%ymm2
vcmpnleps %ymm4,%ymm6,%ymm2
vcmpnleps (%ecx),%ymm6,%ymm2
vcmpnle_usps %ymm4,%ymm6,%ymm2
vcmpnle_usps (%ecx),%ymm6,%ymm2
vcmpordps %ymm4,%ymm6,%ymm2
vcmpordps (%ecx),%ymm6,%ymm2
vcmpord_qps %ymm4,%ymm6,%ymm2
vcmpord_qps (%ecx),%ymm6,%ymm2
vcmpeq_uqps %ymm4,%ymm6,%ymm2
vcmpeq_uqps (%ecx),%ymm6,%ymm2
vcmpngeps %ymm4,%ymm6,%ymm2
vcmpngeps (%ecx),%ymm6,%ymm2
vcmpnge_usps %ymm4,%ymm6,%ymm2
vcmpnge_usps (%ecx),%ymm6,%ymm2
vcmpngtps %ymm4,%ymm6,%ymm2
vcmpngtps (%ecx),%ymm6,%ymm2
vcmpngt_usps %ymm4,%ymm6,%ymm2
vcmpngt_usps (%ecx),%ymm6,%ymm2
vcmpfalseps %ymm4,%ymm6,%ymm2
vcmpfalseps (%ecx),%ymm6,%ymm2
vcmpfalse_oqps %ymm4,%ymm6,%ymm2
vcmpfalse_oqps (%ecx),%ymm6,%ymm2
vcmpneq_oqps %ymm4,%ymm6,%ymm2
vcmpneq_oqps (%ecx),%ymm6,%ymm2
vcmpgeps %ymm4,%ymm6,%ymm2
vcmpgeps (%ecx),%ymm6,%ymm2
vcmpge_osps %ymm4,%ymm6,%ymm2
vcmpge_osps (%ecx),%ymm6,%ymm2
vcmpgtps %ymm4,%ymm6,%ymm2
vcmpgtps (%ecx),%ymm6,%ymm2
vcmpgt_osps %ymm4,%ymm6,%ymm2
vcmpgt_osps (%ecx),%ymm6,%ymm2
vcmptrueps %ymm4,%ymm6,%ymm2
vcmptrueps (%ecx),%ymm6,%ymm2
vcmptrue_uqps %ymm4,%ymm6,%ymm2
vcmptrue_uqps (%ecx),%ymm6,%ymm2
vcmpeq_osps %ymm4,%ymm6,%ymm2
vcmpeq_osps (%ecx),%ymm6,%ymm2
vcmplt_oqps %ymm4,%ymm6,%ymm2
vcmplt_oqps (%ecx),%ymm6,%ymm2
vcmple_oqps %ymm4,%ymm6,%ymm2
vcmple_oqps (%ecx),%ymm6,%ymm2
vcmpunord_sps %ymm4,%ymm6,%ymm2
vcmpunord_sps (%ecx),%ymm6,%ymm2
vcmpneq_usps %ymm4,%ymm6,%ymm2
vcmpneq_usps (%ecx),%ymm6,%ymm2
vcmpnlt_uqps %ymm4,%ymm6,%ymm2
vcmpnlt_uqps (%ecx),%ymm6,%ymm2
vcmpnle_uqps %ymm4,%ymm6,%ymm2
vcmpnle_uqps (%ecx),%ymm6,%ymm2
vcmpord_sps %ymm4,%ymm6,%ymm2
vcmpord_sps (%ecx),%ymm6,%ymm2
vcmpeq_usps %ymm4,%ymm6,%ymm2
vcmpeq_usps (%ecx),%ymm6,%ymm2
vcmpnge_uqps %ymm4,%ymm6,%ymm2
vcmpnge_uqps (%ecx),%ymm6,%ymm2
vcmpngt_uqps %ymm4,%ymm6,%ymm2
vcmpngt_uqps (%ecx),%ymm6,%ymm2
vcmpfalse_osps %ymm4,%ymm6,%ymm2
vcmpfalse_osps (%ecx),%ymm6,%ymm2
vcmpneq_osps %ymm4,%ymm6,%ymm2
vcmpneq_osps (%ecx),%ymm6,%ymm2
vcmpge_oqps %ymm4,%ymm6,%ymm2
vcmpge_oqps (%ecx),%ymm6,%ymm2
vcmpgt_oqps %ymm4,%ymm6,%ymm2
vcmpgt_oqps (%ecx),%ymm6,%ymm2
vcmptrue_usps %ymm4,%ymm6,%ymm2
vcmptrue_usps (%ecx),%ymm6,%ymm2
vgf2p8mulb %ymm4, %ymm5, %ymm6
vgf2p8mulb (%ecx), %ymm5, %ymm6
vgf2p8mulb -123456(%esp,%esi,8), %ymm5, %ymm6
vgf2p8mulb 4064(%edx), %ymm5, %ymm6
vgf2p8mulb 4096(%edx), %ymm5, %ymm6
vgf2p8mulb -4096(%edx), %ymm5, %ymm6
vgf2p8mulb -4128(%edx), %ymm5, %ymm6
# Tests for op ymm/mem256, xmm
vcvtpd2dqy %ymm4,%xmm4
vcvtpd2dqy (%ecx),%xmm4
vcvtpd2psy %ymm4,%xmm4
vcvtpd2psy (%ecx),%xmm4
vcvttpd2dqy %ymm4,%xmm4
vcvttpd2dqy (%ecx),%xmm4
# Tests for op ymm/mem256, ymm
vcvtdq2ps %ymm4,%ymm6
vcvtdq2ps (%ecx),%ymm4
vcvtps2dq %ymm4,%ymm6
vcvtps2dq (%ecx),%ymm4
vcvttps2dq %ymm4,%ymm6
vcvttps2dq (%ecx),%ymm4
vmovapd %ymm4,%ymm6
vmovapd (%ecx),%ymm4
vmovaps %ymm4,%ymm6
vmovaps (%ecx),%ymm4
vmovdqa %ymm4,%ymm6
vmovdqa (%ecx),%ymm4
vmovdqu %ymm4,%ymm6
vmovdqu (%ecx),%ymm4
vmovddup %ymm4,%ymm6
vmovddup (%ecx),%ymm4
vmovshdup %ymm4,%ymm6
vmovshdup (%ecx),%ymm4
vmovsldup %ymm4,%ymm6
vmovsldup (%ecx),%ymm4
vmovupd %ymm4,%ymm6
vmovupd (%ecx),%ymm4
vmovups %ymm4,%ymm6
vmovups (%ecx),%ymm4
vptest %ymm4,%ymm6
vptest (%ecx),%ymm4
vrcpps %ymm4,%ymm6
vrcpps (%ecx),%ymm4
vrsqrtps %ymm4,%ymm6
vrsqrtps (%ecx),%ymm4
vsqrtpd %ymm4,%ymm6
vsqrtpd (%ecx),%ymm4
vsqrtps %ymm4,%ymm6
vsqrtps (%ecx),%ymm4
vtestpd %ymm4,%ymm6
vtestpd (%ecx),%ymm4
vtestps %ymm4,%ymm6
vtestps (%ecx),%ymm4
# Tests for op ymm, ymm/mem256
vmovapd %ymm4,%ymm6
vmovapd %ymm4,(%ecx)
vmovaps %ymm4,%ymm6
vmovaps %ymm4,(%ecx)
vmovdqa %ymm4,%ymm6
vmovdqa %ymm4,(%ecx)
vmovdqu %ymm4,%ymm6
vmovdqu %ymm4,(%ecx)
vmovupd %ymm4,%ymm6
vmovupd %ymm4,(%ecx)
vmovups %ymm4,%ymm6
vmovups %ymm4,(%ecx)
# Tests for op mem256, ymm
vlddqu (%ecx),%ymm4
# Tests for op ymm, mem256
vmovntdq %ymm4,(%ecx)
vmovntpd %ymm4,(%ecx)
vmovntps %ymm4,(%ecx)
# Tests for op imm8, ymm/mem256, ymm, ymm
vblendpd $7,%ymm4,%ymm6,%ymm2
vblendpd $7,(%ecx),%ymm6,%ymm2
vblendps $7,%ymm4,%ymm6,%ymm2
vblendps $7,(%ecx),%ymm6,%ymm2
vcmppd $7,%ymm4,%ymm6,%ymm2
vcmppd $7,(%ecx),%ymm6,%ymm2
vcmpps $7,%ymm4,%ymm6,%ymm2
vcmpps $7,(%ecx),%ymm6,%ymm2
vdpps $7,%ymm4,%ymm6,%ymm2
vdpps $7,(%ecx),%ymm6,%ymm2
vperm2f128 $7,%ymm4,%ymm6,%ymm2
vperm2f128 $7,(%ecx),%ymm6,%ymm2
vshufpd $7,%ymm4,%ymm6,%ymm2
vshufpd $7,(%ecx),%ymm6,%ymm2
vshufps $7,%ymm4,%ymm6,%ymm2
vshufps $7,(%ecx),%ymm6,%ymm2
vgf2p8affineqb $0xab, %ymm4, %ymm5, %ymm6
vgf2p8affineqb $123, %ymm4, %ymm5, %ymm6
vgf2p8affineqb $123, (%ecx), %ymm5, %ymm6
vgf2p8affineqb $123, -123456(%esp,%esi,8), %ymm5, %ymm6
vgf2p8affineqb $123, 4064(%edx), %ymm5, %ymm6
vgf2p8affineqb $123, 4096(%edx), %ymm5, %ymm6
vgf2p8affineqb $123, -4096(%edx), %ymm5, %ymm6
vgf2p8affineqb $123, -4128(%edx), %ymm5, %ymm6
vgf2p8affineinvqb $0xab, %ymm4, %ymm5, %ymm6
vgf2p8affineinvqb $123, %ymm4, %ymm5, %ymm6
vgf2p8affineinvqb $123, (%ecx), %ymm5, %ymm6
vgf2p8affineinvqb $123, -123456(%esp,%esi,8), %ymm5, %ymm6
vgf2p8affineinvqb $123, 4064(%edx), %ymm5, %ymm6
vgf2p8affineinvqb $123, 4096(%edx), %ymm5, %ymm6
vgf2p8affineinvqb $123, -4096(%edx), %ymm5, %ymm6
vgf2p8affineinvqb $123, -4128(%edx), %ymm5, %ymm6
# Tests for op ymm, ymm/mem256, ymm, ymm
vblendvpd %ymm4,%ymm6,%ymm2,%ymm7
vblendvpd %ymm4,(%ecx),%ymm2,%ymm7
vblendvps %ymm4,%ymm6,%ymm2,%ymm7
vblendvps %ymm4,(%ecx),%ymm2,%ymm7
# Tests for op imm8, xmm/mem128, ymm, ymm
vinsertf128 $7,%xmm4,%ymm4,%ymm6
vinsertf128 $7,(%ecx),%ymm4,%ymm6
# Tests for op imm8, ymm, xmm/mem128
vextractf128 $7,%ymm4,%xmm4
vextractf128 $7,%ymm4,(%ecx)
# Tests for op mem128, ymm
vbroadcastf128 (%ecx),%ymm4
# Tests for op xmm/mem128, xmm
vcvtdq2ps %xmm4,%xmm6
vcvtdq2ps (%ecx),%xmm4
vcvtpd2dqx %xmm4,%xmm6
vcvtpd2dqx (%ecx),%xmm4
vcvtpd2psx %xmm4,%xmm6
vcvtpd2psx (%ecx),%xmm4
vcvtps2dq %xmm4,%xmm6
vcvtps2dq (%ecx),%xmm4
vcvttpd2dqx %xmm4,%xmm6
vcvttpd2dqx (%ecx),%xmm4
vcvttps2dq %xmm4,%xmm6
vcvttps2dq (%ecx),%xmm4
vmovapd %xmm4,%xmm6
vmovapd (%ecx),%xmm4
vmovaps %xmm4,%xmm6
vmovaps (%ecx),%xmm4
vmovdqa %xmm4,%xmm6
vmovdqa (%ecx),%xmm4
vmovdqu %xmm4,%xmm6
vmovdqu (%ecx),%xmm4
vmovshdup %xmm4,%xmm6
vmovshdup (%ecx),%xmm4
vmovsldup %xmm4,%xmm6
vmovsldup (%ecx),%xmm4
vmovupd %xmm4,%xmm6
vmovupd (%ecx),%xmm4
vmovups %xmm4,%xmm6
vmovups (%ecx),%xmm4
vpabsb %xmm4,%xmm6
vpabsb (%ecx),%xmm4
vpabsw %xmm4,%xmm6
vpabsw (%ecx),%xmm4
vpabsd %xmm4,%xmm6
vpabsd (%ecx),%xmm4
vphminposuw %xmm4,%xmm6
vphminposuw (%ecx),%xmm4
vptest %xmm4,%xmm6
vptest (%ecx),%xmm4
vtestps %xmm4,%xmm6
vtestps (%ecx),%xmm4
vtestpd %xmm4,%xmm6
vtestpd (%ecx),%xmm4
vrcpps %xmm4,%xmm6
vrcpps (%ecx),%xmm4
vrsqrtps %xmm4,%xmm6
vrsqrtps (%ecx),%xmm4
vsqrtpd %xmm4,%xmm6
vsqrtpd (%ecx),%xmm4
vsqrtps %xmm4,%xmm6
vsqrtps (%ecx),%xmm4
vaesimc %xmm4,%xmm6
vaesimc (%ecx),%xmm4
# Tests for op xmm, xmm/mem128
vmovapd %xmm4,%xmm6
vmovapd %xmm4,(%ecx)
vmovaps %xmm4,%xmm6
vmovaps %xmm4,(%ecx)
vmovdqa %xmm4,%xmm6
vmovdqa %xmm4,(%ecx)
vmovdqu %xmm4,%xmm6
vmovdqu %xmm4,(%ecx)
vmovupd %xmm4,%xmm6
vmovupd %xmm4,(%ecx)
vmovups %xmm4,%xmm6
vmovups %xmm4,(%ecx)
# Tests for op mem128, xmm
vlddqu (%ecx),%xmm4
vmovntdqa (%ecx),%xmm4
# Tests for op xmm, mem128
vmovntdq %xmm4,(%ecx)
vmovntpd %xmm4,(%ecx)
vmovntps %xmm4,(%ecx)
# Tests for op xmm/mem128, ymm
vcvtdq2pd %xmm4,%ymm4
vcvtdq2pd (%ecx),%ymm4
vcvtps2pd %xmm4,%ymm4
vcvtps2pd (%ecx),%ymm4
# Tests for op xmm/mem128, xmm, xmm
vaddpd %xmm4,%xmm6,%xmm2
vaddpd (%ecx),%xmm6,%xmm7
vaddps %xmm4,%xmm6,%xmm2
vaddps (%ecx),%xmm6,%xmm7
vaddsubpd %xmm4,%xmm6,%xmm2
vaddsubpd (%ecx),%xmm6,%xmm7
vaddsubps %xmm4,%xmm6,%xmm2
vaddsubps (%ecx),%xmm6,%xmm7
vandnpd %xmm4,%xmm6,%xmm2
vandnpd (%ecx),%xmm6,%xmm7
vandnps %xmm4,%xmm6,%xmm2
vandnps (%ecx),%xmm6,%xmm7
vandpd %xmm4,%xmm6,%xmm2
vandpd (%ecx),%xmm6,%xmm7
vandps %xmm4,%xmm6,%xmm2
vandps (%ecx),%xmm6,%xmm7
vdivpd %xmm4,%xmm6,%xmm2
vdivpd (%ecx),%xmm6,%xmm7
vdivps %xmm4,%xmm6,%xmm2
vdivps (%ecx),%xmm6,%xmm7
vhaddpd %xmm4,%xmm6,%xmm2
vhaddpd (%ecx),%xmm6,%xmm7
vhaddps %xmm4,%xmm6,%xmm2
vhaddps (%ecx),%xmm6,%xmm7
vhsubpd %xmm4,%xmm6,%xmm2
vhsubpd (%ecx),%xmm6,%xmm7
vhsubps %xmm4,%xmm6,%xmm2
vhsubps (%ecx),%xmm6,%xmm7
vmaxpd %xmm4,%xmm6,%xmm2
vmaxpd (%ecx),%xmm6,%xmm7
vmaxps %xmm4,%xmm6,%xmm2
vmaxps (%ecx),%xmm6,%xmm7
vminpd %xmm4,%xmm6,%xmm2
vminpd (%ecx),%xmm6,%xmm7
vminps %xmm4,%xmm6,%xmm2
vminps (%ecx),%xmm6,%xmm7
vmulpd %xmm4,%xmm6,%xmm2
vmulpd (%ecx),%xmm6,%xmm7
vmulps %xmm4,%xmm6,%xmm2
vmulps (%ecx),%xmm6,%xmm7
vorpd %xmm4,%xmm6,%xmm2
vorpd (%ecx),%xmm6,%xmm7
vorps %xmm4,%xmm6,%xmm2
vorps (%ecx),%xmm6,%xmm7
vpacksswb %xmm4,%xmm6,%xmm2
vpacksswb (%ecx),%xmm6,%xmm7
vpackssdw %xmm4,%xmm6,%xmm2
vpackssdw (%ecx),%xmm6,%xmm7
vpackuswb %xmm4,%xmm6,%xmm2
vpackuswb (%ecx),%xmm6,%xmm7
vpackusdw %xmm4,%xmm6,%xmm2
vpackusdw (%ecx),%xmm6,%xmm7
vpaddb %xmm4,%xmm6,%xmm2
vpaddb (%ecx),%xmm6,%xmm7
vpaddw %xmm4,%xmm6,%xmm2
vpaddw (%ecx),%xmm6,%xmm7
vpaddd %xmm4,%xmm6,%xmm2
vpaddd (%ecx),%xmm6,%xmm7
vpaddq %xmm4,%xmm6,%xmm2
vpaddq (%ecx),%xmm6,%xmm7
vpaddsb %xmm4,%xmm6,%xmm2
vpaddsb (%ecx),%xmm6,%xmm7
vpaddsw %xmm4,%xmm6,%xmm2
vpaddsw (%ecx),%xmm6,%xmm7
vpaddusb %xmm4,%xmm6,%xmm2
vpaddusb (%ecx),%xmm6,%xmm7
vpaddusw %xmm4,%xmm6,%xmm2
vpaddusw (%ecx),%xmm6,%xmm7
vpand %xmm4,%xmm6,%xmm2
vpand (%ecx),%xmm6,%xmm7
vpandn %xmm4,%xmm6,%xmm2
vpandn (%ecx),%xmm6,%xmm7
vpavgb %xmm4,%xmm6,%xmm2
vpavgb (%ecx),%xmm6,%xmm7
vpavgw %xmm4,%xmm6,%xmm2
vpavgw (%ecx),%xmm6,%xmm7
vpclmullqlqdq %xmm4,%xmm6,%xmm2
vpclmullqlqdq (%ecx),%xmm6,%xmm7
vpclmulhqlqdq %xmm4,%xmm6,%xmm2
vpclmulhqlqdq (%ecx),%xmm6,%xmm7
vpclmullqhqdq %xmm4,%xmm6,%xmm2
vpclmullqhqdq (%ecx),%xmm6,%xmm7
vpclmulhqhqdq %xmm4,%xmm6,%xmm2
vpclmulhqhqdq (%ecx),%xmm6,%xmm7
vpcmpeqb %xmm4,%xmm6,%xmm2
vpcmpeqb (%ecx),%xmm6,%xmm7
vpcmpeqw %xmm4,%xmm6,%xmm2
vpcmpeqw (%ecx),%xmm6,%xmm7
vpcmpeqd %xmm4,%xmm6,%xmm2
vpcmpeqd (%ecx),%xmm6,%xmm7
vpcmpeqq %xmm4,%xmm6,%xmm2
vpcmpeqq (%ecx),%xmm6,%xmm7
vpcmpgtb %xmm4,%xmm6,%xmm2
vpcmpgtb (%ecx),%xmm6,%xmm7
vpcmpgtw %xmm4,%xmm6,%xmm2
vpcmpgtw (%ecx),%xmm6,%xmm7
vpcmpgtd %xmm4,%xmm6,%xmm2
vpcmpgtd (%ecx),%xmm6,%xmm7
vpcmpgtq %xmm4,%xmm6,%xmm2
vpcmpgtq (%ecx),%xmm6,%xmm7
vpermilpd %xmm4,%xmm6,%xmm2
vpermilpd (%ecx),%xmm6,%xmm7
vpermilps %xmm4,%xmm6,%xmm2
vpermilps (%ecx),%xmm6,%xmm7
vphaddw %xmm4,%xmm6,%xmm2
vphaddw (%ecx),%xmm6,%xmm7
vphaddd %xmm4,%xmm6,%xmm2
vphaddd (%ecx),%xmm6,%xmm7
vphaddsw %xmm4,%xmm6,%xmm2
vphaddsw (%ecx),%xmm6,%xmm7
vphsubw %xmm4,%xmm6,%xmm2
vphsubw (%ecx),%xmm6,%xmm7
vphsubd %xmm4,%xmm6,%xmm2
vphsubd (%ecx),%xmm6,%xmm7
vphsubsw %xmm4,%xmm6,%xmm2
vphsubsw (%ecx),%xmm6,%xmm7
vpmaddwd %xmm4,%xmm6,%xmm2
vpmaddwd (%ecx),%xmm6,%xmm7
vpmaddubsw %xmm4,%xmm6,%xmm2
vpmaddubsw (%ecx),%xmm6,%xmm7
vpmaxsb %xmm4,%xmm6,%xmm2
vpmaxsb (%ecx),%xmm6,%xmm7
vpmaxsw %xmm4,%xmm6,%xmm2
vpmaxsw (%ecx),%xmm6,%xmm7
vpmaxsd %xmm4,%xmm6,%xmm2
vpmaxsd (%ecx),%xmm6,%xmm7
vpmaxub %xmm4,%xmm6,%xmm2
vpmaxub (%ecx),%xmm6,%xmm7
vpmaxuw %xmm4,%xmm6,%xmm2
vpmaxuw (%ecx),%xmm6,%xmm7
vpmaxud %xmm4,%xmm6,%xmm2
vpmaxud (%ecx),%xmm6,%xmm7
vpminsb %xmm4,%xmm6,%xmm2
vpminsb (%ecx),%xmm6,%xmm7
vpminsw %xmm4,%xmm6,%xmm2
vpminsw (%ecx),%xmm6,%xmm7
vpminsd %xmm4,%xmm6,%xmm2
vpminsd (%ecx),%xmm6,%xmm7
vpminub %xmm4,%xmm6,%xmm2
vpminub (%ecx),%xmm6,%xmm7
vpminuw %xmm4,%xmm6,%xmm2
vpminuw (%ecx),%xmm6,%xmm7
vpminud %xmm4,%xmm6,%xmm2
vpminud (%ecx),%xmm6,%xmm7
vpmulhuw %xmm4,%xmm6,%xmm2
vpmulhuw (%ecx),%xmm6,%xmm7
vpmulhrsw %xmm4,%xmm6,%xmm2
vpmulhrsw (%ecx),%xmm6,%xmm7
vpmulhw %xmm4,%xmm6,%xmm2
vpmulhw (%ecx),%xmm6,%xmm7
vpmullw %xmm4,%xmm6,%xmm2
vpmullw (%ecx),%xmm6,%xmm7
vpmulld %xmm4,%xmm6,%xmm2
vpmulld (%ecx),%xmm6,%xmm7
vpmuludq %xmm4,%xmm6,%xmm2
vpmuludq (%ecx),%xmm6,%xmm7
vpmuldq %xmm4,%xmm6,%xmm2
vpmuldq (%ecx),%xmm6,%xmm7
vpor %xmm4,%xmm6,%xmm2
vpor (%ecx),%xmm6,%xmm7
vpsadbw %xmm4,%xmm6,%xmm2
vpsadbw (%ecx),%xmm6,%xmm7
vpshufb %xmm4,%xmm6,%xmm2
vpshufb (%ecx),%xmm6,%xmm7
vpsignb %xmm4,%xmm6,%xmm2
vpsignb (%ecx),%xmm6,%xmm7
vpsignw %xmm4,%xmm6,%xmm2
vpsignw (%ecx),%xmm6,%xmm7
vpsignd %xmm4,%xmm6,%xmm2
vpsignd (%ecx),%xmm6,%xmm7
vpsllw %xmm4,%xmm6,%xmm2
vpsllw (%ecx),%xmm6,%xmm7
vpslld %xmm4,%xmm6,%xmm2
vpslld (%ecx),%xmm6,%xmm7
vpsllq %xmm4,%xmm6,%xmm2
vpsllq (%ecx),%xmm6,%xmm7
vpsraw %xmm4,%xmm6,%xmm2
vpsraw (%ecx),%xmm6,%xmm7
vpsrad %xmm4,%xmm6,%xmm2
vpsrad (%ecx),%xmm6,%xmm7
vpsrlw %xmm4,%xmm6,%xmm2
vpsrlw (%ecx),%xmm6,%xmm7
vpsrld %xmm4,%xmm6,%xmm2
vpsrld (%ecx),%xmm6,%xmm7
vpsrlq %xmm4,%xmm6,%xmm2
vpsrlq (%ecx),%xmm6,%xmm7
vpsubb %xmm4,%xmm6,%xmm2
vpsubb (%ecx),%xmm6,%xmm7
vpsubw %xmm4,%xmm6,%xmm2
vpsubw (%ecx),%xmm6,%xmm7
vpsubd %xmm4,%xmm6,%xmm2
vpsubd (%ecx),%xmm6,%xmm7
vpsubq %xmm4,%xmm6,%xmm2
vpsubq (%ecx),%xmm6,%xmm7
vpsubsb %xmm4,%xmm6,%xmm2
vpsubsb (%ecx),%xmm6,%xmm7
vpsubsw %xmm4,%xmm6,%xmm2
vpsubsw (%ecx),%xmm6,%xmm7
vpsubusb %xmm4,%xmm6,%xmm2
vpsubusb (%ecx),%xmm6,%xmm7
vpsubusw %xmm4,%xmm6,%xmm2
vpsubusw (%ecx),%xmm6,%xmm7
vpunpckhbw %xmm4,%xmm6,%xmm2
vpunpckhbw (%ecx),%xmm6,%xmm7
vpunpckhwd %xmm4,%xmm6,%xmm2
vpunpckhwd (%ecx),%xmm6,%xmm7
vpunpckhdq %xmm4,%xmm6,%xmm2
vpunpckhdq (%ecx),%xmm6,%xmm7
vpunpckhqdq %xmm4,%xmm6,%xmm2
vpunpckhqdq (%ecx),%xmm6,%xmm7
vpunpcklbw %xmm4,%xmm6,%xmm2
vpunpcklbw (%ecx),%xmm6,%xmm7
vpunpcklwd %xmm4,%xmm6,%xmm2
vpunpcklwd (%ecx),%xmm6,%xmm7
vpunpckldq %xmm4,%xmm6,%xmm2
vpunpckldq (%ecx),%xmm6,%xmm7
vpunpcklqdq %xmm4,%xmm6,%xmm2
vpunpcklqdq (%ecx),%xmm6,%xmm7
vpxor %xmm4,%xmm6,%xmm2
vpxor (%ecx),%xmm6,%xmm7
vsubpd %xmm4,%xmm6,%xmm2
vsubpd (%ecx),%xmm6,%xmm7
vsubps %xmm4,%xmm6,%xmm2
vsubps (%ecx),%xmm6,%xmm7
vunpckhpd %xmm4,%xmm6,%xmm2
vunpckhpd (%ecx),%xmm6,%xmm7
vunpckhps %xmm4,%xmm6,%xmm2
vunpckhps (%ecx),%xmm6,%xmm7
vunpcklpd %xmm4,%xmm6,%xmm2
vunpcklpd (%ecx),%xmm6,%xmm7
vunpcklps %xmm4,%xmm6,%xmm2
vunpcklps (%ecx),%xmm6,%xmm7
vxorpd %xmm4,%xmm6,%xmm2
vxorpd (%ecx),%xmm6,%xmm7
vxorps %xmm4,%xmm6,%xmm2
vxorps (%ecx),%xmm6,%xmm7
vaesenc %xmm4,%xmm6,%xmm2
vaesenc (%ecx),%xmm6,%xmm7
vaesenclast %xmm4,%xmm6,%xmm2
vaesenclast (%ecx),%xmm6,%xmm7
vaesdec %xmm4,%xmm6,%xmm2
vaesdec (%ecx),%xmm6,%xmm7
vaesdeclast %xmm4,%xmm6,%xmm2
vaesdeclast (%ecx),%xmm6,%xmm7
vcmpeqpd %xmm4,%xmm6,%xmm2
vcmpeqpd (%ecx),%xmm6,%xmm7
vcmpltpd %xmm4,%xmm6,%xmm2
vcmpltpd (%ecx),%xmm6,%xmm7
vcmplepd %xmm4,%xmm6,%xmm2
vcmplepd (%ecx),%xmm6,%xmm7
vcmpunordpd %xmm4,%xmm6,%xmm2
vcmpunordpd (%ecx),%xmm6,%xmm7
vcmpneqpd %xmm4,%xmm6,%xmm2
vcmpneqpd (%ecx),%xmm6,%xmm7
vcmpnltpd %xmm4,%xmm6,%xmm2
vcmpnltpd (%ecx),%xmm6,%xmm7
vcmpnlepd %xmm4,%xmm6,%xmm2
vcmpnlepd (%ecx),%xmm6,%xmm7
vcmpordpd %xmm4,%xmm6,%xmm2
vcmpordpd (%ecx),%xmm6,%xmm7
vcmpeq_uqpd %xmm4,%xmm6,%xmm2
vcmpeq_uqpd (%ecx),%xmm6,%xmm7
vcmpngepd %xmm4,%xmm6,%xmm2
vcmpngepd (%ecx),%xmm6,%xmm7
vcmpngtpd %xmm4,%xmm6,%xmm2
vcmpngtpd (%ecx),%xmm6,%xmm7
vcmpfalsepd %xmm4,%xmm6,%xmm2
vcmpfalsepd (%ecx),%xmm6,%xmm7
vcmpneq_oqpd %xmm4,%xmm6,%xmm2
vcmpneq_oqpd (%ecx),%xmm6,%xmm7
vcmpgepd %xmm4,%xmm6,%xmm2
vcmpgepd (%ecx),%xmm6,%xmm7
vcmpgtpd %xmm4,%xmm6,%xmm2
vcmpgtpd (%ecx),%xmm6,%xmm7
vcmptruepd %xmm4,%xmm6,%xmm2
vcmptruepd (%ecx),%xmm6,%xmm7
vcmpeq_ospd %xmm4,%xmm6,%xmm2
vcmpeq_ospd (%ecx),%xmm6,%xmm7
vcmplt_oqpd %xmm4,%xmm6,%xmm2
vcmplt_oqpd (%ecx),%xmm6,%xmm7
vcmple_oqpd %xmm4,%xmm6,%xmm2
vcmple_oqpd (%ecx),%xmm6,%xmm7
vcmpunord_spd %xmm4,%xmm6,%xmm2
vcmpunord_spd (%ecx),%xmm6,%xmm7
vcmpneq_uspd %xmm4,%xmm6,%xmm2
vcmpneq_uspd (%ecx),%xmm6,%xmm7
vcmpnlt_uqpd %xmm4,%xmm6,%xmm2
vcmpnlt_uqpd (%ecx),%xmm6,%xmm7
vcmpnle_uqpd %xmm4,%xmm6,%xmm2
vcmpnle_uqpd (%ecx),%xmm6,%xmm7
vcmpord_spd %xmm4,%xmm6,%xmm2
vcmpord_spd (%ecx),%xmm6,%xmm7
vcmpeq_uspd %xmm4,%xmm6,%xmm2
vcmpeq_uspd (%ecx),%xmm6,%xmm7
vcmpnge_uqpd %xmm4,%xmm6,%xmm2
vcmpnge_uqpd (%ecx),%xmm6,%xmm7
vcmpngt_uqpd %xmm4,%xmm6,%xmm2
vcmpngt_uqpd (%ecx),%xmm6,%xmm7
vcmpfalse_ospd %xmm4,%xmm6,%xmm2
vcmpfalse_ospd (%ecx),%xmm6,%xmm7
vcmpneq_ospd %xmm4,%xmm6,%xmm2
vcmpneq_ospd (%ecx),%xmm6,%xmm7
vcmpge_oqpd %xmm4,%xmm6,%xmm2
vcmpge_oqpd (%ecx),%xmm6,%xmm7
vcmpgt_oqpd %xmm4,%xmm6,%xmm2
vcmpgt_oqpd (%ecx),%xmm6,%xmm7
vcmptrue_uspd %xmm4,%xmm6,%xmm2
vcmptrue_uspd (%ecx),%xmm6,%xmm7
vcmpeqps %xmm4,%xmm6,%xmm2
vcmpeqps (%ecx),%xmm6,%xmm7
vcmpltps %xmm4,%xmm6,%xmm2
vcmpltps (%ecx),%xmm6,%xmm7
vcmpleps %xmm4,%xmm6,%xmm2
vcmpleps (%ecx),%xmm6,%xmm7
vcmpunordps %xmm4,%xmm6,%xmm2
vcmpunordps (%ecx),%xmm6,%xmm7
vcmpneqps %xmm4,%xmm6,%xmm2
vcmpneqps (%ecx),%xmm6,%xmm7
vcmpnltps %xmm4,%xmm6,%xmm2
vcmpnltps (%ecx),%xmm6,%xmm7
vcmpnleps %xmm4,%xmm6,%xmm2
vcmpnleps (%ecx),%xmm6,%xmm7
vcmpordps %xmm4,%xmm6,%xmm2
vcmpordps (%ecx),%xmm6,%xmm7
vcmpeq_uqps %xmm4,%xmm6,%xmm2
vcmpeq_uqps (%ecx),%xmm6,%xmm7
vcmpngeps %xmm4,%xmm6,%xmm2
vcmpngeps (%ecx),%xmm6,%xmm7
vcmpngtps %xmm4,%xmm6,%xmm2
vcmpngtps (%ecx),%xmm6,%xmm7
vcmpfalseps %xmm4,%xmm6,%xmm2
vcmpfalseps (%ecx),%xmm6,%xmm7
vcmpneq_oqps %xmm4,%xmm6,%xmm2
vcmpneq_oqps (%ecx),%xmm6,%xmm7
vcmpgeps %xmm4,%xmm6,%xmm2
vcmpgeps (%ecx),%xmm6,%xmm7
vcmpgtps %xmm4,%xmm6,%xmm2
vcmpgtps (%ecx),%xmm6,%xmm7
vcmptrueps %xmm4,%xmm6,%xmm2
vcmptrueps (%ecx),%xmm6,%xmm7
vcmpeq_osps %xmm4,%xmm6,%xmm2
vcmpeq_osps (%ecx),%xmm6,%xmm7
vcmplt_oqps %xmm4,%xmm6,%xmm2
vcmplt_oqps (%ecx),%xmm6,%xmm7
vcmple_oqps %xmm4,%xmm6,%xmm2
vcmple_oqps (%ecx),%xmm6,%xmm7
vcmpunord_sps %xmm4,%xmm6,%xmm2
vcmpunord_sps (%ecx),%xmm6,%xmm7
vcmpneq_usps %xmm4,%xmm6,%xmm2
vcmpneq_usps (%ecx),%xmm6,%xmm7
vcmpnlt_uqps %xmm4,%xmm6,%xmm2
vcmpnlt_uqps (%ecx),%xmm6,%xmm7
vcmpnle_uqps %xmm4,%xmm6,%xmm2
vcmpnle_uqps (%ecx),%xmm6,%xmm7
vcmpord_sps %xmm4,%xmm6,%xmm2
vcmpord_sps (%ecx),%xmm6,%xmm7
vcmpeq_usps %xmm4,%xmm6,%xmm2
vcmpeq_usps (%ecx),%xmm6,%xmm7
vcmpnge_uqps %xmm4,%xmm6,%xmm2
vcmpnge_uqps (%ecx),%xmm6,%xmm7
vcmpngt_uqps %xmm4,%xmm6,%xmm2
vcmpngt_uqps (%ecx),%xmm6,%xmm7
vcmpfalse_osps %xmm4,%xmm6,%xmm2
vcmpfalse_osps (%ecx),%xmm6,%xmm7
vcmpneq_osps %xmm4,%xmm6,%xmm2
vcmpneq_osps (%ecx),%xmm6,%xmm7
vcmpge_oqps %xmm4,%xmm6,%xmm2
vcmpge_oqps (%ecx),%xmm6,%xmm7
vcmpgt_oqps %xmm4,%xmm6,%xmm2
vcmpgt_oqps (%ecx),%xmm6,%xmm7
vcmptrue_usps %xmm4,%xmm6,%xmm2
vcmptrue_usps (%ecx),%xmm6,%xmm7
vgf2p8mulb %xmm4, %xmm5, %xmm6
vgf2p8mulb (%ecx), %xmm5, %xmm6
vgf2p8mulb -123456(%esp,%esi,8), %xmm5, %xmm6
vgf2p8mulb 2032(%edx), %xmm5, %xmm6
vgf2p8mulb 2048(%edx), %xmm5, %xmm6
vgf2p8mulb -2048(%edx), %xmm5, %xmm6
vgf2p8mulb -2064(%edx), %xmm5, %xmm6
# Tests for op mem128, xmm, xmm
vmaskmovps (%ecx),%xmm4,%xmm6
vmaskmovpd (%ecx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem128, xmm
vaeskeygenassist $7,%xmm4,%xmm6
vaeskeygenassist $7,(%ecx),%xmm6
vpcmpestri $7,%xmm4,%xmm6
vpcmpestri $7,(%ecx),%xmm6
vpcmpestrm $7,%xmm4,%xmm6
vpcmpestrm $7,(%ecx),%xmm6
vpcmpistri $7,%xmm4,%xmm6
vpcmpistri $7,(%ecx),%xmm6
vpcmpistrm $7,%xmm4,%xmm6
vpcmpistrm $7,(%ecx),%xmm6
vpermilpd $7,%xmm4,%xmm6
vpermilpd $7,(%ecx),%xmm6
vpermilps $7,%xmm4,%xmm6
vpermilps $7,(%ecx),%xmm6
vpshufd $7,%xmm4,%xmm6
vpshufd $7,(%ecx),%xmm6
vpshufhw $7,%xmm4,%xmm6
vpshufhw $7,(%ecx),%xmm6
vpshuflw $7,%xmm4,%xmm6
vpshuflw $7,(%ecx),%xmm6
vroundpd $7,%xmm4,%xmm6
vroundpd $7,(%ecx),%xmm6
vroundps $7,%xmm4,%xmm6
vroundps $7,(%ecx),%xmm6
# Tests for op xmm, xmm, mem128
vmaskmovps %xmm4,%xmm6,(%ecx)
vmaskmovpd %xmm4,%xmm6,(%ecx)
# Tests for op imm8, xmm/mem128, xmm, xmm
vblendpd $7,%xmm4,%xmm6,%xmm2
vblendpd $7,(%ecx),%xmm6,%xmm2
vblendps $7,%xmm4,%xmm6,%xmm2
vblendps $7,(%ecx),%xmm6,%xmm2
vcmppd $7,%xmm4,%xmm6,%xmm2
vcmppd $7,(%ecx),%xmm6,%xmm2
vcmpps $7,%xmm4,%xmm6,%xmm2
vcmpps $7,(%ecx),%xmm6,%xmm2
vdppd $7,%xmm4,%xmm6,%xmm2
vdppd $7,(%ecx),%xmm6,%xmm2
vdpps $7,%xmm4,%xmm6,%xmm2
vdpps $7,(%ecx),%xmm6,%xmm2
vmpsadbw $7,%xmm4,%xmm6,%xmm2
vmpsadbw $7,(%ecx),%xmm6,%xmm2
vpalignr $7,%xmm4,%xmm6,%xmm2
vpalignr $7,(%ecx),%xmm6,%xmm2
vpblendw $7,%xmm4,%xmm6,%xmm2
vpblendw $7,(%ecx),%xmm6,%xmm2
vpclmulqdq $7,%xmm4,%xmm6,%xmm2
vpclmulqdq $7,(%ecx),%xmm6,%xmm2
vshufpd $7,%xmm4,%xmm6,%xmm2
vshufpd $7,(%ecx),%xmm6,%xmm2
vshufps $7,%xmm4,%xmm6,%xmm2
vshufps $7,(%ecx),%xmm6,%xmm2
vgf2p8affineqb $0xab, %xmm4, %xmm5, %xmm6
vgf2p8affineqb $123, %xmm4, %xmm5, %xmm6
vgf2p8affineqb $123, (%ecx), %xmm5, %xmm6
vgf2p8affineqb $123, -123456(%esp,%esi,8), %xmm5, %xmm6
vgf2p8affineqb $123, 2032(%edx), %xmm5, %xmm6
vgf2p8affineqb $123, 2048(%edx), %xmm5, %xmm6
vgf2p8affineqb $123, -2048(%edx), %xmm5, %xmm6
vgf2p8affineqb $123, -2064(%edx), %xmm5, %xmm6
vgf2p8affineinvqb $0xab, %xmm4, %xmm5, %xmm6
vgf2p8affineinvqb $123, %xmm4, %xmm5, %xmm6
vgf2p8affineinvqb $123, (%ecx), %xmm5, %xmm6
vgf2p8affineinvqb $123, -123456(%esp,%esi,8), %xmm5, %xmm6
vgf2p8affineinvqb $123, 2032(%edx), %xmm5, %xmm6
vgf2p8affineinvqb $123, 2048(%edx), %xmm5, %xmm6
vgf2p8affineinvqb $123, -2048(%edx), %xmm5, %xmm6
vgf2p8affineinvqb $123, -2064(%edx), %xmm5, %xmm6
# Tests for op xmm, xmm/mem128, xmm, xmm
vblendvpd %xmm4,%xmm6,%xmm2,%xmm7
vblendvpd %xmm4,(%ecx),%xmm2,%xmm7
vblendvps %xmm4,%xmm6,%xmm2,%xmm7
vblendvps %xmm4,(%ecx),%xmm2,%xmm7
vpblendvb %xmm4,%xmm6,%xmm2,%xmm7
vpblendvb %xmm4,(%ecx),%xmm2,%xmm7
# Tests for op mem64, ymm
vbroadcastsd (%ecx),%ymm4
# Tests for op xmm/mem64, xmm
vcomisd %xmm4,%xmm6
vcomisd (%ecx),%xmm4
vcvtdq2pd %xmm4,%xmm6
vcvtdq2pd (%ecx),%xmm4
vcvtps2pd %xmm4,%xmm6
vcvtps2pd (%ecx),%xmm4
vmovddup %xmm4,%xmm6
vmovddup (%ecx),%xmm4
vpmovsxbw %xmm4,%xmm6
vpmovsxbw (%ecx),%xmm4
vpmovsxwd %xmm4,%xmm6
vpmovsxwd (%ecx),%xmm4
vpmovsxdq %xmm4,%xmm6
vpmovsxdq (%ecx),%xmm4
vpmovzxbw %xmm4,%xmm6
vpmovzxbw (%ecx),%xmm4
vpmovzxwd %xmm4,%xmm6
vpmovzxwd (%ecx),%xmm4
vpmovzxdq %xmm4,%xmm6
vpmovzxdq (%ecx),%xmm4
vucomisd %xmm4,%xmm6
vucomisd (%ecx),%xmm4
# Tests for op mem64, xmm
vmovsd (%ecx),%xmm4
# Tests for op xmm, mem64
vmovlpd %xmm4,(%ecx)
vmovlps %xmm4,(%ecx)
vmovhpd %xmm4,(%ecx)
vmovhps %xmm4,(%ecx)
vmovsd %xmm4,(%ecx)
# Tests for op xmm, regq/mem64
# Tests for op regq/mem64, xmm
vmovq %xmm4,(%ecx)
vmovq (%ecx),%xmm4
# Tests for op xmm/mem64, regl
vcvtsd2si %xmm4,%ecx
vcvtsd2si (%ecx),%ecx
vcvttsd2si %xmm4,%ecx
vcvttsd2si (%ecx),%ecx
# Tests for op mem64, xmm, xmm
vmovlpd (%ecx),%xmm4,%xmm6
vmovlps (%ecx),%xmm4,%xmm6
vmovhpd (%ecx),%xmm4,%xmm6
vmovhps (%ecx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd $7,%xmm4,%xmm6,%xmm2
vcmpsd $7,(%ecx),%xmm6,%xmm2
vroundsd $7,%xmm4,%xmm6,%xmm2
vroundsd $7,(%ecx),%xmm6,%xmm2
# Tests for op xmm/mem64, xmm, xmm
vaddsd %xmm4,%xmm6,%xmm2
vaddsd (%ecx),%xmm6,%xmm2
vcvtsd2ss %xmm4,%xmm6,%xmm2
vcvtsd2ss (%ecx),%xmm6,%xmm2
vdivsd %xmm4,%xmm6,%xmm2
vdivsd (%ecx),%xmm6,%xmm2
vmaxsd %xmm4,%xmm6,%xmm2
vmaxsd (%ecx),%xmm6,%xmm2
vminsd %xmm4,%xmm6,%xmm2
vminsd (%ecx),%xmm6,%xmm2
vmulsd %xmm4,%xmm6,%xmm2
vmulsd (%ecx),%xmm6,%xmm2
vsqrtsd %xmm4,%xmm6,%xmm2
vsqrtsd (%ecx),%xmm6,%xmm2
vsubsd %xmm4,%xmm6,%xmm2
vsubsd (%ecx),%xmm6,%xmm2
vcmpeqsd %xmm4,%xmm6,%xmm2
vcmpeqsd (%ecx),%xmm6,%xmm2
vcmpeq_oqsd %xmm4,%xmm6,%xmm2
vcmpeq_oqsd (%ecx),%xmm6,%xmm2
vcmpltsd %xmm4,%xmm6,%xmm2
vcmpltsd (%ecx),%xmm6,%xmm2
vcmplt_ossd %xmm4,%xmm6,%xmm2
vcmplt_ossd (%ecx),%xmm6,%xmm2
vcmplesd %xmm4,%xmm6,%xmm2
vcmplesd (%ecx),%xmm6,%xmm2
vcmple_ossd %xmm4,%xmm6,%xmm2
vcmple_ossd (%ecx),%xmm6,%xmm2
vcmpunordsd %xmm4,%xmm6,%xmm2
vcmpunordsd (%ecx),%xmm6,%xmm2
vcmpunord_qsd %xmm4,%xmm6,%xmm2
vcmpunord_qsd (%ecx),%xmm6,%xmm2
vcmpneqsd %xmm4,%xmm6,%xmm2
vcmpneqsd (%ecx),%xmm6,%xmm2
vcmpneq_uqsd %xmm4,%xmm6,%xmm2
vcmpneq_uqsd (%ecx),%xmm6,%xmm2
vcmpnltsd %xmm4,%xmm6,%xmm2
vcmpnltsd (%ecx),%xmm6,%xmm2
vcmpnlt_ussd %xmm4,%xmm6,%xmm2
vcmpnlt_ussd (%ecx),%xmm6,%xmm2
vcmpnlesd %xmm4,%xmm6,%xmm2
vcmpnlesd (%ecx),%xmm6,%xmm2
vcmpnle_ussd %xmm4,%xmm6,%xmm2
vcmpnle_ussd (%ecx),%xmm6,%xmm2
vcmpordsd %xmm4,%xmm6,%xmm2
vcmpordsd (%ecx),%xmm6,%xmm2
vcmpord_qsd %xmm4,%xmm6,%xmm2
vcmpord_qsd (%ecx),%xmm6,%xmm2
vcmpeq_uqsd %xmm4,%xmm6,%xmm2
vcmpeq_uqsd (%ecx),%xmm6,%xmm2
vcmpngesd %xmm4,%xmm6,%xmm2
vcmpngesd (%ecx),%xmm6,%xmm2
vcmpnge_ussd %xmm4,%xmm6,%xmm2
vcmpnge_ussd (%ecx),%xmm6,%xmm2
vcmpngtsd %xmm4,%xmm6,%xmm2
vcmpngtsd (%ecx),%xmm6,%xmm2
vcmpngt_ussd %xmm4,%xmm6,%xmm2
vcmpngt_ussd (%ecx),%xmm6,%xmm2
vcmpfalsesd %xmm4,%xmm6,%xmm2
vcmpfalsesd (%ecx),%xmm6,%xmm2
vcmpfalse_oqsd %xmm4,%xmm6,%xmm2
vcmpfalse_oqsd (%ecx),%xmm6,%xmm2
vcmpneq_oqsd %xmm4,%xmm6,%xmm2
vcmpneq_oqsd (%ecx),%xmm6,%xmm2
vcmpgesd %xmm4,%xmm6,%xmm2
vcmpgesd (%ecx),%xmm6,%xmm2
vcmpge_ossd %xmm4,%xmm6,%xmm2
vcmpge_ossd (%ecx),%xmm6,%xmm2
vcmpgtsd %xmm4,%xmm6,%xmm2
vcmpgtsd (%ecx),%xmm6,%xmm2
vcmpgt_ossd %xmm4,%xmm6,%xmm2
vcmpgt_ossd (%ecx),%xmm6,%xmm2
vcmptruesd %xmm4,%xmm6,%xmm2
vcmptruesd (%ecx),%xmm6,%xmm2
vcmptrue_uqsd %xmm4,%xmm6,%xmm2
vcmptrue_uqsd (%ecx),%xmm6,%xmm2
vcmpeq_ossd %xmm4,%xmm6,%xmm2
vcmpeq_ossd (%ecx),%xmm6,%xmm2
vcmplt_oqsd %xmm4,%xmm6,%xmm2
vcmplt_oqsd (%ecx),%xmm6,%xmm2
vcmple_oqsd %xmm4,%xmm6,%xmm2
vcmple_oqsd (%ecx),%xmm6,%xmm2
vcmpunord_ssd %xmm4,%xmm6,%xmm2
vcmpunord_ssd (%ecx),%xmm6,%xmm2
vcmpneq_ussd %xmm4,%xmm6,%xmm2
vcmpneq_ussd (%ecx),%xmm6,%xmm2
vcmpnlt_uqsd %xmm4,%xmm6,%xmm2
vcmpnlt_uqsd (%ecx),%xmm6,%xmm2
vcmpnle_uqsd %xmm4,%xmm6,%xmm2
vcmpnle_uqsd (%ecx),%xmm6,%xmm2
vcmpord_ssd %xmm4,%xmm6,%xmm2
vcmpord_ssd (%ecx),%xmm6,%xmm2
vcmpeq_ussd %xmm4,%xmm6,%xmm2
vcmpeq_ussd (%ecx),%xmm6,%xmm2
vcmpnge_uqsd %xmm4,%xmm6,%xmm2
vcmpnge_uqsd (%ecx),%xmm6,%xmm2
vcmpngt_uqsd %xmm4,%xmm6,%xmm2
vcmpngt_uqsd (%ecx),%xmm6,%xmm2
vcmpfalse_ossd %xmm4,%xmm6,%xmm2
vcmpfalse_ossd (%ecx),%xmm6,%xmm2
vcmpneq_ossd %xmm4,%xmm6,%xmm2
vcmpneq_ossd (%ecx),%xmm6,%xmm2
vcmpge_oqsd %xmm4,%xmm6,%xmm2
vcmpge_oqsd (%ecx),%xmm6,%xmm2
vcmpgt_oqsd %xmm4,%xmm6,%xmm2
vcmpgt_oqsd (%ecx),%xmm6,%xmm2
vcmptrue_ussd %xmm4,%xmm6,%xmm2
vcmptrue_ussd (%ecx),%xmm6,%xmm2
# Tests for op mem64
vldmxcsr (%ecx)
vstmxcsr (%ecx)
# Tests for op xmm/mem32, xmm, xmm
vaddss %xmm4,%xmm6,%xmm2
vaddss (%ecx),%xmm6,%xmm2
vcvtss2sd %xmm4,%xmm6,%xmm2
vcvtss2sd (%ecx),%xmm6,%xmm2
vdivss %xmm4,%xmm6,%xmm2
vdivss (%ecx),%xmm6,%xmm2
vmaxss %xmm4,%xmm6,%xmm2
vmaxss (%ecx),%xmm6,%xmm2
vminss %xmm4,%xmm6,%xmm2
vminss (%ecx),%xmm6,%xmm2
vmulss %xmm4,%xmm6,%xmm2
vmulss (%ecx),%xmm6,%xmm2
vrcpss %xmm4,%xmm6,%xmm2
vrcpss (%ecx),%xmm6,%xmm2
vrsqrtss %xmm4,%xmm6,%xmm2
vrsqrtss (%ecx),%xmm6,%xmm2
vsqrtss %xmm4,%xmm6,%xmm2
vsqrtss (%ecx),%xmm6,%xmm2
vsubss %xmm4,%xmm6,%xmm2
vsubss (%ecx),%xmm6,%xmm2
vcmpeqss %xmm4,%xmm6,%xmm2
vcmpeqss (%ecx),%xmm6,%xmm2
vcmpeq_oqss %xmm4,%xmm6,%xmm2
vcmpeq_oqss (%ecx),%xmm6,%xmm2
vcmpltss %xmm4,%xmm6,%xmm2
vcmpltss (%ecx),%xmm6,%xmm2
vcmplt_osss %xmm4,%xmm6,%xmm2
vcmplt_osss (%ecx),%xmm6,%xmm2
vcmpless %xmm4,%xmm6,%xmm2
vcmpless (%ecx),%xmm6,%xmm2
vcmple_osss %xmm4,%xmm6,%xmm2
vcmple_osss (%ecx),%xmm6,%xmm2
vcmpunordss %xmm4,%xmm6,%xmm2
vcmpunordss (%ecx),%xmm6,%xmm2
vcmpunord_qss %xmm4,%xmm6,%xmm2
vcmpunord_qss (%ecx),%xmm6,%xmm2
vcmpneqss %xmm4,%xmm6,%xmm2
vcmpneqss (%ecx),%xmm6,%xmm2
vcmpneq_uqss %xmm4,%xmm6,%xmm2
vcmpneq_uqss (%ecx),%xmm6,%xmm2
vcmpnltss %xmm4,%xmm6,%xmm2
vcmpnltss (%ecx),%xmm6,%xmm2
vcmpnlt_usss %xmm4,%xmm6,%xmm2
vcmpnlt_usss (%ecx),%xmm6,%xmm2
vcmpnless %xmm4,%xmm6,%xmm2
vcmpnless (%ecx),%xmm6,%xmm2
vcmpnle_usss %xmm4,%xmm6,%xmm2
vcmpnle_usss (%ecx),%xmm6,%xmm2
vcmpordss %xmm4,%xmm6,%xmm2
vcmpordss (%ecx),%xmm6,%xmm2
vcmpord_qss %xmm4,%xmm6,%xmm2
vcmpord_qss (%ecx),%xmm6,%xmm2
vcmpeq_uqss %xmm4,%xmm6,%xmm2
vcmpeq_uqss (%ecx),%xmm6,%xmm2
vcmpngess %xmm4,%xmm6,%xmm2
vcmpngess (%ecx),%xmm6,%xmm2
vcmpnge_usss %xmm4,%xmm6,%xmm2
vcmpnge_usss (%ecx),%xmm6,%xmm2
vcmpngtss %xmm4,%xmm6,%xmm2
vcmpngtss (%ecx),%xmm6,%xmm2
vcmpngt_usss %xmm4,%xmm6,%xmm2
vcmpngt_usss (%ecx),%xmm6,%xmm2
vcmpfalsess %xmm4,%xmm6,%xmm2
vcmpfalsess (%ecx),%xmm6,%xmm2
vcmpfalse_oqss %xmm4,%xmm6,%xmm2
vcmpfalse_oqss (%ecx),%xmm6,%xmm2
vcmpneq_oqss %xmm4,%xmm6,%xmm2
vcmpneq_oqss (%ecx),%xmm6,%xmm2
vcmpgess %xmm4,%xmm6,%xmm2
vcmpgess (%ecx),%xmm6,%xmm2
vcmpge_osss %xmm4,%xmm6,%xmm2
vcmpge_osss (%ecx),%xmm6,%xmm2
vcmpgtss %xmm4,%xmm6,%xmm2
vcmpgtss (%ecx),%xmm6,%xmm2
vcmpgt_osss %xmm4,%xmm6,%xmm2
vcmpgt_osss (%ecx),%xmm6,%xmm2
vcmptruess %xmm4,%xmm6,%xmm2
vcmptruess (%ecx),%xmm6,%xmm2
vcmptrue_uqss %xmm4,%xmm6,%xmm2
vcmptrue_uqss (%ecx),%xmm6,%xmm2
vcmpeq_osss %xmm4,%xmm6,%xmm2
vcmpeq_osss (%ecx),%xmm6,%xmm2
vcmplt_oqss %xmm4,%xmm6,%xmm2
vcmplt_oqss (%ecx),%xmm6,%xmm2
vcmple_oqss %xmm4,%xmm6,%xmm2
vcmple_oqss (%ecx),%xmm6,%xmm2
vcmpunord_sss %xmm4,%xmm6,%xmm2
vcmpunord_sss (%ecx),%xmm6,%xmm2
vcmpneq_usss %xmm4,%xmm6,%xmm2
vcmpneq_usss (%ecx),%xmm6,%xmm2
vcmpnlt_uqss %xmm4,%xmm6,%xmm2
vcmpnlt_uqss (%ecx),%xmm6,%xmm2
vcmpnle_uqss %xmm4,%xmm6,%xmm2
vcmpnle_uqss (%ecx),%xmm6,%xmm2
vcmpord_sss %xmm4,%xmm6,%xmm2
vcmpord_sss (%ecx),%xmm6,%xmm2
vcmpeq_usss %xmm4,%xmm6,%xmm2
vcmpeq_usss (%ecx),%xmm6,%xmm2
vcmpnge_uqss %xmm4,%xmm6,%xmm2
vcmpnge_uqss (%ecx),%xmm6,%xmm2
vcmpngt_uqss %xmm4,%xmm6,%xmm2
vcmpngt_uqss (%ecx),%xmm6,%xmm2
vcmpfalse_osss %xmm4,%xmm6,%xmm2
vcmpfalse_osss (%ecx),%xmm6,%xmm2
vcmpneq_osss %xmm4,%xmm6,%xmm2
vcmpneq_osss (%ecx),%xmm6,%xmm2
vcmpge_oqss %xmm4,%xmm6,%xmm2
vcmpge_oqss (%ecx),%xmm6,%xmm2
vcmpgt_oqss %xmm4,%xmm6,%xmm2
vcmpgt_oqss (%ecx),%xmm6,%xmm2
vcmptrue_usss %xmm4,%xmm6,%xmm2
vcmptrue_usss (%ecx),%xmm6,%xmm2
# Tests for op mem32, ymm
vbroadcastss (%ecx),%ymm4
# Tests for op xmm/mem32, xmm
vcomiss %xmm4,%xmm6
vcomiss (%ecx),%xmm4
vpmovsxbd %xmm4,%xmm6
vpmovsxbd (%ecx),%xmm4
vpmovsxwq %xmm4,%xmm6
vpmovsxwq (%ecx),%xmm4
vpmovzxbd %xmm4,%xmm6
vpmovzxbd (%ecx),%xmm4
vpmovzxwq %xmm4,%xmm6
vpmovzxwq (%ecx),%xmm4
vucomiss %xmm4,%xmm6
vucomiss (%ecx),%xmm4
# Tests for op mem32, xmm
vbroadcastss (%ecx),%xmm4
vmovss (%ecx),%xmm4
# Tests for op xmm, mem32
vmovss %xmm4,(%ecx)
# Tests for op xmm, regl/mem32
# Tests for op regl/mem32, xmm
vmovd %xmm4,%ecx
vmovd %xmm4,(%ecx)
vmovd %ecx,%xmm4
vmovd (%ecx),%xmm4
# Tests for op xmm/mem32, regl
vcvtss2si %xmm4,%ecx
vcvtss2si (%ecx),%ecx
vcvttss2si %xmm4,%ecx
vcvttss2si (%ecx),%ecx
# Tests for op imm8, xmm, regq/mem32
vextractps $7,%xmm4,(%ecx)
# Tests for op imm8, xmm, regl/mem32
vpextrd $7,%xmm4,%ecx
vpextrd $7,%xmm4,(%ecx)
vextractps $7,%xmm4,%ecx
vextractps $7,%xmm4,(%ecx)
# Tests for op imm8, regl/mem32, xmm, xmm
vpinsrd $7,%ecx,%xmm4,%xmm6
vpinsrd $7,(%ecx),%xmm4,%xmm6
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd %ecx,%xmm4,%xmm6
vcvtsi2sd (%ecx),%xmm4,%xmm6
vcvtsi2ss %ecx,%xmm4,%xmm6
vcvtsi2ss (%ecx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss $7,%xmm4,%xmm6,%xmm2
vcmpss $7,(%ecx),%xmm6,%xmm2
vinsertps $7,%xmm4,%xmm6,%xmm2
vinsertps $7,(%ecx),%xmm6,%xmm2
vroundss $7,%xmm4,%xmm6,%xmm2
vroundss $7,(%ecx),%xmm6,%xmm2
# Tests for op xmm/m16, xmm
vpmovsxbq %xmm4,%xmm6
vpmovsxbq (%ecx),%xmm4
vpmovzxbq %xmm4,%xmm6
vpmovzxbq (%ecx),%xmm4
# Tests for op imm8, xmm, regl/mem16
vpextrw $7,%xmm4,%ecx
vpextrw $7,%xmm4,(%ecx)
# Tests for op imm8, xmm, regq/mem16
vpextrw $7,%xmm4,(%ecx)
# Tests for op imm8, regl/mem16, xmm, xmm
vpinsrw $7,%ecx,%xmm4,%xmm6
vpinsrw $7,(%ecx),%xmm4,%xmm6
# Tests for op imm8, xmm, regl/mem8
vpextrb $7,%xmm4,%ecx
vpextrb $7,%xmm4,(%ecx)
# Tests for op imm8, regl/mem8, xmm, xmm
vpinsrb $7,%ecx,%xmm4,%xmm6
vpinsrb $7,(%ecx),%xmm4,%xmm6
# Tests for op imm8, xmm, regq/mem8
vpextrb $7,%xmm4,(%ecx)
# Tests for op xmm, xmm
vmaskmovdqu %xmm4,%xmm6
vmovq %xmm4,%xmm6
# Tests for op xmm, regl
vmovmskpd %xmm4,%ecx
vmovmskps %xmm4,%ecx
vpmovmskb %xmm4,%ecx
# Tests for op xmm, xmm, xmm
vmovhlps %xmm4,%xmm6,%xmm2
vmovlhps %xmm4,%xmm6,%xmm2
vmovsd %xmm4,%xmm6,%xmm2
vmovss %xmm4,%xmm6,%xmm2
# Tests for op imm8, xmm, xmm
vpslld $7,%xmm4,%xmm6
vpslldq $7,%xmm4,%xmm6
vpsllq $7,%xmm4,%xmm6
vpsllw $7,%xmm4,%xmm6
vpsrad $7,%xmm4,%xmm6
vpsraw $7,%xmm4,%xmm6
vpsrld $7,%xmm4,%xmm6
vpsrldq $7,%xmm4,%xmm6
vpsrlq $7,%xmm4,%xmm6
vpsrlw $7,%xmm4,%xmm6
# Tests for op imm8, xmm, regl
vpextrw $7,%xmm4,%ecx
# Tests for op ymm, regl
vmovmskpd %ymm4,%ecx
vmovmskps %ymm4,%ecx
# Default instructions without suffixes.
vcvtpd2dq %xmm4,%xmm6
vcvtpd2dq %ymm4,%xmm6
vcvtpd2ps %xmm4,%xmm6
vcvtpd2ps %ymm4,%xmm6
vcvttpd2dq %xmm4,%xmm6
vcvttpd2dq %ymm4,%xmm6
#Tests with different memory and register operands.
vldmxcsr 0x1234
vmovdqa 0x1234,%xmm0
vmovdqa %xmm0,0x1234
vmovd %xmm0,0x1234
vcvtsd2si 0x1234,%eax
vcvtdq2pd 0x1234,%ymm0
vcvtpd2psy 0x1234,%xmm0
vpavgb 0x1234,%xmm0,%xmm7
vaeskeygenassist $7,0x1234,%xmm0
vpextrb $7,%xmm0,0x1234
vcvtsi2sdl 0x1234,%xmm0,%xmm7
vpclmulqdq $7,0x1234,%xmm0,%xmm7
vblendvps %xmm0,0x1234,%xmm4,%xmm6
vpinsrb $7,0x1234,%xmm0,%xmm7
vmovdqa 0x1234,%ymm0
vmovdqa %ymm0,0x1234
vpermilpd 0x1234,%ymm0,%ymm7
vroundpd $7,0x1234,%ymm0
vextractf128 $7,%ymm0,0x1234
vperm2f128 $7,0x1234,%ymm0,%ymm7
vblendvpd %ymm0,0x1234,%ymm4,%ymm6
vldmxcsr (%ebp)
vmovdqa (%ebp),%xmm0
vmovdqa %xmm0,(%ebp)
vmovd %xmm0,(%ebp)
vcvtsd2si (%ebp),%eax
vcvtdq2pd (%ebp),%ymm0
vcvtpd2psy (%ebp),%xmm0
vpavgb (%ebp),%xmm0,%xmm7
vaeskeygenassist $7,(%ebp),%xmm0
vpextrb $7,%xmm0,(%ebp)
vcvtsi2sdl (%ebp),%xmm0,%xmm7
vpclmulqdq $7,(%ebp),%xmm0,%xmm7
vblendvps %xmm0,(%ebp),%xmm4,%xmm6
vpinsrb $7,(%ebp),%xmm0,%xmm7
vmovdqa (%ebp),%ymm0
vmovdqa %ymm0,(%ebp)
vpermilpd (%ebp),%ymm0,%ymm7
vroundpd $7,(%ebp),%ymm0
vextractf128 $7,%ymm0,(%ebp)
vperm2f128 $7,(%ebp),%ymm0,%ymm7
vblendvpd %ymm0,(%ebp),%ymm4,%ymm6
vldmxcsr (%esp)
vmovdqa (%esp),%xmm0
vmovdqa %xmm0,(%esp)
vmovd %xmm0,(%esp)
vcvtsd2si (%esp),%eax
vcvtdq2pd (%esp),%ymm0
vcvtpd2psy (%esp),%xmm0
vpavgb (%esp),%xmm0,%xmm7
vaeskeygenassist $7,(%esp),%xmm0
vpextrb $7,%xmm0,(%esp)
vcvtsi2sdl (%esp),%xmm0,%xmm7
vpclmulqdq $7,(%esp),%xmm0,%xmm7
vblendvps %xmm0,(%esp),%xmm4,%xmm6
vpinsrb $7,(%esp),%xmm0,%xmm7
vmovdqa (%esp),%ymm0
vmovdqa %ymm0,(%esp)
vpermilpd (%esp),%ymm0,%ymm7
vroundpd $7,(%esp),%ymm0
vextractf128 $7,%ymm0,(%esp)
vperm2f128 $7,(%esp),%ymm0,%ymm7
vblendvpd %ymm0,(%esp),%ymm4,%ymm6
vldmxcsr 0x99(%ebp)
vmovdqa 0x99(%ebp),%xmm0
vmovdqa %xmm0,0x99(%ebp)
vmovd %xmm0,0x99(%ebp)
vcvtsd2si 0x99(%ebp),%eax
vcvtdq2pd 0x99(%ebp),%ymm0
vcvtpd2psy 0x99(%ebp),%xmm0
vpavgb 0x99(%ebp),%xmm0,%xmm7
vaeskeygenassist $7,0x99(%ebp),%xmm0
vpextrb $7,%xmm0,0x99(%ebp)
vcvtsi2sdl 0x99(%ebp),%xmm0,%xmm7
vpclmulqdq $7,0x99(%ebp),%xmm0,%xmm7
vblendvps %xmm0,0x99(%ebp),%xmm4,%xmm6
vpinsrb $7,0x99(%ebp),%xmm0,%xmm7
vmovdqa 0x99(%ebp),%ymm0
vmovdqa %ymm0,0x99(%ebp)
vpermilpd 0x99(%ebp),%ymm0,%ymm7
vroundpd $7,0x99(%ebp),%ymm0
vextractf128 $7,%ymm0,0x99(%ebp)
vperm2f128 $7,0x99(%ebp),%ymm0,%ymm7
vblendvpd %ymm0,0x99(%ebp),%ymm4,%ymm6
vldmxcsr 0x99(,%eiz)
vmovdqa 0x99(,%eiz),%xmm0
vmovdqa %xmm0,0x99(,%eiz)
vmovd %xmm0,0x99(,%eiz)
vcvtsd2si 0x99(,%eiz),%eax
vcvtdq2pd 0x99(,%eiz),%ymm0
vcvtpd2psy 0x99(,%eiz),%xmm0
vpavgb 0x99(,%eiz),%xmm0,%xmm7
vaeskeygenassist $7,0x99(,%eiz),%xmm0
vpextrb $7,%xmm0,0x99(,%eiz)
vcvtsi2sdl 0x99(,%eiz),%xmm0,%xmm7
vpclmulqdq $7,0x99(,%eiz),%xmm0,%xmm7
vblendvps %xmm0,0x99(,%eiz),%xmm4,%xmm6
vpinsrb $7,0x99(,%eiz),%xmm0,%xmm7
vmovdqa 0x99(,%eiz),%ymm0
vmovdqa %ymm0,0x99(,%eiz)
vpermilpd 0x99(,%eiz),%ymm0,%ymm7
vroundpd $7,0x99(,%eiz),%ymm0
vextractf128 $7,%ymm0,0x99(,%eiz)
vperm2f128 $7,0x99(,%eiz),%ymm0,%ymm7
vblendvpd %ymm0,0x99(,%eiz),%ymm4,%ymm6
vldmxcsr 0x99(,%eiz,2)
vmovdqa 0x99(,%eiz,2),%xmm0
vmovdqa %xmm0,0x99(,%eiz,2)
vmovd %xmm0,0x99(,%eiz,2)
vcvtsd2si 0x99(,%eiz,2),%eax
vcvtdq2pd 0x99(,%eiz,2),%ymm0
vcvtpd2psy 0x99(,%eiz,2),%xmm0
vpavgb 0x99(,%eiz,2),%xmm0,%xmm7
vaeskeygenassist $7,0x99(,%eiz,2),%xmm0
vpextrb $7,%xmm0,0x99(,%eiz,2)
vcvtsi2sdl 0x99(,%eiz,2),%xmm0,%xmm7
vpclmulqdq $7,0x99(,%eiz,2),%xmm0,%xmm7
vblendvps %xmm0,0x99(,%eiz,2),%xmm4,%xmm6
vpinsrb $7,0x99(,%eiz,2),%xmm0,%xmm7
vmovdqa 0x99(,%eiz,2),%ymm0
vmovdqa %ymm0,0x99(,%eiz,2)
vpermilpd 0x99(,%eiz,2),%ymm0,%ymm7
vroundpd $7,0x99(,%eiz,2),%ymm0
vextractf128 $7,%ymm0,0x99(,%eiz,2)
vperm2f128 $7,0x99(,%eiz,2),%ymm0,%ymm7
vblendvpd %ymm0,0x99(,%eiz,2),%ymm4,%ymm6
vldmxcsr 0x99(%eax,%eiz)
vmovdqa 0x99(%eax,%eiz),%xmm0
vmovdqa %xmm0,0x99(%eax,%eiz)
vmovd %xmm0,0x99(%eax,%eiz)
vcvtsd2si 0x99(%eax,%eiz),%eax
vcvtdq2pd 0x99(%eax,%eiz),%ymm0
vcvtpd2psy 0x99(%eax,%eiz),%xmm0
vpavgb 0x99(%eax,%eiz),%xmm0,%xmm7
vaeskeygenassist $7,0x99(%eax,%eiz),%xmm0
vpextrb $7,%xmm0,0x99(%eax,%eiz)
vcvtsi2sdl 0x99(%eax,%eiz),%xmm0,%xmm7
vpclmulqdq $7,0x99(%eax,%eiz),%xmm0,%xmm7
vblendvps %xmm0,0x99(%eax,%eiz),%xmm4,%xmm6
vpinsrb $7,0x99(%eax,%eiz),%xmm0,%xmm7
vmovdqa 0x99(%eax,%eiz),%ymm0
vmovdqa %ymm0,0x99(%eax,%eiz)
vpermilpd 0x99(%eax,%eiz),%ymm0,%ymm7
vroundpd $7,0x99(%eax,%eiz),%ymm0
vextractf128 $7,%ymm0,0x99(%eax,%eiz)
vperm2f128 $7,0x99(%eax,%eiz),%ymm0,%ymm7
vblendvpd %ymm0,0x99(%eax,%eiz),%ymm4,%ymm6
vldmxcsr 0x99(%eax,%eiz,2)
vmovdqa 0x99(%eax,%eiz,2),%xmm0
vmovdqa %xmm0,0x99(%eax,%eiz,2)
vmovd %xmm0,0x99(%eax,%eiz,2)
vcvtsd2si 0x99(%eax,%eiz,2),%eax
vcvtdq2pd 0x99(%eax,%eiz,2),%ymm0
vcvtpd2psy 0x99(%eax,%eiz,2),%xmm0
vpavgb 0x99(%eax,%eiz,2),%xmm0,%xmm7
vaeskeygenassist $7,0x99(%eax,%eiz,2),%xmm0
vpextrb $7,%xmm0,0x99(%eax,%eiz,2)
vcvtsi2sdl 0x99(%eax,%eiz,2),%xmm0,%xmm7
vpclmulqdq $7,0x99(%eax,%eiz,2),%xmm0,%xmm7
vblendvps %xmm0,0x99(%eax,%eiz,2),%xmm4,%xmm6
vpinsrb $7,0x99(%eax,%eiz,2),%xmm0,%xmm7
vmovdqa 0x99(%eax,%eiz,2),%ymm0
vmovdqa %ymm0,0x99(%eax,%eiz,2)
vpermilpd 0x99(%eax,%eiz,2),%ymm0,%ymm7
vroundpd $7,0x99(%eax,%eiz,2),%ymm0
vextractf128 $7,%ymm0,0x99(%eax,%eiz,2)
vperm2f128 $7,0x99(%eax,%eiz,2),%ymm0,%ymm7
vblendvpd %ymm0,0x99(%eax,%eiz,2),%ymm4,%ymm6
vldmxcsr 0x99(%eax,%ebx,4)
vmovdqa 0x99(%eax,%ebx,4),%xmm0
vmovdqa %xmm0,0x99(%eax,%ebx,4)
vmovd %xmm0,0x99(%eax,%ebx,4)
vcvtsd2si 0x99(%eax,%ebx,4),%eax
vcvtdq2pd 0x99(%eax,%ebx,4),%ymm0
vcvtpd2psy 0x99(%eax,%ebx,4),%xmm0
vpavgb 0x99(%eax,%ebx,4),%xmm0,%xmm7
vaeskeygenassist $7,0x99(%eax,%ebx,4),%xmm0
vpextrb $7,%xmm0,0x99(%eax,%ebx,4)
vcvtsi2sdl 0x99(%eax,%ebx,4),%xmm0,%xmm7
vpclmulqdq $7,0x99(%eax,%ebx,4),%xmm0,%xmm7
vblendvps %xmm0,0x99(%eax,%ebx,4),%xmm4,%xmm6
vpinsrb $7,0x99(%eax,%ebx,4),%xmm0,%xmm7
vmovdqa 0x99(%eax,%ebx,4),%ymm0
vmovdqa %ymm0,0x99(%eax,%ebx,4)
vpermilpd 0x99(%eax,%ebx,4),%ymm0,%ymm7
vroundpd $7,0x99(%eax,%ebx,4),%ymm0
vextractf128 $7,%ymm0,0x99(%eax,%ebx,4)
vperm2f128 $7,0x99(%eax,%ebx,4),%ymm0,%ymm7
vblendvpd %ymm0,0x99(%eax,%ebx,4),%ymm4,%ymm6
vldmxcsr 0x99(%esp,%ecx,8)
vmovdqa 0x99(%esp,%ecx,8),%xmm0
vmovdqa %xmm0,0x99(%esp,%ecx,8)
vmovd %xmm0,0x99(%esp,%ecx,8)
vcvtsd2si 0x99(%esp,%ecx,8),%eax
vcvtdq2pd 0x99(%esp,%ecx,8),%ymm0
vcvtpd2psy 0x99(%esp,%ecx,8),%xmm0
vpavgb 0x99(%esp,%ecx,8),%xmm0,%xmm7
vaeskeygenassist $7,0x99(%esp,%ecx,8),%xmm0
vpextrb $7,%xmm0,0x99(%esp,%ecx,8)
vcvtsi2sdl 0x99(%esp,%ecx,8),%xmm0,%xmm7
vpclmulqdq $7,0x99(%esp,%ecx,8),%xmm0,%xmm7
vblendvps %xmm0,0x99(%esp,%ecx,8),%xmm4,%xmm6
vpinsrb $7,0x99(%esp,%ecx,8),%xmm0,%xmm7
vmovdqa 0x99(%esp,%ecx,8),%ymm0
vmovdqa %ymm0,0x99(%esp,%ecx,8)
vpermilpd 0x99(%esp,%ecx,8),%ymm0,%ymm7
vroundpd $7,0x99(%esp,%ecx,8),%ymm0
vextractf128 $7,%ymm0,0x99(%esp,%ecx,8)
vperm2f128 $7,0x99(%esp,%ecx,8),%ymm0,%ymm7
vblendvpd %ymm0,0x99(%esp,%ecx,8),%ymm4,%ymm6
vldmxcsr 0x99(%ebp,%edx,1)
vmovdqa 0x99(%ebp,%edx,1),%xmm0
vmovdqa %xmm0,0x99(%ebp,%edx,1)
vmovd %xmm0,0x99(%ebp,%edx,1)
vcvtsd2si 0x99(%ebp,%edx,1),%eax
vcvtdq2pd 0x99(%ebp,%edx,1),%ymm0
vcvtpd2psy 0x99(%ebp,%edx,1),%xmm0
vpavgb 0x99(%ebp,%edx,1),%xmm0,%xmm7
vaeskeygenassist $7,0x99(%ebp,%edx,1),%xmm0
vpextrb $7,%xmm0,0x99(%ebp,%edx,1)
vcvtsi2sdl 0x99(%ebp,%edx,1),%xmm0,%xmm7
vpclmulqdq $7,0x99(%ebp,%edx,1),%xmm0,%xmm7
vblendvps %xmm0,0x99(%ebp,%edx,1),%xmm4,%xmm6
vpinsrb $7,0x99(%ebp,%edx,1),%xmm0,%xmm7
vmovdqa 0x99(%ebp,%edx,1),%ymm0
vmovdqa %ymm0,0x99(%ebp,%edx,1)
vpermilpd 0x99(%ebp,%edx,1),%ymm0,%ymm7
vroundpd $7,0x99(%ebp,%edx,1),%ymm0
vextractf128 $7,%ymm0,0x99(%ebp,%edx,1)
vperm2f128 $7,0x99(%ebp,%edx,1),%ymm0,%ymm7
vblendvpd %ymm0,0x99(%ebp,%edx,1),%ymm4,%ymm6
# Tests for all register operands.
vmovmskpd %xmm0,%eax
vpslld $7,%xmm0,%xmm7
vmovmskps %ymm0,%eax
.intel_syntax noprefix
# Tests for op mem64
vldmxcsr DWORD PTR [ecx]
vldmxcsr [ecx]
vstmxcsr DWORD PTR [ecx]
vstmxcsr [ecx]
# Tests for op mem256, mask, ymm
# Tests for op ymm, mask, mem256
vmaskmovpd ymm6,ymm4,YMMWORD PTR [ecx]
vmaskmovpd YMMWORD PTR [ecx],ymm6,ymm4
vmaskmovpd ymm6,ymm4,[ecx]
vmaskmovpd [ecx],ymm6,ymm4
vmaskmovps ymm6,ymm4,YMMWORD PTR [ecx]
vmaskmovps YMMWORD PTR [ecx],ymm6,ymm4
vmaskmovps ymm6,ymm4,[ecx]
vmaskmovps [ecx],ymm6,ymm4
# Tests for op imm8, ymm/mem256, ymm
vpermilpd ymm2,ymm6,7
vpermilpd ymm6,YMMWORD PTR [ecx],7
vpermilpd ymm6,[ecx],7
vpermilps ymm2,ymm6,7
vpermilps ymm6,YMMWORD PTR [ecx],7
vpermilps ymm6,[ecx],7
vroundpd ymm2,ymm6,7
vroundpd ymm6,YMMWORD PTR [ecx],7
vroundpd ymm6,[ecx],7
vroundps ymm2,ymm6,7
vroundps ymm6,YMMWORD PTR [ecx],7
vroundps ymm6,[ecx],7
# Tests for op ymm/mem256, ymm, ymm
vaddpd ymm2,ymm6,ymm4
vaddpd ymm2,ymm6,YMMWORD PTR [ecx]
vaddpd ymm2,ymm6,[ecx]
vaddps ymm2,ymm6,ymm4
vaddps ymm2,ymm6,YMMWORD PTR [ecx]
vaddps ymm2,ymm6,[ecx]
vaddsubpd ymm2,ymm6,ymm4
vaddsubpd ymm2,ymm6,YMMWORD PTR [ecx]
vaddsubpd ymm2,ymm6,[ecx]
vaddsubps ymm2,ymm6,ymm4
vaddsubps ymm2,ymm6,YMMWORD PTR [ecx]
vaddsubps ymm2,ymm6,[ecx]
vandnpd ymm2,ymm6,ymm4
vandnpd ymm2,ymm6,YMMWORD PTR [ecx]
vandnpd ymm2,ymm6,[ecx]
vandnps ymm2,ymm6,ymm4
vandnps ymm2,ymm6,YMMWORD PTR [ecx]
vandnps ymm2,ymm6,[ecx]
vandpd ymm2,ymm6,ymm4
vandpd ymm2,ymm6,YMMWORD PTR [ecx]
vandpd ymm2,ymm6,[ecx]
vandps ymm2,ymm6,ymm4
vandps ymm2,ymm6,YMMWORD PTR [ecx]
vandps ymm2,ymm6,[ecx]
vdivpd ymm2,ymm6,ymm4
vdivpd ymm2,ymm6,YMMWORD PTR [ecx]
vdivpd ymm2,ymm6,[ecx]
vdivps ymm2,ymm6,ymm4
vdivps ymm2,ymm6,YMMWORD PTR [ecx]
vdivps ymm2,ymm6,[ecx]
vhaddpd ymm2,ymm6,ymm4
vhaddpd ymm2,ymm6,YMMWORD PTR [ecx]
vhaddpd ymm2,ymm6,[ecx]
vhaddps ymm2,ymm6,ymm4
vhaddps ymm2,ymm6,YMMWORD PTR [ecx]
vhaddps ymm2,ymm6,[ecx]
vhsubpd ymm2,ymm6,ymm4
vhsubpd ymm2,ymm6,YMMWORD PTR [ecx]
vhsubpd ymm2,ymm6,[ecx]
vhsubps ymm2,ymm6,ymm4
vhsubps ymm2,ymm6,YMMWORD PTR [ecx]
vhsubps ymm2,ymm6,[ecx]
vmaxpd ymm2,ymm6,ymm4
vmaxpd ymm2,ymm6,YMMWORD PTR [ecx]
vmaxpd ymm2,ymm6,[ecx]
vmaxps ymm2,ymm6,ymm4
vmaxps ymm2,ymm6,YMMWORD PTR [ecx]
vmaxps ymm2,ymm6,[ecx]
vminpd ymm2,ymm6,ymm4
vminpd ymm2,ymm6,YMMWORD PTR [ecx]
vminpd ymm2,ymm6,[ecx]
vminps ymm2,ymm6,ymm4
vminps ymm2,ymm6,YMMWORD PTR [ecx]
vminps ymm2,ymm6,[ecx]
vmulpd ymm2,ymm6,ymm4
vmulpd ymm2,ymm6,YMMWORD PTR [ecx]
vmulpd ymm2,ymm6,[ecx]
vmulps ymm2,ymm6,ymm4
vmulps ymm2,ymm6,YMMWORD PTR [ecx]
vmulps ymm2,ymm6,[ecx]
vorpd ymm2,ymm6,ymm4
vorpd ymm2,ymm6,YMMWORD PTR [ecx]
vorpd ymm2,ymm6,[ecx]
vorps ymm2,ymm6,ymm4
vorps ymm2,ymm6,YMMWORD PTR [ecx]
vorps ymm2,ymm6,[ecx]
vpermilpd ymm2,ymm6,ymm4
vpermilpd ymm2,ymm6,YMMWORD PTR [ecx]
vpermilpd ymm2,ymm6,[ecx]
vpermilps ymm2,ymm6,ymm4
vpermilps ymm2,ymm6,YMMWORD PTR [ecx]
vpermilps ymm2,ymm6,[ecx]
vsubpd ymm2,ymm6,ymm4
vsubpd ymm2,ymm6,YMMWORD PTR [ecx]
vsubpd ymm2,ymm6,[ecx]
vsubps ymm2,ymm6,ymm4
vsubps ymm2,ymm6,YMMWORD PTR [ecx]
vsubps ymm2,ymm6,[ecx]
vunpckhpd ymm2,ymm6,ymm4
vunpckhpd ymm2,ymm6,YMMWORD PTR [ecx]
vunpckhpd ymm2,ymm6,[ecx]
vunpckhps ymm2,ymm6,ymm4
vunpckhps ymm2,ymm6,YMMWORD PTR [ecx]
vunpckhps ymm2,ymm6,[ecx]
vunpcklpd ymm2,ymm6,ymm4
vunpcklpd ymm2,ymm6,YMMWORD PTR [ecx]
vunpcklpd ymm2,ymm6,[ecx]
vunpcklps ymm2,ymm6,ymm4
vunpcklps ymm2,ymm6,YMMWORD PTR [ecx]
vunpcklps ymm2,ymm6,[ecx]
vxorpd ymm2,ymm6,ymm4
vxorpd ymm2,ymm6,YMMWORD PTR [ecx]
vxorpd ymm2,ymm6,[ecx]
vxorps ymm2,ymm6,ymm4
vxorps ymm2,ymm6,YMMWORD PTR [ecx]
vxorps ymm2,ymm6,[ecx]
vcmpeqpd ymm2,ymm6,ymm4
vcmpeqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeqpd ymm2,ymm6,[ecx]
vcmpltpd ymm2,ymm6,ymm4
vcmpltpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpltpd ymm2,ymm6,[ecx]
vcmplepd ymm2,ymm6,ymm4
vcmplepd ymm2,ymm6,YMMWORD PTR [ecx]
vcmplepd ymm2,ymm6,[ecx]
vcmpunordpd ymm2,ymm6,ymm4
vcmpunordpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpunordpd ymm2,ymm6,[ecx]
vcmpneqpd ymm2,ymm6,ymm4
vcmpneqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneqpd ymm2,ymm6,[ecx]
vcmpnltpd ymm2,ymm6,ymm4
vcmpnltpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnltpd ymm2,ymm6,[ecx]
vcmpnlepd ymm2,ymm6,ymm4
vcmpnlepd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnlepd ymm2,ymm6,[ecx]
vcmpordpd ymm2,ymm6,ymm4
vcmpordpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpordpd ymm2,ymm6,[ecx]
vcmpeq_uqpd ymm2,ymm6,ymm4
vcmpeq_uqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeq_uqpd ymm2,ymm6,[ecx]
vcmpngepd ymm2,ymm6,ymm4
vcmpngepd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpngepd ymm2,ymm6,[ecx]
vcmpngtpd ymm2,ymm6,ymm4
vcmpngtpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpngtpd ymm2,ymm6,[ecx]
vcmpfalsepd ymm2,ymm6,ymm4
vcmpfalsepd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpfalsepd ymm2,ymm6,[ecx]
vcmpneq_oqpd ymm2,ymm6,ymm4
vcmpneq_oqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneq_oqpd ymm2,ymm6,[ecx]
vcmpgepd ymm2,ymm6,ymm4
vcmpgepd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpgepd ymm2,ymm6,[ecx]
vcmpgtpd ymm2,ymm6,ymm4
vcmpgtpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpgtpd ymm2,ymm6,[ecx]
vcmptruepd ymm2,ymm6,ymm4
vcmptruepd ymm2,ymm6,YMMWORD PTR [ecx]
vcmptruepd ymm2,ymm6,[ecx]
vcmpeq_ospd ymm2,ymm6,ymm4
vcmpeq_ospd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeq_ospd ymm2,ymm6,[ecx]
vcmplt_oqpd ymm2,ymm6,ymm4
vcmplt_oqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmplt_oqpd ymm2,ymm6,[ecx]
vcmple_oqpd ymm2,ymm6,ymm4
vcmple_oqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmple_oqpd ymm2,ymm6,[ecx]
vcmpunord_spd ymm2,ymm6,ymm4
vcmpunord_spd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpunord_spd ymm2,ymm6,[ecx]
vcmpneq_uspd ymm2,ymm6,ymm4
vcmpneq_uspd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneq_uspd ymm2,ymm6,[ecx]
vcmpnlt_uqpd ymm2,ymm6,ymm4
vcmpnlt_uqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnlt_uqpd ymm2,ymm6,[ecx]
vcmpnle_uqpd ymm2,ymm6,ymm4
vcmpnle_uqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnle_uqpd ymm2,ymm6,[ecx]
vcmpord_spd ymm2,ymm6,ymm4
vcmpord_spd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpord_spd ymm2,ymm6,[ecx]
vcmpeq_uspd ymm2,ymm6,ymm4
vcmpeq_uspd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeq_uspd ymm2,ymm6,[ecx]
vcmpnge_uqpd ymm2,ymm6,ymm4
vcmpnge_uqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnge_uqpd ymm2,ymm6,[ecx]
vcmpngt_uqpd ymm2,ymm6,ymm4
vcmpngt_uqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpngt_uqpd ymm2,ymm6,[ecx]
vcmpfalse_ospd ymm2,ymm6,ymm4
vcmpfalse_ospd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpfalse_ospd ymm2,ymm6,[ecx]
vcmpneq_ospd ymm2,ymm6,ymm4
vcmpneq_ospd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneq_ospd ymm2,ymm6,[ecx]
vcmpge_oqpd ymm2,ymm6,ymm4
vcmpge_oqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpge_oqpd ymm2,ymm6,[ecx]
vcmpgt_oqpd ymm2,ymm6,ymm4
vcmpgt_oqpd ymm2,ymm6,YMMWORD PTR [ecx]
vcmpgt_oqpd ymm2,ymm6,[ecx]
vcmptrue_uspd ymm2,ymm6,ymm4
vcmptrue_uspd ymm2,ymm6,YMMWORD PTR [ecx]
vcmptrue_uspd ymm2,ymm6,[ecx]
vcmpeqps ymm2,ymm6,ymm4
vcmpeqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeqps ymm2,ymm6,[ecx]
vcmpltps ymm2,ymm6,ymm4
vcmpltps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpltps ymm2,ymm6,[ecx]
vcmpleps ymm2,ymm6,ymm4
vcmpleps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpleps ymm2,ymm6,[ecx]
vcmpunordps ymm2,ymm6,ymm4
vcmpunordps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpunordps ymm2,ymm6,[ecx]
vcmpneqps ymm2,ymm6,ymm4
vcmpneqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneqps ymm2,ymm6,[ecx]
vcmpnltps ymm2,ymm6,ymm4
vcmpnltps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnltps ymm2,ymm6,[ecx]
vcmpnleps ymm2,ymm6,ymm4
vcmpnleps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnleps ymm2,ymm6,[ecx]
vcmpordps ymm2,ymm6,ymm4
vcmpordps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpordps ymm2,ymm6,[ecx]
vcmpeq_uqps ymm2,ymm6,ymm4
vcmpeq_uqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeq_uqps ymm2,ymm6,[ecx]
vcmpngeps ymm2,ymm6,ymm4
vcmpngeps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpngeps ymm2,ymm6,[ecx]
vcmpngtps ymm2,ymm6,ymm4
vcmpngtps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpngtps ymm2,ymm6,[ecx]
vcmpfalseps ymm2,ymm6,ymm4
vcmpfalseps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpfalseps ymm2,ymm6,[ecx]
vcmpneq_oqps ymm2,ymm6,ymm4
vcmpneq_oqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneq_oqps ymm2,ymm6,[ecx]
vcmpgeps ymm2,ymm6,ymm4
vcmpgeps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpgeps ymm2,ymm6,[ecx]
vcmpgtps ymm2,ymm6,ymm4
vcmpgtps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpgtps ymm2,ymm6,[ecx]
vcmptrueps ymm2,ymm6,ymm4
vcmptrueps ymm2,ymm6,YMMWORD PTR [ecx]
vcmptrueps ymm2,ymm6,[ecx]
vcmpeq_osps ymm2,ymm6,ymm4
vcmpeq_osps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeq_osps ymm2,ymm6,[ecx]
vcmplt_oqps ymm2,ymm6,ymm4
vcmplt_oqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmplt_oqps ymm2,ymm6,[ecx]
vcmple_oqps ymm2,ymm6,ymm4
vcmple_oqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmple_oqps ymm2,ymm6,[ecx]
vcmpunord_sps ymm2,ymm6,ymm4
vcmpunord_sps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpunord_sps ymm2,ymm6,[ecx]
vcmpneq_usps ymm2,ymm6,ymm4
vcmpneq_usps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneq_usps ymm2,ymm6,[ecx]
vcmpnlt_uqps ymm2,ymm6,ymm4
vcmpnlt_uqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnlt_uqps ymm2,ymm6,[ecx]
vcmpnle_uqps ymm2,ymm6,ymm4
vcmpnle_uqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnle_uqps ymm2,ymm6,[ecx]
vcmpord_sps ymm2,ymm6,ymm4
vcmpord_sps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpord_sps ymm2,ymm6,[ecx]
vcmpeq_usps ymm2,ymm6,ymm4
vcmpeq_usps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpeq_usps ymm2,ymm6,[ecx]
vcmpnge_uqps ymm2,ymm6,ymm4
vcmpnge_uqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpnge_uqps ymm2,ymm6,[ecx]
vcmpngt_uqps ymm2,ymm6,ymm4
vcmpngt_uqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpngt_uqps ymm2,ymm6,[ecx]
vcmpfalse_osps ymm2,ymm6,ymm4
vcmpfalse_osps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpfalse_osps ymm2,ymm6,[ecx]
vcmpneq_osps ymm2,ymm6,ymm4
vcmpneq_osps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpneq_osps ymm2,ymm6,[ecx]
vcmpge_oqps ymm2,ymm6,ymm4
vcmpge_oqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpge_oqps ymm2,ymm6,[ecx]
vcmpgt_oqps ymm2,ymm6,ymm4
vcmpgt_oqps ymm2,ymm6,YMMWORD PTR [ecx]
vcmpgt_oqps ymm2,ymm6,[ecx]
vcmptrue_usps ymm2,ymm6,ymm4
vcmptrue_usps ymm2,ymm6,YMMWORD PTR [ecx]
vcmptrue_usps ymm2,ymm6,[ecx]
vgf2p8mulb ymm6, ymm5, ymm4
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [ecx]
vgf2p8mulb ymm6, ymm5, [ecx]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [edx+4064]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [edx+4096]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [edx-4096]
vgf2p8mulb ymm6, ymm5, YMMWORD PTR [edx-4128]
# Tests for op ymm/mem256, xmm
vcvtpd2dq xmm4,ymm4
vcvtpd2dq xmm4,YMMWORD PTR [ecx]
vcvtpd2ps xmm4,ymm4
vcvtpd2ps xmm4,YMMWORD PTR [ecx]
vcvttpd2dq xmm4,ymm4
vcvttpd2dq xmm4,YMMWORD PTR [ecx]
# Tests for op ymm/mem256, ymm
vcvtdq2ps ymm6,ymm4
vcvtdq2ps ymm4,YMMWORD PTR [ecx]
vcvtdq2ps ymm4,[ecx]
vcvtps2dq ymm6,ymm4
vcvtps2dq ymm4,YMMWORD PTR [ecx]
vcvtps2dq ymm4,[ecx]
vcvttps2dq ymm6,ymm4
vcvttps2dq ymm4,YMMWORD PTR [ecx]
vcvttps2dq ymm4,[ecx]
vmovapd ymm6,ymm4
vmovapd ymm4,YMMWORD PTR [ecx]
vmovapd ymm4,[ecx]
vmovaps ymm6,ymm4
vmovaps ymm4,YMMWORD PTR [ecx]
vmovaps ymm4,[ecx]
vmovdqa ymm6,ymm4
vmovdqa ymm4,YMMWORD PTR [ecx]
vmovdqa ymm4,[ecx]
vmovdqu ymm6,ymm4
vmovdqu ymm4,YMMWORD PTR [ecx]
vmovdqu ymm4,[ecx]
vmovddup ymm6,ymm4
vmovddup ymm4,YMMWORD PTR [ecx]
vmovddup ymm4,[ecx]
vmovshdup ymm6,ymm4
vmovshdup ymm4,YMMWORD PTR [ecx]
vmovshdup ymm4,[ecx]
vmovsldup ymm6,ymm4
vmovsldup ymm4,YMMWORD PTR [ecx]
vmovsldup ymm4,[ecx]
vmovupd ymm6,ymm4
vmovupd ymm4,YMMWORD PTR [ecx]
vmovupd ymm4,[ecx]
vmovups ymm6,ymm4
vmovups ymm4,YMMWORD PTR [ecx]
vmovups ymm4,[ecx]
vptest ymm6,ymm4
vptest ymm4,YMMWORD PTR [ecx]
vptest ymm4,[ecx]
vrcpps ymm6,ymm4
vrcpps ymm4,YMMWORD PTR [ecx]
vrcpps ymm4,[ecx]
vrsqrtps ymm6,ymm4
vrsqrtps ymm4,YMMWORD PTR [ecx]
vrsqrtps ymm4,[ecx]
vsqrtpd ymm6,ymm4
vsqrtpd ymm4,YMMWORD PTR [ecx]
vsqrtpd ymm4,[ecx]
vsqrtps ymm6,ymm4
vsqrtps ymm4,YMMWORD PTR [ecx]
vsqrtps ymm4,[ecx]
vtestpd ymm6,ymm4
vtestpd ymm4,YMMWORD PTR [ecx]
vtestpd ymm4,[ecx]
vtestps ymm6,ymm4
vtestps ymm4,YMMWORD PTR [ecx]
vtestps ymm4,[ecx]
# Tests for op ymm, ymm/mem256
vmovapd ymm6,ymm4
vmovapd YMMWORD PTR [ecx],ymm4
vmovapd [ecx],ymm4
vmovaps ymm6,ymm4
vmovaps YMMWORD PTR [ecx],ymm4
vmovaps [ecx],ymm4
vmovdqa ymm6,ymm4
vmovdqa YMMWORD PTR [ecx],ymm4
vmovdqa [ecx],ymm4
vmovdqu ymm6,ymm4
vmovdqu YMMWORD PTR [ecx],ymm4
vmovdqu [ecx],ymm4
vmovupd ymm6,ymm4
vmovupd YMMWORD PTR [ecx],ymm4
vmovupd [ecx],ymm4
vmovups ymm6,ymm4
vmovups YMMWORD PTR [ecx],ymm4
vmovups [ecx],ymm4
# Tests for op mem256, ymm
vlddqu ymm4,YMMWORD PTR [ecx]
vlddqu ymm4,[ecx]
# Tests for op ymm, mem256
vmovntdq YMMWORD PTR [ecx],ymm4
vmovntdq [ecx],ymm4
vmovntpd YMMWORD PTR [ecx],ymm4
vmovntpd [ecx],ymm4
vmovntps YMMWORD PTR [ecx],ymm4
vmovntps [ecx],ymm4
# Tests for op imm8, ymm/mem256, ymm, ymm
vblendpd ymm2,ymm6,ymm4,7
vblendpd ymm2,ymm6,YMMWORD PTR [ecx],7
vblendpd ymm2,ymm6,[ecx],7
vblendps ymm2,ymm6,ymm4,7
vblendps ymm2,ymm6,YMMWORD PTR [ecx],7
vblendps ymm2,ymm6,[ecx],7
vcmppd ymm2,ymm6,ymm4,7
vcmppd ymm2,ymm6,YMMWORD PTR [ecx],7
vcmppd ymm2,ymm6,[ecx],7
vcmpps ymm2,ymm6,ymm4,7
vcmpps ymm2,ymm6,YMMWORD PTR [ecx],7
vcmpps ymm2,ymm6,[ecx],7
vdpps ymm2,ymm6,ymm4,7
vdpps ymm2,ymm6,YMMWORD PTR [ecx],7
vdpps ymm2,ymm6,[ecx],7
vperm2f128 ymm2,ymm6,ymm4,7
vperm2f128 ymm2,ymm6,YMMWORD PTR [ecx],7
vperm2f128 ymm2,ymm6,[ecx],7
vshufpd ymm2,ymm6,ymm4,7
vshufpd ymm2,ymm6,YMMWORD PTR [ecx],7
vshufpd ymm2,ymm6,[ecx],7
vshufps ymm2,ymm6,ymm4,7
vshufps ymm2,ymm6,YMMWORD PTR [ecx],7
vshufps ymm2,ymm6,[ecx],7
vgf2p8affineqb ymm6, ymm5, ymm4, 0xab
vgf2p8affineqb ymm6, ymm5, ymm4, 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [ecx], 123
vgf2p8affineqb ymm6, ymm5, [ecx], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [edx+4064], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [edx+4096], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [edx-4096], 123
vgf2p8affineqb ymm6, ymm5, YMMWORD PTR [edx-4128], 123
vgf2p8affineinvqb ymm6, ymm5, ymm4, 0xab
vgf2p8affineinvqb ymm6, ymm5, ymm4, 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [ecx], 123
vgf2p8affineinvqb ymm6, ymm5, [ecx], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [edx+4064], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [edx+4096], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [edx-4096], 123
vgf2p8affineinvqb ymm6, ymm5, YMMWORD PTR [edx-4128], 123
# Tests for op ymm, ymm/mem256, ymm, ymm
vblendvpd ymm7,ymm2,ymm6,ymm4
vblendvpd ymm7,ymm2,YMMWORD PTR [ecx],ymm4
vblendvpd ymm7,ymm2,[ecx],ymm4
vblendvps ymm7,ymm2,ymm6,ymm4
vblendvps ymm7,ymm2,YMMWORD PTR [ecx],ymm4
vblendvps ymm7,ymm2,[ecx],ymm4
# Tests for op imm8, xmm/mem128, ymm, ymm
vinsertf128 ymm6,ymm4,xmm4,7
vinsertf128 ymm6,ymm4,XMMWORD PTR [ecx],7
vinsertf128 ymm6,ymm4,[ecx],7
# Tests for op imm8, ymm, xmm/mem128
vextractf128 xmm4,ymm4,7
vextractf128 XMMWORD PTR [ecx],ymm4,7
vextractf128 [ecx],ymm4,7
# Tests for op mem128, ymm
vbroadcastf128 ymm4,XMMWORD PTR [ecx]
vbroadcastf128 ymm4,[ecx]
# Tests for op xmm/mem128, xmm
vcvtdq2ps xmm6,xmm4
vcvtdq2ps xmm4,XMMWORD PTR [ecx]
vcvtdq2ps xmm4,[ecx]
vcvtpd2dq xmm6,xmm4
vcvtpd2dq xmm4,XMMWORD PTR [ecx]
vcvtpd2ps xmm6,xmm4
vcvtpd2ps xmm4,XMMWORD PTR [ecx]
vcvtps2dq xmm6,xmm4
vcvtps2dq xmm4,XMMWORD PTR [ecx]
vcvtps2dq xmm4,[ecx]
vcvttpd2dq xmm6,xmm4
vcvttpd2dq xmm4,XMMWORD PTR [ecx]
vcvttps2dq xmm6,xmm4
vcvttps2dq xmm4,XMMWORD PTR [ecx]
vcvttps2dq xmm4,[ecx]
vmovapd xmm6,xmm4
vmovapd xmm4,XMMWORD PTR [ecx]
vmovapd xmm4,[ecx]
vmovaps xmm6,xmm4
vmovaps xmm4,XMMWORD PTR [ecx]
vmovaps xmm4,[ecx]
vmovdqa xmm6,xmm4
vmovdqa xmm4,XMMWORD PTR [ecx]
vmovdqa xmm4,[ecx]
vmovdqu xmm6,xmm4
vmovdqu xmm4,XMMWORD PTR [ecx]
vmovdqu xmm4,[ecx]
vmovshdup xmm6,xmm4
vmovshdup xmm4,XMMWORD PTR [ecx]
vmovshdup xmm4,[ecx]
vmovsldup xmm6,xmm4
vmovsldup xmm4,XMMWORD PTR [ecx]
vmovsldup xmm4,[ecx]
vmovupd xmm6,xmm4
vmovupd xmm4,XMMWORD PTR [ecx]
vmovupd xmm4,[ecx]
vmovups xmm6,xmm4
vmovups xmm4,XMMWORD PTR [ecx]
vmovups xmm4,[ecx]
vpabsb xmm6,xmm4
vpabsb xmm4,XMMWORD PTR [ecx]
vpabsb xmm4,[ecx]
vpabsw xmm6,xmm4
vpabsw xmm4,XMMWORD PTR [ecx]
vpabsw xmm4,[ecx]
vpabsd xmm6,xmm4
vpabsd xmm4,XMMWORD PTR [ecx]
vpabsd xmm4,[ecx]
vphminposuw xmm6,xmm4
vphminposuw xmm4,XMMWORD PTR [ecx]
vphminposuw xmm4,[ecx]
vptest xmm6,xmm4
vptest xmm4,XMMWORD PTR [ecx]
vptest xmm4,[ecx]
vtestps xmm6,xmm4
vtestps xmm4,XMMWORD PTR [ecx]
vtestps xmm4,[ecx]
vtestpd xmm6,xmm4
vtestpd xmm4,XMMWORD PTR [ecx]
vtestpd xmm4,[ecx]
vrcpps xmm6,xmm4
vrcpps xmm4,XMMWORD PTR [ecx]
vrcpps xmm4,[ecx]
vrsqrtps xmm6,xmm4
vrsqrtps xmm4,XMMWORD PTR [ecx]
vrsqrtps xmm4,[ecx]
vsqrtpd xmm6,xmm4
vsqrtpd xmm4,XMMWORD PTR [ecx]
vsqrtpd xmm4,[ecx]
vsqrtps xmm6,xmm4
vsqrtps xmm4,XMMWORD PTR [ecx]
vsqrtps xmm4,[ecx]
vaesimc xmm6,xmm4
vaesimc xmm4,XMMWORD PTR [ecx]
vaesimc xmm4,[ecx]
# Tests for op xmm, xmm/mem128
vmovapd xmm6,xmm4
vmovapd XMMWORD PTR [ecx],xmm4
vmovapd [ecx],xmm4
vmovaps xmm6,xmm4
vmovaps XMMWORD PTR [ecx],xmm4
vmovaps [ecx],xmm4
vmovdqa xmm6,xmm4
vmovdqa XMMWORD PTR [ecx],xmm4
vmovdqa [ecx],xmm4
vmovdqu xmm6,xmm4
vmovdqu XMMWORD PTR [ecx],xmm4
vmovdqu [ecx],xmm4
vmovupd xmm6,xmm4
vmovupd XMMWORD PTR [ecx],xmm4
vmovupd [ecx],xmm4
vmovups xmm6,xmm4
vmovups XMMWORD PTR [ecx],xmm4
vmovups [ecx],xmm4
# Tests for op mem128, xmm
vlddqu xmm4,XMMWORD PTR [ecx]
vlddqu xmm4,[ecx]
vmovntdqa xmm4,XMMWORD PTR [ecx]
vmovntdqa xmm4,[ecx]
# Tests for op xmm, mem128
vmovntdq XMMWORD PTR [ecx],xmm4
vmovntdq [ecx],xmm4
vmovntpd XMMWORD PTR [ecx],xmm4
vmovntpd [ecx],xmm4
vmovntps XMMWORD PTR [ecx],xmm4
vmovntps [ecx],xmm4
# Tests for op xmm/mem128, ymm
vcvtdq2pd ymm4,xmm4
vcvtdq2pd ymm4,XMMWORD PTR [ecx]
vcvtdq2pd ymm4,[ecx]
vcvtps2pd ymm4,xmm4
vcvtps2pd ymm4,XMMWORD PTR [ecx]
vcvtps2pd ymm4,[ecx]
# Tests for op xmm/mem128, xmm, xmm
vaddpd xmm2,xmm6,xmm4
vaddpd xmm7,xmm6,XMMWORD PTR [ecx]
vaddpd xmm7,xmm6,[ecx]
vaddps xmm2,xmm6,xmm4
vaddps xmm7,xmm6,XMMWORD PTR [ecx]
vaddps xmm7,xmm6,[ecx]
vaddsubpd xmm2,xmm6,xmm4
vaddsubpd xmm7,xmm6,XMMWORD PTR [ecx]
vaddsubpd xmm7,xmm6,[ecx]
vaddsubps xmm2,xmm6,xmm4
vaddsubps xmm7,xmm6,XMMWORD PTR [ecx]
vaddsubps xmm7,xmm6,[ecx]
vandnpd xmm2,xmm6,xmm4
vandnpd xmm7,xmm6,XMMWORD PTR [ecx]
vandnpd xmm7,xmm6,[ecx]
vandnps xmm2,xmm6,xmm4
vandnps xmm7,xmm6,XMMWORD PTR [ecx]
vandnps xmm7,xmm6,[ecx]
vandpd xmm2,xmm6,xmm4
vandpd xmm7,xmm6,XMMWORD PTR [ecx]
vandpd xmm7,xmm6,[ecx]
vandps xmm2,xmm6,xmm4
vandps xmm7,xmm6,XMMWORD PTR [ecx]
vandps xmm7,xmm6,[ecx]
vdivpd xmm2,xmm6,xmm4
vdivpd xmm7,xmm6,XMMWORD PTR [ecx]
vdivpd xmm7,xmm6,[ecx]
vdivps xmm2,xmm6,xmm4
vdivps xmm7,xmm6,XMMWORD PTR [ecx]
vdivps xmm7,xmm6,[ecx]
vhaddpd xmm2,xmm6,xmm4
vhaddpd xmm7,xmm6,XMMWORD PTR [ecx]
vhaddpd xmm7,xmm6,[ecx]
vhaddps xmm2,xmm6,xmm4
vhaddps xmm7,xmm6,XMMWORD PTR [ecx]
vhaddps xmm7,xmm6,[ecx]
vhsubpd xmm2,xmm6,xmm4
vhsubpd xmm7,xmm6,XMMWORD PTR [ecx]
vhsubpd xmm7,xmm6,[ecx]
vhsubps xmm2,xmm6,xmm4
vhsubps xmm7,xmm6,XMMWORD PTR [ecx]
vhsubps xmm7,xmm6,[ecx]
vmaxpd xmm2,xmm6,xmm4
vmaxpd xmm7,xmm6,XMMWORD PTR [ecx]
vmaxpd xmm7,xmm6,[ecx]
vmaxps xmm2,xmm6,xmm4
vmaxps xmm7,xmm6,XMMWORD PTR [ecx]
vmaxps xmm7,xmm6,[ecx]
vminpd xmm2,xmm6,xmm4
vminpd xmm7,xmm6,XMMWORD PTR [ecx]
vminpd xmm7,xmm6,[ecx]
vminps xmm2,xmm6,xmm4
vminps xmm7,xmm6,XMMWORD PTR [ecx]
vminps xmm7,xmm6,[ecx]
vmulpd xmm2,xmm6,xmm4
vmulpd xmm7,xmm6,XMMWORD PTR [ecx]
vmulpd xmm7,xmm6,[ecx]
vmulps xmm2,xmm6,xmm4
vmulps xmm7,xmm6,XMMWORD PTR [ecx]
vmulps xmm7,xmm6,[ecx]
vorpd xmm2,xmm6,xmm4
vorpd xmm7,xmm6,XMMWORD PTR [ecx]
vorpd xmm7,xmm6,[ecx]
vorps xmm2,xmm6,xmm4
vorps xmm7,xmm6,XMMWORD PTR [ecx]
vorps xmm7,xmm6,[ecx]
vpacksswb xmm2,xmm6,xmm4
vpacksswb xmm7,xmm6,XMMWORD PTR [ecx]
vpacksswb xmm7,xmm6,[ecx]
vpackssdw xmm2,xmm6,xmm4
vpackssdw xmm7,xmm6,XMMWORD PTR [ecx]
vpackssdw xmm7,xmm6,[ecx]
vpackuswb xmm2,xmm6,xmm4
vpackuswb xmm7,xmm6,XMMWORD PTR [ecx]
vpackuswb xmm7,xmm6,[ecx]
vpackusdw xmm2,xmm6,xmm4
vpackusdw xmm7,xmm6,XMMWORD PTR [ecx]
vpackusdw xmm7,xmm6,[ecx]
vpaddb xmm2,xmm6,xmm4
vpaddb xmm7,xmm6,XMMWORD PTR [ecx]
vpaddb xmm7,xmm6,[ecx]
vpaddw xmm2,xmm6,xmm4
vpaddw xmm7,xmm6,XMMWORD PTR [ecx]
vpaddw xmm7,xmm6,[ecx]
vpaddd xmm2,xmm6,xmm4
vpaddd xmm7,xmm6,XMMWORD PTR [ecx]
vpaddd xmm7,xmm6,[ecx]
vpaddq xmm2,xmm6,xmm4
vpaddq xmm7,xmm6,XMMWORD PTR [ecx]
vpaddq xmm7,xmm6,[ecx]
vpaddsb xmm2,xmm6,xmm4
vpaddsb xmm7,xmm6,XMMWORD PTR [ecx]
vpaddsb xmm7,xmm6,[ecx]
vpaddsw xmm2,xmm6,xmm4
vpaddsw xmm7,xmm6,XMMWORD PTR [ecx]
vpaddsw xmm7,xmm6,[ecx]
vpaddusb xmm2,xmm6,xmm4
vpaddusb xmm7,xmm6,XMMWORD PTR [ecx]
vpaddusb xmm7,xmm6,[ecx]
vpaddusw xmm2,xmm6,xmm4
vpaddusw xmm7,xmm6,XMMWORD PTR [ecx]
vpaddusw xmm7,xmm6,[ecx]
vpand xmm2,xmm6,xmm4
vpand xmm7,xmm6,XMMWORD PTR [ecx]
vpand xmm7,xmm6,[ecx]
vpandn xmm2,xmm6,xmm4
vpandn xmm7,xmm6,XMMWORD PTR [ecx]
vpandn xmm7,xmm6,[ecx]
vpavgb xmm2,xmm6,xmm4
vpavgb xmm7,xmm6,XMMWORD PTR [ecx]
vpavgb xmm7,xmm6,[ecx]
vpavgw xmm2,xmm6,xmm4
vpavgw xmm7,xmm6,XMMWORD PTR [ecx]
vpavgw xmm7,xmm6,[ecx]
vpclmullqlqdq xmm2,xmm6,xmm4
vpclmullqlqdq xmm7,xmm6,XMMWORD PTR [ecx]
vpclmullqlqdq xmm7,xmm6,[ecx]
vpclmulhqlqdq xmm2,xmm6,xmm4
vpclmulhqlqdq xmm7,xmm6,XMMWORD PTR [ecx]
vpclmulhqlqdq xmm7,xmm6,[ecx]
vpclmullqhqdq xmm2,xmm6,xmm4
vpclmullqhqdq xmm7,xmm6,XMMWORD PTR [ecx]
vpclmullqhqdq xmm7,xmm6,[ecx]
vpclmulhqhqdq xmm2,xmm6,xmm4
vpclmulhqhqdq xmm7,xmm6,XMMWORD PTR [ecx]
vpclmulhqhqdq xmm7,xmm6,[ecx]
vpcmpeqb xmm2,xmm6,xmm4
vpcmpeqb xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpeqb xmm7,xmm6,[ecx]
vpcmpeqw xmm2,xmm6,xmm4
vpcmpeqw xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpeqw xmm7,xmm6,[ecx]
vpcmpeqd xmm2,xmm6,xmm4
vpcmpeqd xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpeqd xmm7,xmm6,[ecx]
vpcmpeqq xmm2,xmm6,xmm4
vpcmpeqq xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpeqq xmm7,xmm6,[ecx]
vpcmpgtb xmm2,xmm6,xmm4
vpcmpgtb xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpgtb xmm7,xmm6,[ecx]
vpcmpgtw xmm2,xmm6,xmm4
vpcmpgtw xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpgtw xmm7,xmm6,[ecx]
vpcmpgtd xmm2,xmm6,xmm4
vpcmpgtd xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpgtd xmm7,xmm6,[ecx]
vpcmpgtq xmm2,xmm6,xmm4
vpcmpgtq xmm7,xmm6,XMMWORD PTR [ecx]
vpcmpgtq xmm7,xmm6,[ecx]
vpermilpd xmm2,xmm6,xmm4
vpermilpd xmm7,xmm6,XMMWORD PTR [ecx]
vpermilpd xmm7,xmm6,[ecx]
vpermilps xmm2,xmm6,xmm4
vpermilps xmm7,xmm6,XMMWORD PTR [ecx]
vpermilps xmm7,xmm6,[ecx]
vphaddw xmm2,xmm6,xmm4
vphaddw xmm7,xmm6,XMMWORD PTR [ecx]
vphaddw xmm7,xmm6,[ecx]
vphaddd xmm2,xmm6,xmm4
vphaddd xmm7,xmm6,XMMWORD PTR [ecx]
vphaddd xmm7,xmm6,[ecx]
vphaddsw xmm2,xmm6,xmm4
vphaddsw xmm7,xmm6,XMMWORD PTR [ecx]
vphaddsw xmm7,xmm6,[ecx]
vphsubw xmm2,xmm6,xmm4
vphsubw xmm7,xmm6,XMMWORD PTR [ecx]
vphsubw xmm7,xmm6,[ecx]
vphsubd xmm2,xmm6,xmm4
vphsubd xmm7,xmm6,XMMWORD PTR [ecx]
vphsubd xmm7,xmm6,[ecx]
vphsubsw xmm2,xmm6,xmm4
vphsubsw xmm7,xmm6,XMMWORD PTR [ecx]
vphsubsw xmm7,xmm6,[ecx]
vpmaddwd xmm2,xmm6,xmm4
vpmaddwd xmm7,xmm6,XMMWORD PTR [ecx]
vpmaddwd xmm7,xmm6,[ecx]
vpmaddubsw xmm2,xmm6,xmm4
vpmaddubsw xmm7,xmm6,XMMWORD PTR [ecx]
vpmaddubsw xmm7,xmm6,[ecx]
vpmaxsb xmm2,xmm6,xmm4
vpmaxsb xmm7,xmm6,XMMWORD PTR [ecx]
vpmaxsb xmm7,xmm6,[ecx]
vpmaxsw xmm2,xmm6,xmm4
vpmaxsw xmm7,xmm6,XMMWORD PTR [ecx]
vpmaxsw xmm7,xmm6,[ecx]
vpmaxsd xmm2,xmm6,xmm4
vpmaxsd xmm7,xmm6,XMMWORD PTR [ecx]
vpmaxsd xmm7,xmm6,[ecx]
vpmaxub xmm2,xmm6,xmm4
vpmaxub xmm7,xmm6,XMMWORD PTR [ecx]
vpmaxub xmm7,xmm6,[ecx]
vpmaxuw xmm2,xmm6,xmm4
vpmaxuw xmm7,xmm6,XMMWORD PTR [ecx]
vpmaxuw xmm7,xmm6,[ecx]
vpmaxud xmm2,xmm6,xmm4
vpmaxud xmm7,xmm6,XMMWORD PTR [ecx]
vpmaxud xmm7,xmm6,[ecx]
vpminsb xmm2,xmm6,xmm4
vpminsb xmm7,xmm6,XMMWORD PTR [ecx]
vpminsb xmm7,xmm6,[ecx]
vpminsw xmm2,xmm6,xmm4
vpminsw xmm7,xmm6,XMMWORD PTR [ecx]
vpminsw xmm7,xmm6,[ecx]
vpminsd xmm2,xmm6,xmm4
vpminsd xmm7,xmm6,XMMWORD PTR [ecx]
vpminsd xmm7,xmm6,[ecx]
vpminub xmm2,xmm6,xmm4
vpminub xmm7,xmm6,XMMWORD PTR [ecx]
vpminub xmm7,xmm6,[ecx]
vpminuw xmm2,xmm6,xmm4
vpminuw xmm7,xmm6,XMMWORD PTR [ecx]
vpminuw xmm7,xmm6,[ecx]
vpminud xmm2,xmm6,xmm4
vpminud xmm7,xmm6,XMMWORD PTR [ecx]
vpminud xmm7,xmm6,[ecx]
vpmulhuw xmm2,xmm6,xmm4
vpmulhuw xmm7,xmm6,XMMWORD PTR [ecx]
vpmulhuw xmm7,xmm6,[ecx]
vpmulhrsw xmm2,xmm6,xmm4
vpmulhrsw xmm7,xmm6,XMMWORD PTR [ecx]
vpmulhrsw xmm7,xmm6,[ecx]
vpmulhw xmm2,xmm6,xmm4
vpmulhw xmm7,xmm6,XMMWORD PTR [ecx]
vpmulhw xmm7,xmm6,[ecx]
vpmullw xmm2,xmm6,xmm4
vpmullw xmm7,xmm6,XMMWORD PTR [ecx]
vpmullw xmm7,xmm6,[ecx]
vpmulld xmm2,xmm6,xmm4
vpmulld xmm7,xmm6,XMMWORD PTR [ecx]
vpmulld xmm7,xmm6,[ecx]
vpmuludq xmm2,xmm6,xmm4
vpmuludq xmm7,xmm6,XMMWORD PTR [ecx]
vpmuludq xmm7,xmm6,[ecx]
vpmuldq xmm2,xmm6,xmm4
vpmuldq xmm7,xmm6,XMMWORD PTR [ecx]
vpmuldq xmm7,xmm6,[ecx]
vpor xmm2,xmm6,xmm4
vpor xmm7,xmm6,XMMWORD PTR [ecx]
vpor xmm7,xmm6,[ecx]
vpsadbw xmm2,xmm6,xmm4
vpsadbw xmm7,xmm6,XMMWORD PTR [ecx]
vpsadbw xmm7,xmm6,[ecx]
vpshufb xmm2,xmm6,xmm4
vpshufb xmm7,xmm6,XMMWORD PTR [ecx]
vpshufb xmm7,xmm6,[ecx]
vpsignb xmm2,xmm6,xmm4
vpsignb xmm7,xmm6,XMMWORD PTR [ecx]
vpsignb xmm7,xmm6,[ecx]
vpsignw xmm2,xmm6,xmm4
vpsignw xmm7,xmm6,XMMWORD PTR [ecx]
vpsignw xmm7,xmm6,[ecx]
vpsignd xmm2,xmm6,xmm4
vpsignd xmm7,xmm6,XMMWORD PTR [ecx]
vpsignd xmm7,xmm6,[ecx]
vpsllw xmm2,xmm6,xmm4
vpsllw xmm7,xmm6,XMMWORD PTR [ecx]
vpsllw xmm7,xmm6,[ecx]
vpslld xmm2,xmm6,xmm4
vpslld xmm7,xmm6,XMMWORD PTR [ecx]
vpslld xmm7,xmm6,[ecx]
vpsllq xmm2,xmm6,xmm4
vpsllq xmm7,xmm6,XMMWORD PTR [ecx]
vpsllq xmm7,xmm6,[ecx]
vpsraw xmm2,xmm6,xmm4
vpsraw xmm7,xmm6,XMMWORD PTR [ecx]
vpsraw xmm7,xmm6,[ecx]
vpsrad xmm2,xmm6,xmm4
vpsrad xmm7,xmm6,XMMWORD PTR [ecx]
vpsrad xmm7,xmm6,[ecx]
vpsrlw xmm2,xmm6,xmm4
vpsrlw xmm7,xmm6,XMMWORD PTR [ecx]
vpsrlw xmm7,xmm6,[ecx]
vpsrld xmm2,xmm6,xmm4
vpsrld xmm7,xmm6,XMMWORD PTR [ecx]
vpsrld xmm7,xmm6,[ecx]
vpsrlq xmm2,xmm6,xmm4
vpsrlq xmm7,xmm6,XMMWORD PTR [ecx]
vpsrlq xmm7,xmm6,[ecx]
vpsubb xmm2,xmm6,xmm4
vpsubb xmm7,xmm6,XMMWORD PTR [ecx]
vpsubb xmm7,xmm6,[ecx]
vpsubw xmm2,xmm6,xmm4
vpsubw xmm7,xmm6,XMMWORD PTR [ecx]
vpsubw xmm7,xmm6,[ecx]
vpsubd xmm2,xmm6,xmm4
vpsubd xmm7,xmm6,XMMWORD PTR [ecx]
vpsubd xmm7,xmm6,[ecx]
vpsubq xmm2,xmm6,xmm4
vpsubq xmm7,xmm6,XMMWORD PTR [ecx]
vpsubq xmm7,xmm6,[ecx]
vpsubsb xmm2,xmm6,xmm4
vpsubsb xmm7,xmm6,XMMWORD PTR [ecx]
vpsubsb xmm7,xmm6,[ecx]
vpsubsw xmm2,xmm6,xmm4
vpsubsw xmm7,xmm6,XMMWORD PTR [ecx]
vpsubsw xmm7,xmm6,[ecx]
vpsubusb xmm2,xmm6,xmm4
vpsubusb xmm7,xmm6,XMMWORD PTR [ecx]
vpsubusb xmm7,xmm6,[ecx]
vpsubusw xmm2,xmm6,xmm4
vpsubusw xmm7,xmm6,XMMWORD PTR [ecx]
vpsubusw xmm7,xmm6,[ecx]
vpunpckhbw xmm2,xmm6,xmm4
vpunpckhbw xmm7,xmm6,XMMWORD PTR [ecx]
vpunpckhbw xmm7,xmm6,[ecx]
vpunpckhwd xmm2,xmm6,xmm4
vpunpckhwd xmm7,xmm6,XMMWORD PTR [ecx]
vpunpckhwd xmm7,xmm6,[ecx]
vpunpckhdq xmm2,xmm6,xmm4
vpunpckhdq xmm7,xmm6,XMMWORD PTR [ecx]
vpunpckhdq xmm7,xmm6,[ecx]
vpunpckhqdq xmm2,xmm6,xmm4
vpunpckhqdq xmm7,xmm6,XMMWORD PTR [ecx]
vpunpckhqdq xmm7,xmm6,[ecx]
vpunpcklbw xmm2,xmm6,xmm4
vpunpcklbw xmm7,xmm6,XMMWORD PTR [ecx]
vpunpcklbw xmm7,xmm6,[ecx]
vpunpcklwd xmm2,xmm6,xmm4
vpunpcklwd xmm7,xmm6,XMMWORD PTR [ecx]
vpunpcklwd xmm7,xmm6,[ecx]
vpunpckldq xmm2,xmm6,xmm4
vpunpckldq xmm7,xmm6,XMMWORD PTR [ecx]
vpunpckldq xmm7,xmm6,[ecx]
vpunpcklqdq xmm2,xmm6,xmm4
vpunpcklqdq xmm7,xmm6,XMMWORD PTR [ecx]
vpunpcklqdq xmm7,xmm6,[ecx]
vpxor xmm2,xmm6,xmm4
vpxor xmm7,xmm6,XMMWORD PTR [ecx]
vpxor xmm7,xmm6,[ecx]
vsubpd xmm2,xmm6,xmm4
vsubpd xmm7,xmm6,XMMWORD PTR [ecx]
vsubpd xmm7,xmm6,[ecx]
vsubps xmm2,xmm6,xmm4
vsubps xmm7,xmm6,XMMWORD PTR [ecx]
vsubps xmm7,xmm6,[ecx]
vunpckhpd xmm2,xmm6,xmm4
vunpckhpd xmm7,xmm6,XMMWORD PTR [ecx]
vunpckhpd xmm7,xmm6,[ecx]
vunpckhps xmm2,xmm6,xmm4
vunpckhps xmm7,xmm6,XMMWORD PTR [ecx]
vunpckhps xmm7,xmm6,[ecx]
vunpcklpd xmm2,xmm6,xmm4
vunpcklpd xmm7,xmm6,XMMWORD PTR [ecx]
vunpcklpd xmm7,xmm6,[ecx]
vunpcklps xmm2,xmm6,xmm4
vunpcklps xmm7,xmm6,XMMWORD PTR [ecx]
vunpcklps xmm7,xmm6,[ecx]
vxorpd xmm2,xmm6,xmm4
vxorpd xmm7,xmm6,XMMWORD PTR [ecx]
vxorpd xmm7,xmm6,[ecx]
vxorps xmm2,xmm6,xmm4
vxorps xmm7,xmm6,XMMWORD PTR [ecx]
vxorps xmm7,xmm6,[ecx]
vaesenc xmm2,xmm6,xmm4
vaesenc xmm7,xmm6,XMMWORD PTR [ecx]
vaesenc xmm7,xmm6,[ecx]
vaesenclast xmm2,xmm6,xmm4
vaesenclast xmm7,xmm6,XMMWORD PTR [ecx]
vaesenclast xmm7,xmm6,[ecx]
vaesdec xmm2,xmm6,xmm4
vaesdec xmm7,xmm6,XMMWORD PTR [ecx]
vaesdec xmm7,xmm6,[ecx]
vaesdeclast xmm2,xmm6,xmm4
vaesdeclast xmm7,xmm6,XMMWORD PTR [ecx]
vaesdeclast xmm7,xmm6,[ecx]
vcmpeqpd xmm2,xmm6,xmm4
vcmpeqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeqpd xmm7,xmm6,[ecx]
vcmpltpd xmm2,xmm6,xmm4
vcmpltpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpltpd xmm7,xmm6,[ecx]
vcmplepd xmm2,xmm6,xmm4
vcmplepd xmm7,xmm6,XMMWORD PTR [ecx]
vcmplepd xmm7,xmm6,[ecx]
vcmpunordpd xmm2,xmm6,xmm4
vcmpunordpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpunordpd xmm7,xmm6,[ecx]
vcmpneqpd xmm2,xmm6,xmm4
vcmpneqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneqpd xmm7,xmm6,[ecx]
vcmpnltpd xmm2,xmm6,xmm4
vcmpnltpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnltpd xmm7,xmm6,[ecx]
vcmpnlepd xmm2,xmm6,xmm4
vcmpnlepd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnlepd xmm7,xmm6,[ecx]
vcmpordpd xmm2,xmm6,xmm4
vcmpordpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpordpd xmm7,xmm6,[ecx]
vcmpeq_uqpd xmm2,xmm6,xmm4
vcmpeq_uqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeq_uqpd xmm7,xmm6,[ecx]
vcmpngepd xmm2,xmm6,xmm4
vcmpngepd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpngepd xmm7,xmm6,[ecx]
vcmpngtpd xmm2,xmm6,xmm4
vcmpngtpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpngtpd xmm7,xmm6,[ecx]
vcmpfalsepd xmm2,xmm6,xmm4
vcmpfalsepd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpfalsepd xmm7,xmm6,[ecx]
vcmpneq_oqpd xmm2,xmm6,xmm4
vcmpneq_oqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneq_oqpd xmm7,xmm6,[ecx]
vcmpgepd xmm2,xmm6,xmm4
vcmpgepd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpgepd xmm7,xmm6,[ecx]
vcmpgtpd xmm2,xmm6,xmm4
vcmpgtpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpgtpd xmm7,xmm6,[ecx]
vcmptruepd xmm2,xmm6,xmm4
vcmptruepd xmm7,xmm6,XMMWORD PTR [ecx]
vcmptruepd xmm7,xmm6,[ecx]
vcmpeq_ospd xmm2,xmm6,xmm4
vcmpeq_ospd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeq_ospd xmm7,xmm6,[ecx]
vcmplt_oqpd xmm2,xmm6,xmm4
vcmplt_oqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmplt_oqpd xmm7,xmm6,[ecx]
vcmple_oqpd xmm2,xmm6,xmm4
vcmple_oqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmple_oqpd xmm7,xmm6,[ecx]
vcmpunord_spd xmm2,xmm6,xmm4
vcmpunord_spd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpunord_spd xmm7,xmm6,[ecx]
vcmpneq_uspd xmm2,xmm6,xmm4
vcmpneq_uspd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneq_uspd xmm7,xmm6,[ecx]
vcmpnlt_uqpd xmm2,xmm6,xmm4
vcmpnlt_uqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnlt_uqpd xmm7,xmm6,[ecx]
vcmpnle_uqpd xmm2,xmm6,xmm4
vcmpnle_uqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnle_uqpd xmm7,xmm6,[ecx]
vcmpord_spd xmm2,xmm6,xmm4
vcmpord_spd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpord_spd xmm7,xmm6,[ecx]
vcmpeq_uspd xmm2,xmm6,xmm4
vcmpeq_uspd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeq_uspd xmm7,xmm6,[ecx]
vcmpnge_uqpd xmm2,xmm6,xmm4
vcmpnge_uqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnge_uqpd xmm7,xmm6,[ecx]
vcmpngt_uqpd xmm2,xmm6,xmm4
vcmpngt_uqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpngt_uqpd xmm7,xmm6,[ecx]
vcmpfalse_ospd xmm2,xmm6,xmm4
vcmpfalse_ospd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpfalse_ospd xmm7,xmm6,[ecx]
vcmpneq_ospd xmm2,xmm6,xmm4
vcmpneq_ospd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneq_ospd xmm7,xmm6,[ecx]
vcmpge_oqpd xmm2,xmm6,xmm4
vcmpge_oqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpge_oqpd xmm7,xmm6,[ecx]
vcmpgt_oqpd xmm2,xmm6,xmm4
vcmpgt_oqpd xmm7,xmm6,XMMWORD PTR [ecx]
vcmpgt_oqpd xmm7,xmm6,[ecx]
vcmptrue_uspd xmm2,xmm6,xmm4
vcmptrue_uspd xmm7,xmm6,XMMWORD PTR [ecx]
vcmptrue_uspd xmm7,xmm6,[ecx]
vcmpeqps xmm2,xmm6,xmm4
vcmpeqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeqps xmm7,xmm6,[ecx]
vcmpltps xmm2,xmm6,xmm4
vcmpltps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpltps xmm7,xmm6,[ecx]
vcmpleps xmm2,xmm6,xmm4
vcmpleps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpleps xmm7,xmm6,[ecx]
vcmpunordps xmm2,xmm6,xmm4
vcmpunordps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpunordps xmm7,xmm6,[ecx]
vcmpneqps xmm2,xmm6,xmm4
vcmpneqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneqps xmm7,xmm6,[ecx]
vcmpnltps xmm2,xmm6,xmm4
vcmpnltps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnltps xmm7,xmm6,[ecx]
vcmpnleps xmm2,xmm6,xmm4
vcmpnleps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnleps xmm7,xmm6,[ecx]
vcmpordps xmm2,xmm6,xmm4
vcmpordps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpordps xmm7,xmm6,[ecx]
vcmpeq_uqps xmm2,xmm6,xmm4
vcmpeq_uqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeq_uqps xmm7,xmm6,[ecx]
vcmpngeps xmm2,xmm6,xmm4
vcmpngeps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpngeps xmm7,xmm6,[ecx]
vcmpngtps xmm2,xmm6,xmm4
vcmpngtps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpngtps xmm7,xmm6,[ecx]
vcmpfalseps xmm2,xmm6,xmm4
vcmpfalseps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpfalseps xmm7,xmm6,[ecx]
vcmpneq_oqps xmm2,xmm6,xmm4
vcmpneq_oqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneq_oqps xmm7,xmm6,[ecx]
vcmpgeps xmm2,xmm6,xmm4
vcmpgeps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpgeps xmm7,xmm6,[ecx]
vcmpgtps xmm2,xmm6,xmm4
vcmpgtps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpgtps xmm7,xmm6,[ecx]
vcmptrueps xmm2,xmm6,xmm4
vcmptrueps xmm7,xmm6,XMMWORD PTR [ecx]
vcmptrueps xmm7,xmm6,[ecx]
vcmpeq_osps xmm2,xmm6,xmm4
vcmpeq_osps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeq_osps xmm7,xmm6,[ecx]
vcmplt_oqps xmm2,xmm6,xmm4
vcmplt_oqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmplt_oqps xmm7,xmm6,[ecx]
vcmple_oqps xmm2,xmm6,xmm4
vcmple_oqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmple_oqps xmm7,xmm6,[ecx]
vcmpunord_sps xmm2,xmm6,xmm4
vcmpunord_sps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpunord_sps xmm7,xmm6,[ecx]
vcmpneq_usps xmm2,xmm6,xmm4
vcmpneq_usps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneq_usps xmm7,xmm6,[ecx]
vcmpnlt_uqps xmm2,xmm6,xmm4
vcmpnlt_uqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnlt_uqps xmm7,xmm6,[ecx]
vcmpnle_uqps xmm2,xmm6,xmm4
vcmpnle_uqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnle_uqps xmm7,xmm6,[ecx]
vcmpord_sps xmm2,xmm6,xmm4
vcmpord_sps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpord_sps xmm7,xmm6,[ecx]
vcmpeq_usps xmm2,xmm6,xmm4
vcmpeq_usps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpeq_usps xmm7,xmm6,[ecx]
vcmpnge_uqps xmm2,xmm6,xmm4
vcmpnge_uqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpnge_uqps xmm7,xmm6,[ecx]
vcmpngt_uqps xmm2,xmm6,xmm4
vcmpngt_uqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpngt_uqps xmm7,xmm6,[ecx]
vcmpfalse_osps xmm2,xmm6,xmm4
vcmpfalse_osps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpfalse_osps xmm7,xmm6,[ecx]
vcmpneq_osps xmm2,xmm6,xmm4
vcmpneq_osps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpneq_osps xmm7,xmm6,[ecx]
vcmpge_oqps xmm2,xmm6,xmm4
vcmpge_oqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpge_oqps xmm7,xmm6,[ecx]
vcmpgt_oqps xmm2,xmm6,xmm4
vcmpgt_oqps xmm7,xmm6,XMMWORD PTR [ecx]
vcmpgt_oqps xmm7,xmm6,[ecx]
vcmptrue_usps xmm2,xmm6,xmm4
vcmptrue_usps xmm7,xmm6,XMMWORD PTR [ecx]
vcmptrue_usps xmm7,xmm6,[ecx]
vgf2p8mulb xmm6, xmm5, xmm4
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [ecx]
vgf2p8mulb xmm6, xmm5, [ecx]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [edx+2032]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [edx+2048]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [edx-2048]
vgf2p8mulb xmm6, xmm5, XMMWORD PTR [edx-2064]
# Tests for op mem128, xmm, xmm
vmaskmovps xmm6,xmm4,XMMWORD PTR [ecx]
vmaskmovps xmm6,xmm4,[ecx]
vmaskmovpd xmm6,xmm4,XMMWORD PTR [ecx]
vmaskmovpd xmm6,xmm4,[ecx]
# Tests for op imm8, xmm/mem128, xmm
vaeskeygenassist xmm6,xmm4,7
vaeskeygenassist xmm6,XMMWORD PTR [ecx],7
vaeskeygenassist xmm6,[ecx],7
vpcmpestri xmm6,xmm4,7
vpcmpestri xmm6,XMMWORD PTR [ecx],7
vpcmpestri xmm6,[ecx],7
vpcmpestrm xmm6,xmm4,7
vpcmpestrm xmm6,XMMWORD PTR [ecx],7
vpcmpestrm xmm6,[ecx],7
vpcmpistri xmm6,xmm4,7
vpcmpistri xmm6,XMMWORD PTR [ecx],7
vpcmpistri xmm6,[ecx],7
vpcmpistrm xmm6,xmm4,7
vpcmpistrm xmm6,XMMWORD PTR [ecx],7
vpcmpistrm xmm6,[ecx],7
vpermilpd xmm6,xmm4,7
vpermilpd xmm6,XMMWORD PTR [ecx],7
vpermilpd xmm6,[ecx],7
vpermilps xmm6,xmm4,7
vpermilps xmm6,XMMWORD PTR [ecx],7
vpermilps xmm6,[ecx],7
vpshufd xmm6,xmm4,7
vpshufd xmm6,XMMWORD PTR [ecx],7
vpshufd xmm6,[ecx],7
vpshufhw xmm6,xmm4,7
vpshufhw xmm6,XMMWORD PTR [ecx],7
vpshufhw xmm6,[ecx],7
vpshuflw xmm6,xmm4,7
vpshuflw xmm6,XMMWORD PTR [ecx],7
vpshuflw xmm6,[ecx],7
vroundpd xmm6,xmm4,7
vroundpd xmm6,XMMWORD PTR [ecx],7
vroundpd xmm6,[ecx],7
vroundps xmm6,xmm4,7
vroundps xmm6,XMMWORD PTR [ecx],7
vroundps xmm6,[ecx],7
# Tests for op xmm, xmm, mem128
vmaskmovps XMMWORD PTR [ecx],xmm6,xmm4
vmaskmovps [ecx],xmm6,xmm4
vmaskmovpd XMMWORD PTR [ecx],xmm6,xmm4
vmaskmovpd [ecx],xmm6,xmm4
# Tests for op imm8, xmm/mem128, xmm, xmm
vblendpd xmm2,xmm6,xmm4,7
vblendpd xmm2,xmm6,XMMWORD PTR [ecx],7
vblendpd xmm2,xmm6,[ecx],7
vblendps xmm2,xmm6,xmm4,7
vblendps xmm2,xmm6,XMMWORD PTR [ecx],7
vblendps xmm2,xmm6,[ecx],7
vcmppd xmm2,xmm6,xmm4,7
vcmppd xmm2,xmm6,XMMWORD PTR [ecx],7
vcmppd xmm2,xmm6,[ecx],7
vcmpps xmm2,xmm6,xmm4,7
vcmpps xmm2,xmm6,XMMWORD PTR [ecx],7
vcmpps xmm2,xmm6,[ecx],7
vdppd xmm2,xmm6,xmm4,7
vdppd xmm2,xmm6,XMMWORD PTR [ecx],7
vdppd xmm2,xmm6,[ecx],7
vdpps xmm2,xmm6,xmm4,7
vdpps xmm2,xmm6,XMMWORD PTR [ecx],7
vdpps xmm2,xmm6,[ecx],7
vmpsadbw xmm2,xmm6,xmm4,7
vmpsadbw xmm2,xmm6,XMMWORD PTR [ecx],7
vmpsadbw xmm2,xmm6,[ecx],7
vpalignr xmm2,xmm6,xmm4,7
vpalignr xmm2,xmm6,XMMWORD PTR [ecx],7
vpalignr xmm2,xmm6,[ecx],7
vpblendw xmm2,xmm6,xmm4,7
vpblendw xmm2,xmm6,XMMWORD PTR [ecx],7
vpblendw xmm2,xmm6,[ecx],7
vpclmulqdq xmm2,xmm6,xmm4,7
vpclmulqdq xmm2,xmm6,XMMWORD PTR [ecx],7
vpclmulqdq xmm2,xmm6,[ecx],7
vshufpd xmm2,xmm6,xmm4,7
vshufpd xmm2,xmm6,XMMWORD PTR [ecx],7
vshufpd xmm2,xmm6,[ecx],7
vshufps xmm2,xmm6,xmm4,7
vshufps xmm2,xmm6,XMMWORD PTR [ecx],7
vshufps xmm2,xmm6,[ecx],7
vgf2p8affineqb xmm6, xmm5, xmm4, 0xab
vgf2p8affineqb xmm6, xmm5, xmm4, 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [ecx], 123
vgf2p8affineqb xmm6, xmm5, [ecx], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [edx+2032], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [edx+2048], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [edx-2048], 123
vgf2p8affineqb xmm6, xmm5, XMMWORD PTR [edx-2064], 123
vgf2p8affineinvqb xmm6, xmm5, xmm4, 0xab
vgf2p8affineinvqb xmm6, xmm5, xmm4, 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [ecx], 123
vgf2p8affineinvqb xmm6, xmm5, [ecx], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [edx+2032], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [edx+2048], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [edx-2048], 123
vgf2p8affineinvqb xmm6, xmm5, XMMWORD PTR [edx-2064], 123
# Tests for op xmm, xmm/mem128, xmm, xmm
vblendvpd xmm7,xmm2,xmm6,xmm4
vblendvpd xmm7,xmm2,XMMWORD PTR [ecx],xmm4
vblendvpd xmm7,xmm2,[ecx],xmm4
vblendvps xmm7,xmm2,xmm6,xmm4
vblendvps xmm7,xmm2,XMMWORD PTR [ecx],xmm4
vblendvps xmm7,xmm2,[ecx],xmm4
vpblendvb xmm7,xmm2,xmm6,xmm4
vpblendvb xmm7,xmm2,XMMWORD PTR [ecx],xmm4
vpblendvb xmm7,xmm2,[ecx],xmm4
# Tests for op mem64, ymm
vbroadcastsd ymm4,QWORD PTR [ecx]
vbroadcastsd ymm4,[ecx]
# Tests for op xmm/mem64, xmm
vcomisd xmm6,xmm4
vcomisd xmm4,QWORD PTR [ecx]
vcomisd xmm4,[ecx]
vcvtdq2pd xmm6,xmm4
vcvtdq2pd xmm4,QWORD PTR [ecx]
vcvtdq2pd xmm4,[ecx]
vcvtps2pd xmm6,xmm4
vcvtps2pd xmm4,QWORD PTR [ecx]
vcvtps2pd xmm4,[ecx]
vmovddup xmm6,xmm4
vmovddup xmm4,QWORD PTR [ecx]
vmovddup xmm4,[ecx]
vpmovsxbw xmm6,xmm4
vpmovsxbw xmm4,QWORD PTR [ecx]
vpmovsxbw xmm4,[ecx]
vpmovsxwd xmm6,xmm4
vpmovsxwd xmm4,QWORD PTR [ecx]
vpmovsxwd xmm4,[ecx]
vpmovsxdq xmm6,xmm4
vpmovsxdq xmm4,QWORD PTR [ecx]
vpmovsxdq xmm4,[ecx]
vpmovzxbw xmm6,xmm4
vpmovzxbw xmm4,QWORD PTR [ecx]
vpmovzxbw xmm4,[ecx]
vpmovzxwd xmm6,xmm4
vpmovzxwd xmm4,QWORD PTR [ecx]
vpmovzxwd xmm4,[ecx]
vpmovzxdq xmm6,xmm4
vpmovzxdq xmm4,QWORD PTR [ecx]
vpmovzxdq xmm4,[ecx]
vucomisd xmm6,xmm4
vucomisd xmm4,QWORD PTR [ecx]
vucomisd xmm4,[ecx]
# Tests for op mem64, xmm
vmovsd xmm4,QWORD PTR [ecx]
vmovsd xmm4,[ecx]
# Tests for op xmm, mem64
vmovlpd QWORD PTR [ecx],xmm4
vmovlpd [ecx],xmm4
vmovlps QWORD PTR [ecx],xmm4
vmovlps [ecx],xmm4
vmovhpd QWORD PTR [ecx],xmm4
vmovhpd [ecx],xmm4
vmovhps QWORD PTR [ecx],xmm4
vmovhps [ecx],xmm4
vmovsd QWORD PTR [ecx],xmm4
vmovsd [ecx],xmm4
# Tests for op xmm, regq/mem64
# Tests for op regq/mem64, xmm
vmovq QWORD PTR [ecx],xmm4
vmovq xmm4,QWORD PTR [ecx]
vmovq [ecx],xmm4
vmovq xmm4,[ecx]
# Tests for op xmm/mem64, regl
vcvtsd2si ecx,xmm4
vcvtsd2si ecx,QWORD PTR [ecx]
vcvtsd2si ecx,[ecx]
vcvttsd2si ecx,xmm4
vcvttsd2si ecx,QWORD PTR [ecx]
vcvttsd2si ecx,[ecx]
# Tests for op mem64, xmm, xmm
vmovlpd xmm6,xmm4,QWORD PTR [ecx]
vmovlpd xmm6,xmm4,[ecx]
vmovlps xmm6,xmm4,QWORD PTR [ecx]
vmovlps xmm6,xmm4,[ecx]
vmovhpd xmm6,xmm4,QWORD PTR [ecx]
vmovhpd xmm6,xmm4,[ecx]
vmovhps xmm6,xmm4,QWORD PTR [ecx]
vmovhps xmm6,xmm4,[ecx]
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd xmm2,xmm6,xmm4,7
vcmpsd xmm2,xmm6,QWORD PTR [ecx],7
vcmpsd xmm2,xmm6,[ecx],7
vroundsd xmm2,xmm6,xmm4,7
vroundsd xmm2,xmm6,QWORD PTR [ecx],7
vroundsd xmm2,xmm6,[ecx],7
# Tests for op xmm/mem64, xmm, xmm
vaddsd xmm2,xmm6,xmm4
vaddsd xmm2,xmm6,QWORD PTR [ecx]
vaddsd xmm2,xmm6,[ecx]
vcvtsd2ss xmm2,xmm6,xmm4
vcvtsd2ss xmm2,xmm6,QWORD PTR [ecx]
vcvtsd2ss xmm2,xmm6,[ecx]
vdivsd xmm2,xmm6,xmm4
vdivsd xmm2,xmm6,QWORD PTR [ecx]
vdivsd xmm2,xmm6,[ecx]
vmaxsd xmm2,xmm6,xmm4
vmaxsd xmm2,xmm6,QWORD PTR [ecx]
vmaxsd xmm2,xmm6,[ecx]
vminsd xmm2,xmm6,xmm4
vminsd xmm2,xmm6,QWORD PTR [ecx]
vminsd xmm2,xmm6,[ecx]
vmulsd xmm2,xmm6,xmm4
vmulsd xmm2,xmm6,QWORD PTR [ecx]
vmulsd xmm2,xmm6,[ecx]
vsqrtsd xmm2,xmm6,xmm4
vsqrtsd xmm2,xmm6,QWORD PTR [ecx]
vsqrtsd xmm2,xmm6,[ecx]
vsubsd xmm2,xmm6,xmm4
vsubsd xmm2,xmm6,QWORD PTR [ecx]
vsubsd xmm2,xmm6,[ecx]
vcmpeqsd xmm2,xmm6,xmm4
vcmpeqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpeqsd xmm2,xmm6,[ecx]
vcmpltsd xmm2,xmm6,xmm4
vcmpltsd xmm2,xmm6,QWORD PTR [ecx]
vcmpltsd xmm2,xmm6,[ecx]
vcmplesd xmm2,xmm6,xmm4
vcmplesd xmm2,xmm6,QWORD PTR [ecx]
vcmplesd xmm2,xmm6,[ecx]
vcmpunordsd xmm2,xmm6,xmm4
vcmpunordsd xmm2,xmm6,QWORD PTR [ecx]
vcmpunordsd xmm2,xmm6,[ecx]
vcmpneqsd xmm2,xmm6,xmm4
vcmpneqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpneqsd xmm2,xmm6,[ecx]
vcmpnltsd xmm2,xmm6,xmm4
vcmpnltsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnltsd xmm2,xmm6,[ecx]
vcmpnlesd xmm2,xmm6,xmm4
vcmpnlesd xmm2,xmm6,QWORD PTR [ecx]
vcmpnlesd xmm2,xmm6,[ecx]
vcmpordsd xmm2,xmm6,xmm4
vcmpordsd xmm2,xmm6,QWORD PTR [ecx]
vcmpordsd xmm2,xmm6,[ecx]
vcmpeq_uqsd xmm2,xmm6,xmm4
vcmpeq_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpeq_uqsd xmm2,xmm6,[ecx]
vcmpngesd xmm2,xmm6,xmm4
vcmpngesd xmm2,xmm6,QWORD PTR [ecx]
vcmpngesd xmm2,xmm6,[ecx]
vcmpngtsd xmm2,xmm6,xmm4
vcmpngtsd xmm2,xmm6,QWORD PTR [ecx]
vcmpngtsd xmm2,xmm6,[ecx]
vcmpfalsesd xmm2,xmm6,xmm4
vcmpfalsesd xmm2,xmm6,QWORD PTR [ecx]
vcmpfalsesd xmm2,xmm6,[ecx]
vcmpneq_oqsd xmm2,xmm6,xmm4
vcmpneq_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpneq_oqsd xmm2,xmm6,[ecx]
vcmpgesd xmm2,xmm6,xmm4
vcmpgesd xmm2,xmm6,QWORD PTR [ecx]
vcmpgesd xmm2,xmm6,[ecx]
vcmpgtsd xmm2,xmm6,xmm4
vcmpgtsd xmm2,xmm6,QWORD PTR [ecx]
vcmpgtsd xmm2,xmm6,[ecx]
vcmptruesd xmm2,xmm6,xmm4
vcmptruesd xmm2,xmm6,QWORD PTR [ecx]
vcmptruesd xmm2,xmm6,[ecx]
vcmpeq_ossd xmm2,xmm6,xmm4
vcmpeq_ossd xmm2,xmm6,QWORD PTR [ecx]
vcmpeq_ossd xmm2,xmm6,[ecx]
vcmplt_oqsd xmm2,xmm6,xmm4
vcmplt_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmplt_oqsd xmm2,xmm6,[ecx]
vcmple_oqsd xmm2,xmm6,xmm4
vcmple_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmple_oqsd xmm2,xmm6,[ecx]
vcmpunord_ssd xmm2,xmm6,xmm4
vcmpunord_ssd xmm2,xmm6,QWORD PTR [ecx]
vcmpunord_ssd xmm2,xmm6,[ecx]
vcmpneq_ussd xmm2,xmm6,xmm4
vcmpneq_ussd xmm2,xmm6,QWORD PTR [ecx]
vcmpneq_ussd xmm2,xmm6,[ecx]
vcmpnlt_uqsd xmm2,xmm6,xmm4
vcmpnlt_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnlt_uqsd xmm2,xmm6,[ecx]
vcmpnle_uqsd xmm2,xmm6,xmm4
vcmpnle_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnle_uqsd xmm2,xmm6,[ecx]
vcmpord_ssd xmm2,xmm6,xmm4
vcmpord_ssd xmm2,xmm6,QWORD PTR [ecx]
vcmpord_ssd xmm2,xmm6,[ecx]
vcmpeq_ussd xmm2,xmm6,xmm4
vcmpeq_ussd xmm2,xmm6,QWORD PTR [ecx]
vcmpeq_ussd xmm2,xmm6,[ecx]
vcmpnge_uqsd xmm2,xmm6,xmm4
vcmpnge_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnge_uqsd xmm2,xmm6,[ecx]
vcmpngt_uqsd xmm2,xmm6,xmm4
vcmpngt_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpngt_uqsd xmm2,xmm6,[ecx]
vcmpfalse_ossd xmm2,xmm6,xmm4
vcmpfalse_ossd xmm2,xmm6,QWORD PTR [ecx]
vcmpfalse_ossd xmm2,xmm6,[ecx]
vcmpneq_ossd xmm2,xmm6,xmm4
vcmpneq_ossd xmm2,xmm6,QWORD PTR [ecx]
vcmpneq_ossd xmm2,xmm6,[ecx]
vcmpge_oqsd xmm2,xmm6,xmm4
vcmpge_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpge_oqsd xmm2,xmm6,[ecx]
vcmpgt_oqsd xmm2,xmm6,xmm4
vcmpgt_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpgt_oqsd xmm2,xmm6,[ecx]
vcmptrue_ussd xmm2,xmm6,xmm4
vcmptrue_ussd xmm2,xmm6,QWORD PTR [ecx]
vcmptrue_ussd xmm2,xmm6,[ecx]
# Tests for op mem64
vldmxcsr DWORD PTR [ecx]
vldmxcsr [ecx]
vstmxcsr DWORD PTR [ecx]
vstmxcsr [ecx]
# Tests for op xmm/mem32, xmm, xmm
vaddss xmm2,xmm6,xmm4
vaddss xmm2,xmm6,DWORD PTR [ecx]
vaddss xmm2,xmm6,[ecx]
vcvtss2sd xmm2,xmm6,xmm4
vcvtss2sd xmm2,xmm6,DWORD PTR [ecx]
vcvtss2sd xmm2,xmm6,[ecx]
vdivss xmm2,xmm6,xmm4
vdivss xmm2,xmm6,DWORD PTR [ecx]
vdivss xmm2,xmm6,[ecx]
vmaxss xmm2,xmm6,xmm4
vmaxss xmm2,xmm6,DWORD PTR [ecx]
vmaxss xmm2,xmm6,[ecx]
vminss xmm2,xmm6,xmm4
vminss xmm2,xmm6,DWORD PTR [ecx]
vminss xmm2,xmm6,[ecx]
vmulss xmm2,xmm6,xmm4
vmulss xmm2,xmm6,DWORD PTR [ecx]
vmulss xmm2,xmm6,[ecx]
vrcpss xmm2,xmm6,xmm4
vrcpss xmm2,xmm6,DWORD PTR [ecx]
vrcpss xmm2,xmm6,[ecx]
vrsqrtss xmm2,xmm6,xmm4
vrsqrtss xmm2,xmm6,DWORD PTR [ecx]
vrsqrtss xmm2,xmm6,[ecx]
vsqrtss xmm2,xmm6,xmm4
vsqrtss xmm2,xmm6,DWORD PTR [ecx]
vsqrtss xmm2,xmm6,[ecx]
vsubss xmm2,xmm6,xmm4
vsubss xmm2,xmm6,DWORD PTR [ecx]
vsubss xmm2,xmm6,[ecx]
vcmpeqss xmm2,xmm6,xmm4
vcmpeqss xmm2,xmm6,DWORD PTR [ecx]
vcmpeqss xmm2,xmm6,[ecx]
vcmpltss xmm2,xmm6,xmm4
vcmpltss xmm2,xmm6,DWORD PTR [ecx]
vcmpltss xmm2,xmm6,[ecx]
vcmpless xmm2,xmm6,xmm4
vcmpless xmm2,xmm6,DWORD PTR [ecx]
vcmpless xmm2,xmm6,[ecx]
vcmpunordss xmm2,xmm6,xmm4
vcmpunordss xmm2,xmm6,DWORD PTR [ecx]
vcmpunordss xmm2,xmm6,[ecx]
vcmpneqss xmm2,xmm6,xmm4
vcmpneqss xmm2,xmm6,DWORD PTR [ecx]
vcmpneqss xmm2,xmm6,[ecx]
vcmpnltss xmm2,xmm6,xmm4
vcmpnltss xmm2,xmm6,DWORD PTR [ecx]
vcmpnltss xmm2,xmm6,[ecx]
vcmpnless xmm2,xmm6,xmm4
vcmpnless xmm2,xmm6,DWORD PTR [ecx]
vcmpnless xmm2,xmm6,[ecx]
vcmpordss xmm2,xmm6,xmm4
vcmpordss xmm2,xmm6,DWORD PTR [ecx]
vcmpordss xmm2,xmm6,[ecx]
vcmpeq_uqss xmm2,xmm6,xmm4
vcmpeq_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpeq_uqss xmm2,xmm6,[ecx]
vcmpngess xmm2,xmm6,xmm4
vcmpngess xmm2,xmm6,DWORD PTR [ecx]
vcmpngess xmm2,xmm6,[ecx]
vcmpngtss xmm2,xmm6,xmm4
vcmpngtss xmm2,xmm6,DWORD PTR [ecx]
vcmpngtss xmm2,xmm6,[ecx]
vcmpfalsess xmm2,xmm6,xmm4
vcmpfalsess xmm2,xmm6,DWORD PTR [ecx]
vcmpfalsess xmm2,xmm6,[ecx]
vcmpneq_oqss xmm2,xmm6,xmm4
vcmpneq_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmpneq_oqss xmm2,xmm6,[ecx]
vcmpgess xmm2,xmm6,xmm4
vcmpgess xmm2,xmm6,DWORD PTR [ecx]
vcmpgess xmm2,xmm6,[ecx]
vcmpgtss xmm2,xmm6,xmm4
vcmpgtss xmm2,xmm6,DWORD PTR [ecx]
vcmpgtss xmm2,xmm6,[ecx]
vcmptruess xmm2,xmm6,xmm4
vcmptruess xmm2,xmm6,DWORD PTR [ecx]
vcmptruess xmm2,xmm6,[ecx]
vcmpeq_osss xmm2,xmm6,xmm4
vcmpeq_osss xmm2,xmm6,DWORD PTR [ecx]
vcmpeq_osss xmm2,xmm6,[ecx]
vcmplt_oqss xmm2,xmm6,xmm4
vcmplt_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmplt_oqss xmm2,xmm6,[ecx]
vcmple_oqss xmm2,xmm6,xmm4
vcmple_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmple_oqss xmm2,xmm6,[ecx]
vcmpunord_sss xmm2,xmm6,xmm4
vcmpunord_sss xmm2,xmm6,DWORD PTR [ecx]
vcmpunord_sss xmm2,xmm6,[ecx]
vcmpneq_usss xmm2,xmm6,xmm4
vcmpneq_usss xmm2,xmm6,DWORD PTR [ecx]
vcmpneq_usss xmm2,xmm6,[ecx]
vcmpnlt_uqss xmm2,xmm6,xmm4
vcmpnlt_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpnlt_uqss xmm2,xmm6,[ecx]
vcmpnle_uqss xmm2,xmm6,xmm4
vcmpnle_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpnle_uqss xmm2,xmm6,[ecx]
vcmpord_sss xmm2,xmm6,xmm4
vcmpord_sss xmm2,xmm6,DWORD PTR [ecx]
vcmpord_sss xmm2,xmm6,[ecx]
vcmpeq_usss xmm2,xmm6,xmm4
vcmpeq_usss xmm2,xmm6,DWORD PTR [ecx]
vcmpeq_usss xmm2,xmm6,[ecx]
vcmpnge_uqss xmm2,xmm6,xmm4
vcmpnge_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpnge_uqss xmm2,xmm6,[ecx]
vcmpngt_uqss xmm2,xmm6,xmm4
vcmpngt_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpngt_uqss xmm2,xmm6,[ecx]
vcmpfalse_osss xmm2,xmm6,xmm4
vcmpfalse_osss xmm2,xmm6,DWORD PTR [ecx]
vcmpfalse_osss xmm2,xmm6,[ecx]
vcmpneq_osss xmm2,xmm6,xmm4
vcmpneq_osss xmm2,xmm6,DWORD PTR [ecx]
vcmpneq_osss xmm2,xmm6,[ecx]
vcmpge_oqss xmm2,xmm6,xmm4
vcmpge_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmpge_oqss xmm2,xmm6,[ecx]
vcmpgt_oqss xmm2,xmm6,xmm4
vcmpgt_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmpgt_oqss xmm2,xmm6,[ecx]
vcmptrue_usss xmm2,xmm6,xmm4
vcmptrue_usss xmm2,xmm6,DWORD PTR [ecx]
vcmptrue_usss xmm2,xmm6,[ecx]
# Tests for op mem32, ymm
vbroadcastss ymm4,DWORD PTR [ecx]
vbroadcastss ymm4,[ecx]
# Tests for op xmm/mem32, xmm
vcomiss xmm6,xmm4
vcomiss xmm4,DWORD PTR [ecx]
vcomiss xmm4,[ecx]
vpmovsxbd xmm6,xmm4
vpmovsxbd xmm4,DWORD PTR [ecx]
vpmovsxbd xmm4,[ecx]
vpmovsxwq xmm6,xmm4
vpmovsxwq xmm4,DWORD PTR [ecx]
vpmovsxwq xmm4,[ecx]
vpmovzxbd xmm6,xmm4
vpmovzxbd xmm4,DWORD PTR [ecx]
vpmovzxbd xmm4,[ecx]
vpmovzxwq xmm6,xmm4
vpmovzxwq xmm4,DWORD PTR [ecx]
vpmovzxwq xmm4,[ecx]
vucomiss xmm6,xmm4
vucomiss xmm4,DWORD PTR [ecx]
vucomiss xmm4,[ecx]
# Tests for op mem32, xmm
vbroadcastss xmm4,DWORD PTR [ecx]
vbroadcastss xmm4,[ecx]
vmovss xmm4,DWORD PTR [ecx]
vmovss xmm4,[ecx]
# Tests for op xmm, mem32
vmovss DWORD PTR [ecx],xmm4
vmovss [ecx],xmm4
# Tests for op xmm, regl/mem32
# Tests for op regl/mem32, xmm
vmovd ecx,xmm4
vmovd DWORD PTR [ecx],xmm4
vmovd xmm4,ecx
vmovd xmm4,DWORD PTR [ecx]
vmovd [ecx],xmm4
vmovd xmm4,[ecx]
# Tests for op xmm/mem32, regl
vcvtss2si ecx,xmm4
vcvtss2si ecx,DWORD PTR [ecx]
vcvtss2si ecx,[ecx]
vcvttss2si ecx,xmm4
vcvttss2si ecx,DWORD PTR [ecx]
vcvttss2si ecx,[ecx]
# Tests for op imm8, xmm, regq/mem32
vextractps DWORD PTR [ecx],xmm4,7
vextractps [ecx],xmm4,7
# Tests for op imm8, xmm, regl/mem32
vpextrd ecx,xmm4,7
vpextrd DWORD PTR [ecx],xmm4,7
vpextrd [ecx],xmm4,7
vextractps ecx,xmm4,7
vextractps DWORD PTR [ecx],xmm4,7
vextractps [ecx],xmm4,7
# Tests for op imm8, regl/mem32, xmm, xmm
vpinsrd xmm6,xmm4,ecx,7
vpinsrd xmm6,xmm4,DWORD PTR [ecx],7
vpinsrd xmm6,xmm4,[ecx],7
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd xmm6,xmm4,ecx
vcvtsi2sd xmm6,xmm4,DWORD PTR [ecx]
vcvtsi2sd xmm6,xmm4,[ecx]
vcvtsi2ss xmm6,xmm4,ecx
vcvtsi2ss xmm6,xmm4,DWORD PTR [ecx]
vcvtsi2ss xmm6,xmm4,[ecx]
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss xmm2,xmm6,xmm4,7
vcmpss xmm2,xmm6,DWORD PTR [ecx],7
vcmpss xmm2,xmm6,[ecx],7
vinsertps xmm2,xmm6,xmm4,7
vinsertps xmm2,xmm6,DWORD PTR [ecx],7
vinsertps xmm2,xmm6,[ecx],7
vroundss xmm2,xmm6,xmm4,7
vroundss xmm2,xmm6,DWORD PTR [ecx],7
vroundss xmm2,xmm6,[ecx],7
# Tests for op xmm/m16, xmm
vpmovsxbq xmm6,xmm4
vpmovsxbq xmm4,WORD PTR [ecx]
vpmovsxbq xmm4,[ecx]
vpmovzxbq xmm6,xmm4
vpmovzxbq xmm4,WORD PTR [ecx]
vpmovzxbq xmm4,[ecx]
# Tests for op imm8, xmm, regl/mem16
vpextrw ecx,xmm4,7
vpextrw WORD PTR [ecx],xmm4,7
vpextrw [ecx],xmm4,7
# Tests for op imm8, xmm, regq/mem16
vpextrw WORD PTR [ecx],xmm4,7
vpextrw [ecx],xmm4,7
# Tests for op imm8, regl/mem16, xmm, xmm
vpinsrw xmm6,xmm4,ecx,7
vpinsrw xmm6,xmm4,WORD PTR [ecx],7
vpinsrw xmm6,xmm4,[ecx],7
# Tests for op imm8, xmm, regl/mem8
vpextrb ecx,xmm4,7
vpextrb BYTE PTR [ecx],xmm4,7
vpextrb [ecx],xmm4,7
# Tests for op imm8, regl/mem8, xmm, xmm
vpinsrb xmm6,xmm4,ecx,7
vpinsrb xmm6,xmm4,BYTE PTR [ecx],7
vpinsrb xmm6,xmm4,[ecx],7
# Tests for op imm8, xmm, regq/mem8
vpextrb BYTE PTR [ecx],xmm4,7
vpextrb [ecx],xmm4,7
# Tests for op xmm, xmm
vmaskmovdqu xmm6,xmm4
vmovq xmm6,xmm4
# Tests for op xmm, regl
vmovmskpd ecx,xmm4
vmovmskps ecx,xmm4
vpmovmskb ecx,xmm4
# Tests for op xmm, xmm, xmm
vmovhlps xmm2,xmm6,xmm4
vmovlhps xmm2,xmm6,xmm4
vmovsd xmm2,xmm6,xmm4
vmovss xmm2,xmm6,xmm4
# Tests for op imm8, xmm, xmm
vpslld xmm6,xmm4,7
vpslldq xmm6,xmm4,7
vpsllq xmm6,xmm4,7
vpsllw xmm6,xmm4,7
vpsrad xmm6,xmm4,7
vpsraw xmm6,xmm4,7
vpsrld xmm6,xmm4,7
vpsrldq xmm6,xmm4,7
vpsrlq xmm6,xmm4,7
vpsrlw xmm6,xmm4,7
# Tests for op imm8, xmm, regl
vpextrw ecx,xmm4,7
# Tests for op ymm, regl
vmovmskpd ecx,ymm4
vmovmskps ecx,ymm4
# Default instructions without suffixes.
vcvtpd2dq xmm6,xmm4
vcvtpd2dq xmm6,ymm4
vcvtpd2ps xmm6,xmm4
vcvtpd2ps xmm6,ymm4
vcvttpd2dq xmm6,xmm4
vcvttpd2dq xmm6,ymm4
#Tests with different memory and register operands.
vldmxcsr DWORD PTR ds:0x1234
vmovdqa xmm0,XMMWORD PTR ds:0x1234
vmovdqa XMMWORD PTR ds:0x1234,xmm0
vmovd DWORD PTR ds:0x1234,xmm0
vcvtsd2si eax,QWORD PTR ds:0x1234
vcvtdq2pd ymm0,XMMWORD PTR ds:0x1234
vcvtpd2ps xmm0,YMMWORD PTR ds:0x1234
vpavgb xmm7,xmm0,XMMWORD PTR ds:0x1234
vaeskeygenassist xmm0,XMMWORD PTR ds:0x1234,7
vpextrb ds:0x1234,xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR ds:0x1234
vpclmulqdq xmm7,xmm0,XMMWORD PTR ds:0x1234,7
vblendvps xmm6,xmm4,XMMWORD PTR ds:0x1234,xmm0
vpinsrb xmm7,xmm0,ds:0x1234,7
vmovdqa ymm0,YMMWORD PTR ds:0x1234
vmovdqa YMMWORD PTR ds:0x1234,ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR ds:0x1234
vroundpd ymm0,YMMWORD PTR ds:0x1234,7
vextractf128 XMMWORD PTR ds:0x1234,ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR ds:0x1234,7
vblendvpd ymm6,ymm4,YMMWORD PTR ds:0x1234,ymm0
vldmxcsr DWORD PTR [ebp]
vmovdqa xmm0,XMMWORD PTR [ebp]
vmovdqa XMMWORD PTR [ebp],xmm0
vmovd DWORD PTR [ebp],xmm0
vcvtsd2si eax,QWORD PTR [ebp]
vcvtdq2pd ymm0,XMMWORD PTR [ebp]
vcvtpd2ps xmm0,YMMWORD PTR [ebp]
vpavgb xmm7,xmm0,XMMWORD PTR [ebp]
vaeskeygenassist xmm0,XMMWORD PTR [ebp],7
vpextrb [ebp],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [ebp]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [ebp],7
vblendvps xmm6,xmm4,XMMWORD PTR [ebp],xmm0
vpinsrb xmm7,xmm0,[ebp],7
vmovdqa ymm0,YMMWORD PTR [ebp]
vmovdqa YMMWORD PTR [ebp],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [ebp]
vroundpd ymm0,YMMWORD PTR [ebp],7
vextractf128 XMMWORD PTR [ebp],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [ebp],7
vblendvpd ymm6,ymm4,YMMWORD PTR [ebp],ymm0
vldmxcsr DWORD PTR [ebp+0x99]
vmovdqa xmm0,XMMWORD PTR [ebp+0x99]
vmovdqa XMMWORD PTR [ebp+0x99],xmm0
vmovd DWORD PTR [ebp+0x99],xmm0
vcvtsd2si eax,QWORD PTR [ebp+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [ebp+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [ebp+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [ebp+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [ebp+0x99],7
vpextrb [ebp+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [ebp+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [ebp+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [ebp+0x99],xmm0
vpinsrb xmm7,xmm0,[ebp+0x99],7
vmovdqa ymm0,YMMWORD PTR [ebp+0x99]
vmovdqa YMMWORD PTR [ebp+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [ebp+0x99]
vroundpd ymm0,YMMWORD PTR [ebp+0x99],7
vextractf128 XMMWORD PTR [ebp+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [ebp+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [ebp+0x99],ymm0
vldmxcsr DWORD PTR [eiz*1+0x99]
vmovdqa xmm0,XMMWORD PTR [eiz*1+0x99]
vmovdqa XMMWORD PTR [eiz*1+0x99],xmm0
vmovd DWORD PTR [eiz*1+0x99],xmm0
vcvtsd2si eax,QWORD PTR [eiz*1+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [eiz*1+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [eiz*1+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [eiz*1+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [eiz*1+0x99],7
vpextrb [eiz*1+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [eiz*1+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [eiz*1+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [eiz*1+0x99],xmm0
vpinsrb xmm7,xmm0,[eiz*1+0x99],7
vmovdqa ymm0,YMMWORD PTR [eiz*1+0x99]
vmovdqa YMMWORD PTR [eiz*1+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [eiz*1+0x99]
vroundpd ymm0,YMMWORD PTR [eiz*1+0x99],7
vextractf128 XMMWORD PTR [eiz*1+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [eiz*1+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [eiz*1+0x99],ymm0
vldmxcsr DWORD PTR [eiz*2+0x99]
vmovdqa xmm0,XMMWORD PTR [eiz*2+0x99]
vmovdqa XMMWORD PTR [eiz*2+0x99],xmm0
vmovd DWORD PTR [eiz*2+0x99],xmm0
vcvtsd2si eax,QWORD PTR [eiz*2+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [eiz*2+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [eiz*2+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [eiz*2+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [eiz*2+0x99],7
vpextrb [eiz*2+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [eiz*2+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [eiz*2+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [eiz*2+0x99],xmm0
vpinsrb xmm7,xmm0,[eiz*2+0x99],7
vmovdqa ymm0,YMMWORD PTR [eiz*2+0x99]
vmovdqa YMMWORD PTR [eiz*2+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [eiz*2+0x99]
vroundpd ymm0,YMMWORD PTR [eiz*2+0x99],7
vextractf128 XMMWORD PTR [eiz*2+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [eiz*2+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [eiz*2+0x99],ymm0
vldmxcsr DWORD PTR [eax+eiz*1+0x99]
vmovdqa xmm0,XMMWORD PTR [eax+eiz*1+0x99]
vmovdqa XMMWORD PTR [eax+eiz*1+0x99],xmm0
vmovd DWORD PTR [eax+eiz*1+0x99],xmm0
vcvtsd2si eax,QWORD PTR [eax+eiz*1+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [eax+eiz*1+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [eax+eiz*1+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [eax+eiz*1+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [eax+eiz*1+0x99],7
vpextrb [eax+eiz*1+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [eax+eiz*1+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [eax+eiz*1+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [eax+eiz*1+0x99],xmm0
vpinsrb xmm7,xmm0,[eax+eiz*1+0x99],7
vmovdqa ymm0,YMMWORD PTR [eax+eiz*1+0x99]
vmovdqa YMMWORD PTR [eax+eiz*1+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [eax+eiz*1+0x99]
vroundpd ymm0,YMMWORD PTR [eax+eiz*1+0x99],7
vextractf128 XMMWORD PTR [eax+eiz*1+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [eax+eiz*1+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [eax+eiz*1+0x99],ymm0
vldmxcsr DWORD PTR [eax+eiz*2+0x99]
vmovdqa xmm0,XMMWORD PTR [eax+eiz*2+0x99]
vmovdqa XMMWORD PTR [eax+eiz*2+0x99],xmm0
vmovd DWORD PTR [eax+eiz*2+0x99],xmm0
vcvtsd2si eax,QWORD PTR [eax+eiz*2+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [eax+eiz*2+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [eax+eiz*2+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [eax+eiz*2+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [eax+eiz*2+0x99],7
vpextrb [eax+eiz*2+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [eax+eiz*2+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [eax+eiz*2+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [eax+eiz*2+0x99],xmm0
vpinsrb xmm7,xmm0,[eax+eiz*2+0x99],7
vmovdqa ymm0,YMMWORD PTR [eax+eiz*2+0x99]
vmovdqa YMMWORD PTR [eax+eiz*2+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [eax+eiz*2+0x99]
vroundpd ymm0,YMMWORD PTR [eax+eiz*2+0x99],7
vextractf128 XMMWORD PTR [eax+eiz*2+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [eax+eiz*2+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [eax+eiz*2+0x99],ymm0
vldmxcsr DWORD PTR [eax+ebx*4+0x99]
vmovdqa xmm0,XMMWORD PTR [eax+ebx*4+0x99]
vmovdqa XMMWORD PTR [eax+ebx*4+0x99],xmm0
vmovd DWORD PTR [eax+ebx*4+0x99],xmm0
vcvtsd2si eax,QWORD PTR [eax+ebx*4+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [eax+ebx*4+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [eax+ebx*4+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [eax+ebx*4+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [eax+ebx*4+0x99],7
vpextrb [eax+ebx*4+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [eax+ebx*4+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [eax+ebx*4+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [eax+ebx*4+0x99],xmm0
vpinsrb xmm7,xmm0,[eax+ebx*4+0x99],7
vmovdqa ymm0,YMMWORD PTR [eax+ebx*4+0x99]
vmovdqa YMMWORD PTR [eax+ebx*4+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [eax+ebx*4+0x99]
vroundpd ymm0,YMMWORD PTR [eax+ebx*4+0x99],7
vextractf128 XMMWORD PTR [eax+ebx*4+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [eax+ebx*4+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [eax+ebx*4+0x99],ymm0
vldmxcsr DWORD PTR [esp+ecx*8+0x99]
vmovdqa xmm0,XMMWORD PTR [esp+ecx*8+0x99]
vmovdqa XMMWORD PTR [esp+ecx*8+0x99],xmm0
vmovd DWORD PTR [esp+ecx*8+0x99],xmm0
vcvtsd2si eax,QWORD PTR [esp+ecx*8+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [esp+ecx*8+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [esp+ecx*8+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [esp+ecx*8+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [esp+ecx*8+0x99],7
vpextrb [esp+ecx*8+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [esp+ecx*8+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [esp+ecx*8+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [esp+ecx*8+0x99],xmm0
vpinsrb xmm7,xmm0,[esp+ecx*8+0x99],7
vmovdqa ymm0,YMMWORD PTR [esp+ecx*8+0x99]
vmovdqa YMMWORD PTR [esp+ecx*8+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [esp+ecx*8+0x99]
vroundpd ymm0,YMMWORD PTR [esp+ecx*8+0x99],7
vextractf128 XMMWORD PTR [esp+ecx*8+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [esp+ecx*8+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [esp+ecx*8+0x99],ymm0
vldmxcsr DWORD PTR [ebp+edx*1+0x99]
vmovdqa xmm0,XMMWORD PTR [ebp+edx*1+0x99]
vmovdqa XMMWORD PTR [ebp+edx*1+0x99],xmm0
vmovd DWORD PTR [ebp+edx*1+0x99],xmm0
vcvtsd2si eax,QWORD PTR [ebp+edx*1+0x99]
vcvtdq2pd ymm0,XMMWORD PTR [ebp+edx*1+0x99]
vcvtpd2ps xmm0,YMMWORD PTR [ebp+edx*1+0x99]
vpavgb xmm7,xmm0,XMMWORD PTR [ebp+edx*1+0x99]
vaeskeygenassist xmm0,XMMWORD PTR [ebp+edx*1+0x99],7
vpextrb [ebp+edx*1+0x99],xmm0,7
vcvtsi2sd xmm7,xmm0,DWORD PTR [ebp+edx*1+0x99]
vpclmulqdq xmm7,xmm0,XMMWORD PTR [ebp+edx*1+0x99],7
vblendvps xmm6,xmm4,XMMWORD PTR [ebp+edx*1+0x99],xmm0
vpinsrb xmm7,xmm0,[ebp+edx*1+0x99],7
vmovdqa ymm0,YMMWORD PTR [ebp+edx*1+0x99]
vmovdqa YMMWORD PTR [ebp+edx*1+0x99],ymm0
vpermilpd ymm7,ymm0,YMMWORD PTR [ebp+edx*1+0x99]
vroundpd ymm0,YMMWORD PTR [ebp+edx*1+0x99],7
vextractf128 XMMWORD PTR [ebp+edx*1+0x99],ymm0,7
vperm2f128 ymm7,ymm0,YMMWORD PTR [ebp+edx*1+0x99],7
vblendvpd ymm6,ymm4,YMMWORD PTR [ebp+edx*1+0x99],ymm0
# Tests for all register operands.
vmovmskpd eax,xmm0
vpslld xmm7,xmm0,7
vmovmskps eax,ymm0
|
stsp/binutils-ia16
| 1,906
|
gas/testsuite/gas/i386/vex-lig-2.s
|
# Check VEX non-LIG instructions with with -mavxscalar=256
.allow_index_reg
.text
_start:
vmovd %eax, %xmm0
vmovd (%eax), %xmm0
{vex3} vmovd %eax, %xmm0
{vex3} vmovd (%eax), %xmm0
vmovd %xmm0, %eax
vmovd %xmm0, (%eax)
{vex3} vmovd %xmm0, %eax
{vex3} vmovd %xmm0, (%eax)
vmovq %xmm0, %xmm0
vmovq (%eax), %xmm0
{vex3} vmovq %xmm0, %xmm0
{vex3} vmovq (%eax), %xmm0
{store} vmovq %xmm0, %xmm0
vmovq %xmm0, (%eax)
{vex3} {store} vmovq %xmm0, %xmm0
{vex3} vmovq %xmm0, (%eax)
vextractps $0, %xmm0, %eax
vextractps $0, %xmm0, (%eax)
vpextrb $0, %xmm0, %eax
vpextrb $0, %xmm0, (%eax)
vpextrw $0, %xmm0, %eax
{vex3} vpextrw $0, %xmm0, %eax
{store} vpextrw $0, %xmm0, %eax
vpextrw $0, %xmm0, (%eax)
vpextrd $0, %xmm0, %eax
vpextrd $0, %xmm0, (%eax)
vinsertps $0, %xmm0, %xmm0, %xmm0
vinsertps $0, (%eax), %xmm0, %xmm0
vpinsrb $0, %eax, %xmm0, %xmm0
vpinsrb $0, (%eax), %xmm0, %xmm0
vpinsrw $0, %eax, %xmm0, %xmm0
vpinsrw $0, (%eax), %xmm0, %xmm0
{vex3} vpinsrw $0, %eax, %xmm0, %xmm0
{vex3} vpinsrw $0, (%eax), %xmm0, %xmm0
vpinsrd $0, %eax, %xmm0, %xmm0
vpinsrd $0, (%eax), %xmm0, %xmm0
vldmxcsr (%eax)
vstmxcsr (%eax)
{vex3} vldmxcsr (%eax)
{vex3} vstmxcsr (%eax)
andn (%eax), %eax, %eax
bextr %eax, (%eax), %eax
blsi (%eax), %eax
blsmsk (%eax), %eax
blsr (%eax), %eax
bzhi %eax, (%eax), %eax
mulx (%eax), %eax, %eax
pdep (%eax), %eax, %eax
pext (%eax), %eax, %eax
rorx $0, (%eax), %eax
sarx %eax, (%eax), %eax
shlx %eax, (%eax), %eax
shrx %eax, (%eax), %eax
bextr $0, (%eax), %eax
blcfill (%eax), %eax
blci (%eax), %eax
blcic (%eax), %eax
blcmsk (%eax), %eax
blcs (%eax), %eax
blsfill (%eax), %eax
blsic (%eax), %eax
t1mskc (%eax), %eax
tzmsk (%eax), %eax
|
stsp/binutils-ia16
| 2,404
|
gas/testsuite/gas/i386/x86-64-avx512f_gfni.s
|
# Check 64bit AVX512F,GFNI instructions
.allow_index_reg
.text
_start:
vgf2p8affineqb $0xab, %zmm28, %zmm29, %zmm30 # AVX512F,GFNI
vgf2p8affineqb $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512F,GFNI
vgf2p8affineqb $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F,GFNI
vgf2p8affineqb $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,GFNI
vgf2p8affineqb $123, 8128(%rdx), %zmm29, %zmm30 # AVX512F,GFNI Disp8
vgf2p8affineqb $123, 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F,GFNI Disp8
vgf2p8affineinvqb $0xab, %zmm28, %zmm29, %zmm30 # AVX512F,GFNI
vgf2p8affineinvqb $0xab, %zmm28, %zmm29, %zmm30{%k7} # AVX512F,GFNI
vgf2p8affineinvqb $0xab, %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F,GFNI
vgf2p8affineinvqb $123, 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,GFNI
vgf2p8affineinvqb $123, 8128(%rdx), %zmm29, %zmm30 # AVX512F,GFNI Disp8
vgf2p8affineinvqb $123, 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512F,GFNI Disp8
vgf2p8mulb %zmm28, %zmm29, %zmm30 # AVX512F,GFNI
vgf2p8mulb %zmm28, %zmm29, %zmm30{%k7} # AVX512F,GFNI
vgf2p8mulb %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512F,GFNI
vgf2p8mulb 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,GFNI
vgf2p8mulb 8128(%rdx), %zmm29, %zmm30 # AVX512F,GFNI Disp8
.intel_syntax noprefix
vgf2p8affineqb zmm30, zmm29, zmm28, 0xab # AVX512F,GFNI
vgf2p8affineqb zmm30{k7}, zmm29, zmm28, 0xab # AVX512F,GFNI
vgf2p8affineqb zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512F,GFNI
vgf2p8affineqb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F,GFNI
vgf2p8affineqb zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512F,GFNI Disp8
vgf2p8affineqb zmm30, zmm29, [rdx+1016]{1to8}, 123 # AVX512F,GFNI Disp8
vgf2p8affineinvqb zmm30, zmm29, zmm28, 0xab # AVX512F,GFNI
vgf2p8affineinvqb zmm30{k7}, zmm29, zmm28, 0xab # AVX512F,GFNI
vgf2p8affineinvqb zmm30{k7}{z}, zmm29, zmm28, 0xab # AVX512F,GFNI
vgf2p8affineinvqb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234], 123 # AVX512F,GFNI
vgf2p8affineinvqb zmm30, zmm29, ZMMWORD PTR [rdx+8128], 123 # AVX512F,GFNI Disp8
vgf2p8affineinvqb zmm30, zmm29, [rdx+1024]{1to8}, 123 # AVX512F,GFNI
vgf2p8mulb zmm30, zmm29, zmm28 # AVX512F,GFNI
vgf2p8mulb zmm30{k7}, zmm29, zmm28 # AVX512F,GFNI
vgf2p8mulb zmm30{k7}{z}, zmm29, zmm28 # AVX512F,GFNI
vgf2p8mulb zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F,GFNI
vgf2p8mulb zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512F,GFNI Disp8
|
stsp/binutils-ia16
| 1,604
|
gas/testsuite/gas/i386/lock-1.s
|
# Lockable Instructions
.text
foo:
lock add %eax, (%ebx)
lock addl $0x64, (%ebx)
lock adc %eax, (%ebx)
lock adcl $0x64, (%ebx)
lock and %eax, (%ebx)
lock andl $0x64, (%ebx)
lock btc %eax, (%ebx)
lock btcl $0x64, (%ebx)
lock btr %eax, (%ebx)
lock btrl $0x64, (%ebx)
lock bts %eax, (%ebx)
lock btsl $0x64, (%ebx)
lock cmpxchg %eax,(%ebx)
lock cmpxchg8b (%ebx)
lock decl (%ebx)
lock incl (%ebx)
lock negl (%ebx)
lock notl (%ebx)
lock or %eax, (%ebx)
lock orl $0x64, (%ebx)
lock sbb %eax, (%ebx)
lock sbbl $0x64, (%ebx)
lock sub %eax, (%ebx)
lock subl $0x64, (%ebx)
lock xadd %eax, (%ebx)
lock xchg (%ebx), %eax
lock xchg %eax, (%ebx)
lock xor %eax, (%ebx)
lock xorl $0x64, (%ebx)
.intel_syntax noprefix
lock add DWORD PTR [ebx],eax
lock add DWORD PTR [ebx],0x64
lock adc DWORD PTR [ebx],eax
lock adc DWORD PTR [ebx],0x64
lock and DWORD PTR [ebx],eax
lock and DWORD PTR [ebx],0x64
lock btc DWORD PTR [ebx],eax
lock btc DWORD PTR [ebx],0x64
lock btr DWORD PTR [ebx],eax
lock btr DWORD PTR [ebx],0x64
lock bts DWORD PTR [ebx],eax
lock bts DWORD PTR [ebx],0x64
lock cmpxchg DWORD PTR [ebx],eax
lock cmpxchg8b QWORD PTR [ebx]
lock dec DWORD PTR [ebx]
lock inc DWORD PTR [ebx]
lock neg DWORD PTR [ebx]
lock not DWORD PTR [ebx]
lock or DWORD PTR [ebx],eax
lock or DWORD PTR [ebx],0x64
lock sbb DWORD PTR [ebx],eax
lock sbb DWORD PTR [ebx],0x64
lock sub DWORD PTR [ebx],eax
lock sub DWORD PTR [ebx],0x64
lock xadd DWORD PTR [ebx],eax
lock xchg DWORD PTR [ebx],eax
lock xchg DWORD PTR [ebx],eax
lock xor DWORD PTR [ebx],eax
lock xor DWORD PTR [ebx],0x64
|
stsp/binutils-ia16
| 5,228
|
gas/testsuite/gas/i386/avx512vnni_vl.s
|
# Check 32bit AVX512{VNNI,VL} instructions
.allow_index_reg
.text
_start:
vpdpwssd %xmm2, %xmm4, %xmm2{%k3} # AVX512{VNNI,VL}
vpdpwssd %xmm2, %xmm4, %xmm2{%k3}{z} # AVX512{VNNI,VL}
vpdpwssd -123456(%esp,%esi,8), %xmm4, %xmm2{%k1} # AVX512{VNNI,VL}
vpdpwssd 2032(%edx), %xmm4, %xmm2{%k1} # AVX512{VNNI,VL} Disp8
vpdpwssd 508(%edx){1to4}, %xmm4, %xmm2{%k1} # AVX512{VNNI,VL} Disp8
vpdpwssd %ymm1, %ymm3, %ymm3{%k1} # AVX512{VNNI,VL}
vpdpwssd %ymm1, %ymm3, %ymm3{%k1}{z} # AVX512{VNNI,VL}
vpdpwssd -123456(%esp,%esi,8), %ymm3, %ymm3{%k4} # AVX512{VNNI,VL}
vpdpwssd 4064(%edx), %ymm3, %ymm3{%k4} # AVX512{VNNI,VL} Disp8
vpdpwssd 508(%edx){1to8}, %ymm3, %ymm3{%k4} # AVX512{VNNI,VL} Disp8
vpdpwssds %xmm1, %xmm4, %xmm2{%k1} # AVX512{VNNI,VL}
vpdpwssds %xmm1, %xmm4, %xmm2{%k1}{z} # AVX512{VNNI,VL}
vpdpwssds -123456(%esp,%esi,8), %xmm4, %xmm2{%k4} # AVX512{VNNI,VL}
vpdpwssds 2032(%edx), %xmm4, %xmm2{%k4} # AVX512{VNNI,VL} Disp8
vpdpwssds 508(%edx){1to4}, %xmm4, %xmm2{%k4} # AVX512{VNNI,VL} Disp8
vpdpwssds %ymm4, %ymm1, %ymm4{%k7} # AVX512{VNNI,VL}
vpdpwssds %ymm4, %ymm1, %ymm4{%k7}{z} # AVX512{VNNI,VL}
vpdpwssds -123456(%esp,%esi,8), %ymm1, %ymm4{%k3} # AVX512{VNNI,VL}
vpdpwssds 4064(%edx), %ymm1, %ymm4{%k3} # AVX512{VNNI,VL} Disp8
vpdpwssds 508(%edx){1to8}, %ymm1, %ymm4{%k3} # AVX512{VNNI,VL} Disp8
vpdpbusd %xmm1, %xmm3, %xmm2{%k4} # AVX512{VNNI,VL}
vpdpbusd %xmm1, %xmm3, %xmm2{%k4}{z} # AVX512{VNNI,VL}
vpdpbusd -123456(%esp,%esi,8), %xmm3, %xmm2{%k2} # AVX512{VNNI,VL}
vpdpbusd 2032(%edx), %xmm3, %xmm2{%k2} # AVX512{VNNI,VL} Disp8
vpdpbusd 508(%edx){1to4}, %xmm3, %xmm2{%k2} # AVX512{VNNI,VL} Disp8
vpdpbusd %ymm2, %ymm2, %ymm2{%k5} # AVX512{VNNI,VL}
vpdpbusd %ymm2, %ymm2, %ymm2{%k5}{z} # AVX512{VNNI,VL}
vpdpbusd -123456(%esp,%esi,8), %ymm2, %ymm2{%k7} # AVX512{VNNI,VL}
vpdpbusd 4064(%edx), %ymm2, %ymm2{%k7} # AVX512{VNNI,VL} Disp8
vpdpbusd 508(%edx){1to8}, %ymm2, %ymm2{%k7} # AVX512{VNNI,VL} Disp8
vpdpbusds %xmm4, %xmm2, %xmm6{%k6} # AVX512{VNNI,VL}
vpdpbusds %xmm4, %xmm2, %xmm6{%k6}{z} # AVX512{VNNI,VL}
vpdpbusds -123456(%esp,%esi,8), %xmm2, %xmm6{%k4} # AVX512{VNNI,VL}
vpdpbusds 2032(%edx), %xmm2, %xmm6{%k4} # AVX512{VNNI,VL} Disp8
vpdpbusds 508(%edx){1to4}, %xmm2, %xmm6{%k4} # AVX512{VNNI,VL} Disp8
vpdpbusds %ymm1, %ymm3, %ymm4{%k7} # AVX512{VNNI,VL}
vpdpbusds %ymm1, %ymm3, %ymm4{%k7}{z} # AVX512{VNNI,VL}
vpdpbusds -123456(%esp,%esi,8), %ymm3, %ymm4{%k1} # AVX512{VNNI,VL}
vpdpbusds 4064(%edx), %ymm3, %ymm4{%k1} # AVX512{VNNI,VL} Disp8
vpdpbusds 508(%edx){1to8}, %ymm3, %ymm4{%k1} # AVX512{VNNI,VL} Disp8
.intel_syntax noprefix
vpdpwssd xmm5{k1}, xmm2, xmm2 # AVX512{VNNI,VL}
vpdpwssd xmm5{k1}{z}, xmm2, xmm2 # AVX512{VNNI,VL}
vpdpwssd xmm5{k6}, xmm2, XMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpwssd xmm5{k6}, xmm2, XMMWORD PTR [edx+2032] # AVX512{VNNI,VL} Disp8
vpdpwssd xmm5{k6}, xmm2, [edx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpwssd ymm1{k7}, ymm2, ymm4 # AVX512{VNNI,VL}
vpdpwssd ymm1{k7}{z}, ymm2, ymm4 # AVX512{VNNI,VL}
vpdpwssd ymm1{k6}, ymm2, YMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpwssd ymm1{k6}, ymm2, YMMWORD PTR [edx+4064] # AVX512{VNNI,VL} Disp8
vpdpwssd ymm1{k6}, ymm2, [edx+508]{1to8} # AVX512{VNNI,VL} Disp8
vpdpwssds xmm1{k2}, xmm4, xmm1 # AVX512{VNNI,VL}
vpdpwssds xmm1{k2}{z}, xmm4, xmm1 # AVX512{VNNI,VL}
vpdpwssds xmm1{k6}, xmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpwssds xmm1{k6}, xmm4, XMMWORD PTR [edx+2032] # AVX512{VNNI,VL} Disp8
vpdpwssds xmm1{k6}, xmm4, [edx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpwssds ymm3{k4}, ymm2, ymm4 # AVX512{VNNI,VL}
vpdpwssds ymm3{k4}{z}, ymm2, ymm4 # AVX512{VNNI,VL}
vpdpwssds ymm3{k5}, ymm2, YMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpwssds ymm3{k5}, ymm2, YMMWORD PTR [edx+4064] # AVX512{VNNI,VL} Disp8
vpdpwssds ymm3{k5}, ymm2, [edx+508]{1to8} # AVX512{VNNI,VL} Disp8
vpdpbusd xmm3{k7}, xmm4, xmm4 # AVX512{VNNI,VL}
vpdpbusd xmm3{k7}{z}, xmm4, xmm4 # AVX512{VNNI,VL}
vpdpbusd xmm3{k1}, xmm4, XMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpbusd xmm3{k1}, xmm4, XMMWORD PTR [edx+2032] # AVX512{VNNI,VL} Disp8
vpdpbusd xmm3{k1}, xmm4, [edx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpbusd ymm6{k5}, ymm2, ymm4 # AVX512{VNNI,VL}
vpdpbusd ymm6{k5}{z}, ymm2, ymm4 # AVX512{VNNI,VL}
vpdpbusd ymm6{k5}, ymm2, YMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpbusd ymm6{k5}, ymm2, YMMWORD PTR [edx+4064] # AVX512{VNNI,VL} Disp8
vpdpbusd ymm6{k5}, ymm2, [edx+508]{1to8} # AVX512{VNNI,VL} Disp8
vpdpbusds xmm3{k5}, xmm3, xmm4 # AVX512{VNNI,VL}
vpdpbusds xmm3{k5}{z}, xmm3, xmm4 # AVX512{VNNI,VL}
vpdpbusds xmm3{k4}, xmm3, XMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpbusds xmm3{k4}, xmm3, XMMWORD PTR [edx+2032] # AVX512{VNNI,VL} Disp8
vpdpbusds xmm3{k4}, xmm3, [edx+508]{1to4} # AVX512{VNNI,VL} Disp8
vpdpbusds ymm2{k4}, ymm3, ymm4 # AVX512{VNNI,VL}
vpdpbusds ymm2{k4}{z}, ymm3, ymm4 # AVX512{VNNI,VL}
vpdpbusds ymm2{k1}, ymm3, YMMWORD PTR [esp+esi*8-123456] # AVX512{VNNI,VL}
vpdpbusds ymm2{k1}, ymm3, YMMWORD PTR [edx+4064] # AVX512{VNNI,VL} Disp8
vpdpbusds ymm2{k1}, ymm3, [edx+508]{1to8} # AVX512{VNNI,VL} Disp8
|
stsp/binutils-ia16
| 6,130
|
gas/testsuite/gas/i386/avx512vl_vaes.s
|
# Check 32bit AVX512VL,VAES instructions
.allow_index_reg
.text
_start:
vaesdec %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
vaesdec -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
vaesdec 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
vaesdec %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
vaesdec -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
vaesdec 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
vaesdeclast %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
vaesdeclast -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
vaesdeclast 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
vaesdeclast %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
vaesdeclast -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
vaesdeclast 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
vaesenc %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
vaesenc -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
vaesenc 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
vaesenc %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
vaesenc -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
vaesenc 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
vaesenclast %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
vaesenclast -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
vaesenclast 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
vaesenclast %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
vaesenclast -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
vaesenclast 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
{evex} vaesdec %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesdec -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesdec 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
{evex} vaesdec %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesdec -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesdec 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
{evex} vaesdeclast %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesdeclast -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesdeclast 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
{evex} vaesdeclast %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesdeclast -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesdeclast 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
{evex} vaesenc %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesenc -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesenc 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
{evex} vaesenc %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesenc -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesenc 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
{evex} vaesenclast %xmm4, %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesenclast -123456(%esp,%esi,8), %xmm5, %xmm6 # AVX512VL,VAES
{evex} vaesenclast 2032(%edx), %xmm5, %xmm6 # AVX512VL,VAES Disp8
{evex} vaesenclast %ymm4, %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesenclast -123456(%esp,%esi,8), %ymm5, %ymm6 # AVX512VL,VAES
{evex} vaesenclast 4064(%edx), %ymm5, %ymm6 # AVX512VL,VAES Disp8
.intel_syntax noprefix
vaesdec xmm6, xmm5, xmm4 # AVX512VL,VAES
vaesdec xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesdec xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
vaesdec ymm6, ymm5, ymm4 # AVX512VL,VAES
vaesdec ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesdec ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
vaesdeclast xmm6, xmm5, xmm4 # AVX512VL,VAES
vaesdeclast xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesdeclast xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
vaesdeclast ymm6, ymm5, ymm4 # AVX512VL,VAES
vaesdeclast ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesdeclast ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
vaesenc xmm6, xmm5, xmm4 # AVX512VL,VAES
vaesenc xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesenc xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
vaesenc ymm6, ymm5, ymm4 # AVX512VL,VAES
vaesenc ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesenc ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
vaesenclast xmm6, xmm5, xmm4 # AVX512VL,VAES
vaesenclast xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesenclast xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
vaesenclast ymm6, ymm5, ymm4 # AVX512VL,VAES
vaesenclast ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
vaesenclast ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
{evex} vaesdec xmm6, xmm5, xmm4 # AVX512VL,VAES
{evex} vaesdec xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesdec xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
{evex} vaesdec ymm6, ymm5, ymm4 # AVX512VL,VAES
{evex} vaesdec ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesdec ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
{evex} vaesdeclast xmm6, xmm5, xmm4 # AVX512VL,VAES
{evex} vaesdeclast xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesdeclast xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
{evex} vaesdeclast ymm6, ymm5, ymm4 # AVX512VL,VAES
{evex} vaesdeclast ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesdeclast ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
{evex} vaesenc xmm6, xmm5, xmm4 # AVX512VL,VAES
{evex} vaesenc xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesenc xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
{evex} vaesenc ymm6, ymm5, ymm4 # AVX512VL,VAES
{evex} vaesenc ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesenc ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
{evex} vaesenclast xmm6, xmm5, xmm4 # AVX512VL,VAES
{evex} vaesenclast xmm6, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesenclast xmm6, xmm5, XMMWORD PTR [edx+2032] # AVX512VL,VAES Disp8
{evex} vaesenclast ymm6, ymm5, ymm4 # AVX512VL,VAES
{evex} vaesenclast ymm6, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512VL,VAES
{evex} vaesenclast ymm6, ymm5, YMMWORD PTR [edx+4064] # AVX512VL,VAES Disp8
|
stsp/binutils-ia16
| 1,386
|
gas/testsuite/gas/i386/x86-64-vp2intersect.s
|
# Check AVX512_VP2INTERSECT new instructions.
.text
vp2intersectd %zmm1, %zmm2, %k3
vp2intersectd 64(%rax), %zmm2, %k3
vp2intersectd 8(%rax){1to16}, %zmm2, %k3
vp2intersectd %ymm1, %ymm2, %k3
vp2intersectd 32(%rax), %ymm2, %k3
vp2intersectd 8(%rax){1to8}, %ymm2, %k3
vp2intersectd %xmm1, %xmm2, %k3
vp2intersectd 16(%rax), %xmm2, %k3
vp2intersectd 8(%rax){1to4}, %xmm2, %k3
vp2intersectq %zmm1, %zmm2, %k3
vp2intersectq 64(%rax), %zmm2, %k3
vp2intersectq 8(%rax){1to8}, %zmm2, %k3
vp2intersectq %ymm1, %ymm2, %k3
vp2intersectq 32(%rax), %ymm2, %k3
vp2intersectq 8(%rax){1to4}, %ymm2, %k3
vp2intersectq %xmm1, %xmm2, %k3
vp2intersectq 16(%rax), %xmm2, %k3
vp2intersectq 8(%rax){1to2}, %xmm2, %k3
.intel_syntax noprefix
vp2intersectd k3, zmm2, zmm1
vp2intersectd k3, zmm2, 64[rax]
vp2intersectd k3, zmm2, dword bcst 8[rax]
vp2intersectd k3, ymm2, ymm1
vp2intersectd k3, ymm2, 32[rax]
vp2intersectd k3, ymm2, dword bcst 8[rax]
vp2intersectd k3, xmm2, xmm1
vp2intersectd k3, xmm2, 16[rax]
vp2intersectd k3, xmm2, dword bcst 8[rax]
vp2intersectq k3, zmm2, zmm1
vp2intersectq k3, zmm2, 64[rax]
vp2intersectq k3, zmm2, qword bcst 8[rax]
vp2intersectq k3, ymm2, ymm1
vp2intersectq k3, ymm2, 32[rax]
vp2intersectq k3, ymm2, qword bcst 8[rax]
vp2intersectq k3, xmm2, xmm1
vp2intersectq k3, xmm2, 16[rax]
vp2intersectq k3, xmm2, qword bcst 8[rax]
|
stsp/binutils-ia16
| 1,281
|
gas/testsuite/gas/i386/x86-64-sse-noavx.s
|
# Check 64bit SSE instructions without AVX equivalent
.text
_start:
cmpxchg16b (%rax)
crc32 %cl,%ebx
cvtpd2pi %xmm3,%mm2
cvtpi2pd %mm3,%xmm2
cvtpi2ps %mm3,%xmm2
cvtps2pi %xmm7,%mm6
cvttpd2pi %xmm4,%mm3
cvttps2pi %xmm4,%mm3
fisttps (%rax)
fisttpl (%rax)
fisttpll (%rax)
lfence
maskmovq %mm7,%mm0
mfence
monitor
movdq2q %xmm0, %mm1
movnti %eax, (%rax)
movntq %mm2,(%rax)
movq2dq %mm0, %xmm1
mwait
pabsb %mm1,%mm0
pabsd %mm1,%mm0
pabsw %mm1,%mm0
paddq %mm1,%mm0
palignr $0x2,%mm1,%mm0
pavgb %mm1,%mm0
pavgw %mm3,%mm2
pextrw $0x0,%mm1,%eax
phaddd %mm1,%mm0
phaddsw %mm1,%mm0
phaddw %mm1,%mm0
phsubd %mm1,%mm0
phsubsw %mm1,%mm0
phsubw %mm1,%mm0
pinsrw $0x2,%edx,%mm2
pmaddubsw %mm1,%mm0
pmaxsw %mm1,%mm0
pmaxub %mm2,%mm2
pminsw %mm5,%mm4
pminub %mm7,%mm6
pmovmskb %mm5,%eax
pmulhrsw %mm1,%mm0
pmulhuw %mm5,%mm4
pmuludq %mm0, %mm1
popcnt %ebx,%ecx
prefetchnta (%rax)
prefetcht0 (%rax)
prefetcht1 (%rax)
prefetcht2 (%rax)
psadbw %mm7,%mm6
pshufb %mm1,%mm0
pshufw $0x1,%mm2,%mm3
psignb %mm1,%mm0
psignd %mm1,%mm0
psignw %mm1,%mm0
psubq %mm1,%mm0
sfence
|
stsp/binutils-ia16
| 15,990
|
gas/testsuite/gas/i386/x86-64-avx256int.s
|
# Check x86-64 256it integer AVX instructions
.allow_index_reg
.text
_start:
# Tests for op ymm, regl
vpmovmskb %ymm4,%ecx
# Tests for op ymm, regq
vpmovmskb %ymm4,%rcx
# Tests for op imm8, ymm, ymm
vpslld $7,%ymm6,%ymm2
vpslldq $7,%ymm6,%ymm2
vpsllq $7,%ymm6,%ymm2
vpsllw $7,%ymm6,%ymm2
vpsrad $7,%ymm6,%ymm2
vpsraw $7,%ymm6,%ymm2
vpsrld $7,%ymm6,%ymm2
vpsrldq $7,%ymm6,%ymm2
vpsrlq $7,%ymm6,%ymm2
vpsrlw $7,%ymm6,%ymm2
# Tests for op imm8, ymm/mem256, ymm
vpshufd $7,%ymm6,%ymm2
vpshufd $7,(%rcx),%ymm6
vpshufhw $7,%ymm6,%ymm2
vpshufhw $7,(%rcx),%ymm6
vpshuflw $7,%ymm6,%ymm2
vpshuflw $7,(%rcx),%ymm6
# Tests for op ymm/mem256, ymm, ymm
vpackssdw %ymm4,%ymm6,%ymm2
vpackssdw (%rcx),%ymm6,%ymm2
vpacksswb %ymm4,%ymm6,%ymm2
vpacksswb (%rcx),%ymm6,%ymm2
vpackusdw %ymm4,%ymm6,%ymm2
vpackusdw (%rcx),%ymm6,%ymm2
vpackuswb %ymm4,%ymm6,%ymm2
vpackuswb (%rcx),%ymm6,%ymm2
vpaddb %ymm4,%ymm6,%ymm2
vpaddb (%rcx),%ymm6,%ymm2
vpaddw %ymm4,%ymm6,%ymm2
vpaddw (%rcx),%ymm6,%ymm2
vpaddd %ymm4,%ymm6,%ymm2
vpaddd (%rcx),%ymm6,%ymm2
vpaddq %ymm4,%ymm6,%ymm2
vpaddq (%rcx),%ymm6,%ymm2
vpaddsb %ymm4,%ymm6,%ymm2
vpaddsb (%rcx),%ymm6,%ymm2
vpaddsw %ymm4,%ymm6,%ymm2
vpaddsw (%rcx),%ymm6,%ymm2
vpaddusb %ymm4,%ymm6,%ymm2
vpaddusb (%rcx),%ymm6,%ymm2
vpaddusw %ymm4,%ymm6,%ymm2
vpaddusw (%rcx),%ymm6,%ymm2
vpand %ymm4,%ymm6,%ymm2
vpand (%rcx),%ymm6,%ymm2
vpandn %ymm4,%ymm6,%ymm2
vpandn (%rcx),%ymm6,%ymm2
vpavgb %ymm4,%ymm6,%ymm2
vpavgb (%rcx),%ymm6,%ymm2
vpavgw %ymm4,%ymm6,%ymm2
vpavgw (%rcx),%ymm6,%ymm2
vpcmpeqb %ymm4,%ymm6,%ymm2
vpcmpeqb (%rcx),%ymm6,%ymm2
vpcmpeqw %ymm4,%ymm6,%ymm2
vpcmpeqw (%rcx),%ymm6,%ymm2
vpcmpeqd %ymm4,%ymm6,%ymm2
vpcmpeqd (%rcx),%ymm6,%ymm2
vpcmpeqq %ymm4,%ymm6,%ymm2
vpcmpeqq (%rcx),%ymm6,%ymm2
vpcmpgtb %ymm4,%ymm6,%ymm2
vpcmpgtb (%rcx),%ymm6,%ymm2
vpcmpgtw %ymm4,%ymm6,%ymm2
vpcmpgtw (%rcx),%ymm6,%ymm2
vpcmpgtd %ymm4,%ymm6,%ymm2
vpcmpgtd (%rcx),%ymm6,%ymm2
vpcmpgtq %ymm4,%ymm6,%ymm2
vpcmpgtq (%rcx),%ymm6,%ymm2
vphaddw %ymm4,%ymm6,%ymm2
vphaddw (%rcx),%ymm6,%ymm2
vphaddd %ymm4,%ymm6,%ymm2
vphaddd (%rcx),%ymm6,%ymm2
vphaddsw %ymm4,%ymm6,%ymm2
vphaddsw (%rcx),%ymm6,%ymm2
vphsubw %ymm4,%ymm6,%ymm2
vphsubw (%rcx),%ymm6,%ymm2
vphsubd %ymm4,%ymm6,%ymm2
vphsubd (%rcx),%ymm6,%ymm2
vphsubsw %ymm4,%ymm6,%ymm2
vphsubsw (%rcx),%ymm6,%ymm2
vpmaddwd %ymm4,%ymm6,%ymm2
vpmaddwd (%rcx),%ymm6,%ymm2
vpmaddubsw %ymm4,%ymm6,%ymm2
vpmaddubsw (%rcx),%ymm6,%ymm2
vpmaxsb %ymm4,%ymm6,%ymm2
vpmaxsb (%rcx),%ymm6,%ymm2
vpmaxsw %ymm4,%ymm6,%ymm2
vpmaxsw (%rcx),%ymm6,%ymm2
vpmaxsd %ymm4,%ymm6,%ymm2
vpmaxsd (%rcx),%ymm6,%ymm2
vpmaxub %ymm4,%ymm6,%ymm2
vpmaxub (%rcx),%ymm6,%ymm2
vpmaxuw %ymm4,%ymm6,%ymm2
vpmaxuw (%rcx),%ymm6,%ymm2
vpmaxud %ymm4,%ymm6,%ymm2
vpmaxud (%rcx),%ymm6,%ymm2
vpminsb %ymm4,%ymm6,%ymm2
vpminsb (%rcx),%ymm6,%ymm2
vpminsw %ymm4,%ymm6,%ymm2
vpminsw (%rcx),%ymm6,%ymm2
vpminsd %ymm4,%ymm6,%ymm2
vpminsd (%rcx),%ymm6,%ymm2
vpminub %ymm4,%ymm6,%ymm2
vpminub (%rcx),%ymm6,%ymm2
vpminuw %ymm4,%ymm6,%ymm2
vpminuw (%rcx),%ymm6,%ymm2
vpminud %ymm4,%ymm6,%ymm2
vpminud (%rcx),%ymm6,%ymm2
vpmulhuw %ymm4,%ymm6,%ymm2
vpmulhuw (%rcx),%ymm6,%ymm2
vpmulhrsw %ymm4,%ymm6,%ymm2
vpmulhrsw (%rcx),%ymm6,%ymm2
vpmulhw %ymm4,%ymm6,%ymm2
vpmulhw (%rcx),%ymm6,%ymm2
vpmullw %ymm4,%ymm6,%ymm2
vpmullw (%rcx),%ymm6,%ymm2
vpmulld %ymm4,%ymm6,%ymm2
vpmulld (%rcx),%ymm6,%ymm2
vpmuludq %ymm4,%ymm6,%ymm2
vpmuludq (%rcx),%ymm6,%ymm2
vpmuldq %ymm4,%ymm6,%ymm2
vpmuldq (%rcx),%ymm6,%ymm2
vpor %ymm4,%ymm6,%ymm2
vpor (%rcx),%ymm6,%ymm2
vpsadbw %ymm4,%ymm6,%ymm2
vpsadbw (%rcx),%ymm6,%ymm2
vpshufb %ymm4,%ymm6,%ymm2
vpshufb (%rcx),%ymm6,%ymm2
vpsignb %ymm4,%ymm6,%ymm2
vpsignb (%rcx),%ymm6,%ymm2
vpsignw %ymm4,%ymm6,%ymm2
vpsignw (%rcx),%ymm6,%ymm2
vpsignd %ymm4,%ymm6,%ymm2
vpsignd (%rcx),%ymm6,%ymm2
vpsubb %ymm4,%ymm6,%ymm2
vpsubb (%rcx),%ymm6,%ymm2
vpsubw %ymm4,%ymm6,%ymm2
vpsubw (%rcx),%ymm6,%ymm2
vpsubd %ymm4,%ymm6,%ymm2
vpsubd (%rcx),%ymm6,%ymm2
vpsubq %ymm4,%ymm6,%ymm2
vpsubq (%rcx),%ymm6,%ymm2
vpsubsb %ymm4,%ymm6,%ymm2
vpsubsb (%rcx),%ymm6,%ymm2
vpsubsw %ymm4,%ymm6,%ymm2
vpsubsw (%rcx),%ymm6,%ymm2
vpsubusb %ymm4,%ymm6,%ymm2
vpsubusb (%rcx),%ymm6,%ymm2
vpsubusw %ymm4,%ymm6,%ymm2
vpsubusw (%rcx),%ymm6,%ymm2
vpunpckhbw %ymm4,%ymm6,%ymm2
vpunpckhbw (%rcx),%ymm6,%ymm2
vpunpckhwd %ymm4,%ymm6,%ymm2
vpunpckhwd (%rcx),%ymm6,%ymm2
vpunpckhdq %ymm4,%ymm6,%ymm2
vpunpckhdq (%rcx),%ymm6,%ymm2
vpunpckhqdq %ymm4,%ymm6,%ymm2
vpunpckhqdq (%rcx),%ymm6,%ymm2
vpunpcklbw %ymm4,%ymm6,%ymm2
vpunpcklbw (%rcx),%ymm6,%ymm2
vpunpcklwd %ymm4,%ymm6,%ymm2
vpunpcklwd (%rcx),%ymm6,%ymm2
vpunpckldq %ymm4,%ymm6,%ymm2
vpunpckldq (%rcx),%ymm6,%ymm2
vpunpcklqdq %ymm4,%ymm6,%ymm2
vpunpcklqdq (%rcx),%ymm6,%ymm2
vpxor %ymm4,%ymm6,%ymm2
vpxor (%rcx),%ymm6,%ymm2
# Tests for op ymm/mem256, ymm
vpabsb %ymm4,%ymm6
vpabsb (%rcx),%ymm4
vpabsw %ymm4,%ymm6
vpabsw (%rcx),%ymm4
vpabsd %ymm4,%ymm6
vpabsd (%rcx),%ymm4
# Tests for op imm8, ymm/mem256, ymm, ymm
vmpsadbw $7,%ymm4,%ymm6,%ymm2
vmpsadbw $7,(%rcx),%ymm6,%ymm2
vpalignr $7,%ymm4,%ymm6,%ymm2
vpalignr $7,(%rcx),%ymm6,%ymm2
vpblendw $7,%ymm4,%ymm6,%ymm2
vpblendw $7,(%rcx),%ymm6,%ymm2
# Tests for op ymm, ymm/mem256, ymm, ymm
vpblendvb %ymm4,%ymm6,%ymm2,%ymm7
vpblendvb %ymm4,(%rcx),%ymm2,%ymm7
# Tests for op xmm/mem128, ymm, ymm
vpsllw %xmm4,%ymm6,%ymm2
vpsllw (%rcx),%ymm6,%ymm2
vpslld %xmm4,%ymm6,%ymm2
vpslld (%rcx),%ymm6,%ymm2
vpsllq %xmm4,%ymm6,%ymm2
vpsllq (%rcx),%ymm6,%ymm2
vpsraw %xmm4,%ymm6,%ymm2
vpsraw (%rcx),%ymm6,%ymm2
vpsrad %xmm4,%ymm6,%ymm2
vpsrad (%rcx),%ymm6,%ymm2
vpsrlw %xmm4,%ymm6,%ymm2
vpsrlw (%rcx),%ymm6,%ymm2
vpsrld %xmm4,%ymm6,%ymm2
vpsrld (%rcx),%ymm6,%ymm2
vpsrlq %xmm4,%ymm6,%ymm2
vpsrlq (%rcx),%ymm6,%ymm2
# Tests for op xmm/mem128, ymm
vpmovsxbw %xmm4,%ymm4
vpmovsxbw (%rcx),%ymm4
vpmovsxwd %xmm4,%ymm4
vpmovsxwd (%rcx),%ymm4
vpmovsxdq %xmm4,%ymm4
vpmovsxdq (%rcx),%ymm4
vpmovzxbw %xmm4,%ymm4
vpmovzxbw (%rcx),%ymm4
vpmovzxwd %xmm4,%ymm4
vpmovzxwd (%rcx),%ymm4
vpmovzxdq %xmm4,%ymm4
vpmovzxdq (%rcx),%ymm4
# Tests for op xmm/mem64, ymm
vpmovsxbd %xmm4,%ymm6
vpmovsxbd (%rcx),%ymm4
vpmovsxwq %xmm4,%ymm6
vpmovsxwq (%rcx),%ymm4
vpmovzxbd %xmm4,%ymm6
vpmovzxbd (%rcx),%ymm4
vpmovzxwq %xmm4,%ymm6
vpmovzxwq (%rcx),%ymm4
# Tests for op xmm/mem32, ymm
vpmovsxbq %xmm4,%ymm4
vpmovsxbq (%rcx),%ymm4
vpmovzxbq %xmm4,%ymm4
vpmovzxbq (%rcx),%ymm4
.intel_syntax noprefix
# Tests for op ymm, regl
vpmovmskb ecx,ymm4
# Tests for op ymm, regq
vpmovmskb rcx,ymm4
# Tests for op imm8, ymm, ymm
vpslld ymm2,ymm6,7
vpslldq ymm2,ymm6,7
vpsllq ymm2,ymm6,7
vpsllw ymm2,ymm6,7
vpsrad ymm2,ymm6,7
vpsraw ymm2,ymm6,7
vpsrld ymm2,ymm6,7
vpsrldq ymm2,ymm6,7
vpsrlq ymm2,ymm6,7
vpsrlw ymm2,ymm6,7
# Tests for op imm8, ymm/mem256, ymm
vpshufd ymm2,ymm6,7
vpshufd ymm6,YMMWORD PTR [rcx],7
vpshufd ymm6,[rcx],7
vpshufhw ymm2,ymm6,7
vpshufhw ymm6,YMMWORD PTR [rcx],7
vpshufhw ymm6,[rcx],7
vpshuflw ymm2,ymm6,7
vpshuflw ymm6,YMMWORD PTR [rcx],7
vpshuflw ymm6,[rcx],7
# Tests for op ymm/mem256, ymm, ymm
vpackssdw ymm2,ymm6,ymm4
vpackssdw ymm2,ymm6,YMMWORD PTR [rcx]
vpackssdw ymm2,ymm6,[rcx]
vpacksswb ymm2,ymm6,ymm4
vpacksswb ymm2,ymm6,YMMWORD PTR [rcx]
vpacksswb ymm2,ymm6,[rcx]
vpackusdw ymm2,ymm6,ymm4
vpackusdw ymm2,ymm6,YMMWORD PTR [rcx]
vpackusdw ymm2,ymm6,[rcx]
vpackuswb ymm2,ymm6,ymm4
vpackuswb ymm2,ymm6,YMMWORD PTR [rcx]
vpackuswb ymm2,ymm6,[rcx]
vpaddb ymm2,ymm6,ymm4
vpaddb ymm2,ymm6,YMMWORD PTR [rcx]
vpaddb ymm2,ymm6,[rcx]
vpaddw ymm2,ymm6,ymm4
vpaddw ymm2,ymm6,YMMWORD PTR [rcx]
vpaddw ymm2,ymm6,[rcx]
vpaddd ymm2,ymm6,ymm4
vpaddd ymm2,ymm6,YMMWORD PTR [rcx]
vpaddd ymm2,ymm6,[rcx]
vpaddq ymm2,ymm6,ymm4
vpaddq ymm2,ymm6,YMMWORD PTR [rcx]
vpaddq ymm2,ymm6,[rcx]
vpaddsb ymm2,ymm6,ymm4
vpaddsb ymm2,ymm6,YMMWORD PTR [rcx]
vpaddsb ymm2,ymm6,[rcx]
vpaddsw ymm2,ymm6,ymm4
vpaddsw ymm2,ymm6,YMMWORD PTR [rcx]
vpaddsw ymm2,ymm6,[rcx]
vpaddusb ymm2,ymm6,ymm4
vpaddusb ymm2,ymm6,YMMWORD PTR [rcx]
vpaddusb ymm2,ymm6,[rcx]
vpaddusw ymm2,ymm6,ymm4
vpaddusw ymm2,ymm6,YMMWORD PTR [rcx]
vpaddusw ymm2,ymm6,[rcx]
vpand ymm2,ymm6,ymm4
vpand ymm2,ymm6,YMMWORD PTR [rcx]
vpand ymm2,ymm6,[rcx]
vpandn ymm2,ymm6,ymm4
vpandn ymm2,ymm6,YMMWORD PTR [rcx]
vpandn ymm2,ymm6,[rcx]
vpavgb ymm2,ymm6,ymm4
vpavgb ymm2,ymm6,YMMWORD PTR [rcx]
vpavgb ymm2,ymm6,[rcx]
vpavgw ymm2,ymm6,ymm4
vpavgw ymm2,ymm6,YMMWORD PTR [rcx]
vpavgw ymm2,ymm6,[rcx]
vpcmpeqb ymm2,ymm6,ymm4
vpcmpeqb ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpeqb ymm2,ymm6,[rcx]
vpcmpeqw ymm2,ymm6,ymm4
vpcmpeqw ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpeqw ymm2,ymm6,[rcx]
vpcmpeqd ymm2,ymm6,ymm4
vpcmpeqd ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpeqd ymm2,ymm6,[rcx]
vpcmpeqq ymm2,ymm6,ymm4
vpcmpeqq ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpeqq ymm2,ymm6,[rcx]
vpcmpgtb ymm2,ymm6,ymm4
vpcmpgtb ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpgtb ymm2,ymm6,[rcx]
vpcmpgtw ymm2,ymm6,ymm4
vpcmpgtw ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpgtw ymm2,ymm6,[rcx]
vpcmpgtd ymm2,ymm6,ymm4
vpcmpgtd ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpgtd ymm2,ymm6,[rcx]
vpcmpgtq ymm2,ymm6,ymm4
vpcmpgtq ymm2,ymm6,YMMWORD PTR [rcx]
vpcmpgtq ymm2,ymm6,[rcx]
vphaddw ymm2,ymm6,ymm4
vphaddw ymm2,ymm6,YMMWORD PTR [rcx]
vphaddw ymm2,ymm6,[rcx]
vphaddd ymm2,ymm6,ymm4
vphaddd ymm2,ymm6,YMMWORD PTR [rcx]
vphaddd ymm2,ymm6,[rcx]
vphaddsw ymm2,ymm6,ymm4
vphaddsw ymm2,ymm6,YMMWORD PTR [rcx]
vphaddsw ymm2,ymm6,[rcx]
vphsubw ymm2,ymm6,ymm4
vphsubw ymm2,ymm6,YMMWORD PTR [rcx]
vphsubw ymm2,ymm6,[rcx]
vphsubd ymm2,ymm6,ymm4
vphsubd ymm2,ymm6,YMMWORD PTR [rcx]
vphsubd ymm2,ymm6,[rcx]
vphsubsw ymm2,ymm6,ymm4
vphsubsw ymm2,ymm6,YMMWORD PTR [rcx]
vphsubsw ymm2,ymm6,[rcx]
vpmaddwd ymm2,ymm6,ymm4
vpmaddwd ymm2,ymm6,YMMWORD PTR [rcx]
vpmaddwd ymm2,ymm6,[rcx]
vpmaddubsw ymm2,ymm6,ymm4
vpmaddubsw ymm2,ymm6,YMMWORD PTR [rcx]
vpmaddubsw ymm2,ymm6,[rcx]
vpmaxsb ymm2,ymm6,ymm4
vpmaxsb ymm2,ymm6,YMMWORD PTR [rcx]
vpmaxsb ymm2,ymm6,[rcx]
vpmaxsw ymm2,ymm6,ymm4
vpmaxsw ymm2,ymm6,YMMWORD PTR [rcx]
vpmaxsw ymm2,ymm6,[rcx]
vpmaxsd ymm2,ymm6,ymm4
vpmaxsd ymm2,ymm6,YMMWORD PTR [rcx]
vpmaxsd ymm2,ymm6,[rcx]
vpmaxub ymm2,ymm6,ymm4
vpmaxub ymm2,ymm6,YMMWORD PTR [rcx]
vpmaxub ymm2,ymm6,[rcx]
vpmaxuw ymm2,ymm6,ymm4
vpmaxuw ymm2,ymm6,YMMWORD PTR [rcx]
vpmaxuw ymm2,ymm6,[rcx]
vpmaxud ymm2,ymm6,ymm4
vpmaxud ymm2,ymm6,YMMWORD PTR [rcx]
vpmaxud ymm2,ymm6,[rcx]
vpminsb ymm2,ymm6,ymm4
vpminsb ymm2,ymm6,YMMWORD PTR [rcx]
vpminsb ymm2,ymm6,[rcx]
vpminsw ymm2,ymm6,ymm4
vpminsw ymm2,ymm6,YMMWORD PTR [rcx]
vpminsw ymm2,ymm6,[rcx]
vpminsd ymm2,ymm6,ymm4
vpminsd ymm2,ymm6,YMMWORD PTR [rcx]
vpminsd ymm2,ymm6,[rcx]
vpminub ymm2,ymm6,ymm4
vpminub ymm2,ymm6,YMMWORD PTR [rcx]
vpminub ymm2,ymm6,[rcx]
vpminuw ymm2,ymm6,ymm4
vpminuw ymm2,ymm6,YMMWORD PTR [rcx]
vpminuw ymm2,ymm6,[rcx]
vpminud ymm2,ymm6,ymm4
vpminud ymm2,ymm6,YMMWORD PTR [rcx]
vpminud ymm2,ymm6,[rcx]
vpmulhuw ymm2,ymm6,ymm4
vpmulhuw ymm2,ymm6,YMMWORD PTR [rcx]
vpmulhuw ymm2,ymm6,[rcx]
vpmulhrsw ymm2,ymm6,ymm4
vpmulhrsw ymm2,ymm6,YMMWORD PTR [rcx]
vpmulhrsw ymm2,ymm6,[rcx]
vpmulhw ymm2,ymm6,ymm4
vpmulhw ymm2,ymm6,YMMWORD PTR [rcx]
vpmulhw ymm2,ymm6,[rcx]
vpmullw ymm2,ymm6,ymm4
vpmullw ymm2,ymm6,YMMWORD PTR [rcx]
vpmullw ymm2,ymm6,[rcx]
vpmulld ymm2,ymm6,ymm4
vpmulld ymm2,ymm6,YMMWORD PTR [rcx]
vpmulld ymm2,ymm6,[rcx]
vpmuludq ymm2,ymm6,ymm4
vpmuludq ymm2,ymm6,YMMWORD PTR [rcx]
vpmuludq ymm2,ymm6,[rcx]
vpmuldq ymm2,ymm6,ymm4
vpmuldq ymm2,ymm6,YMMWORD PTR [rcx]
vpmuldq ymm2,ymm6,[rcx]
vpor ymm2,ymm6,ymm4
vpor ymm2,ymm6,YMMWORD PTR [rcx]
vpor ymm2,ymm6,[rcx]
vpsadbw ymm2,ymm6,ymm4
vpsadbw ymm2,ymm6,YMMWORD PTR [rcx]
vpsadbw ymm2,ymm6,[rcx]
vpshufb ymm2,ymm6,ymm4
vpshufb ymm2,ymm6,YMMWORD PTR [rcx]
vpshufb ymm2,ymm6,[rcx]
vpsignb ymm2,ymm6,ymm4
vpsignb ymm2,ymm6,YMMWORD PTR [rcx]
vpsignb ymm2,ymm6,[rcx]
vpsignw ymm2,ymm6,ymm4
vpsignw ymm2,ymm6,YMMWORD PTR [rcx]
vpsignw ymm2,ymm6,[rcx]
vpsignd ymm2,ymm6,ymm4
vpsignd ymm2,ymm6,YMMWORD PTR [rcx]
vpsignd ymm2,ymm6,[rcx]
vpsubb ymm2,ymm6,ymm4
vpsubb ymm2,ymm6,YMMWORD PTR [rcx]
vpsubb ymm2,ymm6,[rcx]
vpsubw ymm2,ymm6,ymm4
vpsubw ymm2,ymm6,YMMWORD PTR [rcx]
vpsubw ymm2,ymm6,[rcx]
vpsubd ymm2,ymm6,ymm4
vpsubd ymm2,ymm6,YMMWORD PTR [rcx]
vpsubd ymm2,ymm6,[rcx]
vpsubq ymm2,ymm6,ymm4
vpsubq ymm2,ymm6,YMMWORD PTR [rcx]
vpsubq ymm2,ymm6,[rcx]
vpsubsb ymm2,ymm6,ymm4
vpsubsb ymm2,ymm6,YMMWORD PTR [rcx]
vpsubsb ymm2,ymm6,[rcx]
vpsubsw ymm2,ymm6,ymm4
vpsubsw ymm2,ymm6,YMMWORD PTR [rcx]
vpsubsw ymm2,ymm6,[rcx]
vpsubusb ymm2,ymm6,ymm4
vpsubusb ymm2,ymm6,YMMWORD PTR [rcx]
vpsubusb ymm2,ymm6,[rcx]
vpsubusw ymm2,ymm6,ymm4
vpsubusw ymm2,ymm6,YMMWORD PTR [rcx]
vpsubusw ymm2,ymm6,[rcx]
vpunpckhbw ymm2,ymm6,ymm4
vpunpckhbw ymm2,ymm6,YMMWORD PTR [rcx]
vpunpckhbw ymm2,ymm6,[rcx]
vpunpckhwd ymm2,ymm6,ymm4
vpunpckhwd ymm2,ymm6,YMMWORD PTR [rcx]
vpunpckhwd ymm2,ymm6,[rcx]
vpunpckhdq ymm2,ymm6,ymm4
vpunpckhdq ymm2,ymm6,YMMWORD PTR [rcx]
vpunpckhdq ymm2,ymm6,[rcx]
vpunpckhqdq ymm2,ymm6,ymm4
vpunpckhqdq ymm2,ymm6,YMMWORD PTR [rcx]
vpunpckhqdq ymm2,ymm6,[rcx]
vpunpcklbw ymm2,ymm6,ymm4
vpunpcklbw ymm2,ymm6,YMMWORD PTR [rcx]
vpunpcklbw ymm2,ymm6,[rcx]
vpunpcklwd ymm2,ymm6,ymm4
vpunpcklwd ymm2,ymm6,YMMWORD PTR [rcx]
vpunpcklwd ymm2,ymm6,[rcx]
vpunpckldq ymm2,ymm6,ymm4
vpunpckldq ymm2,ymm6,YMMWORD PTR [rcx]
vpunpckldq ymm2,ymm6,[rcx]
vpunpcklqdq ymm2,ymm6,ymm4
vpunpcklqdq ymm2,ymm6,YMMWORD PTR [rcx]
vpunpcklqdq ymm2,ymm6,[rcx]
vpxor ymm2,ymm6,ymm4
vpxor ymm2,ymm6,YMMWORD PTR [rcx]
vpxor ymm2,ymm6,[rcx]
# Tests for op ymm/mem256, ymm
vpabsb ymm6,ymm4
vpabsb ymm4,YMMWORD PTR [rcx]
vpabsb ymm4,[rcx]
vpabsw ymm6,ymm4
vpabsw ymm4,YMMWORD PTR [rcx]
vpabsw ymm4,[rcx]
vpabsd ymm6,ymm4
vpabsd ymm4,YMMWORD PTR [rcx]
vpabsd ymm4,[rcx]
# Tests for op imm8, ymm/mem256, ymm, ymm
vmpsadbw ymm2,ymm6,ymm4,7
vmpsadbw ymm2,ymm6,YMMWORD PTR [rcx],7
vmpsadbw ymm2,ymm6,[rcx],7
vpalignr ymm2,ymm6,ymm4,7
vpalignr ymm2,ymm6,YMMWORD PTR [rcx],7
vpalignr ymm2,ymm6,[rcx],7
vpblendw ymm2,ymm6,ymm4,7
vpblendw ymm2,ymm6,YMMWORD PTR [rcx],7
vpblendw ymm2,ymm6,[rcx],7
# Tests for op ymm, ymm/mem256, ymm, ymm
vpblendvb ymm7,ymm2,ymm6,ymm4
vpblendvb ymm7,ymm2,YMMWORD PTR [rcx],ymm4
vpblendvb ymm7,ymm2,[rcx],ymm4
# Tests for op xmm/mem128, ymm, ymm
vpsllw ymm2,ymm6,xmm4
vpsllw ymm2,ymm6,XMMWORD PTR [rcx]
vpsllw ymm2,ymm6,[rcx]
vpslld ymm2,ymm6,xmm4
vpslld ymm2,ymm6,XMMWORD PTR [rcx]
vpslld ymm2,ymm6,[rcx]
vpsllq ymm2,ymm6,xmm4
vpsllq ymm2,ymm6,XMMWORD PTR [rcx]
vpsllq ymm2,ymm6,[rcx]
vpsraw ymm2,ymm6,xmm4
vpsraw ymm2,ymm6,XMMWORD PTR [rcx]
vpsraw ymm2,ymm6,[rcx]
vpsrad ymm2,ymm6,xmm4
vpsrad ymm2,ymm6,XMMWORD PTR [rcx]
vpsrad ymm2,ymm6,[rcx]
vpsrlw ymm2,ymm6,xmm4
vpsrlw ymm2,ymm6,XMMWORD PTR [rcx]
vpsrlw ymm2,ymm6,[rcx]
vpsrld ymm2,ymm6,xmm4
vpsrld ymm2,ymm6,XMMWORD PTR [rcx]
vpsrld ymm2,ymm6,[rcx]
vpsrlq ymm2,ymm6,xmm4
vpsrlq ymm2,ymm6,XMMWORD PTR [rcx]
vpsrlq ymm2,ymm6,[rcx]
# Tests for op xmm/mem128, ymm
vpmovsxbw ymm4,xmm4
vpmovsxbw ymm4,XMMWORD PTR [rcx]
vpmovsxbw ymm4,[rcx]
vpmovsxwd ymm4,xmm4
vpmovsxwd ymm4,XMMWORD PTR [rcx]
vpmovsxwd ymm4,[rcx]
vpmovsxdq ymm4,xmm4
vpmovsxdq ymm4,XMMWORD PTR [rcx]
vpmovsxdq ymm4,[rcx]
vpmovzxbw ymm4,xmm4
vpmovzxbw ymm4,XMMWORD PTR [rcx]
vpmovzxbw ymm4,[rcx]
vpmovzxwd ymm4,xmm4
vpmovzxwd ymm4,XMMWORD PTR [rcx]
vpmovzxwd ymm4,[rcx]
vpmovzxdq ymm4,xmm4
vpmovzxdq ymm4,XMMWORD PTR [rcx]
vpmovzxdq ymm4,[rcx]
# Tests for op xmm/mem64, ymm
vpmovsxbd ymm6,xmm4
vpmovsxbd ymm4,QWORD PTR [rcx]
vpmovsxbd ymm4,[rcx]
vpmovsxwq ymm6,xmm4
vpmovsxwq ymm4,QWORD PTR [rcx]
vpmovsxwq ymm4,[rcx]
vpmovzxbd ymm6,xmm4
vpmovzxbd ymm4,QWORD PTR [rcx]
vpmovzxbd ymm4,[rcx]
vpmovzxwq ymm6,xmm4
vpmovzxwq ymm4,QWORD PTR [rcx]
vpmovzxwq ymm4,[rcx]
# Tests for op xmm/mem32, ymm
vpmovsxbq ymm4,xmm4
vpmovsxbq ymm4,DWORD PTR [rcx]
vpmovsxbq ymm4,[rcx]
vpmovzxbq ymm4,xmm4
vpmovzxbq ymm4,DWORD PTR [rcx]
vpmovzxbq ymm4,[rcx]
|
stsp/binutils-ia16
| 1,216
|
gas/testsuite/gas/i386/x86-64-avx-swap.s
|
# Check 64bit instructions with encoding options
.allow_index_reg
.text
_start:
# Tests for op ymm, ymm
vmovapd %ymm8,%ymm6
vmovaps %ymm8,%ymm6
vmovdqa %ymm8,%ymm6
vmovdqu %ymm8,%ymm6
vmovupd %ymm8,%ymm6
vmovups %ymm8,%ymm6
# Tests for op xmm, xmm
movapd %xmm8,%xmm6
movaps %xmm8,%xmm6
movdqa %xmm8,%xmm6
movdqu %xmm8,%xmm6
movq %xmm8,%xmm6
movsd %xmm8,%xmm6
movss %xmm8,%xmm6
movupd %xmm8,%xmm6
movups %xmm8,%xmm6
vmovapd %xmm8,%xmm6
vmovaps %xmm8,%xmm6
vmovdqa %xmm8,%xmm6
vmovdqu %xmm8,%xmm6
vmovq %xmm8,%xmm6
vmovupd %xmm8,%xmm6
vmovups %xmm8,%xmm6
# Tests for op xmm, xmm, xmm
vmovsd %xmm8,%xmm6,%xmm2
vmovss %xmm8,%xmm6,%xmm2
.intel_syntax noprefix
# Tests for op ymm, ymm
vmovapd ymm6,ymm8
vmovaps ymm6,ymm8
vmovdqa ymm6,ymm8
vmovdqu ymm6,ymm8
vmovupd ymm6,ymm8
vmovups ymm6,ymm8
# Tests for op xmm, xmm
movapd xmm6,xmm8
movaps xmm6,xmm8
movdqa xmm6,xmm8
movdqu xmm6,xmm8
movq xmm6,xmm8
movsd xmm6,xmm8
movss xmm6,xmm8
movupd xmm6,xmm8
movups xmm6,xmm8
vmovapd xmm6,xmm8
vmovaps xmm6,xmm8
vmovdqa xmm6,xmm8
vmovdqu xmm6,xmm8
vmovq xmm6,xmm8
vmovupd xmm6,xmm8
vmovups xmm6,xmm8
# Tests for op xmm, xmm, xmm
vmovsd xmm2,xmm6,xmm8
vmovss xmm2,xmm6,xmm8
|
stsp/binutils-ia16
| 5,796
|
gas/testsuite/gas/i386/x86_64.s
|
.text
.intel_syntax noprefix
# REX prefix and addressing modes.
add edx,ecx
add edx,r9d
add r10d,ecx
add rdx,rcx
add r10,r9
add r8d,eax
add r8w,ax
add r8,rax
add eax,0x44332211
add rax,0xfffffffff4332211
add ax,0x4433
add rax,0x44332211
add dl,cl
add bh,dh
add dil,sil
add r15b,sil
add dil,r14b
add r15b,r14b
PUSH RAX
PUSH R8
POP R9
ADD AL,0x11
ADD AH,0x11
ADD SPL,0x11
ADD R8B,0x11
ADD R12B,0x11
MOV RAX,CR0
MOV R8,CR0
MOV RAX,CR8
MOV CR8,RAX
REP MOVSQ #[RSI],[RDI]
REP MOVSW #[RSI,[RDI]
REP MOVSQ #[RSI],[RDI]
MOV AL, 0x11
MOV AH, 0x11
MOV SPL, 0x11
MOV R12B, 0x11
MOV EAX,0x11223344
MOV R8D,0x11223344
MOV RAX,0x1122334455667788
MOV R8,0x1122334455667788
add eax,[rax]
ADD EAX,[R8]
ADD R8D,[R8]
ADD RAX,[R8]
ADD EAX,[0x22222222+RIP]
ADD EAX,[RBP+0x00]
ADD EAX,FLAT:[0x22222222]
ADD EAX,[R13+0]
ADD EAX,[RAX+RAX*4]
ADD EAX,[R8+RAX*4]
ADD R8D,[R8+RAX*4]
ADD EAX,[R8+R8*4]
ADD [RCX+R8*4],R8D
ADD EDX,[RAX+RAX*8]
ADD EDX,[RAX+RCX*8]
ADD EDX,[RAX+RDX*8]
ADD EDX,[RAX+RBX*8]
ADD EDX,[RAX]
ADD EDX,[RAX+RBP*8]
ADD EDX,[RAX+RSI*8]
ADD EDX,[RAX+RDI*8]
ADD EDX,[RAX+R8*8]
ADD EDX,[RAX+R9*8]
ADD EDX,[RAX+R10*8]
ADD EDX,[RAX+R11*8]
ADD EDX,[RAX+R12*8]
ADD EDX,[RAX+R13*8]
ADD EDX,[RAX+R14*8]
ADD EDX,[RAX+R15*8]
ADD ECX,0x11
ADD DWORD PTR [RAX],0x11
ADD QWORD PTR [RAX],0x11
ADD DWORD PTR [R8],0x11
ADD DWORD PTR [RCX+RAX*4],0x11
ADD DWORD PTR [R9+RAX*4],0x11
ADD DWORD PTR [RCX+R8*4],0x11
ADD DWORD PTR [0x22222222+RIP],0x33
ADD QWORD PTR [RIP+0x22222222],0x33
ADD DWORD PTR [RIP+0x22222222],0x33333333
ADD QWORD PTR [RIP+0x22222222],0x33333333
ADD DWORD PTR [RAX*8+0x22222222],0x33
ADD DWORD PTR [RAX+0x22222222],0x33
ADD DWORD PTR [RAX+0x22222222],0x33
ADD DWORD PTR [R8+RBP*8],0x33
ADD DWORD PTR FLAT:[0x22222222],0x33
#new instructions
MOVABS AL,FLAT:[0x8877665544332211]
MOVABS EAX,FLAT:[0x8877665544332211]
MOVABS FLAT:[0x8877665544332211],AL
MOVABS FLAT:[0x8877665544332211],EAX
MOVABS RAX,FLAT:[0x8877665544332211]
MOVABS FLAT:[0x8877665544332211],RAX
cqo
cdqe
movsx rax, eax
movsx rax, ax
movsx rax, al
retf
retf 16
retfw
retfw 2
retfd
retfd 4
retfq
retfq 8
bar:
.att_syntax
#testcase for symbol references.
#immediates - various sizes:
mov $symbol, %al
mov $symbol, %ax
mov $symbol, %eax
mov $symbol, %rax
#addressing modes:
#absolute 64bit addressing
movabs symbol, %eax
#absolute 32bit addressing
mov symbol, %eax
#arithmetic
mov symbol(%rax), %eax
#RIP relative
mov symbol(%rip), %eax
.intel_syntax noprefix
#immediates - various sizes:
mov al, offset flat:symbol
mov ax, offset flat:symbol
mov eax, offset flat:symbol
mov rax, offset flat:symbol
#parts aren't supported by the parser, yet (and not at all for symbol refs)
#mov eax, high part symbol
#mov eax, low part symbol
#addressing modes
#absolute 64bit addressing
movabs eax, [symbol]
#absolute 32bit addressing
mov eax, [symbol]
#arithmetic
mov eax, [rax+symbol]
#RIP relative
mov eax, [rip+symbol]
foo:
.att_syntax
#absolute 64bit addressing
mov 0x8877665544332211,%al
mov 0x8877665544332211,%ax
mov 0x8877665544332211,%eax
mov 0x8877665544332211,%rax
mov %al,0x8877665544332211
mov %ax,0x8877665544332211
mov %eax,0x8877665544332211
mov %rax,0x8877665544332211
movb 0x8877665544332211,%al
movw 0x8877665544332211,%ax
movl 0x8877665544332211,%eax
movq 0x8877665544332211,%rax
movb %al,0x8877665544332211
movw %ax,0x8877665544332211
movl %eax,0x8877665544332211
movq %rax,0x8877665544332211
#absolute signed 32bit addressing
mov 0xffffffffff332211,%al
mov 0xffffffffff332211,%ax
mov 0xffffffffff332211,%eax
mov 0xffffffffff332211,%rax
mov %al,0xffffffffff332211
mov %ax,0xffffffffff332211
mov %eax,0xffffffffff332211
mov %rax,0xffffffffff332211
movb 0xffffffffff332211,%al
movw 0xffffffffff332211,%ax
movl 0xffffffffff332211,%eax
movq 0xffffffffff332211,%rax
movb %al,0xffffffffff332211
movw %ax,0xffffffffff332211
movl %eax,0xffffffffff332211
movq %rax,0xffffffffff332211
cmpxchg16b (%rax)
.intel_syntax noprefix
cmpxchg16b oword ptr [rax]
.att_syntax
movsx %al, %si
movsx %al, %esi
movsx %al, %rsi
movsx %ax, %esi
movsx %ax, %rsi
movsx %eax, %rsi
movsx (%rax), %dx
movsbl (%rax), %edx
movsbq (%rax), %rdx
movsbw (%rax), %dx
movswl (%rax), %edx
movswq (%rax), %rdx
movzx %al, %si
movzx %al, %esi
movzx %al, %rsi
movzx %ax, %esi
movzx %ax, %rsi
movzx (%rax), %dx
movzb (%rax), %edx
movzb (%rax), %rdx
movzb (%rax), %dx
movzbl (%rax), %edx
movzbq (%rax), %rdx
movzbw (%rax), %dx
movzwl (%rax), %edx
movzwq (%rax), %rdx
.intel_syntax noprefix
movsx si,al
movsx esi,al
movsx rsi,al
movsx esi,ax
movsx rsi,ax
movsx rsi,eax
movsx edx,BYTE PTR [rax]
movsx rdx,BYTE PTR [rax]
movsx dx,BYTE PTR [rax]
movsx edx,WORD PTR [rax]
movsx rdx,WORD PTR [rax]
movzx si,al
movzx esi,al
movzx rsi,al
movzx esi,ax
movzx rsi,ax
movzx edx,BYTE PTR [rax]
movzx rdx,BYTE PTR [rax]
movzx dx,BYTE PTR [rax]
movzx edx,WORD PTR [rax]
movzx rdx,WORD PTR [rax]
movq xmm1,QWORD PTR [rsp]
movq xmm1,[rsp]
movq QWORD PTR [rsp],xmm1
movq [rsp],xmm1
.att_syntax
fnstsw
fnstsw %ax
fstsw
fstsw %ax
.intel_syntax noprefix
fnstsw
fnstsw ax
fstsw
fstsw ax
.att_syntax
movsx (%rax),%ax
movsxb (%rax), %dx
movsxb (%rax), %edx
movsxb (%rax), %rdx
movsxw (%rax), %edx
movsxw (%rax), %rdx
movsxl (%rax), %rdx
movsxd (%rax),%rax
movzx (%rax),%ax
movzxb (%rax), %dx
movzxb (%rax), %edx
movzxb (%rax), %rdx
movzxw (%rax), %edx
movzxw (%rax), %rdx
movnti %eax, (%rax)
movntil %eax, (%rax)
movnti %rax, (%rax)
movntiq %rax, (%rax)
.intel_syntax noprefix
movsx ax, BYTE PTR [rax]
movsx eax, BYTE PTR [rax]
movsx eax, WORD PTR [rax]
movsx rax, WORD PTR [rax]
movsx rax, DWORD PTR [rax]
movsxd rax, [rax]
movzx ax, BYTE PTR [rax]
movzx eax, BYTE PTR [rax]
movzx eax, WORD PTR [rax]
movzx rax, WORD PTR [rax]
movnti dword ptr [rax], eax
movnti qword ptr [rax], rax
mov eax, tr1
mov tr0, rcx
|
stsp/binutils-ia16
| 1,413
|
gas/testsuite/gas/i386/ssse3.s
|
# SSSE3 New Instructions
.text
foo:
phaddw (%ecx),%mm0
phaddw %mm1,%mm0
phaddw (%ecx),%xmm0
phaddw %xmm1,%xmm0
phaddd (%ecx),%mm0
phaddd %mm1,%mm0
phaddd (%ecx),%xmm0
phaddd %xmm1,%xmm0
phaddsw (%ecx),%mm0
phaddsw %mm1,%mm0
phaddsw (%ecx),%xmm0
phaddsw %xmm1,%xmm0
phsubw (%ecx),%mm0
phsubw %mm1,%mm0
phsubw (%ecx),%xmm0
phsubw %xmm1,%xmm0
phsubd (%ecx),%mm0
phsubd %mm1,%mm0
phsubd (%ecx),%xmm0
phsubd %xmm1,%xmm0
phsubsw (%ecx),%mm0
phsubsw %mm1,%mm0
phsubsw (%ecx),%xmm0
phsubsw %xmm1,%xmm0
pmaddubsw (%ecx),%mm0
pmaddubsw %mm1,%mm0
pmaddubsw (%ecx),%xmm0
pmaddubsw %xmm1,%xmm0
pmulhrsw (%ecx),%mm0
pmulhrsw %mm1,%mm0
pmulhrsw (%ecx),%xmm0
pmulhrsw %xmm1,%xmm0
pshufb (%ecx),%mm0
pshufb %mm1,%mm0
pshufb (%ecx),%xmm0
pshufb %xmm1,%xmm0
psignb (%ecx),%mm0
psignb %mm1,%mm0
psignb (%ecx),%xmm0
psignb %xmm1,%xmm0
psignw (%ecx),%mm0
psignw %mm1,%mm0
psignw (%ecx),%xmm0
psignw %xmm1,%xmm0
psignd (%ecx),%mm0
psignd %mm1,%mm0
psignd (%ecx),%xmm0
psignd %xmm1,%xmm0
palignr $0x2,(%ecx),%mm0
palignr $0x2,%mm1,%mm0
palignr $0x2,(%ecx),%xmm0
palignr $0x2,%xmm1,%xmm0
pabsb (%ecx),%mm0
pabsb %mm1,%mm0
pabsb (%ecx),%xmm0
pabsb %xmm1,%xmm0
pabsw (%ecx),%mm0
pabsw %mm1,%mm0
pabsw (%ecx),%xmm0
pabsw %xmm1,%xmm0
pabsd (%ecx),%mm0
pabsd %mm1,%mm0
pabsd (%ecx),%xmm0
pabsd %xmm1,%xmm0
.p2align 4,0
|
stsp/binutils-ia16
| 3,493
|
gas/testsuite/gas/i386/katmai.s
|
#PIII SIMD instructions
.text
foo:
addps (%ecx),%xmm0
addps %xmm2,%xmm1
addss (%ebx),%xmm2
addss %xmm4,%xmm3
andnps 0x0(%ebp),%xmm4
andnps %xmm6,%xmm5
andps (%edi),%xmm6
andps %xmm0,%xmm7
cmpps $0x2,%xmm1,%xmm0
cmpps $0x3,(%edx),%xmm1
cmpss $0x4,%xmm2,%xmm2
cmpss $0x5,(%esp,1),%xmm3
cmpps $0x6,%xmm5,%xmm4
cmpps $0x7,(%esi),%xmm5
cmpss $0x0,%xmm7,%xmm6
cmpss $0x1,(%eax),%xmm7
cmpeqps %xmm1,%xmm0
cmpeqps (%edx),%xmm1
cmpeqss %xmm2,%xmm2
cmpeqss (%esp,1),%xmm3
cmpltps %xmm5,%xmm4
cmpltps (%esi),%xmm5
cmpltss %xmm7,%xmm6
cmpltss (%eax),%xmm7
cmpleps (%ecx),%xmm0
cmpleps %xmm2,%xmm1
cmpless (%ebx),%xmm2
cmpless %xmm4,%xmm3
cmpunordps 0x0(%ebp),%xmm4
cmpunordps %xmm6,%xmm5
cmpunordss (%edi),%xmm6
cmpunordss %xmm0,%xmm7
cmpneqps %xmm1,%xmm0
cmpneqps (%edx),%xmm1
cmpneqss %xmm2,%xmm2
cmpneqss (%esp,1),%xmm3
cmpnltps %xmm5,%xmm4
cmpnltps (%esi),%xmm5
cmpnltss %xmm7,%xmm6
cmpnltss (%eax),%xmm7
cmpnleps (%ecx),%xmm0
cmpnleps %xmm2,%xmm1
cmpnless (%ebx),%xmm2
cmpnless %xmm4,%xmm3
cmpordps 0x0(%ebp),%xmm4
cmpordps %xmm6,%xmm5
cmpordss (%edi),%xmm6
cmpordss %xmm0,%xmm7
comiss %xmm1,%xmm0
comiss (%edx),%xmm1
cvtpi2ps %mm3,%xmm2
cvtpi2ps (%esp,1),%xmm3
cvtsi2ss %ebp,%xmm4
cvtsi2ss (%esi),%xmm5
cvtps2pi %xmm7,%mm6
cvtps2pi (%eax),%mm7
cvtss2si (%ecx),%eax
cvtss2si %xmm2,%ecx
cvttps2pi (%ebx),%mm2
cvttps2pi %xmm4,%mm3
cvttss2si 0x0(%ebp),%esp
cvttss2si %xmm6,%ebp
divps %xmm1,%xmm0
divps (%edx),%xmm1
divss %xmm3,%xmm2
divss (%esp,1),%xmm3
ldmxcsr 0x0(%ebp)
stmxcsr (%esi)
sfence
maxps %xmm1,%xmm0
maxps (%edx),%xmm1
maxss %xmm3,%xmm2
maxss (%esp,1),%xmm3
minps %xmm5,%xmm4
minps (%esi),%xmm5
minss %xmm7,%xmm6
minss (%eax),%xmm7
movaps %xmm1,%xmm0
movaps %xmm2,(%ecx)
movaps (%edx),%xmm2
movlhps %xmm4,%xmm3
movhps %xmm5,(%esp,1)
movhps (%esi),%xmm5
movhlps %xmm7,%xmm6
movlps %xmm0,(%edi)
movlps (%eax),%xmm0
movmskps %xmm2,%ecx
movups %xmm3,%xmm2
movups %xmm4,(%edx)
movups 0x0(%ebp),%xmm4
movss %xmm6,%xmm5
movss %xmm7,(%esi)
movss (%eax),%xmm7
mulps %xmm1,%xmm0
mulps (%edx),%xmm1
mulss %xmm2,%xmm2
mulss (%esp,1),%xmm3
orps %xmm5,%xmm4
orps (%esi),%xmm5
rcpps %xmm7,%xmm6
rcpps (%eax),%xmm7
rcpss (%ecx),%xmm0
rcpss %xmm2,%xmm1
rsqrtps (%ebx),%xmm2
rsqrtps %xmm4,%xmm3
rsqrtss 0x0(%ebp),%xmm4
rsqrtss %xmm6,%xmm5
shufps $0x2,(%edi),%xmm6
shufps $0x3,%xmm0,%xmm7
sqrtps %xmm1,%xmm0
sqrtps (%edx),%xmm1
sqrtss %xmm2,%xmm2
sqrtss (%esp,1),%xmm3
subps %xmm5,%xmm4
subps (%esi),%xmm5
subss %xmm7,%xmm6
subss (%eax),%xmm7
ucomiss (%ecx),%xmm0
ucomiss %xmm2,%xmm1
unpckhps (%ebx),%xmm2
unpckhps %xmm4,%xmm3
unpcklps 0x0(%ebp),%xmm4
unpcklps %xmm6,%xmm5
xorps (%edi),%xmm6
xorps %xmm0,%xmm7
pavgb %mm1,%mm0
pavgb (%edx),%mm1
pavgw %mm3,%mm2
pavgw (%esp,1),%mm3
pextrw $0x0,%mm1,%eax
pinsrw $0x1,(%ecx),%mm1
pinsrw $0x2,%edx,%mm2
pmaxsw %mm1,%mm0
pmaxsw (%edx),%mm1
pmaxub %mm2,%mm2
pmaxub (%esp,1),%mm3
pminsw %mm5,%mm4
pminsw (%esi),%mm5
pminub %mm7,%mm6
pminub (%eax),%mm7
pmovmskb %mm5,%eax
pmulhuw %mm5,%mm4
pmulhuw (%esi),%mm5
psadbw %mm7,%mm6
psadbw (%eax),%mm7
pshufw $0x1,%mm2,%mm3
pshufw $0x4,0x0(%ebp),%mm6
maskmovq %mm7,%mm0
movntps %xmm6,(%ebx)
movntq %mm2,(%eax)
prefetchnta (%esi)
prefetcht0 (%eax,%ebx,4)
prefetcht1 (%edx)
prefetcht2 (%ecx)
# A bad sfence modrm byte
.byte 0x65,0x0F,0xAE,0xff
# Pad out to good alignment
.p2align 4,0
|
stsp/binutils-ia16
| 16,324
|
gas/testsuite/gas/i386/fma.s
|
# Check FMA instructions
.allow_index_reg
.text
_start:
# Tests for op ymm/mem256, ymm, ymm
vfmadd132pd %ymm4,%ymm6,%ymm2
vfmadd132pd (%ecx),%ymm6,%ymm2
vfmadd132ps %ymm4,%ymm6,%ymm2
vfmadd132ps (%ecx),%ymm6,%ymm2
vfmadd213pd %ymm4,%ymm6,%ymm2
vfmadd213pd (%ecx),%ymm6,%ymm2
vfmadd213ps %ymm4,%ymm6,%ymm2
vfmadd213ps (%ecx),%ymm6,%ymm2
vfmadd231pd %ymm4,%ymm6,%ymm2
vfmadd231pd (%ecx),%ymm6,%ymm2
vfmadd231ps %ymm4,%ymm6,%ymm2
vfmadd231ps (%ecx),%ymm6,%ymm2
vfmaddsub132pd %ymm4,%ymm6,%ymm2
vfmaddsub132pd (%ecx),%ymm6,%ymm2
vfmaddsub132ps %ymm4,%ymm6,%ymm2
vfmaddsub132ps (%ecx),%ymm6,%ymm2
vfmaddsub213pd %ymm4,%ymm6,%ymm2
vfmaddsub213pd (%ecx),%ymm6,%ymm2
vfmaddsub213ps %ymm4,%ymm6,%ymm2
vfmaddsub213ps (%ecx),%ymm6,%ymm2
vfmaddsub231pd %ymm4,%ymm6,%ymm2
vfmaddsub231pd (%ecx),%ymm6,%ymm2
vfmaddsub231ps %ymm4,%ymm6,%ymm2
vfmaddsub231ps (%ecx),%ymm6,%ymm2
vfmsubadd132pd %ymm4,%ymm6,%ymm2
vfmsubadd132pd (%ecx),%ymm6,%ymm2
vfmsubadd132ps %ymm4,%ymm6,%ymm2
vfmsubadd132ps (%ecx),%ymm6,%ymm2
vfmsubadd213pd %ymm4,%ymm6,%ymm2
vfmsubadd213pd (%ecx),%ymm6,%ymm2
vfmsubadd213ps %ymm4,%ymm6,%ymm2
vfmsubadd213ps (%ecx),%ymm6,%ymm2
vfmsubadd231pd %ymm4,%ymm6,%ymm2
vfmsubadd231pd (%ecx),%ymm6,%ymm2
vfmsubadd231ps %ymm4,%ymm6,%ymm2
vfmsubadd231ps (%ecx),%ymm6,%ymm2
vfmsub132pd %ymm4,%ymm6,%ymm2
vfmsub132pd (%ecx),%ymm6,%ymm2
vfmsub132ps %ymm4,%ymm6,%ymm2
vfmsub132ps (%ecx),%ymm6,%ymm2
vfmsub213pd %ymm4,%ymm6,%ymm2
vfmsub213pd (%ecx),%ymm6,%ymm2
vfmsub213ps %ymm4,%ymm6,%ymm2
vfmsub213ps (%ecx),%ymm6,%ymm2
vfmsub231pd %ymm4,%ymm6,%ymm2
vfmsub231pd (%ecx),%ymm6,%ymm2
vfmsub231ps %ymm4,%ymm6,%ymm2
vfmsub231ps (%ecx),%ymm6,%ymm2
vfnmadd132pd %ymm4,%ymm6,%ymm2
vfnmadd132pd (%ecx),%ymm6,%ymm2
vfnmadd132ps %ymm4,%ymm6,%ymm2
vfnmadd132ps (%ecx),%ymm6,%ymm2
vfnmadd213pd %ymm4,%ymm6,%ymm2
vfnmadd213pd (%ecx),%ymm6,%ymm2
vfnmadd213ps %ymm4,%ymm6,%ymm2
vfnmadd213ps (%ecx),%ymm6,%ymm2
vfnmadd231pd %ymm4,%ymm6,%ymm2
vfnmadd231pd (%ecx),%ymm6,%ymm2
vfnmadd231ps %ymm4,%ymm6,%ymm2
vfnmadd231ps (%ecx),%ymm6,%ymm2
vfnmsub132pd %ymm4,%ymm6,%ymm2
vfnmsub132pd (%ecx),%ymm6,%ymm2
vfnmsub132ps %ymm4,%ymm6,%ymm2
vfnmsub132ps (%ecx),%ymm6,%ymm2
vfnmsub213pd %ymm4,%ymm6,%ymm2
vfnmsub213pd (%ecx),%ymm6,%ymm2
vfnmsub213ps %ymm4,%ymm6,%ymm2
vfnmsub213ps (%ecx),%ymm6,%ymm2
vfnmsub231pd %ymm4,%ymm6,%ymm2
vfnmsub231pd (%ecx),%ymm6,%ymm2
vfnmsub231ps %ymm4,%ymm6,%ymm2
vfnmsub231ps (%ecx),%ymm6,%ymm2
# Tests for op xmm/mem128, xmm, xmm
vfmadd132pd %xmm4,%xmm6,%xmm2
vfmadd132pd (%ecx),%xmm6,%xmm7
vfmadd132ps %xmm4,%xmm6,%xmm2
vfmadd132ps (%ecx),%xmm6,%xmm7
vfmadd213pd %xmm4,%xmm6,%xmm2
vfmadd213pd (%ecx),%xmm6,%xmm7
vfmadd213ps %xmm4,%xmm6,%xmm2
vfmadd213ps (%ecx),%xmm6,%xmm7
vfmadd231pd %xmm4,%xmm6,%xmm2
vfmadd231pd (%ecx),%xmm6,%xmm7
vfmadd231ps %xmm4,%xmm6,%xmm2
vfmadd231ps (%ecx),%xmm6,%xmm7
vfmaddsub132pd %xmm4,%xmm6,%xmm2
vfmaddsub132pd (%ecx),%xmm6,%xmm7
vfmaddsub132ps %xmm4,%xmm6,%xmm2
vfmaddsub132ps (%ecx),%xmm6,%xmm7
vfmaddsub213pd %xmm4,%xmm6,%xmm2
vfmaddsub213pd (%ecx),%xmm6,%xmm7
vfmaddsub213ps %xmm4,%xmm6,%xmm2
vfmaddsub213ps (%ecx),%xmm6,%xmm7
vfmaddsub231pd %xmm4,%xmm6,%xmm2
vfmaddsub231pd (%ecx),%xmm6,%xmm7
vfmaddsub231ps %xmm4,%xmm6,%xmm2
vfmaddsub231ps (%ecx),%xmm6,%xmm7
vfmsubadd132pd %xmm4,%xmm6,%xmm2
vfmsubadd132pd (%ecx),%xmm6,%xmm7
vfmsubadd132ps %xmm4,%xmm6,%xmm2
vfmsubadd132ps (%ecx),%xmm6,%xmm7
vfmsubadd213pd %xmm4,%xmm6,%xmm2
vfmsubadd213pd (%ecx),%xmm6,%xmm7
vfmsubadd213ps %xmm4,%xmm6,%xmm2
vfmsubadd213ps (%ecx),%xmm6,%xmm7
vfmsubadd231pd %xmm4,%xmm6,%xmm2
vfmsubadd231pd (%ecx),%xmm6,%xmm7
vfmsubadd231ps %xmm4,%xmm6,%xmm2
vfmsubadd231ps (%ecx),%xmm6,%xmm7
vfmsub132pd %xmm4,%xmm6,%xmm2
vfmsub132pd (%ecx),%xmm6,%xmm7
vfmsub132ps %xmm4,%xmm6,%xmm2
vfmsub132ps (%ecx),%xmm6,%xmm7
vfmsub213pd %xmm4,%xmm6,%xmm2
vfmsub213pd (%ecx),%xmm6,%xmm7
vfmsub213ps %xmm4,%xmm6,%xmm2
vfmsub213ps (%ecx),%xmm6,%xmm7
vfmsub231pd %xmm4,%xmm6,%xmm2
vfmsub231pd (%ecx),%xmm6,%xmm7
vfmsub231ps %xmm4,%xmm6,%xmm2
vfmsub231ps (%ecx),%xmm6,%xmm7
vfnmadd132pd %xmm4,%xmm6,%xmm2
vfnmadd132pd (%ecx),%xmm6,%xmm7
vfnmadd132ps %xmm4,%xmm6,%xmm2
vfnmadd132ps (%ecx),%xmm6,%xmm7
vfnmadd213pd %xmm4,%xmm6,%xmm2
vfnmadd213pd (%ecx),%xmm6,%xmm7
vfnmadd213ps %xmm4,%xmm6,%xmm2
vfnmadd213ps (%ecx),%xmm6,%xmm7
vfnmadd231pd %xmm4,%xmm6,%xmm2
vfnmadd231pd (%ecx),%xmm6,%xmm7
vfnmadd231ps %xmm4,%xmm6,%xmm2
vfnmadd231ps (%ecx),%xmm6,%xmm7
vfnmsub132pd %xmm4,%xmm6,%xmm2
vfnmsub132pd (%ecx),%xmm6,%xmm7
vfnmsub132ps %xmm4,%xmm6,%xmm2
vfnmsub132ps (%ecx),%xmm6,%xmm7
vfnmsub213pd %xmm4,%xmm6,%xmm2
vfnmsub213pd (%ecx),%xmm6,%xmm7
vfnmsub213ps %xmm4,%xmm6,%xmm2
vfnmsub213ps (%ecx),%xmm6,%xmm7
vfnmsub231pd %xmm4,%xmm6,%xmm2
vfnmsub231pd (%ecx),%xmm6,%xmm7
vfnmsub231ps %xmm4,%xmm6,%xmm2
vfnmsub231ps (%ecx),%xmm6,%xmm7
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd %xmm4,%xmm6,%xmm2
vfmadd132sd (%ecx),%xmm6,%xmm2
vfmadd213sd %xmm4,%xmm6,%xmm2
vfmadd213sd (%ecx),%xmm6,%xmm2
vfmadd231sd %xmm4,%xmm6,%xmm2
vfmadd231sd (%ecx),%xmm6,%xmm2
vfmsub132sd %xmm4,%xmm6,%xmm2
vfmsub132sd (%ecx),%xmm6,%xmm2
vfmsub213sd %xmm4,%xmm6,%xmm2
vfmsub213sd (%ecx),%xmm6,%xmm2
vfmsub231sd %xmm4,%xmm6,%xmm2
vfmsub231sd (%ecx),%xmm6,%xmm2
vfnmadd132sd %xmm4,%xmm6,%xmm2
vfnmadd132sd (%ecx),%xmm6,%xmm2
vfnmadd213sd %xmm4,%xmm6,%xmm2
vfnmadd213sd (%ecx),%xmm6,%xmm2
vfnmadd231sd %xmm4,%xmm6,%xmm2
vfnmadd231sd (%ecx),%xmm6,%xmm2
vfnmsub132sd %xmm4,%xmm6,%xmm2
vfnmsub132sd (%ecx),%xmm6,%xmm2
vfnmsub213sd %xmm4,%xmm6,%xmm2
vfnmsub213sd (%ecx),%xmm6,%xmm2
vfnmsub231sd %xmm4,%xmm6,%xmm2
vfnmsub231sd (%ecx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss %xmm4,%xmm6,%xmm2
vfmadd132ss (%ecx),%xmm6,%xmm2
vfmadd213ss %xmm4,%xmm6,%xmm2
vfmadd213ss (%ecx),%xmm6,%xmm2
vfmadd231ss %xmm4,%xmm6,%xmm2
vfmadd231ss (%ecx),%xmm6,%xmm2
vfmsub132ss %xmm4,%xmm6,%xmm2
vfmsub132ss (%ecx),%xmm6,%xmm2
vfmsub213ss %xmm4,%xmm6,%xmm2
vfmsub213ss (%ecx),%xmm6,%xmm2
vfmsub231ss %xmm4,%xmm6,%xmm2
vfmsub231ss (%ecx),%xmm6,%xmm2
vfnmadd132ss %xmm4,%xmm6,%xmm2
vfnmadd132ss (%ecx),%xmm6,%xmm2
vfnmadd213ss %xmm4,%xmm6,%xmm2
vfnmadd213ss (%ecx),%xmm6,%xmm2
vfnmadd231ss %xmm4,%xmm6,%xmm2
vfnmadd231ss (%ecx),%xmm6,%xmm2
vfnmsub132ss %xmm4,%xmm6,%xmm2
vfnmsub132ss (%ecx),%xmm6,%xmm2
vfnmsub213ss %xmm4,%xmm6,%xmm2
vfnmsub213ss (%ecx),%xmm6,%xmm2
vfnmsub231ss %xmm4,%xmm6,%xmm2
vfnmsub231ss (%ecx),%xmm6,%xmm2
.intel_syntax noprefix
# Tests for op ymm/mem256, ymm, ymm
vfmadd132pd ymm2,ymm6,ymm4
vfmadd132pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmadd132pd ymm2,ymm6,[ecx]
vfmadd132ps ymm2,ymm6,ymm4
vfmadd132ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmadd132ps ymm2,ymm6,[ecx]
vfmadd213pd ymm2,ymm6,ymm4
vfmadd213pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmadd213pd ymm2,ymm6,[ecx]
vfmadd213ps ymm2,ymm6,ymm4
vfmadd213ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmadd213ps ymm2,ymm6,[ecx]
vfmadd231pd ymm2,ymm6,ymm4
vfmadd231pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmadd231pd ymm2,ymm6,[ecx]
vfmadd231ps ymm2,ymm6,ymm4
vfmadd231ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmadd231ps ymm2,ymm6,[ecx]
vfmaddsub132pd ymm2,ymm6,ymm4
vfmaddsub132pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmaddsub132pd ymm2,ymm6,[ecx]
vfmaddsub132ps ymm2,ymm6,ymm4
vfmaddsub132ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmaddsub132ps ymm2,ymm6,[ecx]
vfmaddsub213pd ymm2,ymm6,ymm4
vfmaddsub213pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmaddsub213pd ymm2,ymm6,[ecx]
vfmaddsub213ps ymm2,ymm6,ymm4
vfmaddsub213ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmaddsub213ps ymm2,ymm6,[ecx]
vfmaddsub231pd ymm2,ymm6,ymm4
vfmaddsub231pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmaddsub231pd ymm2,ymm6,[ecx]
vfmaddsub231ps ymm2,ymm6,ymm4
vfmaddsub231ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmaddsub231ps ymm2,ymm6,[ecx]
vfmsubadd132pd ymm2,ymm6,ymm4
vfmsubadd132pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmsubadd132pd ymm2,ymm6,[ecx]
vfmsubadd132ps ymm2,ymm6,ymm4
vfmsubadd132ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmsubadd132ps ymm2,ymm6,[ecx]
vfmsubadd213pd ymm2,ymm6,ymm4
vfmsubadd213pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmsubadd213pd ymm2,ymm6,[ecx]
vfmsubadd213ps ymm2,ymm6,ymm4
vfmsubadd213ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmsubadd213ps ymm2,ymm6,[ecx]
vfmsubadd231pd ymm2,ymm6,ymm4
vfmsubadd231pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmsubadd231pd ymm2,ymm6,[ecx]
vfmsubadd231ps ymm2,ymm6,ymm4
vfmsubadd231ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmsubadd231ps ymm2,ymm6,[ecx]
vfmsub132pd ymm2,ymm6,ymm4
vfmsub132pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmsub132pd ymm2,ymm6,[ecx]
vfmsub132ps ymm2,ymm6,ymm4
vfmsub132ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmsub132ps ymm2,ymm6,[ecx]
vfmsub213pd ymm2,ymm6,ymm4
vfmsub213pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmsub213pd ymm2,ymm6,[ecx]
vfmsub213ps ymm2,ymm6,ymm4
vfmsub213ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmsub213ps ymm2,ymm6,[ecx]
vfmsub231pd ymm2,ymm6,ymm4
vfmsub231pd ymm2,ymm6,YMMWORD PTR [ecx]
vfmsub231pd ymm2,ymm6,[ecx]
vfmsub231ps ymm2,ymm6,ymm4
vfmsub231ps ymm2,ymm6,YMMWORD PTR [ecx]
vfmsub231ps ymm2,ymm6,[ecx]
vfnmadd132pd ymm2,ymm6,ymm4
vfnmadd132pd ymm2,ymm6,YMMWORD PTR [ecx]
vfnmadd132pd ymm2,ymm6,[ecx]
vfnmadd132ps ymm2,ymm6,ymm4
vfnmadd132ps ymm2,ymm6,YMMWORD PTR [ecx]
vfnmadd132ps ymm2,ymm6,[ecx]
vfnmadd213pd ymm2,ymm6,ymm4
vfnmadd213pd ymm2,ymm6,YMMWORD PTR [ecx]
vfnmadd213pd ymm2,ymm6,[ecx]
vfnmadd213ps ymm2,ymm6,ymm4
vfnmadd213ps ymm2,ymm6,YMMWORD PTR [ecx]
vfnmadd213ps ymm2,ymm6,[ecx]
vfnmadd231pd ymm2,ymm6,ymm4
vfnmadd231pd ymm2,ymm6,YMMWORD PTR [ecx]
vfnmadd231pd ymm2,ymm6,[ecx]
vfnmadd231ps ymm2,ymm6,ymm4
vfnmadd231ps ymm2,ymm6,YMMWORD PTR [ecx]
vfnmadd231ps ymm2,ymm6,[ecx]
vfnmsub132pd ymm2,ymm6,ymm4
vfnmsub132pd ymm2,ymm6,YMMWORD PTR [ecx]
vfnmsub132pd ymm2,ymm6,[ecx]
vfnmsub132ps ymm2,ymm6,ymm4
vfnmsub132ps ymm2,ymm6,YMMWORD PTR [ecx]
vfnmsub132ps ymm2,ymm6,[ecx]
vfnmsub213pd ymm2,ymm6,ymm4
vfnmsub213pd ymm2,ymm6,YMMWORD PTR [ecx]
vfnmsub213pd ymm2,ymm6,[ecx]
vfnmsub213ps ymm2,ymm6,ymm4
vfnmsub213ps ymm2,ymm6,YMMWORD PTR [ecx]
vfnmsub213ps ymm2,ymm6,[ecx]
vfnmsub231pd ymm2,ymm6,ymm4
vfnmsub231pd ymm2,ymm6,YMMWORD PTR [ecx]
vfnmsub231pd ymm2,ymm6,[ecx]
vfnmsub231ps ymm2,ymm6,ymm4
vfnmsub231ps ymm2,ymm6,YMMWORD PTR [ecx]
vfnmsub231ps ymm2,ymm6,[ecx]
# Tests for op xmm/mem128, xmm, xmm
vfmadd132pd xmm2,xmm6,xmm4
vfmadd132pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmadd132pd xmm7,xmm6,[ecx]
vfmadd132ps xmm2,xmm6,xmm4
vfmadd132ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmadd132ps xmm7,xmm6,[ecx]
vfmadd213pd xmm2,xmm6,xmm4
vfmadd213pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmadd213pd xmm7,xmm6,[ecx]
vfmadd213ps xmm2,xmm6,xmm4
vfmadd213ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmadd213ps xmm7,xmm6,[ecx]
vfmadd231pd xmm2,xmm6,xmm4
vfmadd231pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmadd231pd xmm7,xmm6,[ecx]
vfmadd231ps xmm2,xmm6,xmm4
vfmadd231ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmadd231ps xmm7,xmm6,[ecx]
vfmaddsub132pd xmm2,xmm6,xmm4
vfmaddsub132pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmaddsub132pd xmm7,xmm6,[ecx]
vfmaddsub132ps xmm2,xmm6,xmm4
vfmaddsub132ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmaddsub132ps xmm7,xmm6,[ecx]
vfmaddsub213pd xmm2,xmm6,xmm4
vfmaddsub213pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmaddsub213pd xmm7,xmm6,[ecx]
vfmaddsub213ps xmm2,xmm6,xmm4
vfmaddsub213ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmaddsub213ps xmm7,xmm6,[ecx]
vfmaddsub231pd xmm2,xmm6,xmm4
vfmaddsub231pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmaddsub231pd xmm7,xmm6,[ecx]
vfmaddsub231ps xmm2,xmm6,xmm4
vfmaddsub231ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmaddsub231ps xmm7,xmm6,[ecx]
vfmsubadd132pd xmm2,xmm6,xmm4
vfmsubadd132pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmsubadd132pd xmm7,xmm6,[ecx]
vfmsubadd132ps xmm2,xmm6,xmm4
vfmsubadd132ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmsubadd132ps xmm7,xmm6,[ecx]
vfmsubadd213pd xmm2,xmm6,xmm4
vfmsubadd213pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmsubadd213pd xmm7,xmm6,[ecx]
vfmsubadd213ps xmm2,xmm6,xmm4
vfmsubadd213ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmsubadd213ps xmm7,xmm6,[ecx]
vfmsubadd231pd xmm2,xmm6,xmm4
vfmsubadd231pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmsubadd231pd xmm7,xmm6,[ecx]
vfmsubadd231ps xmm2,xmm6,xmm4
vfmsubadd231ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmsubadd231ps xmm7,xmm6,[ecx]
vfmsub132pd xmm2,xmm6,xmm4
vfmsub132pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmsub132pd xmm7,xmm6,[ecx]
vfmsub132ps xmm2,xmm6,xmm4
vfmsub132ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmsub132ps xmm7,xmm6,[ecx]
vfmsub213pd xmm2,xmm6,xmm4
vfmsub213pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmsub213pd xmm7,xmm6,[ecx]
vfmsub213ps xmm2,xmm6,xmm4
vfmsub213ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmsub213ps xmm7,xmm6,[ecx]
vfmsub231pd xmm2,xmm6,xmm4
vfmsub231pd xmm7,xmm6,XMMWORD PTR [ecx]
vfmsub231pd xmm7,xmm6,[ecx]
vfmsub231ps xmm2,xmm6,xmm4
vfmsub231ps xmm7,xmm6,XMMWORD PTR [ecx]
vfmsub231ps xmm7,xmm6,[ecx]
vfnmadd132pd xmm2,xmm6,xmm4
vfnmadd132pd xmm7,xmm6,XMMWORD PTR [ecx]
vfnmadd132pd xmm7,xmm6,[ecx]
vfnmadd132ps xmm2,xmm6,xmm4
vfnmadd132ps xmm7,xmm6,XMMWORD PTR [ecx]
vfnmadd132ps xmm7,xmm6,[ecx]
vfnmadd213pd xmm2,xmm6,xmm4
vfnmadd213pd xmm7,xmm6,XMMWORD PTR [ecx]
vfnmadd213pd xmm7,xmm6,[ecx]
vfnmadd213ps xmm2,xmm6,xmm4
vfnmadd213ps xmm7,xmm6,XMMWORD PTR [ecx]
vfnmadd213ps xmm7,xmm6,[ecx]
vfnmadd231pd xmm2,xmm6,xmm4
vfnmadd231pd xmm7,xmm6,XMMWORD PTR [ecx]
vfnmadd231pd xmm7,xmm6,[ecx]
vfnmadd231ps xmm2,xmm6,xmm4
vfnmadd231ps xmm7,xmm6,XMMWORD PTR [ecx]
vfnmadd231ps xmm7,xmm6,[ecx]
vfnmsub132pd xmm2,xmm6,xmm4
vfnmsub132pd xmm7,xmm6,XMMWORD PTR [ecx]
vfnmsub132pd xmm7,xmm6,[ecx]
vfnmsub132ps xmm2,xmm6,xmm4
vfnmsub132ps xmm7,xmm6,XMMWORD PTR [ecx]
vfnmsub132ps xmm7,xmm6,[ecx]
vfnmsub213pd xmm2,xmm6,xmm4
vfnmsub213pd xmm7,xmm6,XMMWORD PTR [ecx]
vfnmsub213pd xmm7,xmm6,[ecx]
vfnmsub213ps xmm2,xmm6,xmm4
vfnmsub213ps xmm7,xmm6,XMMWORD PTR [ecx]
vfnmsub213ps xmm7,xmm6,[ecx]
vfnmsub231pd xmm2,xmm6,xmm4
vfnmsub231pd xmm7,xmm6,XMMWORD PTR [ecx]
vfnmsub231pd xmm7,xmm6,[ecx]
vfnmsub231ps xmm2,xmm6,xmm4
vfnmsub231ps xmm7,xmm6,XMMWORD PTR [ecx]
vfnmsub231ps xmm7,xmm6,[ecx]
# Tests for op xmm/mem64, xmm, xmm
vfmadd132sd xmm2,xmm6,xmm4
vfmadd132sd xmm2,xmm6,QWORD PTR [ecx]
vfmadd132sd xmm2,xmm6,[ecx]
vfmadd213sd xmm2,xmm6,xmm4
vfmadd213sd xmm2,xmm6,QWORD PTR [ecx]
vfmadd213sd xmm2,xmm6,[ecx]
vfmadd231sd xmm2,xmm6,xmm4
vfmadd231sd xmm2,xmm6,QWORD PTR [ecx]
vfmadd231sd xmm2,xmm6,[ecx]
vfmsub132sd xmm2,xmm6,xmm4
vfmsub132sd xmm2,xmm6,QWORD PTR [ecx]
vfmsub132sd xmm2,xmm6,[ecx]
vfmsub213sd xmm2,xmm6,xmm4
vfmsub213sd xmm2,xmm6,QWORD PTR [ecx]
vfmsub213sd xmm2,xmm6,[ecx]
vfmsub231sd xmm2,xmm6,xmm4
vfmsub231sd xmm2,xmm6,QWORD PTR [ecx]
vfmsub231sd xmm2,xmm6,[ecx]
vfnmadd132sd xmm2,xmm6,xmm4
vfnmadd132sd xmm2,xmm6,QWORD PTR [ecx]
vfnmadd132sd xmm2,xmm6,[ecx]
vfnmadd213sd xmm2,xmm6,xmm4
vfnmadd213sd xmm2,xmm6,QWORD PTR [ecx]
vfnmadd213sd xmm2,xmm6,[ecx]
vfnmadd231sd xmm2,xmm6,xmm4
vfnmadd231sd xmm2,xmm6,QWORD PTR [ecx]
vfnmadd231sd xmm2,xmm6,[ecx]
vfnmsub132sd xmm2,xmm6,xmm4
vfnmsub132sd xmm2,xmm6,QWORD PTR [ecx]
vfnmsub132sd xmm2,xmm6,[ecx]
vfnmsub213sd xmm2,xmm6,xmm4
vfnmsub213sd xmm2,xmm6,QWORD PTR [ecx]
vfnmsub213sd xmm2,xmm6,[ecx]
vfnmsub231sd xmm2,xmm6,xmm4
vfnmsub231sd xmm2,xmm6,QWORD PTR [ecx]
vfnmsub231sd xmm2,xmm6,[ecx]
# Tests for op xmm/mem32, xmm, xmm
vfmadd132ss xmm2,xmm6,xmm4
vfmadd132ss xmm2,xmm6,DWORD PTR [ecx]
vfmadd132ss xmm2,xmm6,[ecx]
vfmadd213ss xmm2,xmm6,xmm4
vfmadd213ss xmm2,xmm6,DWORD PTR [ecx]
vfmadd213ss xmm2,xmm6,[ecx]
vfmadd231ss xmm2,xmm6,xmm4
vfmadd231ss xmm2,xmm6,DWORD PTR [ecx]
vfmadd231ss xmm2,xmm6,[ecx]
vfmsub132ss xmm2,xmm6,xmm4
vfmsub132ss xmm2,xmm6,DWORD PTR [ecx]
vfmsub132ss xmm2,xmm6,[ecx]
vfmsub213ss xmm2,xmm6,xmm4
vfmsub213ss xmm2,xmm6,DWORD PTR [ecx]
vfmsub213ss xmm2,xmm6,[ecx]
vfmsub231ss xmm2,xmm6,xmm4
vfmsub231ss xmm2,xmm6,DWORD PTR [ecx]
vfmsub231ss xmm2,xmm6,[ecx]
vfnmadd132ss xmm2,xmm6,xmm4
vfnmadd132ss xmm2,xmm6,DWORD PTR [ecx]
vfnmadd132ss xmm2,xmm6,[ecx]
vfnmadd213ss xmm2,xmm6,xmm4
vfnmadd213ss xmm2,xmm6,DWORD PTR [ecx]
vfnmadd213ss xmm2,xmm6,[ecx]
vfnmadd231ss xmm2,xmm6,xmm4
vfnmadd231ss xmm2,xmm6,DWORD PTR [ecx]
vfnmadd231ss xmm2,xmm6,[ecx]
vfnmsub132ss xmm2,xmm6,xmm4
vfnmsub132ss xmm2,xmm6,DWORD PTR [ecx]
vfnmsub132ss xmm2,xmm6,[ecx]
vfnmsub213ss xmm2,xmm6,xmm4
vfnmsub213ss xmm2,xmm6,DWORD PTR [ecx]
vfnmsub213ss xmm2,xmm6,[ecx]
vfnmsub231ss xmm2,xmm6,xmm4
vfnmsub231ss xmm2,xmm6,DWORD PTR [ecx]
vfnmsub231ss xmm2,xmm6,[ecx]
|
stsp/binutils-ia16
| 1,212
|
gas/testsuite/gas/i386/notrack.s
|
# Check 32bit NOTRACK prefix
.allow_index_reg
.text
_start:
notrack call *%eax
notrack call *%ax
notrack jmp *%eax
notrack jmp *%ax
notrack call *(%eax)
notrack callw *(%eax)
notrack jmp *(%eax)
notrack jmpw *(%eax)
notrack bnd call *%eax
notrack bnd call *%ax
notrack bnd jmp *%eax
notrack bnd jmp *%ax
notrack bnd call *(%eax)
notrack bnd callw *(%eax)
notrack bnd jmp *(%eax)
notrack bnd jmpw *(%eax)
bnd notrack call *%eax
bnd notrack call *%ax
bnd notrack call *(%eax)
bnd notrack callw *(%eax)
.intel_syntax noprefix
notrack call eax
notrack call ax
notrack jmp eax
notrack jmp ax
notrack call DWORD PTR [eax]
notrack call WORD PTR [eax]
notrack jmp DWORD PTR [eax]
notrack jmp WORD PTR [eax]
notrack bnd call eax
notrack bnd call ax
notrack bnd jmp eax
notrack bnd jmp ax
notrack bnd call DWORD PTR [eax]
notrack bnd call WORD PTR [eax]
notrack bnd jmp DWORD PTR [eax]
notrack bnd jmp WORD PTR [eax]
bnd notrack call eax
bnd notrack call ax
bnd notrack call DWORD PTR [eax]
bnd notrack call WORD PTR [eax]
# bnd notrack call *%eax
.byte 0xf2
.byte 0x3e
.byte 0xff
.byte 0xd0
# notrack callw *%ax
.byte 0x66
.byte 0x3e
.byte 0xff
.byte 0xd0
|
stsp/binutils-ia16
| 1,175
|
gas/testsuite/gas/i386/x86-64-relax-1.s
|
.text
je .LBB0_46
.zero 6, 0x90
je .LBB0_46
.LBB0_8:
.zero 134, 0x90
je .LBB0_8
.zero 4, 0x90
je .LBB0_8
.zero 8, 0x90
je .LBB0_46
.zero 10, 0x90
je .LBB0_8
.zero 4, 0x90
je .LBB0_8
movq 304(%rsp), %r14
.zero 2, 0x90
je .LBB0_8
je .LBB0_8
movq 256(%rsp), %r14
.zero 3, 0x90
je .LBB0_46
.zero 10, 0x90
je .LBB0_8
.zero 13, 0x90
je .LBB0_8
leaq 432(%rsp), %rsi
je .LBB0_8
movq 176(%rsp), %r14
je .LBB0_46
je .LBB0_8
je .LBB0_8
leaq 424(%rsp), %rsi
je .LBB0_8
.zero 22, 0x90
je .LBB0_8
.zero 11, 0x90
je .LBB0_8
leaq 416(%rsp), %rsi
je .LBB0_8
.zero 21, 0x90
je .LBB0_46
.zero 8, 0x90
je .LBB0_8
.zero 11, 0x90
je .LBB0_8
.zero 7, 0x90
je .LBB0_8
.zero 22, 0x90
je .LBB0_46
.zero 131, 0x90
.LBB0_46:
.balign 16, 0x90
movq 168(%rsp), %rax
.zero 3, 0x90
je .LBB1_35
.balign 16, 0x90
.zero 2, 0x90
je .LBB1_35
.zero 37, 0x90
je .LBB1_35
.zero 59, 0x90
je .LBB1_35
.zero 68, 0x90
je .LBB1_17
.balign 16, 0x90
.LBB1_17:
.zero 85, 0x90
.LBB1_35:
nop
|
stsp/binutils-ia16
| 10,181
|
gas/testsuite/gas/i386/inval-avx512f.s
|
# Check illegal AVX512F instructions
.text
.allow_index_reg
_start:
mov {sae}, %eax{%k1}
mov {sae}, %eax
mov %ebx, %eax{%k2}
vaddps %zmm3, %zmm1, %zmm2{z}{%k1}{z}
vaddps %zmm3, %zmm1{%k3}, %zmm2{z}
vaddps %zmm3, %zmm1{%k1}, %zmm2{%k2}
vcvtps2pd (%eax), %zmm1{1to8}
vcvtps2pd (%eax){1to16}, %zmm1
vcvtps2pd (%eax){%k1}, %zmm1
vcvtps2pd (%eax){z}, %zmm1
vgatherqpd (%rdi,%zmm2,8),%zmm6
vgatherqpd (%edi),%zmm6{%k1}
vgatherqpd (%zmm2),%zmm6{%k1}
vpscatterdd %zmm6,(%edi){%k1}
vpscatterdd %zmm6,(%zmm2){%k1}
.intel_syntax noprefix
mov eax{k1}, {sae}
mov eax, {sae}
mov eax{k2}, ebx
vaddps zmm2{z}{k1}{z}, zmm1, zmm3
vaddps zmm2{z}, zmm1{k3}, zmm3
vaddps zmm2{k2}, zmm1{k1}, zmm3
vcvtps2pd zmm1{1to8}, [eax]
vcvtps2pd zmm1, [eax]{1to16}
vcvtps2pd zmm1, [eax]{k1}
vcvtps2pd zmm1, [eax]{z}
vgatherqpd zmm6, ZMMWORD PTR [rdi+zmm2*8]
vgatherqpd zmm6{k1}, ZMMWORD PTR [edi]
vgatherqpd zmm6{k1}, ZMMWORD PTR [zmm2+eiz]
vpscatterdd ZMMWORD PTR [edi]{k1}, zmm6
vpscatterdd ZMMWORD PTR [zmm2+eiz]{k1}, zmm6
vaddps zmm2, zmm1, QWORD PTR [eax]{1to8}
vaddps zmm2, zmm1, QWORD PTR [eax]{1to16}
vaddpd zmm2, zmm1, DWORD PTR [eax]{1to8}
vaddpd zmm2, zmm1, DWORD PTR [eax]{1to16}
vaddps zmm2, zmm1, ZMMWORD PTR [eax]{1to16}
vaddps zmm2, zmm1, DWORD PTR [eax]
vaddpd zmm2, zmm1, QWORD PTR [eax]
.att_syntax prefix
vaddps %zmm0, %zmm1, %zmm2{%ecx}
vaddps %zmm0, %zmm1, %zmm2{z}
.intel_syntax noprefix
vaddps zmm2{ecx}, zmm1, zmm0
vaddps zmm2{z}, zmm1, zmm0
.att_syntax prefix
vmovaps (%eax){1to2}, %zmm1
vmovaps (%eax){1to4}, %zmm1
vmovaps (%eax){1to8}, %zmm1
vmovaps (%eax){1to16}, %zmm1
vcvtps2pd (%eax){1to2}, %zmm1
vcvtps2pd (%eax){1to4}, %zmm1
vcvtps2pd (%eax){1to8}, %zmm1
vcvtps2pd (%eax){1to16}, %zmm1
vcvtps2pd (%eax){1to2}, %ymm1
vcvtps2pd (%eax){1to4}, %ymm1
vcvtps2pd (%eax){1to8}, %ymm1
vcvtps2pd (%eax){1to16}, %ymm1
vcvtps2pd (%eax){1to2}, %xmm1
vcvtps2pd (%eax){1to4}, %xmm1
vcvtps2pd (%eax){1to8}, %xmm1
vcvtps2pd (%eax){1to16}, %xmm1
vaddps (%eax){1to2}, %zmm1, %zmm2
vaddps (%eax){1to4}, %zmm1, %zmm2
vaddps (%eax){1to8}, %zmm1, %zmm2
vaddps (%eax){1to16}, %zmm1, %zmm2
vaddps (%eax){1to2}, %ymm1, %ymm2
vaddps (%eax){1to4}, %ymm1, %ymm2
vaddps (%eax){1to8}, %ymm1, %ymm2
vaddps (%eax){1to16}, %ymm1, %ymm2
vaddps (%eax){1to2}, %xmm1, %xmm2
vaddps (%eax){1to4}, %xmm1, %xmm2
vaddps (%eax){1to8}, %xmm1, %xmm2
vaddps (%eax){1to16}, %xmm1, %xmm2
vaddpd (%eax){1to2}, %zmm1, %zmm2
vaddpd (%eax){1to4}, %zmm1, %zmm2
vaddpd (%eax){1to8}, %zmm1, %zmm2
vaddpd (%eax){1to16}, %zmm1, %zmm2
vaddpd (%eax){1to2}, %ymm1, %ymm2
vaddpd (%eax){1to4}, %ymm1, %ymm2
vaddpd (%eax){1to8}, %ymm1, %ymm2
vaddpd (%eax){1to16}, %ymm1, %ymm2
vaddpd (%eax){1to2}, %xmm1, %xmm2
vaddpd (%eax){1to4}, %xmm1, %xmm2
vaddpd (%eax){1to8}, %xmm1, %xmm2
vaddpd (%eax){1to16}, %xmm1, %xmm2
.intel_syntax noprefix
vcvtps2pd zmm1, QWORD PTR [eax]
vcvtps2pd ymm1, QWORD PTR [eax]
vcvtps2pd xmm1, QWORD PTR [eax]
vcvtps2pd xmm1, DWORD PTR [eax]{1to2}
vcvtps2pd xmm1, DWORD PTR [eax]{1to4}
vcvtps2pd xmm1, DWORD PTR [eax]{1to8}
vcvtps2pd xmm1, DWORD PTR [eax]{1to16}
vaddps zmm2, zmm1, QWORD PTR [eax]
vaddps ymm2, ymm1, QWORD PTR [eax]
vaddps xmm2, xmm1, QWORD PTR [eax]
vaddps zmm2, zmm1, DWORD PTR [eax]{1to2}
vaddps zmm2, zmm1, DWORD PTR [eax]{1to4}
vaddps zmm2, zmm1, DWORD PTR [eax]{1to8}
vaddps zmm2, zmm1, DWORD PTR [eax]{1to16}
vaddps ymm2, ymm1, DWORD PTR [eax]{1to2}
vaddps ymm2, ymm1, DWORD PTR [eax]{1to4}
vaddps ymm2, ymm1, DWORD PTR [eax]{1to8}
vaddps ymm2, ymm1, DWORD PTR [eax]{1to16}
vaddps xmm2, xmm1, DWORD PTR [eax]{1to2}
vaddps xmm2, xmm1, DWORD PTR [eax]{1to4}
vaddps xmm2, xmm1, DWORD PTR [eax]{1to8}
vaddps xmm2, xmm1, DWORD PTR [eax]{1to16}
vaddpd zmm2, zmm1, DWORD PTR [eax]
vaddpd ymm2, ymm1, DWORD PTR [eax]
vaddpd xmm2, xmm1, DWORD PTR [eax]
vaddpd zmm2, zmm1, QWORD PTR [eax]{1to2}
vaddpd zmm2, zmm1, QWORD PTR [eax]{1to4}
vaddpd zmm2, zmm1, QWORD PTR [eax]{1to8}
vaddpd zmm2, zmm1, QWORD PTR [eax]{1to16}
vaddpd ymm2, ymm1, QWORD PTR [eax]{1to2}
vaddpd ymm2, ymm1, QWORD PTR [eax]{1to4}
vaddpd ymm2, ymm1, QWORD PTR [eax]{1to8}
vaddpd ymm2, ymm1, QWORD PTR [eax]{1to16}
vaddpd xmm2, xmm1, QWORD PTR [eax]{1to2}
vaddpd xmm2, xmm1, QWORD PTR [eax]{1to4}
vaddpd xmm2, xmm1, QWORD PTR [eax]{1to8}
vaddpd xmm2, xmm1, QWORD PTR [eax]{1to16}
vcvtps2qq xmm0, DWORD PTR [eax]
.att_syntax prefix
vcmppd $0, %zmm0, %zmm0, %k0{%k1}{z}
vcmpps $0, %zmm0, %zmm0, %k0{%k1}{z}
vcmpsd $0, %xmm0, %xmm0, %k0{%k1}{z}
vcmpss $0, %xmm0, %xmm0, %k0{%k1}{z}
vcompresspd %zmm0, (%eax){%k1}{z}
vcompressps %zmm0, (%eax){%k1}{z}
vcvtps2ph $0, %zmm0, (%eax){%k1}{z}
vextractf32x4 $0, %zmm0, (%eax){%k1}{z}
vextractf32x8 $0, %zmm0, (%eax){%k1}{z}
vextractf64x2 $0, %zmm0, (%eax){%k1}{z}
vextractf64x4 $0, %zmm0, (%eax){%k1}{z}
vextracti32x4 $0, %zmm0, (%eax){%k1}{z}
vextracti32x8 $0, %zmm0, (%eax){%k1}{z}
vextracti64x2 $0, %zmm0, (%eax){%k1}{z}
vextracti64x4 $0, %zmm0, (%eax){%k1}{z}
vfpclasspd $0, %zmm0, %k0{%k1}{z}
vfpclassps $0, %zmm0, %k0{%k1}{z}
vfpclasssd $0, %xmm0, %k0{%k1}{z}
vfpclassss $0, %xmm0, %k0{%k1}{z}
vgatherdpd (%eax,%ymm1), %zmm0{%k1}{z}
vgatherdps (%eax,%zmm1), %zmm0{%k1}{z}
vgatherqpd (%eax,%zmm1), %zmm0{%k1}{z}
vgatherqps (%eax,%zmm1), %ymm0{%k1}{z}
vgatherpf0dpd (%eax,%ymm1){%k1}{z}
vgatherpf0dps (%eax,%zmm1){%k1}{z}
vgatherpf0qpd (%eax,%zmm1){%k1}{z}
vgatherpf0qps (%eax,%zmm1){%k1}{z}
vgatherpf1dpd (%eax,%ymm1){%k1}{z}
vgatherpf1dps (%eax,%zmm1){%k1}{z}
vgatherpf1qpd (%eax,%zmm1){%k1}{z}
vgatherpf1qps (%eax,%zmm1){%k1}{z}
vmovapd %zmm0, (%eax){%k1}{z}
vmovaps %zmm0, (%eax){%k1}{z}
vmovdqa32 %zmm0, (%eax){%k1}{z}
vmovdqa64 %zmm0, (%eax){%k1}{z}
vmovdqu8 %zmm0, (%eax){%k1}{z}
vmovdqu16 %zmm0, (%eax){%k1}{z}
vmovdqu32 %zmm0, (%eax){%k1}{z}
vmovdqu64 %zmm0, (%eax){%k1}{z}
vmovsd %xmm0, (%eax){%k1}{z}
vmovss %xmm0, (%eax){%k1}{z}
vmovupd %zmm0, (%eax){%k1}{z}
vmovups %zmm0, (%eax){%k1}{z}
vpcmpb $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcmpd $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcmpq $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcmpw $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcmpeqb %zmm0, %zmm0, %k0{%k1}{z}
vpcmpeqd %zmm0, %zmm0, %k0{%k1}{z}
vpcmpeqq %zmm0, %zmm0, %k0{%k1}{z}
vpcmpeqw %zmm0, %zmm0, %k0{%k1}{z}
vpcmpgtb %zmm0, %zmm0, %k0{%k1}{z}
vpcmpgtd %zmm0, %zmm0, %k0{%k1}{z}
vpcmpgtq %zmm0, %zmm0, %k0{%k1}{z}
vpcmpgtw %zmm0, %zmm0, %k0{%k1}{z}
vpcmpub $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcmpud $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcmpuq $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcmpuw $0, %zmm0, %zmm0, %k0{%k1}{z}
vpcompressb %zmm0, (%eax){%k1}{z}
vpcompressd %zmm0, (%eax){%k1}{z}
vpcompressq %zmm0, (%eax){%k1}{z}
vpcompressw %zmm0, (%eax){%k1}{z}
vpgatherdd (%eax,%zmm1), %zmm0{%k1}{z}
vpgatherdq (%eax,%ymm1), %zmm0{%k1}{z}
vpgatherqd (%eax,%zmm1), %ymm0{%k1}{z}
vpgatherqq (%eax,%zmm1), %zmm0{%k1}{z}
vpmovdb %zmm0, (%eax){%k1}{z}
vpmovdw %zmm0, (%eax){%k1}{z}
vpmovqb %zmm0, (%eax){%k1}{z}
vpmovqd %zmm0, (%eax){%k1}{z}
vpmovqw %zmm0, (%eax){%k1}{z}
vpmovsdb %zmm0, (%eax){%k1}{z}
vpmovsdw %zmm0, (%eax){%k1}{z}
vpmovsqb %zmm0, (%eax){%k1}{z}
vpmovsqd %zmm0, (%eax){%k1}{z}
vpmovsqw %zmm0, (%eax){%k1}{z}
vpmovswb %zmm0, (%eax){%k1}{z}
vpmovusdb %zmm0, (%eax){%k1}{z}
vpmovusdw %zmm0, (%eax){%k1}{z}
vpmovusqb %zmm0, (%eax){%k1}{z}
vpmovusqd %zmm0, (%eax){%k1}{z}
vpmovusqw %zmm0, (%eax){%k1}{z}
vpmovuswb %zmm0, (%eax){%k1}{z}
vpmovwb %zmm0, (%eax){%k1}{z}
vpscatterdd %zmm0, (%eax,%zmm1){%k1}{z}
vpscatterdq %zmm0, (%eax,%ymm1){%k1}{z}
vpscatterqd %ymm0, (%eax,%zmm1){%k1}{z}
vpscatterqq %zmm0, (%eax,%zmm1){%k1}{z}
vpshufbitqmb %zmm0, %zmm0, %k0{%k1}{z}
vptestmb %zmm0, %zmm0, %k0{%k1}{z}
vptestmd %zmm0, %zmm0, %k0{%k1}{z}
vptestmq %zmm0, %zmm0, %k0{%k1}{z}
vptestmw %zmm0, %zmm0, %k0{%k1}{z}
vptestnmb %zmm0, %zmm0, %k0{%k1}{z}
vptestnmd %zmm0, %zmm0, %k0{%k1}{z}
vptestnmq %zmm0, %zmm0, %k0{%k1}{z}
vptestnmw %zmm0, %zmm0, %k0{%k1}{z}
vscatterdpd %zmm0, (%eax,%ymm1){%k1}{z}
vscatterdps %zmm0, (%eax,%zmm1){%k1}{z}
vscatterqpd %zmm0, (%eax,%zmm1){%k1}{z}
vscatterqps %ymm0, (%eax,%zmm1){%k1}{z}
vscatterpf0dpd (%eax,%ymm1){%k1}{z}
vscatterpf0dps (%eax,%zmm1){%k1}{z}
vscatterpf0qpd (%eax,%zmm1){%k1}{z}
vscatterpf0qps (%eax,%zmm1){%k1}{z}
vscatterpf1dpd (%eax,%ymm1){%k1}{z}
vscatterpf1dps (%eax,%zmm1){%k1}{z}
vscatterpf1qpd (%eax,%zmm1){%k1}{z}
vscatterpf1qps (%eax,%zmm1){%k1}{z}
vdpbf16ps 8(%eax){1to8}, %zmm2, %zmm2
vcvtne2ps2bf16 8(%eax){1to8}, %zmm2, %zmm2
vcvtneps2bf16 (%eax){1to2}, %ymm1
vcvtneps2bf16 (%eax){1to4}, %ymm1
vcvtneps2bf16 (%eax){1to8}, %ymm1
vcvtneps2bf16 (%eax){1to16}, %ymm1
vcvtneps2bf16 (%eax){1to2}, %xmm1
vcvtneps2bf16 (%eax){1to4}, %xmm1
vcvtneps2bf16 (%eax){1to8}, %xmm1
vcvtneps2bf16 (%eax){1to16}, %xmm1
vaddps $0xcc, %zmm0, %zmm0, %zmm0
vcmpss $0, $0xcc, %xmm0, %xmm0, %k0
vaddss {sae}, %xmm0, %xmm0, %xmm0
vcmpps $0, {rn-sae}, %zmm0, %zmm0, %k0
.intel_syntax noprefix
vaddps zmm2, zmm1, WORD BCST [eax]
vaddps zmm2, zmm1, DWORD BCST [eax]
vaddps zmm2, zmm1, QWORD BCST [eax]
vaddps zmm2, zmm1, ZMMWORD BCST [eax]
vaddpd zmm2, zmm1, WORD BCST [eax]
vaddpd zmm2, zmm1, DWORD BCST [eax]
vaddpd zmm2, zmm1, QWORD BCST [eax]
vaddpd zmm2, zmm1, ZMMWORD BCST [eax]
.att_syntax prefix
vaddps {rn-sae}, %zmm0, %zmm0, %zmm0
vaddps %zmm0, {rn-sae}, %zmm0, %zmm0
vaddps %zmm0, %zmm0, {rn-sae}, %zmm0
vaddps %zmm0, %zmm0, %zmm0, {rn-sae}
vcmpps {sae}, $0, %zmm0, %zmm0, %k0
vcmpps $0, {sae}, %zmm0, %zmm0, %k0
vcmpps $0, %zmm0, {sae}, %zmm0, %k0
vcmpps $0, %zmm0, %zmm0, {sae}, %k0
vcmpps $0, %zmm0, %zmm0, %k0, {sae}
vcvtsi2ss {rn-sae}, %eax, %xmm0, %xmm0
vcvtsi2ss %eax, {rn-sae}, %xmm0, %xmm0
vcvtsi2ss %eax, %xmm0, {rn-sae}, %xmm0
vcvtsi2ss %eax, %xmm0, %xmm0, {rn-sae}
.intel_syntax noprefix
vaddps zmm0{rn-sae}, zmm0, zmm0
vaddps zmm0, zmm0{rn-sae}, zmm0
vaddps zmm0, zmm0, zmm0{rn-sae}
vcmpps k0{sae}, zmm0, zmm0, 0
vcmpps k0, zmm0{sae}, zmm0, 0
vcmpps k0, zmm0, zmm0{sae}, 0
vcmpps k0, zmm0, zmm0, 0{sae}
vcvtsi2ss xmm0{rn-sae}, xmm0, eax
vcvtsi2ss xmm0, xmm0{rn-sae}, eax
vcvtsi2ss xmm0, xmm0, eax{rn-sae}
.p2align 4
|
stsp/binutils-ia16
| 6,431
|
gas/testsuite/gas/i386/avx512pf.s
|
# Check 32bit AVX512PF instructions
.allow_index_reg
.text
_start:
vgatherpf0dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vgatherpf0dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vgatherpf0dpd 256(%eax,%ymm7){%k1} # AVX512PF
vgatherpf0dpd 1024(%ecx,%ymm7,4){%k1} # AVX512PF
vgatherpf0dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf0dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf0dps 256(%eax,%zmm7){%k1} # AVX512PF
vgatherpf0dps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vgatherpf0qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf0qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf0qpd 256(%eax,%zmm7){%k1} # AVX512PF
vgatherpf0qpd 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vgatherpf0qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf0qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf0qps 256(%eax,%zmm7){%k1} # AVX512PF
vgatherpf0qps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vgatherpf1dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vgatherpf1dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vgatherpf1dpd 256(%eax,%ymm7){%k1} # AVX512PF
vgatherpf1dpd 1024(%ecx,%ymm7,4){%k1} # AVX512PF
vgatherpf1dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf1dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf1dps 256(%eax,%zmm7){%k1} # AVX512PF
vgatherpf1dps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vgatherpf1qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf1qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf1qpd 256(%eax,%zmm7){%k1} # AVX512PF
vgatherpf1qpd 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vgatherpf1qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf1qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vgatherpf1qps 256(%eax,%zmm7){%k1} # AVX512PF
vgatherpf1qps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vscatterpf0dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vscatterpf0dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vscatterpf0dpd 256(%eax,%ymm7){%k1} # AVX512PF
vscatterpf0dpd 1024(%ecx,%ymm7,4){%k1} # AVX512PF
vscatterpf0dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf0dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf0dps 256(%eax,%zmm7){%k1} # AVX512PF
vscatterpf0dps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vscatterpf0qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf0qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf0qpd 256(%eax,%zmm7){%k1} # AVX512PF
vscatterpf0qpd 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vscatterpf0qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf0qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf0qps 256(%eax,%zmm7){%k1} # AVX512PF
vscatterpf0qps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vscatterpf1dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vscatterpf1dpd 123(%ebp,%ymm7,8){%k1} # AVX512PF
vscatterpf1dpd 256(%eax,%ymm7){%k1} # AVX512PF
vscatterpf1dpd 1024(%ecx,%ymm7,4){%k1} # AVX512PF
vscatterpf1dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf1dps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf1dps 256(%eax,%zmm7){%k1} # AVX512PF
vscatterpf1dps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vscatterpf1qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf1qpd 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf1qpd 256(%eax,%zmm7){%k1} # AVX512PF
vscatterpf1qpd 1024(%ecx,%zmm7,4){%k1} # AVX512PF
vscatterpf1qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf1qps 123(%ebp,%zmm7,8){%k1} # AVX512PF
vscatterpf1qps 256(%eax,%zmm7){%k1} # AVX512PF
vscatterpf1qps 1024(%ecx,%zmm7,4){%k1} # AVX512PF
.intel_syntax noprefix
vgatherpf0dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vgatherpf0dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vgatherpf0dpd [eax+ymm7+256]{k1} # AVX512PF
vgatherpf0dpd [ecx+ymm7*4+1024]{k1} # AVX512PF
vgatherpf0dps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf0dps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf0dps [eax+zmm7+256]{k1} # AVX512PF
vgatherpf0dps [ecx+zmm7*4+1024]{k1} # AVX512PF
vgatherpf0qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf0qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf0qpd [eax+zmm7+256]{k1} # AVX512PF
vgatherpf0qpd [ecx+zmm7*4+1024]{k1} # AVX512PF
vgatherpf0qps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf0qps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf0qps [eax+zmm7+256]{k1} # AVX512PF
vgatherpf0qps [ecx+zmm7*4+1024]{k1} # AVX512PF
vgatherpf1dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vgatherpf1dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vgatherpf1dpd [eax+ymm7+256]{k1} # AVX512PF
vgatherpf1dpd [ecx+ymm7*4+1024]{k1} # AVX512PF
vgatherpf1dps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf1dps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf1dps [eax+zmm7+256]{k1} # AVX512PF
vgatherpf1dps [ecx+zmm7*4+1024]{k1} # AVX512PF
vgatherpf1qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf1qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf1qpd [eax+zmm7+256]{k1} # AVX512PF
vgatherpf1qpd [ecx+zmm7*4+1024]{k1} # AVX512PF
vgatherpf1qps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf1qps [ebp+zmm7*8-123]{k1} # AVX512PF
vgatherpf1qps [eax+zmm7+256]{k1} # AVX512PF
vgatherpf1qps [ecx+zmm7*4+1024]{k1} # AVX512PF
vscatterpf0dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vscatterpf0dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vscatterpf0dpd [eax+ymm7+256]{k1} # AVX512PF
vscatterpf0dpd [ecx+ymm7*4+1024]{k1} # AVX512PF
vscatterpf0dps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf0dps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf0dps [eax+zmm7+256]{k1} # AVX512PF
vscatterpf0dps [ecx+zmm7*4+1024]{k1} # AVX512PF
vscatterpf0qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf0qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf0qpd [eax+zmm7+256]{k1} # AVX512PF
vscatterpf0qpd [ecx+zmm7*4+1024]{k1} # AVX512PF
vscatterpf0qps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf0qps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf0qps [eax+zmm7+256]{k1} # AVX512PF
vscatterpf0qps [ecx+zmm7*4+1024]{k1} # AVX512PF
vscatterpf1dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vscatterpf1dpd [ebp+ymm7*8-123]{k1} # AVX512PF
vscatterpf1dpd [eax+ymm7+256]{k1} # AVX512PF
vscatterpf1dpd [ecx+ymm7*4+1024]{k1} # AVX512PF
vscatterpf1dps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf1dps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf1dps [eax+zmm7+256]{k1} # AVX512PF
vscatterpf1dps [ecx+zmm7*4+1024]{k1} # AVX512PF
vscatterpf1qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf1qpd [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf1qpd [eax+zmm7+256]{k1} # AVX512PF
vscatterpf1qpd [ecx+zmm7*4+1024]{k1} # AVX512PF
vscatterpf1qps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf1qps [ebp+zmm7*8-123]{k1} # AVX512PF
vscatterpf1qps [eax+zmm7+256]{k1} # AVX512PF
vscatterpf1qps [ecx+zmm7*4+1024]{k1} # AVX512PF
|
stsp/binutils-ia16
| 4,802
|
gas/testsuite/gas/i386/x86-64-mpx.s
|
# MPX instructions
.allow_index_reg
.text
start:
### bndmk
bndmk (%r11), %bnd1
bndmk (%rax), %bnd1
bndmk (0x399), %bnd1
bndmk 0x3(%r9), %bnd1
bndmk 0x3(%rax), %bnd1
bndmk 0x3(,%r12,1), %bnd1
bndmk (%rax,%rcx), %bnd1
bndmk 0x3(%r11,%rax,2), %bnd1
bndmk 0x3(%rbx,%r9,1), %bnd1
### bndmov
bndmov (%r11), %bnd1
bndmov (%rax), %bnd1
bndmov (0x399), %bnd1
bndmov 0x3(%r9), %bnd2
bndmov 0x3(%rax), %bnd2
bndmov 0x3333(%rip), %bnd2
bndmov 0x3(,%r12,1), %bnd0
bndmov (%rax,%rdx), %bnd2
bndmov 0x3(%r11,%rax,2), %bnd1
bndmov 0x3(%rbx,%r9,1), %bnd1
bndmov %bnd2, %bnd0
bndmov %bnd1, (%r11)
bndmov %bnd1, (%rax)
bndmov %bnd1, (0x399)
bndmov %bnd2, 0x3(%r9)
bndmov %bnd2, 0x3(%rax)
bndmov %bnd2, 0x3333(%rip)
bndmov %bnd0, 0x3(,%r12,1)
bndmov %bnd2, (%rax,%rdx)
bndmov %bnd1, 0x3(%r11,%rax,2)
bndmov %bnd1, 0x3(%rbx,%r9,1)
bndmov %bnd0, %bnd2
### bndcl
bndcl (%r11), %bnd1
bndcl (%rax), %bnd1
bndcl %r11, %bnd1
bndcl %rcx, %bnd1
bndcl (0x399), %bnd1
bndcl 0x3(%r9), %bnd2
bndcl 0x3(%rax), %bnd2
bndcl 0x3333(%rip), %bnd2
bndcl 0x3(,%r12,1), %bnd0
bndcl (%rax,%rdx), %bnd2
bndcl 0x3(%r11,%rax,2), %bnd1
bndcl 0x3(%rbx,%r9,1), %bnd1
### bndcu
bndcu (%r11), %bnd1
bndcu (%rax), %bnd1
bndcu %r11, %bnd1
bndcu %rcx, %bnd1
bndcu (0x399), %bnd1
bndcu 0x3(%r9), %bnd2
bndcu 0x3(%rax), %bnd2
bndcu 0x3333(%rip), %bnd2
bndcu 0x3(,%r12,1), %bnd0
bndcu (%rax,%rdx), %bnd2
bndcu 0x3(%r11,%rax,2), %bnd1
bndcu 0x3(%rbx,%r9,1), %bnd1
### bndcn
bndcn (%r11), %bnd1
bndcn (%rax), %bnd1
bndcn %r11, %bnd1
bndcn %rcx, %bnd1
bndcn (0x399), %bnd1
bndcn 0x3(%r9), %bnd2
bndcn 0x3(%rax), %bnd2
bndcn 0x3333(%rip), %bnd2
bndcn 0x3(,%r12,1), %bnd0
bndcn (%rax,%rdx), %bnd2
bndcn 0x3(%r11,%rax,2), %bnd1
bndcn 0x3(%rbx,%r9,1), %bnd1
### bndstx
bndstx %bnd0, 0x3(%rax,%rbx,1)
bndstx %bnd2, 3(%rbx,%rdx)
bndstx %bnd3, 0x399(%r12)
bndstx %bnd1, 0x1234(%r11)
bndstx %bnd2, 0x1234(%rbx)
bndstx %bnd2, 3(,%rbx,1)
bndstx %bnd2, 3(,%r12,1)
bndstx %bnd1, (%rdx)
### bndldx
bndldx 0x3(%rax,%rbx,1), %bnd0
bndldx 3(%rbx,%rdx), %bnd2
bndldx 0x399(%r12), %bnd3
bndldx 0x1234(%r11), %bnd1
bndldx 0x1234(%rbx), %bnd2
bndldx 3(,%rbx,1), %bnd2
bndldx 3(,%r12,1), %bnd2
bndldx (%rdx), %bnd1
### bnd
bnd call foo
bnd call *(%rax)
bnd call *(%r11)
bnd je foo
bnd jmp foo
bnd jmp *(%rcx)
bnd jmp *(%r12)
bnd ret
.intel_syntax noprefix
bndmk bnd1, [r11]
bndmk bnd1, [rax]
bndmk bnd1, [0x399]
bndmk bnd1, [r9+0x3]
bndmk bnd1, [rax+0x3]
bndmk bnd1, [1*r12+0x3]
bndmk bnd1, [rax+rcx]
bndmk bnd1, [r11+1*rax+0x3]
bndmk bnd1, [rbx+1*r9+0x3]
### bndmov
bndmov bnd1, [r11]
bndmov bnd1, [rax]
bndmov bnd1, [0x399]
bndmov bnd2, [r9+0x3]
bndmov bnd2, [rax+0x3]
bndmov bnd0, [1*r12+0x3]
bndmov bnd2, [rax+rdx]
bndmov bnd1, [r11+1*rax+0x3]
bndmov bnd1, [rbx+1*r9+0x3]
bndmov bnd0, bnd2
bndmov [r11], bnd1
bndmov [rax], bnd1
bndmov [0x399], bnd1
bndmov [r9+0x3], bnd2
bndmov [rax+0x3], bnd2
bndmov [1*r12+0x3], bnd0
bndmov [rax+rdx], bnd2
bndmov [r11+1*rax+0x3], bnd1
bndmov [rbx+1*r9+0x3], bnd1
bndmov bnd2, bnd0
### bndcl
bndcl bnd1, [r11]
bndcl bnd1, [rax]
bndcl bnd1, r11
bndcl bnd1, rcx
bndcl bnd1, [0x399]
bndcl bnd1, [r9+0x3]
bndcl bnd1, [rax+0x3]
bndcl bnd1, [1*r12+0x3]
bndcl bnd1, [rax+rcx]
bndcl bnd1, [r11+1*rax+0x3]
bndcl bnd1, [rbx+1*r9+0x3]
### bndcu
bndcu bnd1, [r11]
bndcu bnd1, [rax]
bndcu bnd1, r11
bndcu bnd1, rcx
bndcu bnd1, [0x399]
bndcu bnd1, [r9+0x3]
bndcu bnd1, [rax+0x3]
bndcu bnd1, [1*r12+0x3]
bndcu bnd1, [rax+rcx]
bndcu bnd1, [r11+1*rax+0x3]
bndcu bnd1, [rbx+1*r9+0x3]
### bndcn
bndcn bnd1, [r11]
bndcn bnd1, [rax]
bndcn bnd1, r11
bndcn bnd1, rcx
bndcn bnd1, [0x399]
bndcn bnd1, [r9+0x3]
bndcn bnd1, [rax+0x3]
bndcn bnd1, [1*r9+0x3]
bndcn bnd1, [rax+rcx]
bndcn bnd1, [r11+1*rax+0x3]
bndcn bnd1, [rbx+1*r9+0x3]
### bndstx
bndstx [rax+rbx*1+0x3], bnd0
bndstx [rbx+rdx+3], bnd2
bndstx [r12+0x399], bnd3
bndstx [r11+0x1234], bnd1
bndstx [rbx+0x1234], bnd2
bndstx [1*rbx+3], bnd2
bndstx [1*r12+3], bnd2
bndstx [rdx], bnd1
### bndldx
bndldx bnd0, [rax+rbx*1+0x3]
bndldx bnd2, [rbx+rdx+3]
bndldx bnd3, [r12+0x399]
bndldx bnd1, [r11+0x1234]
bndldx bnd2, [rbx+0x1234]
bndldx bnd2, [1*rbx+3]
bndldx bnd2, [1*r12+3]
bndldx bnd1, [rdx]
### bnd
bnd call foo
bnd call rax
bnd call r11
bnd je foo
bnd jmp foo
bnd jmp rcx
bnd jmp r12
bnd ret
foo: bnd ret
bad:
# bndldx (%eax),(bad)
.byte 0x0f
.byte 0x1a
.byte 0x30
# bndmov (bad),%bnd0
.byte 0x66
.byte 0x0f
.byte 0x1a
.byte 0xc4
# bndmov with REX.B set
.byte 0x66
.byte 0x41
.byte 0x0f
.byte 0x1a
.byte 0xc0
# bndmov with REX.R set
.byte 0x66
.byte 0x44
.byte 0x0f
.byte 0x1a
.byte 0xc0
# bndmk (bad),%bnd0
.byte 0xf3
.byte 0x0f
.byte 0x1b
.byte 0x05
.long 0x90909090
|
stsp/binutils-ia16
| 46,714
|
gas/testsuite/gas/i386/hle.s
|
# Check 32bit HLE instructions
.allow_index_reg
.text
_start:
# Tests for op imm8 regb/m8
xacquire lock adcb $100,(%ecx)
lock xacquire adcb $100,(%ecx)
xrelease lock adcb $100,(%ecx)
lock xrelease adcb $100,(%ecx)
.byte 0xf0; .byte 0xf2; adcb $100,(%ecx)
.byte 0xf0; .byte 0xf3; adcb $100,(%ecx)
xacquire lock addb $100,(%ecx)
lock xacquire addb $100,(%ecx)
xrelease lock addb $100,(%ecx)
lock xrelease addb $100,(%ecx)
.byte 0xf0; .byte 0xf2; addb $100,(%ecx)
.byte 0xf0; .byte 0xf3; addb $100,(%ecx)
xacquire lock andb $100,(%ecx)
lock xacquire andb $100,(%ecx)
xrelease lock andb $100,(%ecx)
lock xrelease andb $100,(%ecx)
.byte 0xf0; .byte 0xf2; andb $100,(%ecx)
.byte 0xf0; .byte 0xf3; andb $100,(%ecx)
xrelease movb $100,(%ecx)
xacquire lock orb $100,(%ecx)
lock xacquire orb $100,(%ecx)
xrelease lock orb $100,(%ecx)
lock xrelease orb $100,(%ecx)
.byte 0xf0; .byte 0xf2; orb $100,(%ecx)
.byte 0xf0; .byte 0xf3; orb $100,(%ecx)
xacquire lock sbbb $100,(%ecx)
lock xacquire sbbb $100,(%ecx)
xrelease lock sbbb $100,(%ecx)
lock xrelease sbbb $100,(%ecx)
.byte 0xf0; .byte 0xf2; sbbb $100,(%ecx)
.byte 0xf0; .byte 0xf3; sbbb $100,(%ecx)
xacquire lock subb $100,(%ecx)
lock xacquire subb $100,(%ecx)
xrelease lock subb $100,(%ecx)
lock xrelease subb $100,(%ecx)
.byte 0xf0; .byte 0xf2; subb $100,(%ecx)
.byte 0xf0; .byte 0xf3; subb $100,(%ecx)
xacquire lock xorb $100,(%ecx)
lock xacquire xorb $100,(%ecx)
xrelease lock xorb $100,(%ecx)
lock xrelease xorb $100,(%ecx)
.byte 0xf0; .byte 0xf2; xorb $100,(%ecx)
.byte 0xf0; .byte 0xf3; xorb $100,(%ecx)
# Tests for op imm16 regs/m16
xacquire lock adcw $1000,(%ecx)
lock xacquire adcw $1000,(%ecx)
xrelease lock adcw $1000,(%ecx)
lock xrelease adcw $1000,(%ecx)
.byte 0xf0; .byte 0xf2; adcw $1000,(%ecx)
.byte 0xf0; .byte 0xf3; adcw $1000,(%ecx)
xacquire lock addw $1000,(%ecx)
lock xacquire addw $1000,(%ecx)
xrelease lock addw $1000,(%ecx)
lock xrelease addw $1000,(%ecx)
.byte 0xf0; .byte 0xf2; addw $1000,(%ecx)
.byte 0xf0; .byte 0xf3; addw $1000,(%ecx)
xacquire lock andw $1000,(%ecx)
lock xacquire andw $1000,(%ecx)
xrelease lock andw $1000,(%ecx)
lock xrelease andw $1000,(%ecx)
.byte 0xf0; .byte 0xf2; andw $1000,(%ecx)
.byte 0xf0; .byte 0xf3; andw $1000,(%ecx)
xrelease movw $1000,(%ecx)
xacquire lock orw $1000,(%ecx)
lock xacquire orw $1000,(%ecx)
xrelease lock orw $1000,(%ecx)
lock xrelease orw $1000,(%ecx)
.byte 0xf0; .byte 0xf2; orw $1000,(%ecx)
.byte 0xf0; .byte 0xf3; orw $1000,(%ecx)
xacquire lock sbbw $1000,(%ecx)
lock xacquire sbbw $1000,(%ecx)
xrelease lock sbbw $1000,(%ecx)
lock xrelease sbbw $1000,(%ecx)
.byte 0xf0; .byte 0xf2; sbbw $1000,(%ecx)
.byte 0xf0; .byte 0xf3; sbbw $1000,(%ecx)
xacquire lock subw $1000,(%ecx)
lock xacquire subw $1000,(%ecx)
xrelease lock subw $1000,(%ecx)
lock xrelease subw $1000,(%ecx)
.byte 0xf0; .byte 0xf2; subw $1000,(%ecx)
.byte 0xf0; .byte 0xf3; subw $1000,(%ecx)
xacquire lock xorw $1000,(%ecx)
lock xacquire xorw $1000,(%ecx)
xrelease lock xorw $1000,(%ecx)
lock xrelease xorw $1000,(%ecx)
.byte 0xf0; .byte 0xf2; xorw $1000,(%ecx)
.byte 0xf0; .byte 0xf3; xorw $1000,(%ecx)
# Tests for op imm32 regl/m32
xacquire lock adcl $10000000,(%ecx)
lock xacquire adcl $10000000,(%ecx)
xrelease lock adcl $10000000,(%ecx)
lock xrelease adcl $10000000,(%ecx)
.byte 0xf0; .byte 0xf2; adcl $10000000,(%ecx)
.byte 0xf0; .byte 0xf3; adcl $10000000,(%ecx)
xacquire lock addl $10000000,(%ecx)
lock xacquire addl $10000000,(%ecx)
xrelease lock addl $10000000,(%ecx)
lock xrelease addl $10000000,(%ecx)
.byte 0xf0; .byte 0xf2; addl $10000000,(%ecx)
.byte 0xf0; .byte 0xf3; addl $10000000,(%ecx)
xacquire lock andl $10000000,(%ecx)
lock xacquire andl $10000000,(%ecx)
xrelease lock andl $10000000,(%ecx)
lock xrelease andl $10000000,(%ecx)
.byte 0xf0; .byte 0xf2; andl $10000000,(%ecx)
.byte 0xf0; .byte 0xf3; andl $10000000,(%ecx)
xrelease movl $10000000,(%ecx)
xacquire lock orl $10000000,(%ecx)
lock xacquire orl $10000000,(%ecx)
xrelease lock orl $10000000,(%ecx)
lock xrelease orl $10000000,(%ecx)
.byte 0xf0; .byte 0xf2; orl $10000000,(%ecx)
.byte 0xf0; .byte 0xf3; orl $10000000,(%ecx)
xacquire lock sbbl $10000000,(%ecx)
lock xacquire sbbl $10000000,(%ecx)
xrelease lock sbbl $10000000,(%ecx)
lock xrelease sbbl $10000000,(%ecx)
.byte 0xf0; .byte 0xf2; sbbl $10000000,(%ecx)
.byte 0xf0; .byte 0xf3; sbbl $10000000,(%ecx)
xacquire lock subl $10000000,(%ecx)
lock xacquire subl $10000000,(%ecx)
xrelease lock subl $10000000,(%ecx)
lock xrelease subl $10000000,(%ecx)
.byte 0xf0; .byte 0xf2; subl $10000000,(%ecx)
.byte 0xf0; .byte 0xf3; subl $10000000,(%ecx)
xacquire lock xorl $10000000,(%ecx)
lock xacquire xorl $10000000,(%ecx)
xrelease lock xorl $10000000,(%ecx)
lock xrelease xorl $10000000,(%ecx)
.byte 0xf0; .byte 0xf2; xorl $10000000,(%ecx)
.byte 0xf0; .byte 0xf3; xorl $10000000,(%ecx)
# Tests for op imm8 regs/m16
xacquire lock adcw $100,(%ecx)
lock xacquire adcw $100,(%ecx)
xrelease lock adcw $100,(%ecx)
lock xrelease adcw $100,(%ecx)
.byte 0xf0; .byte 0xf2; adcw $100,(%ecx)
.byte 0xf0; .byte 0xf3; adcw $100,(%ecx)
xacquire lock addw $100,(%ecx)
lock xacquire addw $100,(%ecx)
xrelease lock addw $100,(%ecx)
lock xrelease addw $100,(%ecx)
.byte 0xf0; .byte 0xf2; addw $100,(%ecx)
.byte 0xf0; .byte 0xf3; addw $100,(%ecx)
xacquire lock andw $100,(%ecx)
lock xacquire andw $100,(%ecx)
xrelease lock andw $100,(%ecx)
lock xrelease andw $100,(%ecx)
.byte 0xf0; .byte 0xf2; andw $100,(%ecx)
.byte 0xf0; .byte 0xf3; andw $100,(%ecx)
xacquire lock btcw $100,(%ecx)
lock xacquire btcw $100,(%ecx)
xrelease lock btcw $100,(%ecx)
lock xrelease btcw $100,(%ecx)
.byte 0xf0; .byte 0xf2; btcw $100,(%ecx)
.byte 0xf0; .byte 0xf3; btcw $100,(%ecx)
xacquire lock btrw $100,(%ecx)
lock xacquire btrw $100,(%ecx)
xrelease lock btrw $100,(%ecx)
lock xrelease btrw $100,(%ecx)
.byte 0xf0; .byte 0xf2; btrw $100,(%ecx)
.byte 0xf0; .byte 0xf3; btrw $100,(%ecx)
xacquire lock btsw $100,(%ecx)
lock xacquire btsw $100,(%ecx)
xrelease lock btsw $100,(%ecx)
lock xrelease btsw $100,(%ecx)
.byte 0xf0; .byte 0xf2; btsw $100,(%ecx)
.byte 0xf0; .byte 0xf3; btsw $100,(%ecx)
xrelease movw $100,(%ecx)
xacquire lock orw $100,(%ecx)
lock xacquire orw $100,(%ecx)
xrelease lock orw $100,(%ecx)
lock xrelease orw $100,(%ecx)
.byte 0xf0; .byte 0xf2; orw $100,(%ecx)
.byte 0xf0; .byte 0xf3; orw $100,(%ecx)
xacquire lock sbbw $100,(%ecx)
lock xacquire sbbw $100,(%ecx)
xrelease lock sbbw $100,(%ecx)
lock xrelease sbbw $100,(%ecx)
.byte 0xf0; .byte 0xf2; sbbw $100,(%ecx)
.byte 0xf0; .byte 0xf3; sbbw $100,(%ecx)
xacquire lock subw $100,(%ecx)
lock xacquire subw $100,(%ecx)
xrelease lock subw $100,(%ecx)
lock xrelease subw $100,(%ecx)
.byte 0xf0; .byte 0xf2; subw $100,(%ecx)
.byte 0xf0; .byte 0xf3; subw $100,(%ecx)
xacquire lock xorw $100,(%ecx)
lock xacquire xorw $100,(%ecx)
xrelease lock xorw $100,(%ecx)
lock xrelease xorw $100,(%ecx)
.byte 0xf0; .byte 0xf2; xorw $100,(%ecx)
.byte 0xf0; .byte 0xf3; xorw $100,(%ecx)
# Tests for op imm8 regl/m32
xacquire lock adcl $100,(%ecx)
lock xacquire adcl $100,(%ecx)
xrelease lock adcl $100,(%ecx)
lock xrelease adcl $100,(%ecx)
.byte 0xf0; .byte 0xf2; adcl $100,(%ecx)
.byte 0xf0; .byte 0xf3; adcl $100,(%ecx)
xacquire lock addl $100,(%ecx)
lock xacquire addl $100,(%ecx)
xrelease lock addl $100,(%ecx)
lock xrelease addl $100,(%ecx)
.byte 0xf0; .byte 0xf2; addl $100,(%ecx)
.byte 0xf0; .byte 0xf3; addl $100,(%ecx)
xacquire lock andl $100,(%ecx)
lock xacquire andl $100,(%ecx)
xrelease lock andl $100,(%ecx)
lock xrelease andl $100,(%ecx)
.byte 0xf0; .byte 0xf2; andl $100,(%ecx)
.byte 0xf0; .byte 0xf3; andl $100,(%ecx)
xacquire lock btcl $100,(%ecx)
lock xacquire btcl $100,(%ecx)
xrelease lock btcl $100,(%ecx)
lock xrelease btcl $100,(%ecx)
.byte 0xf0; .byte 0xf2; btcl $100,(%ecx)
.byte 0xf0; .byte 0xf3; btcl $100,(%ecx)
xacquire lock btrl $100,(%ecx)
lock xacquire btrl $100,(%ecx)
xrelease lock btrl $100,(%ecx)
lock xrelease btrl $100,(%ecx)
.byte 0xf0; .byte 0xf2; btrl $100,(%ecx)
.byte 0xf0; .byte 0xf3; btrl $100,(%ecx)
xacquire lock btsl $100,(%ecx)
lock xacquire btsl $100,(%ecx)
xrelease lock btsl $100,(%ecx)
lock xrelease btsl $100,(%ecx)
.byte 0xf0; .byte 0xf2; btsl $100,(%ecx)
.byte 0xf0; .byte 0xf3; btsl $100,(%ecx)
xrelease movl $100,(%ecx)
xacquire lock orl $100,(%ecx)
lock xacquire orl $100,(%ecx)
xrelease lock orl $100,(%ecx)
lock xrelease orl $100,(%ecx)
.byte 0xf0; .byte 0xf2; orl $100,(%ecx)
.byte 0xf0; .byte 0xf3; orl $100,(%ecx)
xacquire lock sbbl $100,(%ecx)
lock xacquire sbbl $100,(%ecx)
xrelease lock sbbl $100,(%ecx)
lock xrelease sbbl $100,(%ecx)
.byte 0xf0; .byte 0xf2; sbbl $100,(%ecx)
.byte 0xf0; .byte 0xf3; sbbl $100,(%ecx)
xacquire lock subl $100,(%ecx)
lock xacquire subl $100,(%ecx)
xrelease lock subl $100,(%ecx)
lock xrelease subl $100,(%ecx)
.byte 0xf0; .byte 0xf2; subl $100,(%ecx)
.byte 0xf0; .byte 0xf3; subl $100,(%ecx)
xacquire lock xorl $100,(%ecx)
lock xacquire xorl $100,(%ecx)
xrelease lock xorl $100,(%ecx)
lock xrelease xorl $100,(%ecx)
.byte 0xf0; .byte 0xf2; xorl $100,(%ecx)
.byte 0xf0; .byte 0xf3; xorl $100,(%ecx)
# Tests for op imm8 regb/m8
xacquire lock adcb $100,(%ecx)
lock xacquire adcb $100,(%ecx)
xrelease lock adcb $100,(%ecx)
lock xrelease adcb $100,(%ecx)
.byte 0xf0; .byte 0xf2; adcb $100,(%ecx)
.byte 0xf0; .byte 0xf3; adcb $100,(%ecx)
xacquire lock addb $100,(%ecx)
lock xacquire addb $100,(%ecx)
xrelease lock addb $100,(%ecx)
lock xrelease addb $100,(%ecx)
.byte 0xf0; .byte 0xf2; addb $100,(%ecx)
.byte 0xf0; .byte 0xf3; addb $100,(%ecx)
xacquire lock andb $100,(%ecx)
lock xacquire andb $100,(%ecx)
xrelease lock andb $100,(%ecx)
lock xrelease andb $100,(%ecx)
.byte 0xf0; .byte 0xf2; andb $100,(%ecx)
.byte 0xf0; .byte 0xf3; andb $100,(%ecx)
xrelease movb $100,(%ecx)
xacquire lock orb $100,(%ecx)
lock xacquire orb $100,(%ecx)
xrelease lock orb $100,(%ecx)
lock xrelease orb $100,(%ecx)
.byte 0xf0; .byte 0xf2; orb $100,(%ecx)
.byte 0xf0; .byte 0xf3; orb $100,(%ecx)
xacquire lock sbbb $100,(%ecx)
lock xacquire sbbb $100,(%ecx)
xrelease lock sbbb $100,(%ecx)
lock xrelease sbbb $100,(%ecx)
.byte 0xf0; .byte 0xf2; sbbb $100,(%ecx)
.byte 0xf0; .byte 0xf3; sbbb $100,(%ecx)
xacquire lock subb $100,(%ecx)
lock xacquire subb $100,(%ecx)
xrelease lock subb $100,(%ecx)
lock xrelease subb $100,(%ecx)
.byte 0xf0; .byte 0xf2; subb $100,(%ecx)
.byte 0xf0; .byte 0xf3; subb $100,(%ecx)
xacquire lock xorb $100,(%ecx)
lock xacquire xorb $100,(%ecx)
xrelease lock xorb $100,(%ecx)
lock xrelease xorb $100,(%ecx)
.byte 0xf0; .byte 0xf2; xorb $100,(%ecx)
.byte 0xf0; .byte 0xf3; xorb $100,(%ecx)
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire lock adcb %al,(%ecx)
lock xacquire adcb %al,(%ecx)
xrelease lock adcb %al,(%ecx)
lock xrelease adcb %al,(%ecx)
.byte 0xf0; .byte 0xf2; adcb %al,(%ecx)
.byte 0xf0; .byte 0xf3; adcb %al,(%ecx)
xacquire lock addb %al,(%ecx)
lock xacquire addb %al,(%ecx)
xrelease lock addb %al,(%ecx)
lock xrelease addb %al,(%ecx)
.byte 0xf0; .byte 0xf2; addb %al,(%ecx)
.byte 0xf0; .byte 0xf3; addb %al,(%ecx)
xacquire lock andb %al,(%ecx)
lock xacquire andb %al,(%ecx)
xrelease lock andb %al,(%ecx)
lock xrelease andb %al,(%ecx)
.byte 0xf0; .byte 0xf2; andb %al,(%ecx)
.byte 0xf0; .byte 0xf3; andb %al,(%ecx)
xrelease movb %al,(%ecx)
xrelease movb %al,0x12345678
xacquire lock orb %al,(%ecx)
lock xacquire orb %al,(%ecx)
xrelease lock orb %al,(%ecx)
lock xrelease orb %al,(%ecx)
.byte 0xf0; .byte 0xf2; orb %al,(%ecx)
.byte 0xf0; .byte 0xf3; orb %al,(%ecx)
xacquire lock sbbb %al,(%ecx)
lock xacquire sbbb %al,(%ecx)
xrelease lock sbbb %al,(%ecx)
lock xrelease sbbb %al,(%ecx)
.byte 0xf0; .byte 0xf2; sbbb %al,(%ecx)
.byte 0xf0; .byte 0xf3; sbbb %al,(%ecx)
xacquire lock subb %al,(%ecx)
lock xacquire subb %al,(%ecx)
xrelease lock subb %al,(%ecx)
lock xrelease subb %al,(%ecx)
.byte 0xf0; .byte 0xf2; subb %al,(%ecx)
.byte 0xf0; .byte 0xf3; subb %al,(%ecx)
xacquire lock xchgb %al,(%ecx)
lock xacquire xchgb %al,(%ecx)
xacquire xchgb %al,(%ecx)
xrelease lock xchgb %al,(%ecx)
lock xrelease xchgb %al,(%ecx)
xrelease xchgb %al,(%ecx)
.byte 0xf0; .byte 0xf2; xchgb %al,(%ecx)
.byte 0xf0; .byte 0xf3; xchgb %al,(%ecx)
xacquire lock xorb %al,(%ecx)
lock xacquire xorb %al,(%ecx)
xrelease lock xorb %al,(%ecx)
lock xrelease xorb %al,(%ecx)
.byte 0xf0; .byte 0xf2; xorb %al,(%ecx)
.byte 0xf0; .byte 0xf3; xorb %al,(%ecx)
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire lock adcw %ax,(%ecx)
lock xacquire adcw %ax,(%ecx)
xrelease lock adcw %ax,(%ecx)
lock xrelease adcw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; adcw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; adcw %ax,(%ecx)
xacquire lock addw %ax,(%ecx)
lock xacquire addw %ax,(%ecx)
xrelease lock addw %ax,(%ecx)
lock xrelease addw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; addw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; addw %ax,(%ecx)
xacquire lock andw %ax,(%ecx)
lock xacquire andw %ax,(%ecx)
xrelease lock andw %ax,(%ecx)
lock xrelease andw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; andw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; andw %ax,(%ecx)
xrelease movw %ax,(%ecx)
xrelease movw %ax,0x12345678
xacquire lock orw %ax,(%ecx)
lock xacquire orw %ax,(%ecx)
xrelease lock orw %ax,(%ecx)
lock xrelease orw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; orw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; orw %ax,(%ecx)
xacquire lock sbbw %ax,(%ecx)
lock xacquire sbbw %ax,(%ecx)
xrelease lock sbbw %ax,(%ecx)
lock xrelease sbbw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; sbbw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; sbbw %ax,(%ecx)
xacquire lock subw %ax,(%ecx)
lock xacquire subw %ax,(%ecx)
xrelease lock subw %ax,(%ecx)
lock xrelease subw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; subw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; subw %ax,(%ecx)
xacquire lock xchgw %ax,(%ecx)
lock xacquire xchgw %ax,(%ecx)
xacquire xchgw %ax,(%ecx)
xrelease lock xchgw %ax,(%ecx)
lock xrelease xchgw %ax,(%ecx)
xrelease xchgw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; xchgw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; xchgw %ax,(%ecx)
xacquire lock xorw %ax,(%ecx)
lock xacquire xorw %ax,(%ecx)
xrelease lock xorw %ax,(%ecx)
lock xrelease xorw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; xorw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; xorw %ax,(%ecx)
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire lock adcl %eax,(%ecx)
lock xacquire adcl %eax,(%ecx)
xrelease lock adcl %eax,(%ecx)
lock xrelease adcl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; adcl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; adcl %eax,(%ecx)
xacquire lock addl %eax,(%ecx)
lock xacquire addl %eax,(%ecx)
xrelease lock addl %eax,(%ecx)
lock xrelease addl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; addl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; addl %eax,(%ecx)
xacquire lock andl %eax,(%ecx)
lock xacquire andl %eax,(%ecx)
xrelease lock andl %eax,(%ecx)
lock xrelease andl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; andl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; andl %eax,(%ecx)
xrelease movl %eax,(%ecx)
xrelease movl %eax,0x12345678
xacquire lock orl %eax,(%ecx)
lock xacquire orl %eax,(%ecx)
xrelease lock orl %eax,(%ecx)
lock xrelease orl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; orl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; orl %eax,(%ecx)
xacquire lock sbbl %eax,(%ecx)
lock xacquire sbbl %eax,(%ecx)
xrelease lock sbbl %eax,(%ecx)
lock xrelease sbbl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; sbbl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; sbbl %eax,(%ecx)
xacquire lock subl %eax,(%ecx)
lock xacquire subl %eax,(%ecx)
xrelease lock subl %eax,(%ecx)
lock xrelease subl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; subl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; subl %eax,(%ecx)
xacquire lock xchgl %eax,(%ecx)
lock xacquire xchgl %eax,(%ecx)
xacquire xchgl %eax,(%ecx)
xrelease lock xchgl %eax,(%ecx)
lock xrelease xchgl %eax,(%ecx)
xrelease xchgl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; xchgl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; xchgl %eax,(%ecx)
xacquire lock xorl %eax,(%ecx)
lock xacquire xorl %eax,(%ecx)
xrelease lock xorl %eax,(%ecx)
lock xrelease xorl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; xorl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; xorl %eax,(%ecx)
# Tests for op regs, regs/m16
xacquire lock btcw %ax,(%ecx)
lock xacquire btcw %ax,(%ecx)
xrelease lock btcw %ax,(%ecx)
lock xrelease btcw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; btcw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; btcw %ax,(%ecx)
xacquire lock btrw %ax,(%ecx)
lock xacquire btrw %ax,(%ecx)
xrelease lock btrw %ax,(%ecx)
lock xrelease btrw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; btrw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; btrw %ax,(%ecx)
xacquire lock btsw %ax,(%ecx)
lock xacquire btsw %ax,(%ecx)
xrelease lock btsw %ax,(%ecx)
lock xrelease btsw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; btsw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; btsw %ax,(%ecx)
xacquire lock cmpxchgw %ax,(%ecx)
lock xacquire cmpxchgw %ax,(%ecx)
xrelease lock cmpxchgw %ax,(%ecx)
lock xrelease cmpxchgw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; cmpxchgw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; cmpxchgw %ax,(%ecx)
xacquire lock xaddw %ax,(%ecx)
lock xacquire xaddw %ax,(%ecx)
xrelease lock xaddw %ax,(%ecx)
lock xrelease xaddw %ax,(%ecx)
.byte 0xf0; .byte 0xf2; xaddw %ax,(%ecx)
.byte 0xf0; .byte 0xf3; xaddw %ax,(%ecx)
# Tests for op regl regl/m32
xacquire lock btcl %eax,(%ecx)
lock xacquire btcl %eax,(%ecx)
xrelease lock btcl %eax,(%ecx)
lock xrelease btcl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; btcl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; btcl %eax,(%ecx)
xacquire lock btrl %eax,(%ecx)
lock xacquire btrl %eax,(%ecx)
xrelease lock btrl %eax,(%ecx)
lock xrelease btrl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; btrl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; btrl %eax,(%ecx)
xacquire lock btsl %eax,(%ecx)
lock xacquire btsl %eax,(%ecx)
xrelease lock btsl %eax,(%ecx)
lock xrelease btsl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; btsl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; btsl %eax,(%ecx)
xacquire lock cmpxchgl %eax,(%ecx)
lock xacquire cmpxchgl %eax,(%ecx)
xrelease lock cmpxchgl %eax,(%ecx)
lock xrelease cmpxchgl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; cmpxchgl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; cmpxchgl %eax,(%ecx)
xacquire lock xaddl %eax,(%ecx)
lock xacquire xaddl %eax,(%ecx)
xrelease lock xaddl %eax,(%ecx)
lock xrelease xaddl %eax,(%ecx)
.byte 0xf0; .byte 0xf2; xaddl %eax,(%ecx)
.byte 0xf0; .byte 0xf3; xaddl %eax,(%ecx)
# Tests for op regb/m8
xacquire lock decb (%ecx)
lock xacquire decb (%ecx)
xrelease lock decb (%ecx)
lock xrelease decb (%ecx)
.byte 0xf0; .byte 0xf2; decb (%ecx)
.byte 0xf0; .byte 0xf3; decb (%ecx)
xacquire lock incb (%ecx)
lock xacquire incb (%ecx)
xrelease lock incb (%ecx)
lock xrelease incb (%ecx)
.byte 0xf0; .byte 0xf2; incb (%ecx)
.byte 0xf0; .byte 0xf3; incb (%ecx)
xacquire lock negb (%ecx)
lock xacquire negb (%ecx)
xrelease lock negb (%ecx)
lock xrelease negb (%ecx)
.byte 0xf0; .byte 0xf2; negb (%ecx)
.byte 0xf0; .byte 0xf3; negb (%ecx)
xacquire lock notb (%ecx)
lock xacquire notb (%ecx)
xrelease lock notb (%ecx)
lock xrelease notb (%ecx)
.byte 0xf0; .byte 0xf2; notb (%ecx)
.byte 0xf0; .byte 0xf3; notb (%ecx)
# Tests for op regs/m16
xacquire lock decw (%ecx)
lock xacquire decw (%ecx)
xrelease lock decw (%ecx)
lock xrelease decw (%ecx)
.byte 0xf0; .byte 0xf2; decw (%ecx)
.byte 0xf0; .byte 0xf3; decw (%ecx)
xacquire lock incw (%ecx)
lock xacquire incw (%ecx)
xrelease lock incw (%ecx)
lock xrelease incw (%ecx)
.byte 0xf0; .byte 0xf2; incw (%ecx)
.byte 0xf0; .byte 0xf3; incw (%ecx)
xacquire lock negw (%ecx)
lock xacquire negw (%ecx)
xrelease lock negw (%ecx)
lock xrelease negw (%ecx)
.byte 0xf0; .byte 0xf2; negw (%ecx)
.byte 0xf0; .byte 0xf3; negw (%ecx)
xacquire lock notw (%ecx)
lock xacquire notw (%ecx)
xrelease lock notw (%ecx)
lock xrelease notw (%ecx)
.byte 0xf0; .byte 0xf2; notw (%ecx)
.byte 0xf0; .byte 0xf3; notw (%ecx)
# Tests for op regl/m32
xacquire lock decl (%ecx)
lock xacquire decl (%ecx)
xrelease lock decl (%ecx)
lock xrelease decl (%ecx)
.byte 0xf0; .byte 0xf2; decl (%ecx)
.byte 0xf0; .byte 0xf3; decl (%ecx)
xacquire lock incl (%ecx)
lock xacquire incl (%ecx)
xrelease lock incl (%ecx)
lock xrelease incl (%ecx)
.byte 0xf0; .byte 0xf2; incl (%ecx)
.byte 0xf0; .byte 0xf3; incl (%ecx)
xacquire lock negl (%ecx)
lock xacquire negl (%ecx)
xrelease lock negl (%ecx)
lock xrelease negl (%ecx)
.byte 0xf0; .byte 0xf2; negl (%ecx)
.byte 0xf0; .byte 0xf3; negl (%ecx)
xacquire lock notl (%ecx)
lock xacquire notl (%ecx)
xrelease lock notl (%ecx)
lock xrelease notl (%ecx)
.byte 0xf0; .byte 0xf2; notl (%ecx)
.byte 0xf0; .byte 0xf3; notl (%ecx)
# Tests for op m64
xacquire lock cmpxchg8bq (%ecx)
lock xacquire cmpxchg8bq (%ecx)
xrelease lock cmpxchg8bq (%ecx)
lock xrelease cmpxchg8bq (%ecx)
.byte 0xf0; .byte 0xf2; cmpxchg8bq (%ecx)
.byte 0xf0; .byte 0xf3; cmpxchg8bq (%ecx)
# Tests for op regb, regb/m8
xacquire lock cmpxchgb %cl,(%ecx)
lock xacquire cmpxchgb %cl,(%ecx)
xrelease lock cmpxchgb %cl,(%ecx)
lock xrelease cmpxchgb %cl,(%ecx)
.byte 0xf0; .byte 0xf2; cmpxchgb %cl,(%ecx)
.byte 0xf0; .byte 0xf3; cmpxchgb %cl,(%ecx)
xacquire lock xaddb %cl,(%ecx)
lock xacquire xaddb %cl,(%ecx)
xrelease lock xaddb %cl,(%ecx)
lock xrelease xaddb %cl,(%ecx)
.byte 0xf0; .byte 0xf2; xaddb %cl,(%ecx)
.byte 0xf0; .byte 0xf3; xaddb %cl,(%ecx)
.intel_syntax noprefix
# Tests for op imm8 regb/m8
xacquire lock adc BYTE PTR [ecx],100
lock xacquire adc BYTE PTR [ecx],100
xrelease lock adc BYTE PTR [ecx],100
lock xrelease adc BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; adc BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; adc BYTE PTR [ecx],100
xacquire lock add BYTE PTR [ecx],100
lock xacquire add BYTE PTR [ecx],100
xrelease lock add BYTE PTR [ecx],100
lock xrelease add BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; add BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; add BYTE PTR [ecx],100
xacquire lock and BYTE PTR [ecx],100
lock xacquire and BYTE PTR [ecx],100
xrelease lock and BYTE PTR [ecx],100
lock xrelease and BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; and BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; and BYTE PTR [ecx],100
xrelease mov BYTE PTR [ecx],100
xacquire lock or BYTE PTR [ecx],100
lock xacquire or BYTE PTR [ecx],100
xrelease lock or BYTE PTR [ecx],100
lock xrelease or BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; or BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; or BYTE PTR [ecx],100
xacquire lock sbb BYTE PTR [ecx],100
lock xacquire sbb BYTE PTR [ecx],100
xrelease lock sbb BYTE PTR [ecx],100
lock xrelease sbb BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; sbb BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; sbb BYTE PTR [ecx],100
xacquire lock sub BYTE PTR [ecx],100
lock xacquire sub BYTE PTR [ecx],100
xrelease lock sub BYTE PTR [ecx],100
lock xrelease sub BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; sub BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; sub BYTE PTR [ecx],100
xacquire lock xor BYTE PTR [ecx],100
lock xacquire xor BYTE PTR [ecx],100
xrelease lock xor BYTE PTR [ecx],100
lock xrelease xor BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; xor BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; xor BYTE PTR [ecx],100
# Tests for op imm16 regs/m16
xacquire lock adc WORD PTR [ecx],1000
lock xacquire adc WORD PTR [ecx],1000
xrelease lock adc WORD PTR [ecx],1000
lock xrelease adc WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf2; adc WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf3; adc WORD PTR [ecx],1000
xacquire lock add WORD PTR [ecx],1000
lock xacquire add WORD PTR [ecx],1000
xrelease lock add WORD PTR [ecx],1000
lock xrelease add WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf2; add WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf3; add WORD PTR [ecx],1000
xacquire lock and WORD PTR [ecx],1000
lock xacquire and WORD PTR [ecx],1000
xrelease lock and WORD PTR [ecx],1000
lock xrelease and WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf2; and WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf3; and WORD PTR [ecx],1000
xrelease mov WORD PTR [ecx],1000
xacquire lock or WORD PTR [ecx],1000
lock xacquire or WORD PTR [ecx],1000
xrelease lock or WORD PTR [ecx],1000
lock xrelease or WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf2; or WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf3; or WORD PTR [ecx],1000
xacquire lock sbb WORD PTR [ecx],1000
lock xacquire sbb WORD PTR [ecx],1000
xrelease lock sbb WORD PTR [ecx],1000
lock xrelease sbb WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf2; sbb WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf3; sbb WORD PTR [ecx],1000
xacquire lock sub WORD PTR [ecx],1000
lock xacquire sub WORD PTR [ecx],1000
xrelease lock sub WORD PTR [ecx],1000
lock xrelease sub WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf2; sub WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf3; sub WORD PTR [ecx],1000
xacquire lock xor WORD PTR [ecx],1000
lock xacquire xor WORD PTR [ecx],1000
xrelease lock xor WORD PTR [ecx],1000
lock xrelease xor WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf2; xor WORD PTR [ecx],1000
.byte 0xf0; .byte 0xf3; xor WORD PTR [ecx],1000
# Tests for op imm32 regl/m32
xacquire lock adc DWORD PTR [ecx],10000000
lock xacquire adc DWORD PTR [ecx],10000000
xrelease lock adc DWORD PTR [ecx],10000000
lock xrelease adc DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf2; adc DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf3; adc DWORD PTR [ecx],10000000
xacquire lock add DWORD PTR [ecx],10000000
lock xacquire add DWORD PTR [ecx],10000000
xrelease lock add DWORD PTR [ecx],10000000
lock xrelease add DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf2; add DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf3; add DWORD PTR [ecx],10000000
xacquire lock and DWORD PTR [ecx],10000000
lock xacquire and DWORD PTR [ecx],10000000
xrelease lock and DWORD PTR [ecx],10000000
lock xrelease and DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf2; and DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf3; and DWORD PTR [ecx],10000000
xrelease mov DWORD PTR [ecx],10000000
xacquire lock or DWORD PTR [ecx],10000000
lock xacquire or DWORD PTR [ecx],10000000
xrelease lock or DWORD PTR [ecx],10000000
lock xrelease or DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf2; or DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf3; or DWORD PTR [ecx],10000000
xacquire lock sbb DWORD PTR [ecx],10000000
lock xacquire sbb DWORD PTR [ecx],10000000
xrelease lock sbb DWORD PTR [ecx],10000000
lock xrelease sbb DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf2; sbb DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf3; sbb DWORD PTR [ecx],10000000
xacquire lock sub DWORD PTR [ecx],10000000
lock xacquire sub DWORD PTR [ecx],10000000
xrelease lock sub DWORD PTR [ecx],10000000
lock xrelease sub DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf2; sub DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf3; sub DWORD PTR [ecx],10000000
xacquire lock xor DWORD PTR [ecx],10000000
lock xacquire xor DWORD PTR [ecx],10000000
xrelease lock xor DWORD PTR [ecx],10000000
lock xrelease xor DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf2; xor DWORD PTR [ecx],10000000
.byte 0xf0; .byte 0xf3; xor DWORD PTR [ecx],10000000
# Tests for op imm8 regs/m16
xacquire lock adc WORD PTR [ecx],100
lock xacquire adc WORD PTR [ecx],100
xrelease lock adc WORD PTR [ecx],100
lock xrelease adc WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; adc WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; adc WORD PTR [ecx],100
xacquire lock add WORD PTR [ecx],100
lock xacquire add WORD PTR [ecx],100
xrelease lock add WORD PTR [ecx],100
lock xrelease add WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; add WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; add WORD PTR [ecx],100
xacquire lock and WORD PTR [ecx],100
lock xacquire and WORD PTR [ecx],100
xrelease lock and WORD PTR [ecx],100
lock xrelease and WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; and WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; and WORD PTR [ecx],100
xacquire lock btc WORD PTR [ecx],100
lock xacquire btc WORD PTR [ecx],100
xrelease lock btc WORD PTR [ecx],100
lock xrelease btc WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; btc WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; btc WORD PTR [ecx],100
xacquire lock btr WORD PTR [ecx],100
lock xacquire btr WORD PTR [ecx],100
xrelease lock btr WORD PTR [ecx],100
lock xrelease btr WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; btr WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; btr WORD PTR [ecx],100
xacquire lock bts WORD PTR [ecx],100
lock xacquire bts WORD PTR [ecx],100
xrelease lock bts WORD PTR [ecx],100
lock xrelease bts WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; bts WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; bts WORD PTR [ecx],100
xrelease mov WORD PTR [ecx],100
xacquire lock or WORD PTR [ecx],100
lock xacquire or WORD PTR [ecx],100
xrelease lock or WORD PTR [ecx],100
lock xrelease or WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; or WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; or WORD PTR [ecx],100
xacquire lock sbb WORD PTR [ecx],100
lock xacquire sbb WORD PTR [ecx],100
xrelease lock sbb WORD PTR [ecx],100
lock xrelease sbb WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; sbb WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; sbb WORD PTR [ecx],100
xacquire lock sub WORD PTR [ecx],100
lock xacquire sub WORD PTR [ecx],100
xrelease lock sub WORD PTR [ecx],100
lock xrelease sub WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; sub WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; sub WORD PTR [ecx],100
xacquire lock xor WORD PTR [ecx],100
lock xacquire xor WORD PTR [ecx],100
xrelease lock xor WORD PTR [ecx],100
lock xrelease xor WORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; xor WORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; xor WORD PTR [ecx],100
# Tests for op imm8 regl/m32
xacquire lock adc DWORD PTR [ecx],100
lock xacquire adc DWORD PTR [ecx],100
xrelease lock adc DWORD PTR [ecx],100
lock xrelease adc DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; adc DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; adc DWORD PTR [ecx],100
xacquire lock add DWORD PTR [ecx],100
lock xacquire add DWORD PTR [ecx],100
xrelease lock add DWORD PTR [ecx],100
lock xrelease add DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; add DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; add DWORD PTR [ecx],100
xacquire lock and DWORD PTR [ecx],100
lock xacquire and DWORD PTR [ecx],100
xrelease lock and DWORD PTR [ecx],100
lock xrelease and DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; and DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; and DWORD PTR [ecx],100
xacquire lock btc DWORD PTR [ecx],100
lock xacquire btc DWORD PTR [ecx],100
xrelease lock btc DWORD PTR [ecx],100
lock xrelease btc DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; btc DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; btc DWORD PTR [ecx],100
xacquire lock btr DWORD PTR [ecx],100
lock xacquire btr DWORD PTR [ecx],100
xrelease lock btr DWORD PTR [ecx],100
lock xrelease btr DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; btr DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; btr DWORD PTR [ecx],100
xacquire lock bts DWORD PTR [ecx],100
lock xacquire bts DWORD PTR [ecx],100
xrelease lock bts DWORD PTR [ecx],100
lock xrelease bts DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; bts DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; bts DWORD PTR [ecx],100
xrelease mov DWORD PTR [ecx],100
xacquire lock or DWORD PTR [ecx],100
lock xacquire or DWORD PTR [ecx],100
xrelease lock or DWORD PTR [ecx],100
lock xrelease or DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; or DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; or DWORD PTR [ecx],100
xacquire lock sbb DWORD PTR [ecx],100
lock xacquire sbb DWORD PTR [ecx],100
xrelease lock sbb DWORD PTR [ecx],100
lock xrelease sbb DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; sbb DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; sbb DWORD PTR [ecx],100
xacquire lock sub DWORD PTR [ecx],100
lock xacquire sub DWORD PTR [ecx],100
xrelease lock sub DWORD PTR [ecx],100
lock xrelease sub DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; sub DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; sub DWORD PTR [ecx],100
xacquire lock xor DWORD PTR [ecx],100
lock xacquire xor DWORD PTR [ecx],100
xrelease lock xor DWORD PTR [ecx],100
lock xrelease xor DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf2; xor DWORD PTR [ecx],100
.byte 0xf0; .byte 0xf3; xor DWORD PTR [ecx],100
# Tests for op imm8 regb/m8
xacquire lock adc BYTE PTR [ecx],100
lock xacquire adc BYTE PTR [ecx],100
xrelease lock adc BYTE PTR [ecx],100
lock xrelease adc BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; adc BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; adc BYTE PTR [ecx],100
xacquire lock add BYTE PTR [ecx],100
lock xacquire add BYTE PTR [ecx],100
xrelease lock add BYTE PTR [ecx],100
lock xrelease add BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; add BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; add BYTE PTR [ecx],100
xacquire lock and BYTE PTR [ecx],100
lock xacquire and BYTE PTR [ecx],100
xrelease lock and BYTE PTR [ecx],100
lock xrelease and BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; and BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; and BYTE PTR [ecx],100
xrelease mov BYTE PTR [ecx],100
xacquire lock or BYTE PTR [ecx],100
lock xacquire or BYTE PTR [ecx],100
xrelease lock or BYTE PTR [ecx],100
lock xrelease or BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; or BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; or BYTE PTR [ecx],100
xacquire lock sbb BYTE PTR [ecx],100
lock xacquire sbb BYTE PTR [ecx],100
xrelease lock sbb BYTE PTR [ecx],100
lock xrelease sbb BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; sbb BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; sbb BYTE PTR [ecx],100
xacquire lock sub BYTE PTR [ecx],100
lock xacquire sub BYTE PTR [ecx],100
xrelease lock sub BYTE PTR [ecx],100
lock xrelease sub BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; sub BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; sub BYTE PTR [ecx],100
xacquire lock xor BYTE PTR [ecx],100
lock xacquire xor BYTE PTR [ecx],100
xrelease lock xor BYTE PTR [ecx],100
lock xrelease xor BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf2; xor BYTE PTR [ecx],100
.byte 0xf0; .byte 0xf3; xor BYTE PTR [ecx],100
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire lock adc BYTE PTR [ecx],al
lock xacquire adc BYTE PTR [ecx],al
xrelease lock adc BYTE PTR [ecx],al
lock xrelease adc BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; adc BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; adc BYTE PTR [ecx],al
xacquire lock add BYTE PTR [ecx],al
lock xacquire add BYTE PTR [ecx],al
xrelease lock add BYTE PTR [ecx],al
lock xrelease add BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; add BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; add BYTE PTR [ecx],al
xacquire lock and BYTE PTR [ecx],al
lock xacquire and BYTE PTR [ecx],al
xrelease lock and BYTE PTR [ecx],al
lock xrelease and BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; and BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; and BYTE PTR [ecx],al
xrelease mov BYTE PTR [ecx],al
xacquire lock or BYTE PTR [ecx],al
lock xacquire or BYTE PTR [ecx],al
xrelease lock or BYTE PTR [ecx],al
lock xrelease or BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; or BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; or BYTE PTR [ecx],al
xacquire lock sbb BYTE PTR [ecx],al
lock xacquire sbb BYTE PTR [ecx],al
xrelease lock sbb BYTE PTR [ecx],al
lock xrelease sbb BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; sbb BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; sbb BYTE PTR [ecx],al
xacquire lock sub BYTE PTR [ecx],al
lock xacquire sub BYTE PTR [ecx],al
xrelease lock sub BYTE PTR [ecx],al
lock xrelease sub BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; sub BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; sub BYTE PTR [ecx],al
xacquire lock xchg BYTE PTR [ecx],al
lock xacquire xchg BYTE PTR [ecx],al
xacquire xchg BYTE PTR [ecx],al
xrelease lock xchg BYTE PTR [ecx],al
lock xrelease xchg BYTE PTR [ecx],al
xrelease xchg BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; xchg BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; xchg BYTE PTR [ecx],al
xacquire lock xor BYTE PTR [ecx],al
lock xacquire xor BYTE PTR [ecx],al
xrelease lock xor BYTE PTR [ecx],al
lock xrelease xor BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf2; xor BYTE PTR [ecx],al
.byte 0xf0; .byte 0xf3; xor BYTE PTR [ecx],al
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire lock adc WORD PTR [ecx],ax
lock xacquire adc WORD PTR [ecx],ax
xrelease lock adc WORD PTR [ecx],ax
lock xrelease adc WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; adc WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; adc WORD PTR [ecx],ax
xacquire lock add WORD PTR [ecx],ax
lock xacquire add WORD PTR [ecx],ax
xrelease lock add WORD PTR [ecx],ax
lock xrelease add WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; add WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; add WORD PTR [ecx],ax
xacquire lock and WORD PTR [ecx],ax
lock xacquire and WORD PTR [ecx],ax
xrelease lock and WORD PTR [ecx],ax
lock xrelease and WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; and WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; and WORD PTR [ecx],ax
xrelease mov WORD PTR [ecx],ax
xacquire lock or WORD PTR [ecx],ax
lock xacquire or WORD PTR [ecx],ax
xrelease lock or WORD PTR [ecx],ax
lock xrelease or WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; or WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; or WORD PTR [ecx],ax
xacquire lock sbb WORD PTR [ecx],ax
lock xacquire sbb WORD PTR [ecx],ax
xrelease lock sbb WORD PTR [ecx],ax
lock xrelease sbb WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; sbb WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; sbb WORD PTR [ecx],ax
xacquire lock sub WORD PTR [ecx],ax
lock xacquire sub WORD PTR [ecx],ax
xrelease lock sub WORD PTR [ecx],ax
lock xrelease sub WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; sub WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; sub WORD PTR [ecx],ax
xacquire lock xchg WORD PTR [ecx],ax
lock xacquire xchg WORD PTR [ecx],ax
xacquire xchg WORD PTR [ecx],ax
xrelease lock xchg WORD PTR [ecx],ax
lock xrelease xchg WORD PTR [ecx],ax
xrelease xchg WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; xchg WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; xchg WORD PTR [ecx],ax
xacquire lock xor WORD PTR [ecx],ax
lock xacquire xor WORD PTR [ecx],ax
xrelease lock xor WORD PTR [ecx],ax
lock xrelease xor WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; xor WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; xor WORD PTR [ecx],ax
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire lock adc DWORD PTR [ecx],eax
lock xacquire adc DWORD PTR [ecx],eax
xrelease lock adc DWORD PTR [ecx],eax
lock xrelease adc DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; adc DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; adc DWORD PTR [ecx],eax
xacquire lock add DWORD PTR [ecx],eax
lock xacquire add DWORD PTR [ecx],eax
xrelease lock add DWORD PTR [ecx],eax
lock xrelease add DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; add DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; add DWORD PTR [ecx],eax
xacquire lock and DWORD PTR [ecx],eax
lock xacquire and DWORD PTR [ecx],eax
xrelease lock and DWORD PTR [ecx],eax
lock xrelease and DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; and DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; and DWORD PTR [ecx],eax
xrelease mov DWORD PTR [ecx],eax
xacquire lock or DWORD PTR [ecx],eax
lock xacquire or DWORD PTR [ecx],eax
xrelease lock or DWORD PTR [ecx],eax
lock xrelease or DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; or DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; or DWORD PTR [ecx],eax
xacquire lock sbb DWORD PTR [ecx],eax
lock xacquire sbb DWORD PTR [ecx],eax
xrelease lock sbb DWORD PTR [ecx],eax
lock xrelease sbb DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; sbb DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; sbb DWORD PTR [ecx],eax
xacquire lock sub DWORD PTR [ecx],eax
lock xacquire sub DWORD PTR [ecx],eax
xrelease lock sub DWORD PTR [ecx],eax
lock xrelease sub DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; sub DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; sub DWORD PTR [ecx],eax
xacquire lock xchg DWORD PTR [ecx],eax
lock xacquire xchg DWORD PTR [ecx],eax
xacquire xchg DWORD PTR [ecx],eax
xrelease lock xchg DWORD PTR [ecx],eax
lock xrelease xchg DWORD PTR [ecx],eax
xrelease xchg DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; xchg DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; xchg DWORD PTR [ecx],eax
xacquire lock xor DWORD PTR [ecx],eax
lock xacquire xor DWORD PTR [ecx],eax
xrelease lock xor DWORD PTR [ecx],eax
lock xrelease xor DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; xor DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; xor DWORD PTR [ecx],eax
# Tests for op regs, regs/m16
xacquire lock btc WORD PTR [ecx],ax
lock xacquire btc WORD PTR [ecx],ax
xrelease lock btc WORD PTR [ecx],ax
lock xrelease btc WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; btc WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; btc WORD PTR [ecx],ax
xacquire lock btr WORD PTR [ecx],ax
lock xacquire btr WORD PTR [ecx],ax
xrelease lock btr WORD PTR [ecx],ax
lock xrelease btr WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; btr WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; btr WORD PTR [ecx],ax
xacquire lock bts WORD PTR [ecx],ax
lock xacquire bts WORD PTR [ecx],ax
xrelease lock bts WORD PTR [ecx],ax
lock xrelease bts WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; bts WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; bts WORD PTR [ecx],ax
xacquire lock cmpxchg WORD PTR [ecx],ax
lock xacquire cmpxchg WORD PTR [ecx],ax
xrelease lock cmpxchg WORD PTR [ecx],ax
lock xrelease cmpxchg WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; cmpxchg WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; cmpxchg WORD PTR [ecx],ax
xacquire lock xadd WORD PTR [ecx],ax
lock xacquire xadd WORD PTR [ecx],ax
xrelease lock xadd WORD PTR [ecx],ax
lock xrelease xadd WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf2; xadd WORD PTR [ecx],ax
.byte 0xf0; .byte 0xf3; xadd WORD PTR [ecx],ax
# Tests for op regl regl/m32
xacquire lock btc DWORD PTR [ecx],eax
lock xacquire btc DWORD PTR [ecx],eax
xrelease lock btc DWORD PTR [ecx],eax
lock xrelease btc DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; btc DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; btc DWORD PTR [ecx],eax
xacquire lock btr DWORD PTR [ecx],eax
lock xacquire btr DWORD PTR [ecx],eax
xrelease lock btr DWORD PTR [ecx],eax
lock xrelease btr DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; btr DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; btr DWORD PTR [ecx],eax
xacquire lock bts DWORD PTR [ecx],eax
lock xacquire bts DWORD PTR [ecx],eax
xrelease lock bts DWORD PTR [ecx],eax
lock xrelease bts DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; bts DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; bts DWORD PTR [ecx],eax
xacquire lock cmpxchg DWORD PTR [ecx],eax
lock xacquire cmpxchg DWORD PTR [ecx],eax
xrelease lock cmpxchg DWORD PTR [ecx],eax
lock xrelease cmpxchg DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; cmpxchg DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; cmpxchg DWORD PTR [ecx],eax
xacquire lock xadd DWORD PTR [ecx],eax
lock xacquire xadd DWORD PTR [ecx],eax
xrelease lock xadd DWORD PTR [ecx],eax
lock xrelease xadd DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf2; xadd DWORD PTR [ecx],eax
.byte 0xf0; .byte 0xf3; xadd DWORD PTR [ecx],eax
# Tests for op regb/m8
xacquire lock dec BYTE PTR [ecx]
lock xacquire dec BYTE PTR [ecx]
xrelease lock dec BYTE PTR [ecx]
lock xrelease dec BYTE PTR [ecx]
.byte 0xf0; .byte 0xf2; dec BYTE PTR [ecx]
.byte 0xf0; .byte 0xf3; dec BYTE PTR [ecx]
xacquire lock inc BYTE PTR [ecx]
lock xacquire inc BYTE PTR [ecx]
xrelease lock inc BYTE PTR [ecx]
lock xrelease inc BYTE PTR [ecx]
.byte 0xf0; .byte 0xf2; inc BYTE PTR [ecx]
.byte 0xf0; .byte 0xf3; inc BYTE PTR [ecx]
xacquire lock neg BYTE PTR [ecx]
lock xacquire neg BYTE PTR [ecx]
xrelease lock neg BYTE PTR [ecx]
lock xrelease neg BYTE PTR [ecx]
.byte 0xf0; .byte 0xf2; neg BYTE PTR [ecx]
.byte 0xf0; .byte 0xf3; neg BYTE PTR [ecx]
xacquire lock not BYTE PTR [ecx]
lock xacquire not BYTE PTR [ecx]
xrelease lock not BYTE PTR [ecx]
lock xrelease not BYTE PTR [ecx]
.byte 0xf0; .byte 0xf2; not BYTE PTR [ecx]
.byte 0xf0; .byte 0xf3; not BYTE PTR [ecx]
# Tests for op regs/m16
xacquire lock dec WORD PTR [ecx]
lock xacquire dec WORD PTR [ecx]
xrelease lock dec WORD PTR [ecx]
lock xrelease dec WORD PTR [ecx]
.byte 0xf0; .byte 0xf2; dec WORD PTR [ecx]
.byte 0xf0; .byte 0xf3; dec WORD PTR [ecx]
xacquire lock inc WORD PTR [ecx]
lock xacquire inc WORD PTR [ecx]
xrelease lock inc WORD PTR [ecx]
lock xrelease inc WORD PTR [ecx]
.byte 0xf0; .byte 0xf2; inc WORD PTR [ecx]
.byte 0xf0; .byte 0xf3; inc WORD PTR [ecx]
xacquire lock neg WORD PTR [ecx]
lock xacquire neg WORD PTR [ecx]
xrelease lock neg WORD PTR [ecx]
lock xrelease neg WORD PTR [ecx]
.byte 0xf0; .byte 0xf2; neg WORD PTR [ecx]
.byte 0xf0; .byte 0xf3; neg WORD PTR [ecx]
xacquire lock not WORD PTR [ecx]
lock xacquire not WORD PTR [ecx]
xrelease lock not WORD PTR [ecx]
lock xrelease not WORD PTR [ecx]
.byte 0xf0; .byte 0xf2; not WORD PTR [ecx]
.byte 0xf0; .byte 0xf3; not WORD PTR [ecx]
# Tests for op regl/m32
xacquire lock dec DWORD PTR [ecx]
lock xacquire dec DWORD PTR [ecx]
xrelease lock dec DWORD PTR [ecx]
lock xrelease dec DWORD PTR [ecx]
.byte 0xf0; .byte 0xf2; dec DWORD PTR [ecx]
.byte 0xf0; .byte 0xf3; dec DWORD PTR [ecx]
xacquire lock inc DWORD PTR [ecx]
lock xacquire inc DWORD PTR [ecx]
xrelease lock inc DWORD PTR [ecx]
lock xrelease inc DWORD PTR [ecx]
.byte 0xf0; .byte 0xf2; inc DWORD PTR [ecx]
.byte 0xf0; .byte 0xf3; inc DWORD PTR [ecx]
xacquire lock neg DWORD PTR [ecx]
lock xacquire neg DWORD PTR [ecx]
xrelease lock neg DWORD PTR [ecx]
lock xrelease neg DWORD PTR [ecx]
.byte 0xf0; .byte 0xf2; neg DWORD PTR [ecx]
.byte 0xf0; .byte 0xf3; neg DWORD PTR [ecx]
xacquire lock not DWORD PTR [ecx]
lock xacquire not DWORD PTR [ecx]
xrelease lock not DWORD PTR [ecx]
lock xrelease not DWORD PTR [ecx]
.byte 0xf0; .byte 0xf2; not DWORD PTR [ecx]
.byte 0xf0; .byte 0xf3; not DWORD PTR [ecx]
# Tests for op m64
xacquire lock cmpxchg8b QWORD PTR [ecx]
lock xacquire cmpxchg8b QWORD PTR [ecx]
xrelease lock cmpxchg8b QWORD PTR [ecx]
lock xrelease cmpxchg8b QWORD PTR [ecx]
.byte 0xf0; .byte 0xf2; cmpxchg8b QWORD PTR [ecx]
.byte 0xf0; .byte 0xf3; cmpxchg8b QWORD PTR [ecx]
# Tests for op regb, regb/m8
xacquire lock cmpxchg BYTE PTR [ecx],cl
lock xacquire cmpxchg BYTE PTR [ecx],cl
xrelease lock cmpxchg BYTE PTR [ecx],cl
lock xrelease cmpxchg BYTE PTR [ecx],cl
.byte 0xf0; .byte 0xf2; cmpxchg BYTE PTR [ecx],cl
.byte 0xf0; .byte 0xf3; cmpxchg BYTE PTR [ecx],cl
xacquire lock xadd BYTE PTR [ecx],cl
lock xacquire xadd BYTE PTR [ecx],cl
xrelease lock xadd BYTE PTR [ecx],cl
lock xrelease xadd BYTE PTR [ecx],cl
.byte 0xf0; .byte 0xf2; xadd BYTE PTR [ecx],cl
.byte 0xf0; .byte 0xf3; xadd BYTE PTR [ecx],cl
|
stsp/binutils-ia16
| 1,186
|
gas/testsuite/gas/i386/x86-64-optimize-1.s
|
# Check 64bit instructions with optimized encoding
.allow_index_reg
.text
_start:
andq $foo, %rax
andq $((1<<31) - 1), %rax
andq $((1<<31) - 1), %rbx
andq $((1<<31) - 1), %r14
andq $-((1<<31)), %rax
andq $-((1<<31)), %rbx
andq $-((1<<31)), %r14
andq $((1<<7) - 1), %rax
andq $((1<<7) - 1), %rbx
andq $((1<<7) - 1), %r14
andq $-((1<<7)), %rax
andq $-((1<<7)), %rbx
andq $-((1<<7)), %r14
testq $((1<<31) - 1), %rax
testq $((1<<31) - 1), %rbx
testq $((1<<31) - 1), %r14
testq $-((1<<31)), %rax
testq $-((1<<31)), %rbx
testq $-((1<<31)), %r14
xorq (%rsi), %rax
xorq %rax, %rax
xorq %rbx, %rbx
xorq %r14, %r14
xorq %rdx, %rax
xorq %rdx, %rbx
xorq %rdx, %r14
subq %rax, %rax
subq %rbx, %rbx
subq %r14, %r14
subq %rdx, %rax
subq %rdx, %rbx
subq %rdx, %r14
andq $((1<<31) - 1), (%rax)
andq $-((1<<31)), (%rax)
testq $((1<<31) - 1), (%rax)
testq $-((1<<31)), (%rax)
mov $((1<<31) - 1),%rax
movq $((1<<31) - 1),%rax
mov $((1<<31) - 1),%r8
movq $((1<<31) - 1),%r8
mov $0xffffffff,%rax
movq $0xffffffff,%rax
mov $0xffffffff,%r8
movq $0xffffffff,%r8
mov $1023,%rax
movq $1023,%rax
mov $0x100000000,%rax
movq $0x100000000,%rax
clrq %rax
clrq %r14
|
stsp/binutils-ia16
| 2,330
|
gas/testsuite/gas/i386/avx512f_gfni.s
|
# Check 32bit AVX512F,GFNI instructions
.allow_index_reg
.text
_start:
vgf2p8affineqb $0xab, %zmm4, %zmm5, %zmm6 # AVX512F,GFNI
vgf2p8affineqb $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F,GFNI
vgf2p8affineqb $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F,GFNI
vgf2p8affineqb $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,GFNI
vgf2p8affineqb $123, 8128(%edx), %zmm5, %zmm6 # AVX512F,GFNI Disp8
vgf2p8affineqb $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F,GFNI Disp8
vgf2p8affineinvqb $0xab, %zmm4, %zmm5, %zmm6 # AVX512F,GFNI
vgf2p8affineinvqb $0xab, %zmm4, %zmm5, %zmm6{%k7} # AVX512F,GFNI
vgf2p8affineinvqb $0xab, %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F,GFNI
vgf2p8affineinvqb $123, -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,GFNI
vgf2p8affineinvqb $123, 8128(%edx), %zmm5, %zmm6 # AVX512F,GFNI Disp8
vgf2p8affineinvqb $123, 1016(%edx){1to8}, %zmm5, %zmm6 # AVX512F,GFNI Disp8
vgf2p8mulb %zmm4, %zmm5, %zmm6 # AVX512F,GFNI
vgf2p8mulb %zmm4, %zmm5, %zmm6{%k7} # AVX512F,GFNI
vgf2p8mulb %zmm4, %zmm5, %zmm6{%k7}{z} # AVX512F,GFNI
vgf2p8mulb -123456(%esp,%esi,8), %zmm5, %zmm6 # AVX512F,GFNI
vgf2p8mulb 8128(%edx), %zmm5, %zmm6 # AVX512F,GFNI Disp8
.intel_syntax noprefix
vgf2p8affineqb zmm6, zmm5, zmm4, 0xab # AVX512F,GFNI
vgf2p8affineqb zmm6{k7}, zmm5, zmm4, 0xab # AVX512F,GFNI
vgf2p8affineqb zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F,GFNI
vgf2p8affineqb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F,GFNI
vgf2p8affineqb zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F,GFNI Disp8
vgf2p8affineqb zmm6, zmm5, [edx+1016]{1to8}, 123 # AVX512F,GFNI Disp8
vgf2p8affineinvqb zmm6, zmm5, zmm4, 0xab # AVX512F,GFNI
vgf2p8affineinvqb zmm6{k7}, zmm5, zmm4, 0xab # AVX512F,GFNI
vgf2p8affineinvqb zmm6{k7}{z}, zmm5, zmm4, 0xab # AVX512F,GFNI
vgf2p8affineinvqb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456], 123 # AVX512F,GFNI
vgf2p8affineinvqb zmm6, zmm5, ZMMWORD PTR [edx+8128], 123 # AVX512F,GFNI Disp8
vgf2p8affineinvqb zmm6, zmm5, [edx+1016]{1to8}, 123 # AVX512F,GFNI Disp8
vgf2p8mulb zmm6, zmm5, zmm4 # AVX512F,GFNI
vgf2p8mulb zmm6{k7}, zmm5, zmm4 # AVX512F,GFNI
vgf2p8mulb zmm6{k7}{z}, zmm5, zmm4 # AVX512F,GFNI
vgf2p8mulb zmm6, zmm5, ZMMWORD PTR [esp+esi*8-123456] # AVX512F,GFNI
vgf2p8mulb zmm6, zmm5, ZMMWORD PTR [edx+8128] # AVX512F,GFNI Disp8
|
stsp/binutils-ia16
| 159,280
|
gas/testsuite/gas/i386/avx512bw_vl.s
|
# Check 32bit AVX512{BW,VL} instructions
.allow_index_reg
.text
_start:
vpabsb %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpabsb %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpabsb (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpabsb -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpabsb 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsb 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsb -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsb -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsb %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpabsb %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpabsb (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpabsb -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpabsb 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsb 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpabsb -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsb -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpabsw %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpabsw %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpabsw (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpabsw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpabsw 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsw 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsw -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpabsw -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpabsw %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpabsw %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpabsw (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpabsw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpabsw 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsw 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpabsw -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpabsw -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpackssdw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpackssdw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackssdw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackssdw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpackssdw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackssdw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackssdw (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackssdw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackssdw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackssdw 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackssdw -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackssdw -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpacksswb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpacksswb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpacksswb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpacksswb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpacksswb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpackusdw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw (%eax){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw 512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw -512(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw -516(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackusdw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpackusdw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw (%eax){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw 512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackusdw -512(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackusdw -516(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpackuswb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpackuswb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpackuswb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpackuswb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpackuswb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddusb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddusb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddusw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddusw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddusw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddusw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddusw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpaddw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpaddw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpaddw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpaddw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpaddw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpaddw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpalignr $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpalignr $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpalignr $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpalignr $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpavgb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpavgb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpavgw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpavgw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpavgw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpavgw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpavgw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpavgw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpblendmb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpblendmb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpblendmb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpblendmb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpblendmb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpblendmb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastb %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastb %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastb (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastb -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastb 127(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastb 128(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastb -128(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastb -129(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastb %xmm5, %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastb %xmm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastb (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastb -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastb 127(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastb 128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastb -128(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastb -129(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastb %eax, %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastb %eax, %xmm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastb %ebp, %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastb %eax, %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastb %eax, %ymm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastb %ebp, %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastw %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastw %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastw (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastw 254(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastw 256(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastw -256(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastw -258(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastw %xmm5, %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastw %xmm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastw (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastw 254(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastw 256(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastw -256(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpbroadcastw -258(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastw %eax, %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastw %eax, %xmm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastw %ebp, %xmm6{%k7} # AVX512{BW,VL}
vpbroadcastw %eax, %ymm6{%k7} # AVX512{BW,VL}
vpbroadcastw %eax, %ymm6{%k7}{z} # AVX512{BW,VL}
vpbroadcastw %ebp, %ymm6{%k7} # AVX512{BW,VL}
vpcmpeqb %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqb -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqb -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpeqw -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpeqw -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtb -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtb -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpgtw -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpgtw -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpblendmw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpblendmw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpblendmw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpblendmw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpblendmw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpblendmw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpblendmw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpblendmw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpblendmw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaddubsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddubsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaddubsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddubsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddubsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaddwd (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaddwd %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaddwd (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaddwd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaddwd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxub (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxub %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxub (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxub -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxub -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmaxuw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmaxuw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmaxuw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmaxuw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmaxuw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminub (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminub 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminub -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminub %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminub (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminub 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminub -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminub -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpminuw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminuw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpminuw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpminuw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpminuw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminuw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpminuw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpminuw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovsxbw (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw 1016(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw 1024(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw -1024(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw -1032(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw %xmm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmovsxbw (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw 2032(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw 2048(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmovsxbw -2048(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovsxbw -2064(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovzxbw (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw 1016(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw 1024(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw -1024(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw -1032(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw %xmm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmovzxbw (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw 2032(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw 2048(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmovzxbw -2048(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmovzxbw -2064(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmulhrsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhrsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmulhrsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhrsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhrsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmulhuw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhuw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmulhuw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhuw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhuw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmulhw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmulhw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmulhw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmulhw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmulhw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmullw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmullw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpmullw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmullw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpmullw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmullw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmullw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpmullw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpshufb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpshufb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpshufhw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpshufhw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpshufhw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshufhw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpshuflw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpshuflw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpshuflw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpshuflw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsllw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsllw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsraw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsraw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsrlw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw %xmm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsrlw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw 2032(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw 2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw -2048(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw -2064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsrlw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsrlw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsrlw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsraw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsraw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsraw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsraw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsrlvw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlvw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsrlvw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlvw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlvw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlvw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlvw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsrlvw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsrlvw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlvw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsrlvw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlvw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlvw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlvw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsrlvw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsrlvw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsravw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsravw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsravw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsravw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsravw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsravw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsravw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsravw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsravw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsravw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsravw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsravw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsravw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsravw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsravw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsravw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubsb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubsb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubsw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubsw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubsw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubsw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubsw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubusb (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusb %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubusb (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusb -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusb -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubusw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubusw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubusw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubusw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubusw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsubw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsubw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsubw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsubw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsubw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsubw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpckhbw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhbw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpckhbw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhbw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhbw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpckhwd (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpckhwd %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpckhwd (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpckhwd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpckhwd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpcklbw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklbw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpcklbw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklbw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklbw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpunpcklwd (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpunpcklwd %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpunpcklwd (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpunpcklwd -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpunpcklwd -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpmovwb %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovwb %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovwb %ymm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovwb %ymm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovswb %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovswb %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovswb %ymm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovswb %ymm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovuswb %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovuswb %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpmovuswb %ymm5, %xmm6{%k7} # AVX512{BW,VL}
vpmovuswb %ymm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vdbpsadbw $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vdbpsadbw $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vdbpsadbw $123, %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vdbpsadbw $123, 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vdbpsadbw $123, -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vdbpsadbw $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vdbpsadbw $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vdbpsadbw $123, %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vdbpsadbw $123, 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vdbpsadbw $123, -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vdbpsadbw $123, -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpermw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpermw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpermw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpermw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpermw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpermw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermt2w %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermt2w %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpermt2w (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermt2w -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermt2w 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpermt2w 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermt2w -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpermt2w -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermt2w %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermt2w %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpermt2w (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermt2w -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermt2w 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpermt2w 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermt2w -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpermt2w -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsllw $123, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $123, -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw $0xab, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsllw $123, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsllw $123, -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllw $123, -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpsllvw %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllvw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpsllvw (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllvw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllvw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllvw 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllvw -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpsllvw -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpsllvw %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllvw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpsllvw (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllvw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllvw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllvw 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpsllvw -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpsllvw -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vmovdqu8 %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vmovdqu8 %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vmovdqu8 (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu8 -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu8 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu8 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu8 -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu8 -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu8 %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vmovdqu8 %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vmovdqu8 (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vmovdqu8 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vmovdqu8 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu8 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vmovdqu8 -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu8 -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vmovdqu16 %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vmovdqu16 %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vmovdqu16 (%ecx), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu16 -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu16 2032(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu16 2048(%edx), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu16 -2048(%edx), %xmm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu16 -2064(%edx), %xmm6{%k7} # AVX512{BW,VL}
vmovdqu16 %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vmovdqu16 %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vmovdqu16 (%ecx), %ymm6{%k7} # AVX512{BW,VL}
vmovdqu16 -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{BW,VL}
vmovdqu16 4064(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu16 4096(%edx), %ymm6{%k7} # AVX512{BW,VL}
vmovdqu16 -4096(%edx), %ymm6{%k7} # AVX512{BW,VL} Disp8
vmovdqu16 -4128(%edx), %ymm6{%k7} # AVX512{BW,VL}
vpmovwb %xmm6, (%ecx){%k7} # AVX512{BW,VL}
vpmovwb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vpmovwb %xmm6, 1016(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovwb %xmm6, 1024(%edx){%k7} # AVX512{BW,VL}
vpmovwb %xmm6, -1024(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovwb %xmm6, -1032(%edx){%k7} # AVX512{BW,VL}
vpmovwb %ymm6, (%ecx){%k7} # AVX512{BW,VL}
vpmovwb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vpmovwb %ymm6, 2032(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovwb %ymm6, 2048(%edx){%k7} # AVX512{BW,VL}
vpmovwb %ymm6, -2048(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovwb %ymm6, -2064(%edx){%k7} # AVX512{BW,VL}
vpmovswb %xmm6, (%ecx){%k7} # AVX512{BW,VL}
vpmovswb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vpmovswb %xmm6, 1016(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovswb %xmm6, 1024(%edx){%k7} # AVX512{BW,VL}
vpmovswb %xmm6, -1024(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovswb %xmm6, -1032(%edx){%k7} # AVX512{BW,VL}
vpmovswb %ymm6, (%ecx){%k7} # AVX512{BW,VL}
vpmovswb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vpmovswb %ymm6, 2032(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovswb %ymm6, 2048(%edx){%k7} # AVX512{BW,VL}
vpmovswb %ymm6, -2048(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovswb %ymm6, -2064(%edx){%k7} # AVX512{BW,VL}
vpmovuswb %xmm6, (%ecx){%k7} # AVX512{BW,VL}
vpmovuswb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vpmovuswb %xmm6, 1016(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovuswb %xmm6, 1024(%edx){%k7} # AVX512{BW,VL}
vpmovuswb %xmm6, -1024(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovuswb %xmm6, -1032(%edx){%k7} # AVX512{BW,VL}
vpmovuswb %ymm6, (%ecx){%k7} # AVX512{BW,VL}
vpmovuswb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vpmovuswb %ymm6, 2032(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovuswb %ymm6, 2048(%edx){%k7} # AVX512{BW,VL}
vpmovuswb %ymm6, -2048(%edx){%k7} # AVX512{BW,VL} Disp8
vpmovuswb %ymm6, -2064(%edx){%k7} # AVX512{BW,VL}
vmovdqu8 %xmm6, (%ecx){%k7} # AVX512{BW,VL}
vmovdqu8 %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vmovdqu8 %xmm6, 2032(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu8 %xmm6, 2048(%edx){%k7} # AVX512{BW,VL}
vmovdqu8 %xmm6, -2048(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu8 %xmm6, -2064(%edx){%k7} # AVX512{BW,VL}
vmovdqu8 %ymm6, (%ecx){%k7} # AVX512{BW,VL}
vmovdqu8 %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vmovdqu8 %ymm6, 4064(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu8 %ymm6, 4096(%edx){%k7} # AVX512{BW,VL}
vmovdqu8 %ymm6, -4096(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu8 %ymm6, -4128(%edx){%k7} # AVX512{BW,VL}
vmovdqu16 %xmm6, (%ecx){%k7} # AVX512{BW,VL}
vmovdqu16 %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vmovdqu16 %xmm6, 2032(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu16 %xmm6, 2048(%edx){%k7} # AVX512{BW,VL}
vmovdqu16 %xmm6, -2048(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu16 %xmm6, -2064(%edx){%k7} # AVX512{BW,VL}
vmovdqu16 %ymm6, (%ecx){%k7} # AVX512{BW,VL}
vmovdqu16 %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{BW,VL}
vmovdqu16 %ymm6, 4064(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu16 %ymm6, 4096(%edx){%k7} # AVX512{BW,VL}
vmovdqu16 %ymm6, -4096(%edx){%k7} # AVX512{BW,VL} Disp8
vmovdqu16 %ymm6, -4128(%edx){%k7} # AVX512{BW,VL}
vpermi2w %xmm4, %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermi2w %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{BW,VL}
vpermi2w (%ecx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermi2w -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermi2w 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpermi2w 2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermi2w -2048(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL} Disp8
vpermi2w -2064(%edx), %xmm5, %xmm6{%k7} # AVX512{BW,VL}
vpermi2w %ymm4, %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermi2w %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{BW,VL}
vpermi2w (%ecx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermi2w -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermi2w 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpermi2w 4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vpermi2w -4096(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL} Disp8
vpermi2w -4128(%edx), %ymm5, %ymm6{%k7} # AVX512{BW,VL}
vptestmb %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmb (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmb -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmb 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmb 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmb -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmb -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmb %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmb (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmb -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmb 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmb 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmb -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmb -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmw %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmw (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmw -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmw 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmw 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmw -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmw -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vptestmw %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmw (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmw -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmw 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmw 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vptestmw -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vptestmw -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpmovb2m %xmm6, %k5 # AVX512{BW,VL}
vpmovb2m %ymm6, %k5 # AVX512{BW,VL}
vpmovw2m %xmm6, %k5 # AVX512{BW,VL}
vpmovw2m %ymm6, %k5 # AVX512{BW,VL}
vpmovm2b %k5, %xmm6 # AVX512{BW,VL}
vpmovm2b %k5, %ymm6 # AVX512{BW,VL}
vpmovm2w %k5, %xmm6 # AVX512{BW,VL}
vpmovm2w %k5, %ymm6 # AVX512{BW,VL}
vptestnmb %xmm4, %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb (%ecx), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb 2032(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmb 2048(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb -2048(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmb -2064(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb %ymm4, %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb (%ecx), %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb -123456(%esp,%esi,8), %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb 4064(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmb 4096(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmb -4096(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmb -4128(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw %xmm4, %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw (%ecx), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw -123456(%esp,%esi,8), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw 2032(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmw 2048(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw -2048(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmw -2064(%edx), %xmm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw %ymm4, %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw (%ecx), %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw -123456(%esp,%esi,8), %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw 4064(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmw 4096(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL}
vptestnmw -4096(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL} Disp8
vptestnmw -4128(%edx), %ymm5, %k5{%k7} # AVX512{BW,VL}
vpcmpb $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpb $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpb $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpb $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpb $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpb $0, %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpb $0, %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpleb %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpleb 0x7f0(%eax), %xmm6, %k5 # AVX512{BW,VL} Disp8
vpcmpleb 0x800(%eax), %xmm6, %k5 # AVX512{BW,VL}
vpcmpleb %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpleb 0xfe0(%eax), %ymm6, %k5 # AVX512{BW,VL} Disp8
vpcmpleb 0x1000(%eax), %ymm6, %k5 # AVX512{BW,VL}
vpcmpltb %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpltb %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpneqb %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpneqb %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnleb %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnleb %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnltb %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnltb %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpw $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpw $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpw $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpw $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpw $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpw $0, %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpw $0, %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmplew %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmplew 0x7f0(%eax), %xmm6, %k5 # AVX512{BW,VL} Disp8
vpcmplew 0x800(%eax), %xmm6, %k5 # AVX512{BW,VL}
vpcmplew %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmplew 0xfe0(%eax), %ymm6, %k5 # AVX512{BW,VL} Disp8
vpcmplew 0x1000(%eax), %ymm6, %k5 # AVX512{BW,VL}
vpcmpltw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpltw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpneqw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpneqw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnlew %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnlew %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnltw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnltw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpub $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpub $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpub $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpub $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpub $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpub $0, %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpub $0, %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpleub %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpleub %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpltub %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpltub %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnequb %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnequb %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnleub %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnleub %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnltub %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnltub %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpuw $0xab, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, %xmm5, %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, (%ecx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, -123456(%esp,%esi,8), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, 2032(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpuw $123, 2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, -2048(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpuw $123, -2064(%edx), %xmm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $0xab, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, %ymm5, %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, (%ecx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, -123456(%esp,%esi,8), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, 4064(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpuw $123, 4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $123, -4096(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL} Disp8
vpcmpuw $123, -4128(%edx), %ymm6, %k5{%k7} # AVX512{BW,VL}
vpcmpuw $0, %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpuw $0, %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpleuw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpleuw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpltuw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpltuw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnequw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnequw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnleuw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnleuw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
vpcmpnltuw %xmm5, %xmm6, %k5 # AVX512{BW,VL}
vpcmpnltuw %ymm5, %ymm6, %k5 # AVX512{BW,VL}
.intel_syntax noprefix
vpabsb xmm6{k7}, xmm5 # AVX512{BW,VL}
vpabsb xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpabsb xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpabsb xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpabsb xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpabsb ymm6{k7}, ymm5 # AVX512{BW,VL}
vpabsb ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpabsb ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpabsb ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpabsb ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpabsw xmm6{k7}, xmm5 # AVX512{BW,VL}
vpabsw xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpabsw xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpabsw xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpabsw xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpabsw ymm6{k7}, ymm5 # AVX512{BW,VL}
vpabsw ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpabsw ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpabsw ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpabsw ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpackssdw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, [eax]{1to4} # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpackssdw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpackssdw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{BW,VL} Disp8
vpackssdw xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{BW,VL}
vpackssdw xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{BW,VL} Disp8
vpackssdw xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpackssdw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, [eax]{1to8} # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpackssdw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpackssdw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{BW,VL} Disp8
vpackssdw ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{BW,VL}
vpackssdw ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{BW,VL} Disp8
vpackssdw ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpacksswb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpacksswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpacksswb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpacksswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpackusdw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, [eax]{1to4} # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpackusdw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpackusdw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{BW,VL} Disp8
vpackusdw xmm6{k7}, xmm5, [edx+512]{1to4} # AVX512{BW,VL}
vpackusdw xmm6{k7}, xmm5, [edx-512]{1to4} # AVX512{BW,VL} Disp8
vpackusdw xmm6{k7}, xmm5, [edx-516]{1to4} # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpackusdw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, [eax]{1to8} # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpackusdw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpackusdw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{BW,VL} Disp8
vpackusdw ymm6{k7}, ymm5, [edx+512]{1to8} # AVX512{BW,VL}
vpackusdw ymm6{k7}, ymm5, [edx-512]{1to8} # AVX512{BW,VL} Disp8
vpackusdw ymm6{k7}, ymm5, [edx-516]{1to8} # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpackuswb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpackuswb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpackuswb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpackuswb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpaddw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpaddw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpaddw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpaddw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, xmm4, 0xab # AVX512{BW,VL}
vpalignr xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, xmm4, 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpalignr xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, ymm4, 0xab # AVX512{BW,VL}
vpalignr ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, ymm4, 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpalignr ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpavgb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpavgb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpavgb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpavgb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpavgw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpavgw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpavgw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpavgw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpblendmb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpblendmb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpblendmb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpblendmb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpblendmb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpblendmb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpblendmb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpblendmb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpblendmb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpblendmb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpblendmb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpblendmb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpblendmb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpblendmb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpblendmb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpblendmb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpbroadcastb xmm6{k7}, xmm5 # AVX512{BW,VL}
vpbroadcastb xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpbroadcastb xmm6{k7}, BYTE PTR [ecx] # AVX512{BW,VL}
vpbroadcastb xmm6{k7}, BYTE PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpbroadcastb xmm6{k7}, BYTE PTR [edx+127] # AVX512{BW,VL} Disp8
vpbroadcastb xmm6{k7}, BYTE PTR [edx+128] # AVX512{BW,VL}
vpbroadcastb xmm6{k7}, BYTE PTR [edx-128] # AVX512{BW,VL} Disp8
vpbroadcastb xmm6{k7}, BYTE PTR [edx-129] # AVX512{BW,VL}
vpbroadcastb ymm6{k7}, xmm5 # AVX512{BW,VL}
vpbroadcastb ymm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpbroadcastb ymm6{k7}, BYTE PTR [ecx] # AVX512{BW,VL}
vpbroadcastb ymm6{k7}, BYTE PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpbroadcastb ymm6{k7}, BYTE PTR [edx+127] # AVX512{BW,VL} Disp8
vpbroadcastb ymm6{k7}, BYTE PTR [edx+128] # AVX512{BW,VL}
vpbroadcastb ymm6{k7}, BYTE PTR [edx-128] # AVX512{BW,VL} Disp8
vpbroadcastb ymm6{k7}, BYTE PTR [edx-129] # AVX512{BW,VL}
vpbroadcastb xmm6{k7}, eax # AVX512{BW,VL}
vpbroadcastb xmm6{k7}{z}, eax # AVX512{BW,VL}
vpbroadcastb xmm6{k7}, ebp # AVX512{BW,VL}
vpbroadcastb ymm6{k7}, eax # AVX512{BW,VL}
vpbroadcastb ymm6{k7}{z}, eax # AVX512{BW,VL}
vpbroadcastb ymm6{k7}, ebp # AVX512{BW,VL}
vpbroadcastw xmm6{k7}, xmm5 # AVX512{BW,VL}
vpbroadcastw xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpbroadcastw xmm6{k7}, WORD PTR [ecx] # AVX512{BW,VL}
vpbroadcastw xmm6{k7}, WORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpbroadcastw xmm6{k7}, WORD PTR [edx+254] # AVX512{BW,VL} Disp8
vpbroadcastw xmm6{k7}, WORD PTR [edx+256] # AVX512{BW,VL}
vpbroadcastw xmm6{k7}, WORD PTR [edx-256] # AVX512{BW,VL} Disp8
vpbroadcastw xmm6{k7}, WORD PTR [edx-258] # AVX512{BW,VL}
vpbroadcastw ymm6{k7}, xmm5 # AVX512{BW,VL}
vpbroadcastw ymm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpbroadcastw ymm6{k7}, WORD PTR [ecx] # AVX512{BW,VL}
vpbroadcastw ymm6{k7}, WORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpbroadcastw ymm6{k7}, WORD PTR [edx+254] # AVX512{BW,VL} Disp8
vpbroadcastw ymm6{k7}, WORD PTR [edx+256] # AVX512{BW,VL}
vpbroadcastw ymm6{k7}, WORD PTR [edx-256] # AVX512{BW,VL} Disp8
vpbroadcastw ymm6{k7}, WORD PTR [edx-258] # AVX512{BW,VL}
vpbroadcastw xmm6{k7}, eax # AVX512{BW,VL}
vpbroadcastw xmm6{k7}{z}, eax # AVX512{BW,VL}
vpbroadcastw xmm6{k7}, ebp # AVX512{BW,VL}
vpbroadcastw ymm6{k7}, eax # AVX512{BW,VL}
vpbroadcastw ymm6{k7}{z}, eax # AVX512{BW,VL}
vpbroadcastw ymm6{k7}, ebp # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpeqb k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpeqw k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpgtb k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpcmpgtw k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpblendmw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpblendmw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpblendmw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpblendmw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpblendmw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpblendmw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpblendmw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpblendmw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpblendmw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpblendmw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpblendmw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpblendmw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpblendmw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpblendmw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpblendmw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpblendmw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddubsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaddubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddubsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaddubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddwd xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaddwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddwd ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaddwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxub xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxub xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxub ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxub ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxuw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmaxuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxuw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmaxuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminub xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminub xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminub ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminub ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpminuw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpminuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpminuw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpminuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, xmm5 # AVX512{BW,VL}
vpmovsxbw xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [ecx] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [edx+1016] # AVX512{BW,VL} Disp8
vpmovsxbw xmm6{k7}, QWORD PTR [edx+1024] # AVX512{BW,VL}
vpmovsxbw xmm6{k7}, QWORD PTR [edx-1024] # AVX512{BW,VL} Disp8
vpmovsxbw xmm6{k7}, QWORD PTR [edx-1032] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, xmm5 # AVX512{BW,VL}
vpmovsxbw ymm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmovsxbw ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, xmm5 # AVX512{BW,VL}
vpmovzxbw xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [ecx] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [edx+1016] # AVX512{BW,VL} Disp8
vpmovzxbw xmm6{k7}, QWORD PTR [edx+1024] # AVX512{BW,VL}
vpmovzxbw xmm6{k7}, QWORD PTR [edx-1024] # AVX512{BW,VL} Disp8
vpmovzxbw xmm6{k7}, QWORD PTR [edx-1032] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, xmm5 # AVX512{BW,VL}
vpmovzxbw ymm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmovzxbw ymm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhrsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmulhrsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhrsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmulhrsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhuw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmulhuw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhuw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmulhuw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmulhw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmulhw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpmullw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpmullw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpmullw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpmullw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpshufb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpshufb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpshufb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpshufb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpshufhw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpshufhw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpshufhw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpshufhw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpshufhw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpshufhw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpshufhw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpshufhw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpshufhw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpshufhw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpshufhw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpshuflw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpshuflw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpshuflw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpshuflw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpshuflw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpshuflw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpshuflw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpshuflw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpshuflw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpshuflw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsllw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, xmm4 # AVX512{BW,VL}
vpsllw ymm6{k7}{z}, ymm5, xmm4 # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsraw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, xmm4 # AVX512{BW,VL}
vpsraw ymm6{k7}{z}, ymm5, xmm4 # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsrlw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, xmm4 # AVX512{BW,VL}
vpsrlw ymm6{k7}{z}, ymm5, xmm4 # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, ymm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpsrlw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpsrlw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpsrlw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpsrlw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpsrlw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpsrlw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpsrlw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpsrlw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpsraw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpsraw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpsraw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpsraw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpsraw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpsraw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpsraw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpsraw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpsrlvw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsrlvw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsrlvw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsrlvw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsrlvw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsrlvw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsrlvw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsrlvw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsrlvw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsrlvw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsrlvw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsrlvw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsrlvw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsrlvw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsrlvw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsrlvw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsravw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsravw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsravw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsravw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsravw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsravw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsravw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsravw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsravw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsravw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsravw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsravw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsravw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsravw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsravw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsravw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubsb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubsb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubsw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubsw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusb xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubusb xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusb ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubusb ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubusw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubusw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsubw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsubw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsubw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsubw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhbw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpckhbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhbw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpckhbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhwd xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpckhwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhwd ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpckhwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklbw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpcklbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklbw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpcklbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklwd xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpunpcklwd xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklwd ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpunpcklwd ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmovwb xmm6{k7}, xmm5 # AVX512{BW,VL}
vpmovwb xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovwb xmm6{k7}, ymm5 # AVX512{BW,VL}
vpmovwb xmm6{k7}{z}, ymm5 # AVX512{BW,VL}
vpmovswb xmm6{k7}, xmm5 # AVX512{BW,VL}
vpmovswb xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovswb xmm6{k7}, ymm5 # AVX512{BW,VL}
vpmovswb xmm6{k7}{z}, ymm5 # AVX512{BW,VL}
vpmovuswb xmm6{k7}, xmm5 # AVX512{BW,VL}
vpmovuswb xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vpmovuswb xmm6{k7}, ymm5 # AVX512{BW,VL}
vpmovuswb xmm6{k7}{z}, ymm5 # AVX512{BW,VL}
vdbpsadbw xmm6{k7}, xmm5, xmm4, 0xab # AVX512{BW,VL}
vdbpsadbw xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{BW,VL}
vdbpsadbw xmm6{k7}, xmm5, xmm4, 123 # AVX512{BW,VL}
vdbpsadbw xmm6{k7}, xmm5, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vdbpsadbw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vdbpsadbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vdbpsadbw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vdbpsadbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vdbpsadbw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vdbpsadbw ymm6{k7}, ymm5, ymm4, 0xab # AVX512{BW,VL}
vdbpsadbw ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{BW,VL}
vdbpsadbw ymm6{k7}, ymm5, ymm4, 123 # AVX512{BW,VL}
vdbpsadbw ymm6{k7}, ymm5, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vdbpsadbw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vdbpsadbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vdbpsadbw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vdbpsadbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vdbpsadbw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpermw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpermw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpermw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpermw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpermw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpermw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpermw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpermw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpermw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpermw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpermw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpermw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpermw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpermw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpermw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpermw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpermt2w xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpermt2w xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpermt2w xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpermt2w xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpermt2w xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpermt2w xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpermt2w xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpermt2w xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpermt2w ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpermt2w ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpermt2w ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpermt2w ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpermt2w ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpermt2w ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpermt2w ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpermt2w ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, 0xab # AVX512{BW,VL}
vpsllw xmm6{k7}{z}, xmm5, 0xab # AVX512{BW,VL}
vpsllw xmm6{k7}, xmm5, 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpsllw xmm6{k7}, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpsllw xmm6{k7}, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, 0xab # AVX512{BW,VL}
vpsllw ymm6{k7}{z}, ymm5, 0xab # AVX512{BW,VL}
vpsllw ymm6{k7}, ymm5, 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpsllw ymm6{k7}, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpsllw ymm6{k7}, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpsllvw xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpsllvw xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpsllvw xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpsllvw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsllvw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpsllvw xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpsllvw xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpsllvw xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpsllvw ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpsllvw ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpsllvw ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpsllvw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpsllvw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpsllvw ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpsllvw ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpsllvw ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vmovdqu8 xmm6{k7}, xmm5 # AVX512{BW,VL}
vmovdqu8 xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vmovdqu8 xmm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vmovdqu8 xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vmovdqu8 xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vmovdqu8 xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vmovdqu8 xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vmovdqu8 xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vmovdqu8 ymm6{k7}, ymm5 # AVX512{BW,VL}
vmovdqu8 ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vmovdqu8 ymm6{k7}, YMMWORD PTR [ecx] # AVX512{BW,VL}
vmovdqu8 ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vmovdqu8 ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vmovdqu8 ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vmovdqu8 ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vmovdqu8 ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vmovdqu16 xmm6{k7}, xmm5 # AVX512{BW,VL}
vmovdqu16 xmm6{k7}{z}, xmm5 # AVX512{BW,VL}
vmovdqu16 xmm6{k7}, XMMWORD PTR [ecx] # AVX512{BW,VL}
vmovdqu16 xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vmovdqu16 xmm6{k7}, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vmovdqu16 xmm6{k7}, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vmovdqu16 xmm6{k7}, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vmovdqu16 xmm6{k7}, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vmovdqu16 ymm6{k7}, ymm5 # AVX512{BW,VL}
vmovdqu16 ymm6{k7}{z}, ymm5 # AVX512{BW,VL}
vmovdqu16 ymm6{k7}, YMMWORD PTR [ecx] # AVX512{BW,VL}
vmovdqu16 ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vmovdqu16 ymm6{k7}, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vmovdqu16 ymm6{k7}, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vmovdqu16 ymm6{k7}, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vmovdqu16 ymm6{k7}, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmovwb QWORD PTR [ecx]{k7}, xmm6 # AVX512{BW,VL}
vpmovwb QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{BW,VL}
vpmovwb QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{BW,VL} Disp8
vpmovwb QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{BW,VL}
vpmovwb QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{BW,VL} Disp8
vpmovwb QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{BW,VL}
vpmovwb XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{BW,VL}
vpmovwb XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{BW,VL}
vpmovwb XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{BW,VL} Disp8
vpmovwb XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{BW,VL}
vpmovwb XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{BW,VL} Disp8
vpmovwb XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{BW,VL}
vpmovswb QWORD PTR [ecx]{k7}, xmm6 # AVX512{BW,VL}
vpmovswb QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{BW,VL}
vpmovswb QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{BW,VL} Disp8
vpmovswb QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{BW,VL}
vpmovswb QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{BW,VL} Disp8
vpmovswb QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{BW,VL}
vpmovswb XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{BW,VL}
vpmovswb XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{BW,VL}
vpmovswb XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{BW,VL} Disp8
vpmovswb XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{BW,VL}
vpmovswb XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{BW,VL} Disp8
vpmovswb XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{BW,VL}
vpmovuswb QWORD PTR [ecx]{k7}, xmm6 # AVX512{BW,VL}
vpmovuswb QWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{BW,VL}
vpmovuswb QWORD PTR [edx+1016]{k7}, xmm6 # AVX512{BW,VL} Disp8
vpmovuswb QWORD PTR [edx+1024]{k7}, xmm6 # AVX512{BW,VL}
vpmovuswb QWORD PTR [edx-1024]{k7}, xmm6 # AVX512{BW,VL} Disp8
vpmovuswb QWORD PTR [edx-1032]{k7}, xmm6 # AVX512{BW,VL}
vpmovuswb XMMWORD PTR [ecx]{k7}, ymm6 # AVX512{BW,VL}
vpmovuswb XMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{BW,VL}
vpmovuswb XMMWORD PTR [edx+2032]{k7}, ymm6 # AVX512{BW,VL} Disp8
vpmovuswb XMMWORD PTR [edx+2048]{k7}, ymm6 # AVX512{BW,VL}
vpmovuswb XMMWORD PTR [edx-2048]{k7}, ymm6 # AVX512{BW,VL} Disp8
vpmovuswb XMMWORD PTR [edx-2064]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu8 XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu8 XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu8 XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{BW,VL} Disp8
vmovdqu8 XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu8 XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{BW,VL} Disp8
vmovdqu8 XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu8 YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu8 YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu8 YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{BW,VL} Disp8
vmovdqu8 YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu8 YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{BW,VL} Disp8
vmovdqu8 YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu16 XMMWORD PTR [ecx]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu16 XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu16 XMMWORD PTR [edx+2032]{k7}, xmm6 # AVX512{BW,VL} Disp8
vmovdqu16 XMMWORD PTR [edx+2048]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu16 XMMWORD PTR [edx-2048]{k7}, xmm6 # AVX512{BW,VL} Disp8
vmovdqu16 XMMWORD PTR [edx-2064]{k7}, xmm6 # AVX512{BW,VL}
vmovdqu16 YMMWORD PTR [ecx]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu16 YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu16 YMMWORD PTR [edx+4064]{k7}, ymm6 # AVX512{BW,VL} Disp8
vmovdqu16 YMMWORD PTR [edx+4096]{k7}, ymm6 # AVX512{BW,VL}
vmovdqu16 YMMWORD PTR [edx-4096]{k7}, ymm6 # AVX512{BW,VL} Disp8
vmovdqu16 YMMWORD PTR [edx-4128]{k7}, ymm6 # AVX512{BW,VL}
vpermi2w xmm6{k7}, xmm5, xmm4 # AVX512{BW,VL}
vpermi2w xmm6{k7}{z}, xmm5, xmm4 # AVX512{BW,VL}
vpermi2w xmm6{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vpermi2w xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpermi2w xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vpermi2w xmm6{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vpermi2w xmm6{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vpermi2w xmm6{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vpermi2w ymm6{k7}, ymm5, ymm4 # AVX512{BW,VL}
vpermi2w ymm6{k7}{z}, ymm5, ymm4 # AVX512{BW,VL}
vpermi2w ymm6{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vpermi2w ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vpermi2w ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vpermi2w ymm6{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vpermi2w ymm6{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vpermi2w ymm6{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vptestmb k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vptestmb k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vptestmb k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestmb k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vptestmb k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vptestmb k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vptestmb k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vptestmb k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vptestmb k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vptestmb k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestmb k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vptestmb k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vptestmb k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vptestmb k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vptestmw k5{k7}, xmm6, xmm5 # AVX512{BW,VL}
vptestmw k5{k7}, xmm6, XMMWORD PTR [ecx] # AVX512{BW,VL}
vptestmw k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestmw k5{k7}, xmm6, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vptestmw k5{k7}, xmm6, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vptestmw k5{k7}, xmm6, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vptestmw k5{k7}, xmm6, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vptestmw k5{k7}, ymm6, ymm5 # AVX512{BW,VL}
vptestmw k5{k7}, ymm6, YMMWORD PTR [ecx] # AVX512{BW,VL}
vptestmw k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestmw k5{k7}, ymm6, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vptestmw k5{k7}, ymm6, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vptestmw k5{k7}, ymm6, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vptestmw k5{k7}, ymm6, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpmovb2m k5, xmm6 # AVX512{BW,VL}
vpmovb2m k5, ymm6 # AVX512{BW,VL}
vpmovw2m k5, xmm6 # AVX512{BW,VL}
vpmovw2m k5, ymm6 # AVX512{BW,VL}
vpmovm2b xmm6, k5 # AVX512{BW,VL}
vpmovm2b ymm6, k5 # AVX512{BW,VL}
vpmovm2w xmm6, k5 # AVX512{BW,VL}
vpmovm2w ymm6, k5 # AVX512{BW,VL}
vptestnmb k5{k7}, xmm5, xmm4 # AVX512{BW,VL}
vptestnmb k5{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vptestnmb k5{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestnmb k5{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vptestnmb k5{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vptestnmb k5{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vptestnmb k5{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vptestnmb k5{k7}, ymm5, ymm4 # AVX512{BW,VL}
vptestnmb k5{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vptestnmb k5{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestnmb k5{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vptestnmb k5{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vptestnmb k5{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vptestnmb k5{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vptestnmw k5{k7}, xmm5, xmm4 # AVX512{BW,VL}
vptestnmw k5{k7}, xmm5, XMMWORD PTR [ecx] # AVX512{BW,VL}
vptestnmw k5{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestnmw k5{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{BW,VL} Disp8
vptestnmw k5{k7}, xmm5, XMMWORD PTR [edx+2048] # AVX512{BW,VL}
vptestnmw k5{k7}, xmm5, XMMWORD PTR [edx-2048] # AVX512{BW,VL} Disp8
vptestnmw k5{k7}, xmm5, XMMWORD PTR [edx-2064] # AVX512{BW,VL}
vptestnmw k5{k7}, ymm5, ymm4 # AVX512{BW,VL}
vptestnmw k5{k7}, ymm5, YMMWORD PTR [ecx] # AVX512{BW,VL}
vptestnmw k5{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{BW,VL}
vptestnmw k5{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{BW,VL} Disp8
vptestnmw k5{k7}, ymm5, YMMWORD PTR [edx+4096] # AVX512{BW,VL}
vptestnmw k5{k7}, ymm5, YMMWORD PTR [edx-4096] # AVX512{BW,VL} Disp8
vptestnmw k5{k7}, ymm5, YMMWORD PTR [edx-4128] # AVX512{BW,VL}
vpcmpb k5{k7}, xmm6, xmm5, 0xab # AVX512{BW,VL}
vpcmpb k5{k7}, xmm6, xmm5, 123 # AVX512{BW,VL}
vpcmpb k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpb k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpb k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpcmpb k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpcmpb k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpcmpb k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpcmpb k5{k7}, ymm6, ymm5, 0xab # AVX512{BW,VL}
vpcmpb k5{k7}, ymm6, ymm5, 123 # AVX512{BW,VL}
vpcmpb k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpb k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpb k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpcmpb k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpcmpb k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpcmpb k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, xmm6, xmm5, 0xab # AVX512{BW,VL}
vpcmpw k5{k7}, xmm6, xmm5, 123 # AVX512{BW,VL}
vpcmpw k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpcmpw k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpcmpw k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, ymm6, ymm5, 0xab # AVX512{BW,VL}
vpcmpw k5{k7}, ymm6, ymm5, 123 # AVX512{BW,VL}
vpcmpw k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpcmpw k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpcmpw k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpcmpw k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, xmm6, xmm5, 0xab # AVX512{BW,VL}
vpcmpub k5{k7}, xmm6, xmm5, 123 # AVX512{BW,VL}
vpcmpub k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpcmpub k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpcmpub k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, ymm6, ymm5, 0xab # AVX512{BW,VL}
vpcmpub k5{k7}, ymm6, ymm5, 123 # AVX512{BW,VL}
vpcmpub k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpcmpub k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpcmpub k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpcmpub k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, xmm6, xmm5, 0xab # AVX512{BW,VL}
vpcmpuw k5{k7}, xmm6, xmm5, 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, xmm6, XMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, xmm6, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, xmm6, XMMWORD PTR [edx+2032], 123 # AVX512{BW,VL} Disp8
vpcmpuw k5{k7}, xmm6, XMMWORD PTR [edx+2048], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, xmm6, XMMWORD PTR [edx-2048], 123 # AVX512{BW,VL} Disp8
vpcmpuw k5{k7}, xmm6, XMMWORD PTR [edx-2064], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, ymm6, ymm5, 0xab # AVX512{BW,VL}
vpcmpuw k5{k7}, ymm6, ymm5, 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, ymm6, YMMWORD PTR [ecx], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, ymm6, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, ymm6, YMMWORD PTR [edx+4064], 123 # AVX512{BW,VL} Disp8
vpcmpuw k5{k7}, ymm6, YMMWORD PTR [edx+4096], 123 # AVX512{BW,VL}
vpcmpuw k5{k7}, ymm6, YMMWORD PTR [edx-4096], 123 # AVX512{BW,VL} Disp8
vpcmpuw k5{k7}, ymm6, YMMWORD PTR [edx-4128], 123 # AVX512{BW,VL}
|
stsp/binutils-ia16
| 14,692
|
gas/testsuite/gas/i386/x86-64-hlebad.s
|
# Check 64bit unsupported HLE instructions
.allow_index_reg
.text
_start:
# Tests for op imm8 al
xacquire adc $100,%al
xacquire lock adc $100,%al
lock xacquire adc $100,%al
xrelease adc $100,%al
xrelease lock adc $100,%al
lock xrelease adc $100,%al
# Tests for op imm16 ax
xacquire adc $1000,%ax
xacquire lock adc $1000,%ax
lock xacquire adc $1000,%ax
xrelease adc $1000,%ax
xrelease lock adc $1000,%ax
lock xrelease adc $1000,%ax
# Tests for op imm32 eax
xacquire adc $10000000,%eax
xacquire lock adc $10000000,%eax
lock xacquire adc $10000000,%eax
xrelease adc $10000000,%eax
xrelease lock adc $10000000,%eax
lock xrelease adc $10000000,%eax
# Tests for op imm32 rax
xacquire adc $10000000,%rax
xacquire lock adc $10000000,%rax
lock xacquire adc $10000000,%rax
xrelease adc $10000000,%rax
xrelease lock adc $10000000,%rax
lock xrelease adc $10000000,%rax
# Tests for op imm8 regb/m8
xacquire adcb $100,%cl
xacquire lock adcb $100,%cl
lock xacquire adcb $100,%cl
xrelease adcb $100,%cl
xrelease lock adcb $100,%cl
lock xrelease adcb $100,%cl
xacquire adcb $100,(%rcx)
xrelease adcb $100,(%rcx)
# Tests for op imm16 regs/m16
xacquire adcw $1000,%cx
xacquire lock adcw $1000,%cx
lock xacquire adcw $1000,%cx
xrelease adcw $1000,%cx
xrelease lock adcw $1000,%cx
lock xrelease adcw $1000,%cx
xacquire adcw $1000,(%rcx)
xrelease adcw $1000,(%rcx)
# Tests for op imm32 regl/m32
xacquire adcl $10000000,%ecx
xacquire lock adcl $10000000,%ecx
lock xacquire adcl $10000000,%ecx
xrelease adcl $10000000,%ecx
xrelease lock adcl $10000000,%ecx
lock xrelease adcl $10000000,%ecx
xacquire adcl $10000000,(%rcx)
xrelease adcl $10000000,(%rcx)
# Tests for op imm32 regq/m64
xacquire adcq $10000000,%rcx
xacquire lock adcq $10000000,%rcx
lock xacquire adcq $10000000,%rcx
xrelease adcq $10000000,%rcx
xrelease lock adcq $10000000,%rcx
lock xrelease adcq $10000000,%rcx
xacquire adcq $10000000,(%rcx)
xrelease adcq $10000000,(%rcx)
# Tests for op imm8 regs/m16
xacquire adcw $100,%cx
xacquire lock adcw $100,%cx
lock xacquire adcw $100,%cx
xrelease adcw $100,%cx
xrelease lock adcw $100,%cx
lock xrelease adcw $100,%cx
xacquire adcw $100,(%rcx)
xrelease adcw $100,(%rcx)
# Tests for op imm8 regl/m32
xacquire adcl $100,%ecx
xacquire lock adcl $100,%ecx
lock xacquire adcl $100,%ecx
xrelease adcl $100,%ecx
xrelease lock adcl $100,%ecx
lock xrelease adcl $100,%ecx
xacquire adcl $100,(%rcx)
xrelease adcl $100,(%rcx)
# Tests for op imm8 regq/m64
xacquire adcq $100,%rcx
xacquire lock adcq $100,%rcx
lock xacquire adcq $100,%rcx
xrelease adcq $100,%rcx
xrelease lock adcq $100,%rcx
lock xrelease adcq $100,%rcx
xacquire adcq $100,(%rcx)
xrelease adcq $100,(%rcx)
# Tests for op imm8 regb/m8
xacquire adcb $100,%cl
xacquire lock adcb $100,%cl
lock xacquire adcb $100,%cl
xrelease adcb $100,%cl
xrelease lock adcb $100,%cl
lock xrelease adcb $100,%cl
xacquire adcb $100,(%rcx)
xrelease adcb $100,(%rcx)
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire adcb %al,%cl
xacquire lock adcb %al,%cl
lock xacquire adcb %al,%cl
xrelease adcb %al,%cl
xrelease lock adcb %al,%cl
lock xrelease adcb %al,%cl
xacquire adcb %al,(%rcx)
xrelease adcb %al,(%rcx)
xacquire adcb %cl,%al
xacquire lock adcb %cl,%al
lock xacquire adcb %cl,%al
xrelease adcb %cl,%al
xrelease lock adcb %cl,%al
lock xrelease adcb %cl,%al
xacquire adcb (%rcx),%al
xacquire lock adcb (%rcx),%al
lock xacquire adcb (%rcx),%al
xrelease adcb (%rcx),%al
xrelease lock adcb (%rcx),%al
lock xrelease adcb (%rcx),%al
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire adcw %ax,%cx
xacquire lock adcw %ax,%cx
lock xacquire adcw %ax,%cx
xrelease adcw %ax,%cx
xrelease lock adcw %ax,%cx
lock xrelease adcw %ax,%cx
xacquire adcw %ax,(%rcx)
xrelease adcw %ax,(%rcx)
xacquire adcw %cx,%ax
xacquire lock adcw %cx,%ax
lock xacquire adcw %cx,%ax
xrelease adcw %cx,%ax
xrelease lock adcw %cx,%ax
lock xrelease adcw %cx,%ax
xacquire adcw (%rcx),%ax
xacquire lock adcw (%rcx),%ax
lock xacquire adcw (%rcx),%ax
xrelease adcw (%rcx),%ax
xrelease lock adcw (%rcx),%ax
lock xrelease adcw (%rcx),%ax
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire adcl %eax,%ecx
xacquire lock adcl %eax,%ecx
lock xacquire adcl %eax,%ecx
xrelease adcl %eax,%ecx
xrelease lock adcl %eax,%ecx
lock xrelease adcl %eax,%ecx
xacquire adcl %eax,(%rcx)
xrelease adcl %eax,(%rcx)
xacquire adcl %ecx,%eax
xacquire lock adcl %ecx,%eax
lock xacquire adcl %ecx,%eax
xrelease adcl %ecx,%eax
xrelease lock adcl %ecx,%eax
lock xrelease adcl %ecx,%eax
xacquire adcl (%rcx),%eax
xacquire lock adcl (%rcx),%eax
lock xacquire adcl (%rcx),%eax
xrelease adcl (%rcx),%eax
xrelease lock adcl (%rcx),%eax
lock xrelease adcl (%rcx),%eax
# Tests for op regq regq/m64
# Tests for op regq/m64 regq
xacquire adcq %rax,%rcx
xacquire lock adcq %rax,%rcx
lock xacquire adcq %rax,%rcx
xrelease adcq %rax,%rcx
xrelease lock adcq %rax,%rcx
lock xrelease adcq %rax,%rcx
xacquire adcq %rax,(%rcx)
xrelease adcq %rax,(%rcx)
xacquire adcq %rcx,%rax
xacquire lock adcq %rcx,%rax
lock xacquire adcq %rcx,%rax
xrelease adcq %rcx,%rax
xrelease lock adcq %rcx,%rax
lock xrelease adcq %rcx,%rax
xacquire adcq (%rcx),%rax
xacquire lock adcq (%rcx),%rax
lock xacquire adcq (%rcx),%rax
xrelease adcq (%rcx),%rax
xrelease lock adcq (%rcx),%rax
lock xrelease adcq (%rcx),%rax
# Tests for op regs, regs/m16
xacquire btcw %ax,%cx
xacquire lock btcw %ax,%cx
lock xacquire btcw %ax,%cx
xrelease btcw %ax,%cx
xrelease lock btcw %ax,%cx
lock xrelease btcw %ax,%cx
xacquire btcw %ax,(%rcx)
xrelease btcw %ax,(%rcx)
# Tests for op regl regl/m32
xacquire btcl %eax,%ecx
xacquire lock btcl %eax,%ecx
lock xacquire btcl %eax,%ecx
xrelease btcl %eax,%ecx
xrelease lock btcl %eax,%ecx
lock xrelease btcl %eax,%ecx
xacquire btcl %eax,(%rcx)
xrelease btcl %eax,(%rcx)
# Tests for op regq regq/m64
xacquire btcq %rax,%rcx
xacquire lock btcq %rax,%rcx
lock xacquire btcq %rax,%rcx
xrelease btcq %rax,%rcx
xrelease lock btcq %rax,%rcx
lock xrelease btcq %rax,%rcx
xacquire btcq %rax,(%rcx)
xrelease btcq %rax,(%rcx)
# Tests for op regb/m8
xacquire decb %cl
xacquire lock decb %cl
lock xacquire decb %cl
xrelease decb %cl
xrelease lock decb %cl
lock xrelease decb %cl
xacquire decb (%rcx)
xrelease decb (%rcx)
# Tests for op regs/m16
xacquire decw %cx
xacquire lock decw %cx
lock xacquire decw %cx
xrelease decw %cx
xrelease lock decw %cx
lock xrelease decw %cx
xacquire decw (%rcx)
xrelease decw (%rcx)
# Tests for op regl/m32
xacquire decl %ecx
xacquire lock decl %ecx
lock xacquire decl %ecx
xrelease decl %ecx
xrelease lock decl %ecx
lock xrelease decl %ecx
xacquire decl (%rcx)
xrelease decl (%rcx)
# Tests for op regq/m64
xacquire decq %rcx
xacquire lock decq %rcx
lock xacquire decq %rcx
xrelease decq %rcx
xrelease lock decq %rcx
lock xrelease decq %rcx
xacquire decq (%rcx)
xrelease decq (%rcx)
# Tests for op m64
xacquire cmpxchg8bq (%rcx)
xrelease cmpxchg8bq (%rcx)
# Tests for op regb, regb/m8
xacquire cmpxchgb %cl,%al
xacquire lock cmpxchgb %cl,%al
lock xacquire cmpxchgb %cl,%al
xrelease cmpxchgb %cl,%al
xrelease lock cmpxchgb %cl,%al
lock xrelease cmpxchgb %cl,%al
xacquire cmpxchgb %cl,(%rcx)
xrelease cmpxchgb %cl,(%rcx)
.intel_syntax noprefix
# Tests for op imm8 al
xacquire adc al,100
xacquire lock adc al,100
lock xacquire adc al,100
xrelease adc al,100
xrelease lock adc al,100
lock xrelease adc al,100
# Tests for op imm16 ax
xacquire adc ax,1000
xacquire lock adc ax,1000
lock xacquire adc ax,1000
xrelease adc ax,1000
xrelease lock adc ax,1000
lock xrelease adc ax,1000
# Tests for op imm32 eax
xacquire adc eax,10000000
xacquire lock adc eax,10000000
lock xacquire adc eax,10000000
xrelease adc eax,10000000
xrelease lock adc eax,10000000
lock xrelease adc eax,10000000
# Tests for op imm32 rax
xacquire adc rax,10000000
xacquire lock adc rax,10000000
lock xacquire adc rax,10000000
xrelease adc rax,10000000
xrelease lock adc rax,10000000
lock xrelease adc rax,10000000
# Tests for op imm8 regb/m8
xacquire adc cl,100
xacquire lock adc cl,100
lock xacquire adc cl,100
xrelease adc cl,100
xrelease lock adc cl,100
lock xrelease adc cl,100
xacquire adc BYTE PTR [rcx],100
xrelease adc BYTE PTR [rcx],100
# Tests for op imm16 regs/m16
xacquire adc cx,1000
xacquire lock adc cx,1000
lock xacquire adc cx,1000
xrelease adc cx,1000
xrelease lock adc cx,1000
lock xrelease adc cx,1000
xacquire adc WORD PTR [rcx],1000
xrelease adc WORD PTR [rcx],1000
# Tests for op imm32 regl/m32
xacquire adc ecx,10000000
xacquire lock adc ecx,10000000
lock xacquire adc ecx,10000000
xrelease adc ecx,10000000
xrelease lock adc ecx,10000000
lock xrelease adc ecx,10000000
xacquire adc DWORD PTR [rcx],10000000
xrelease adc DWORD PTR [rcx],10000000
# Tests for op imm32 regq/m64
xacquire adc rcx,10000000
xacquire lock adc rcx,10000000
lock xacquire adc rcx,10000000
xrelease adc rcx,10000000
xrelease lock adc rcx,10000000
lock xrelease adc rcx,10000000
xacquire adc QWORD PTR [rcx],10000000
xrelease adc QWORD PTR [rcx],10000000
# Tests for op imm8 regs/m16
xacquire adc cx,100
xacquire lock adc cx,100
lock xacquire adc cx,100
xrelease adc cx,100
xrelease lock adc cx,100
lock xrelease adc cx,100
xacquire adc WORD PTR [rcx],100
xrelease adc WORD PTR [rcx],100
# Tests for op imm8 regl/m32
xacquire adc ecx,100
xacquire lock adc ecx,100
lock xacquire adc ecx,100
xrelease adc ecx,100
xrelease lock adc ecx,100
lock xrelease adc ecx,100
xacquire adc DWORD PTR [rcx],100
xrelease adc DWORD PTR [rcx],100
# Tests for op imm8 regq/m64
xacquire adc rcx,100
xacquire lock adc rcx,100
lock xacquire adc rcx,100
xrelease adc rcx,100
xrelease lock adc rcx,100
lock xrelease adc rcx,100
xacquire adc QWORD PTR [rcx],100
xrelease adc QWORD PTR [rcx],100
# Tests for op imm8 regb/m8
xacquire adc cl,100
xacquire lock adc cl,100
lock xacquire adc cl,100
xrelease adc cl,100
xrelease lock adc cl,100
lock xrelease adc cl,100
xacquire adc BYTE PTR [rcx],100
xrelease adc BYTE PTR [rcx],100
# Tests for op regb regb/m8
# Tests for op regb/m8 regb
xacquire adc cl,al
xacquire lock adc cl,al
lock xacquire adc cl,al
xrelease adc cl,al
xrelease lock adc cl,al
lock xrelease adc cl,al
xacquire adc BYTE PTR [rcx],al
xrelease adc BYTE PTR [rcx],al
xacquire adc al,cl
xacquire lock adc al,cl
lock xacquire adc al,cl
xrelease adc al,cl
xrelease lock adc al,cl
lock xrelease adc al,cl
xacquire adc al,BYTE PTR [rcx]
xacquire lock adc al,BYTE PTR [rcx]
lock xacquire adc al,BYTE PTR [rcx]
xrelease adc al,BYTE PTR [rcx]
xrelease lock adc al,BYTE PTR [rcx]
lock xrelease adc al,BYTE PTR [rcx]
# Tests for op regs regs/m16
# Tests for op regs/m16 regs
xacquire adc cx,ax
xacquire lock adc cx,ax
lock xacquire adc cx,ax
xrelease adc cx,ax
xrelease lock adc cx,ax
lock xrelease adc cx,ax
xacquire adc WORD PTR [rcx],ax
xrelease adc WORD PTR [rcx],ax
xacquire adc ax,cx
xacquire lock adc ax,cx
lock xacquire adc ax,cx
xrelease adc ax,cx
xrelease lock adc ax,cx
lock xrelease adc ax,cx
xacquire adc ax,WORD PTR [rcx]
xacquire lock adc ax,WORD PTR [rcx]
lock xacquire adc ax,WORD PTR [rcx]
xrelease adc ax,WORD PTR [rcx]
xrelease lock adc ax,WORD PTR [rcx]
lock xrelease adc ax,WORD PTR [rcx]
# Tests for op regl regl/m32
# Tests for op regl/m32 regl
xacquire adc ecx,eax
xacquire lock adc ecx,eax
lock xacquire adc ecx,eax
xrelease adc ecx,eax
xrelease lock adc ecx,eax
lock xrelease adc ecx,eax
xacquire adc DWORD PTR [rcx],eax
xrelease adc DWORD PTR [rcx],eax
xacquire adc eax,ecx
xacquire lock adc eax,ecx
lock xacquire adc eax,ecx
xrelease adc eax,ecx
xrelease lock adc eax,ecx
lock xrelease adc eax,ecx
xacquire adc eax,DWORD PTR [rcx]
xacquire lock adc eax,DWORD PTR [rcx]
lock xacquire adc eax,DWORD PTR [rcx]
xrelease adc eax,DWORD PTR [rcx]
xrelease lock adc eax,DWORD PTR [rcx]
lock xrelease adc eax,DWORD PTR [rcx]
# Tests for op regq regq/m64
# Tests for op regq/m64 regq
xacquire adc rcx,rax
xacquire lock adc rcx,rax
lock xacquire adc rcx,rax
xrelease adc rcx,rax
xrelease lock adc rcx,rax
lock xrelease adc rcx,rax
xacquire adc QWORD PTR [rcx],rax
xrelease adc QWORD PTR [rcx],rax
xacquire adc rax,rcx
xacquire lock adc rax,rcx
lock xacquire adc rax,rcx
xrelease adc rax,rcx
xrelease lock adc rax,rcx
lock xrelease adc rax,rcx
xacquire adc rax,QWORD PTR [rcx]
xacquire lock adc rax,QWORD PTR [rcx]
lock xacquire adc rax,QWORD PTR [rcx]
xrelease adc rax,QWORD PTR [rcx]
xrelease lock adc rax,QWORD PTR [rcx]
lock xrelease adc rax,QWORD PTR [rcx]
# Tests for op regs, regs/m16
xacquire btc cx,ax
xacquire lock btc cx,ax
lock xacquire btc cx,ax
xrelease btc cx,ax
xrelease lock btc cx,ax
lock xrelease btc cx,ax
xacquire btc WORD PTR [rcx],ax
xrelease btc WORD PTR [rcx],ax
# Tests for op regl regl/m32
xacquire btc ecx,eax
xacquire lock btc ecx,eax
lock xacquire btc ecx,eax
xrelease btc ecx,eax
xrelease lock btc ecx,eax
lock xrelease btc ecx,eax
xacquire btc DWORD PTR [rcx],eax
xrelease btc DWORD PTR [rcx],eax
# Tests for op regq regq/m64
xacquire btc rcx,rax
xacquire lock btc rcx,rax
lock xacquire btc rcx,rax
xrelease btc rcx,rax
xrelease lock btc rcx,rax
lock xrelease btc rcx,rax
xacquire btc QWORD PTR [rcx],rax
xrelease btc QWORD PTR [rcx],rax
# Tests for op regb/m8
xacquire dec cl
xacquire lock dec cl
lock xacquire dec cl
xrelease dec cl
xrelease lock dec cl
lock xrelease dec cl
xacquire dec BYTE PTR [rcx]
xrelease dec BYTE PTR [rcx]
# Tests for op regs/m16
xacquire dec cx
xacquire lock dec cx
lock xacquire dec cx
xrelease dec cx
xrelease lock dec cx
lock xrelease dec cx
xacquire dec WORD PTR [rcx]
xrelease dec WORD PTR [rcx]
# Tests for op regl/m32
xacquire dec ecx
xacquire lock dec ecx
lock xacquire dec ecx
xrelease dec ecx
xrelease lock dec ecx
lock xrelease dec ecx
xacquire dec DWORD PTR [rcx]
xrelease dec DWORD PTR [rcx]
# Tests for op regq/m64
xacquire dec rcx
xacquire lock dec rcx
lock xacquire dec rcx
xrelease dec rcx
xrelease lock dec rcx
lock xrelease dec rcx
xacquire dec QWORD PTR [rcx]
xrelease dec QWORD PTR [rcx]
# Tests for op m64
xacquire cmpxchg8b QWORD PTR [rcx]
xrelease cmpxchg8b QWORD PTR [rcx]
# Tests for op regb, regb/m8
xacquire cmpxchg al,cl
xacquire lock cmpxchg al,cl
lock xacquire cmpxchg al,cl
xrelease cmpxchg al,cl
xrelease lock cmpxchg al,cl
lock xrelease cmpxchg al,cl
xacquire cmpxchg BYTE PTR [rcx],cl
xrelease cmpxchg BYTE PTR [rcx],cl
|
stsp/binutils-ia16
| 1,067
|
gas/testsuite/gas/i386/secidx.s
|
.text
.ascii ">>>>"
pre04: .ascii "<<<<"
.ascii ">>>>>"
pre0d: .ascii "<<<"
.ascii ">>>>>>"
pre16: .ascii "<<"
.ascii ">>>>>>>"
pre1f: .ascii "<"
.data
.ascii ">>>>"
sam04: .ascii "<<<<"
.ascii ">>>>>"
sam0d: .ascii "<<<"
.ascii ">>>>>>"
sam16: .ascii "<<"
.ascii ">>>>>>>"
sam1f: .ascii "<"
.ascii ">>>>"
.secidx pre04
.byte 0x11
.secidx pre0d
.byte 0x11
.secidx pre16
.byte 0x11
.secidx pre1f
.byte 0x11
.ascii "<<<<<<<<"
.ascii ">>>>"
.secidx sam04
.byte 0x11
.secidx sam0d
.byte 0x11
.secidx sam16
.byte 0x11
.secidx sam1f
.byte 0x11
.ascii "<<<<<<<<"
.ascii ">>>>"
.secidx nex04
.byte 0x11
.secidx nex0d
.byte 0x11
.secidx nex16
.byte 0x11
.secidx nex1f
.byte 0x11
.ascii "<<<<<<<<"
.ascii ">>>>"
.secidx ext24
.byte 0x11
.secidx ext2d
.byte 0x11
.secidx ext36
.byte 0x11
.secidx ext3f
.byte 0x11
.ascii "<<<<<<<<"
.section .rdata
.ascii ">>>>"
nex04: .ascii "<<<<"
.ascii ">>>>>"
nex0d: .ascii "<<<"
.ascii ">>>>>>"
nex16: .ascii "<<"
.ascii ">>>>>>>"
nex1f: .ascii "<"
.ascii ">>>>"
.p2align 4,0
|
stsp/binutils-ia16
| 19,272
|
gas/testsuite/gas/i386/avx512vbmi2_vl.s
|
# Check 32bit AVX512{VBMI2,VL} instructions
.allow_index_reg
.text
_start:
vpcompressb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{VBMI2,VL}
vpcompressb %xmm6, 126(%edx){%k7} # AVX512{VBMI2,VL} Disp8
vpcompressb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{VBMI2,VL}
vpcompressb %ymm6, 126(%edx){%k7} # AVX512{VBMI2,VL} Disp8
vpcompressb %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpcompressb %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpcompressb %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpcompressb %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpcompressw %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{VBMI2,VL}
vpcompressw %xmm6, 128(%edx){%k7} # AVX512{VBMI2,VL} Disp8
vpcompressw %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{VBMI2,VL}
vpcompressw %ymm6, 128(%edx){%k7} # AVX512{VBMI2,VL} Disp8
vpcompressw %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpcompressw %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpcompressw %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpcompressw %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpexpandb (%ecx), %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpexpandb -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{VBMI2,VL}
vpexpandb 126(%edx), %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpexpandb (%ecx), %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpexpandb -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{VBMI2,VL}
vpexpandb 126(%edx), %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpexpandb %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpexpandb %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpexpandb %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpexpandb %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpexpandw (%ecx), %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpexpandw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{VBMI2,VL}
vpexpandw 128(%edx), %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpexpandw (%ecx), %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpexpandw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{VBMI2,VL}
vpexpandw 128(%edx), %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpexpandw %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpexpandw %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpexpandw %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpexpandw %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpshldvw %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshldvw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpshldvw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshldvw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldvw %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshldvw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpshldvw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshldvw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldvd %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshldvd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpshldvd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshldvd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldvd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldvd %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshldvd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpshldvd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshldvd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldvd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldvq %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshldvq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpshldvq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshldvq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldvq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldvq %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshldvq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpshldvq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshldvq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldvq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdvw %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshrdvw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpshrdvw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshrdvw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdvw %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshrdvw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpshrdvw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshrdvw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdvd %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshrdvd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpshrdvd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshrdvd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdvd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdvd %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshrdvd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpshrdvd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshrdvd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdvd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdvq %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshrdvq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpshrdvq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshrdvq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdvq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdvq %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshrdvq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpshrdvq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshrdvq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdvq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldw $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshldw $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpshldw $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshldw $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldw $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshldw $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpshldw $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshldw $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshldd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpshldd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshldd $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldd $123, 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldd $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshldd $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpshldd $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshldd $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldd $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldq $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshldq $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpshldq $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshldq $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldq $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldq $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshldq $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpshldq $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshldq $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshldq $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdw $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshrdw $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpshrdw $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshrdw $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdw $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshrdw $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpshrdw $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshrdw $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshrdd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpshrdd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshrdd $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdd $123, 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdd $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshrdd $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpshrdd $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshrdd $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdd $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdq $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshrdq $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL}
vpshrdq $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL}
vpshrdq $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdq $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdq $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshrdq $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL}
vpshrdq $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL}
vpshrdq $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
vpshrdq $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8
.intel_syntax noprefix
vpcompressb XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{VBMI2,VL}
vpcompressb XMMWORD PTR [edx+126]{k7}, xmm6 # AVX512{VBMI2,VL} Disp8
vpcompressb YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{VBMI2,VL}
vpcompressb YMMWORD PTR [edx+126]{k7}, ymm6 # AVX512{VBMI2,VL} Disp8
vpcompressb xmm6{k7}, xmm5 # AVX512{VBMI2,VL}
vpcompressb xmm6{k7}{z}, xmm5 # AVX512{VBMI2,VL}
vpcompressb ymm6{k7}, ymm5 # AVX512{VBMI2,VL}
vpcompressb ymm6{k7}{z}, ymm5 # AVX512{VBMI2,VL}
vpcompressw XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{VBMI2,VL}
vpcompressw XMMWORD PTR [edx+128]{k7}, xmm6 # AVX512{VBMI2,VL} Disp8
vpcompressw YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{VBMI2,VL}
vpcompressw YMMWORD PTR [edx+128]{k7}, ymm6 # AVX512{VBMI2,VL} Disp8
vpcompressw xmm6{k7}, xmm5 # AVX512{VBMI2,VL}
vpcompressw xmm6{k7}{z}, xmm5 # AVX512{VBMI2,VL}
vpcompressw ymm6{k7}, ymm5 # AVX512{VBMI2,VL}
vpcompressw ymm6{k7}{z}, ymm5 # AVX512{VBMI2,VL}
vpexpandb xmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{VBMI2,VL}
vpexpandb xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpexpandb xmm6{k7}, XMMWORD PTR [edx+126] # AVX512{VBMI2,VL} Disp8
vpexpandb ymm6{k7}{z}, YMMWORD PTR [ecx] # AVX512{VBMI2,VL}
vpexpandb ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpexpandb ymm6{k7}, YMMWORD PTR [edx+126] # AVX512{VBMI2,VL} Disp8
vpexpandb xmm6{k7}, xmm5 # AVX512{VBMI2,VL}
vpexpandb xmm6{k7}{z}, xmm5 # AVX512{VBMI2,VL}
vpexpandb ymm6{k7}, ymm5 # AVX512{VBMI2,VL}
vpexpandb ymm6{k7}{z}, ymm5 # AVX512{VBMI2,VL}
vpexpandw xmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{VBMI2,VL}
vpexpandw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpexpandw xmm6{k7}, XMMWORD PTR [edx+128] # AVX512{VBMI2,VL} Disp8
vpexpandw ymm6{k7}{z}, YMMWORD PTR [ecx] # AVX512{VBMI2,VL}
vpexpandw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpexpandw ymm6{k7}, YMMWORD PTR [edx+128] # AVX512{VBMI2,VL} Disp8
vpexpandw xmm6{k7}, xmm5 # AVX512{VBMI2,VL}
vpexpandw xmm6{k7}{z}, xmm5 # AVX512{VBMI2,VL}
vpexpandw ymm6{k7}, ymm5 # AVX512{VBMI2,VL}
vpexpandw ymm6{k7}{z}, ymm5 # AVX512{VBMI2,VL}
vpshldvw xmm6{k7}, xmm5, xmm4 # AVX512{VBMI2,VL}
vpshldvw xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI2,VL}
vpshldvw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpshldvw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI2,VL} Disp8
vpshldvw ymm6{k7}, ymm5, ymm4 # AVX512{VBMI2,VL}
vpshldvw ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI2,VL}
vpshldvw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpshldvw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI2,VL} Disp8
vpshldvd xmm6{k7}, xmm5, xmm4 # AVX512{VBMI2,VL}
vpshldvd xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI2,VL}
vpshldvd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpshldvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI2,VL} Disp8
vpshldvd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{VBMI2,VL} Disp8
vpshldvd ymm6{k7}, ymm5, ymm4 # AVX512{VBMI2,VL}
vpshldvd ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI2,VL}
vpshldvd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpshldvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI2,VL} Disp8
vpshldvd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{VBMI2,VL} Disp8
vpshldvq xmm6{k7}, xmm5, xmm4 # AVX512{VBMI2,VL}
vpshldvq xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI2,VL}
vpshldvq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpshldvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI2,VL} Disp8
vpshldvq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{VBMI2,VL} Disp8
vpshldvq ymm6{k7}, ymm5, ymm4 # AVX512{VBMI2,VL}
vpshldvq ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI2,VL}
vpshldvq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpshldvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI2,VL} Disp8
vpshldvq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{VBMI2,VL} Disp8
vpshrdvw xmm6{k7}, xmm5, xmm4 # AVX512{VBMI2,VL}
vpshrdvw xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI2,VL}
vpshrdvw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpshrdvw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI2,VL} Disp8
vpshrdvw ymm6{k7}, ymm5, ymm4 # AVX512{VBMI2,VL}
vpshrdvw ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI2,VL}
vpshrdvw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpshrdvw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI2,VL} Disp8
vpshrdvd xmm6{k7}, xmm5, xmm4 # AVX512{VBMI2,VL}
vpshrdvd xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI2,VL}
vpshrdvd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpshrdvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI2,VL} Disp8
vpshrdvd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{VBMI2,VL} Disp8
vpshrdvd ymm6{k7}, ymm5, ymm4 # AVX512{VBMI2,VL}
vpshrdvd ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI2,VL}
vpshrdvd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpshrdvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI2,VL} Disp8
vpshrdvd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{VBMI2,VL} Disp8
vpshrdvq xmm6{k7}, xmm5, xmm4 # AVX512{VBMI2,VL}
vpshrdvq xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI2,VL}
vpshrdvq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpshrdvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI2,VL} Disp8
vpshrdvq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{VBMI2,VL} Disp8
vpshrdvq ymm6{k7}, ymm5, ymm4 # AVX512{VBMI2,VL}
vpshrdvq ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI2,VL}
vpshrdvq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL}
vpshrdvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI2,VL} Disp8
vpshrdvq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{VBMI2,VL} Disp8
vpshldw xmm6{k7}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL}
vpshldw xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL}
vpshldw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL}
vpshldw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{VBMI2,VL} Disp8
vpshldw ymm6{k7}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL}
vpshldw ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL}
vpshldw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL}
vpshldw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{VBMI2,VL} Disp8
vpshldd xmm6{k7}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL}
vpshldd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL}
vpshldd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL}
vpshldd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{VBMI2,VL} Disp8
vpshldd xmm6{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{VBMI2,VL} Disp8
vpshldd ymm6{k7}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL}
vpshldd ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL}
vpshldd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL}
vpshldd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{VBMI2,VL} Disp8
vpshldd ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{VBMI2,VL} Disp8
vpshldq xmm6{k7}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL}
vpshldq xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL}
vpshldq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL}
vpshldq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{VBMI2,VL} Disp8
vpshldq xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{VBMI2,VL} Disp8
vpshldq ymm6{k7}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL}
vpshldq ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL}
vpshldq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL}
vpshldq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{VBMI2,VL} Disp8
vpshldq ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{VBMI2,VL} Disp8
vpshrdw xmm6{k7}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL}
vpshrdw xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL}
vpshrdw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL}
vpshrdw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{VBMI2,VL} Disp8
vpshrdw ymm6{k7}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL}
vpshrdw ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL}
vpshrdw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL}
vpshrdw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{VBMI2,VL} Disp8
vpshrdd xmm6{k7}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL}
vpshrdd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL}
vpshrdd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL}
vpshrdd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{VBMI2,VL} Disp8
vpshrdd xmm6{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{VBMI2,VL} Disp8
vpshrdd ymm6{k7}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL}
vpshrdd ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL}
vpshrdd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL}
vpshrdd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{VBMI2,VL} Disp8
vpshrdd ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{VBMI2,VL} Disp8
vpshrdq xmm6{k7}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL}
vpshrdq xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL}
vpshrdq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL}
vpshrdq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{VBMI2,VL} Disp8
vpshrdq xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{VBMI2,VL} Disp8
vpshrdq ymm6{k7}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL}
vpshrdq ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL}
vpshrdq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL}
vpshrdq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{VBMI2,VL} Disp8
vpshrdq ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{VBMI2,VL} Disp8
|
stsp/binutils-ia16
| 2,181
|
gas/testsuite/gas/i386/sha.s
|
# Check SHA instructions
.allow_index_reg
.text
_start:
sha1rnds4 $9, %xmm2, %xmm1
sha1rnds4 $7, (%eax), %xmm2
sha1rnds4 $5, 0x12(%eax), %xmm3
sha1rnds4 $1, (%eax,%ebx,2), %xmm4
sha1nexte %xmm2, %xmm1
sha1nexte (%eax), %xmm1
sha1nexte 0x12(%eax), %xmm1
sha1nexte (%eax,%ebx,2), %xmm1
sha1msg1 %xmm2, %xmm1
sha1msg1 (%eax), %xmm1
sha1msg1 0x12(%eax), %xmm1
sha1msg1 (%eax,%ebx,2), %xmm1
sha1msg2 %xmm2, %xmm1
sha1msg2 (%eax), %xmm1
sha1msg2 0x12(%eax), %xmm1
sha1msg2 (%eax,%ebx,2), %xmm1
sha256rnds2 %xmm2, %xmm1
sha256rnds2 (%eax), %xmm1
sha256rnds2 0x12(%eax), %xmm1
sha256rnds2 (%eax,%ebx,2), %xmm1
sha256rnds2 %xmm0, %xmm2, %xmm1
sha256rnds2 %xmm0, (%eax), %xmm1
sha256rnds2 %xmm0, 0x12(%eax), %xmm1
sha256rnds2 %xmm0, (%eax,%ebx,2), %xmm1
sha256msg1 %xmm2, %xmm1
sha256msg1 (%eax), %xmm1
sha256msg1 0x12(%eax), %xmm1
sha256msg1 (%eax,%ebx,2), %xmm1
sha256msg2 %xmm2, %xmm1
sha256msg2 (%eax), %xmm1
sha256msg2 0x12(%eax), %xmm1
sha256msg2 (%eax,%ebx,2), %xmm1
.intel_syntax noprefix
sha1rnds4 xmm1, xmm2, 9
sha1rnds4 xmm2, XMMWORD PTR [eax], 7
sha1rnds4 xmm3, XMMWORD PTR [eax+0x12], 5
sha1rnds4 xmm4, XMMWORD PTR [eax+ebx*2], 1
sha1nexte xmm1, xmm2
sha1nexte xmm2, XMMWORD PTR [eax]
sha1nexte xmm3, XMMWORD PTR [eax+0x12]
sha1nexte xmm4, XMMWORD PTR [eax+ebx*2]
sha1msg1 xmm1, xmm2
sha1msg1 xmm2, XMMWORD PTR [eax]
sha1msg1 xmm3, XMMWORD PTR [eax+0x12]
sha1msg1 xmm4, XMMWORD PTR [eax+ebx*2]
sha1msg2 xmm1, xmm2
sha1msg2 xmm2, XMMWORD PTR [eax]
sha1msg2 xmm3, XMMWORD PTR [eax+0x12]
sha1msg2 xmm4, XMMWORD PTR [eax+ebx*2]
sha256rnds2 xmm1, xmm2
sha256rnds2 xmm2, XMMWORD PTR [eax]
sha256rnds2 xmm3, XMMWORD PTR [eax+0x12]
sha256rnds2 xmm4, XMMWORD PTR [eax+ebx*2]
sha256rnds2 xmm1, xmm2, xmm0
sha256rnds2 xmm2, XMMWORD PTR [eax], xmm0
sha256rnds2 xmm3, XMMWORD PTR [eax+0x12], xmm0
sha256rnds2 xmm4, XMMWORD PTR [eax+ebx*2], xmm0
sha256msg1 xmm1, xmm2
sha256msg1 xmm2, XMMWORD PTR [eax]
sha256msg1 xmm3, XMMWORD PTR [eax+0x12]
sha256msg1 xmm4, XMMWORD PTR [eax+ebx*2]
sha256msg2 xmm1, xmm2
sha256msg2 xmm2, XMMWORD PTR [eax]
sha256msg2 xmm3, XMMWORD PTR [eax+0x12]
sha256msg2 xmm4, XMMWORD PTR [eax+ebx*2]
|
stsp/binutils-ia16
| 1,527
|
gas/testsuite/gas/i386/x86-64-avx512f_vaes.s
|
# Check 64bit AVX512F,VAES instructions
.allow_index_reg
.text
_start:
vaesdec %zmm28, %zmm29, %zmm30 # AVX512F,VAES
vaesdec 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,VAES
vaesdec 8128(%rdx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesdeclast %zmm28, %zmm29, %zmm30 # AVX512F,VAES
vaesdeclast 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,VAES
vaesdeclast 8128(%rdx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesenc %zmm28, %zmm29, %zmm30 # AVX512F,VAES
vaesenc 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,VAES
vaesenc 8128(%rdx), %zmm5, %zmm6 # AVX512F,VAES Disp8
vaesenclast %zmm28, %zmm29, %zmm30 # AVX512F,VAES
vaesenclast 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512F,VAES
vaesenclast 8128(%rdx), %zmm5, %zmm6 # AVX512F,VAES Disp8
.intel_syntax noprefix
vaesdec zmm30, zmm29, zmm28 # AVX512F,VAES
vaesdec zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F,VAES
vaesdec zmm6, zmm5, ZMMWORD PTR [rdx+8128] # AVX512F,VAES Disp8
vaesdeclast zmm30, zmm29, zmm28 # AVX512F,VAES
vaesdeclast zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F,VAES
vaesdeclast zmm6, zmm5, ZMMWORD PTR [rdx+8128] # AVX512F,VAES Disp8
vaesenc zmm30, zmm29, zmm28 # AVX512F,VAES
vaesenc zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F,VAES
vaesenc zmm6, zmm5, ZMMWORD PTR [rdx+8128] # AVX512F,VAES Disp8
vaesenclast zmm30, zmm29, zmm28 # AVX512F,VAES
vaesenclast zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512F,VAES
vaesenclast zmm6, zmm5, ZMMWORD PTR [rdx+8128] # AVX512F,VAES Disp8
|
stsp/binutils-ia16
| 2,881
|
gas/testsuite/gas/i386/x86-64-avx2-wig.s
|
# Check AVX2 WIG instructions
.allow_index_reg
.text
_start:
vmovntdqa (%rcx),%ymm4
vmpsadbw $7,%ymm4,%ymm6,%ymm2
vpabsb %ymm4,%ymm6
vpabsd %ymm4,%ymm6
vpabsw %ymm4,%ymm6
vpackssdw %ymm4,%ymm6,%ymm2
vpacksswb %ymm4,%ymm6,%ymm2
vpackusdw %ymm4,%ymm6,%ymm2
vpackuswb %ymm4,%ymm6,%ymm2
vpaddb %ymm4,%ymm6,%ymm2
vpaddd %ymm4,%ymm6,%ymm2
vpaddq %ymm4,%ymm6,%ymm2
vpaddsb %ymm4,%ymm6,%ymm2
vpaddsw %ymm4,%ymm6,%ymm2
vpaddusb %ymm4,%ymm6,%ymm2
vpaddusw %ymm4,%ymm6,%ymm2
vpaddw %ymm4,%ymm6,%ymm2
vpalignr $7,%ymm4,%ymm6,%ymm2
vpand %ymm4,%ymm6,%ymm2
vpandn %ymm4,%ymm6,%ymm2
vpavgb %ymm4,%ymm6,%ymm2
vpavgw %ymm4,%ymm6,%ymm2
vpblendw $7,%ymm4,%ymm6,%ymm2
vpcmpeqb %ymm4,%ymm6,%ymm2
vpcmpeqd %ymm4,%ymm6,%ymm2
vpcmpeqq %ymm4,%ymm6,%ymm2
vpcmpeqw %ymm4,%ymm6,%ymm2
vpcmpgtb %ymm4,%ymm6,%ymm2
vpcmpgtd %ymm4,%ymm6,%ymm2
vpcmpgtq %ymm4,%ymm6,%ymm2
vpcmpgtw %ymm4,%ymm6,%ymm2
vphaddd %ymm4,%ymm6,%ymm2
vphaddsw %ymm4,%ymm6,%ymm2
vphaddw %ymm4,%ymm6,%ymm2
vphsubd %ymm4,%ymm6,%ymm2
vphsubsw %ymm4,%ymm6,%ymm2
vphsubw %ymm4,%ymm6,%ymm2
vpmaddubsw %ymm4,%ymm6,%ymm2
vpmaddwd %ymm4,%ymm6,%ymm2
vpmaxsb %ymm4,%ymm6,%ymm2
vpmaxsd %ymm4,%ymm6,%ymm2
vpmaxsw %ymm4,%ymm6,%ymm2
vpmaxub %ymm4,%ymm6,%ymm2
vpmaxud %ymm4,%ymm6,%ymm2
vpmaxuw %ymm4,%ymm6,%ymm2
vpminsb %ymm4,%ymm6,%ymm2
vpminsd %ymm4,%ymm6,%ymm2
vpminsw %ymm4,%ymm6,%ymm2
vpminub %ymm4,%ymm6,%ymm2
vpminud %ymm4,%ymm6,%ymm2
vpminuw %ymm4,%ymm6,%ymm2
vpmovmskb %ymm4,%ecx
vpmovsxbd %xmm4,%ymm6
vpmovsxbq %xmm4,%ymm4
vpmovsxbw %xmm4,%ymm4
vpmovsxdq %xmm4,%ymm4
vpmovsxwd %xmm4,%ymm4
vpmovsxwq %xmm4,%ymm6
vpmovzxbd %xmm4,%ymm6
vpmovzxbq %xmm4,%ymm4
vpmovzxbw %xmm4,%ymm4
vpmovzxdq %xmm4,%ymm4
vpmovzxwd %xmm4,%ymm4
vpmovzxwq %xmm4,%ymm6
vpmuldq %ymm4,%ymm6,%ymm2
vpmulhrsw %ymm4,%ymm6,%ymm2
vpmulhuw %ymm4,%ymm6,%ymm2
vpmulhw %ymm4,%ymm6,%ymm2
vpmulld %ymm4,%ymm6,%ymm2
vpmullw %ymm4,%ymm6,%ymm2
vpmuludq %ymm4,%ymm6,%ymm2
vpor %ymm4,%ymm6,%ymm2
vpsadbw %ymm4,%ymm6,%ymm2
vpshufb %ymm4,%ymm6,%ymm2
vpshufd $7,%ymm6,%ymm2
vpshufhw $7,%ymm6,%ymm2
vpshuflw $7,%ymm6,%ymm2
vpsignb %ymm4,%ymm6,%ymm2
vpsignd %ymm4,%ymm6,%ymm2
vpsignw %ymm4,%ymm6,%ymm2
vpslld $7,%ymm6,%ymm2
vpslldq $7,%ymm6,%ymm2
vpsllq $7,%ymm6,%ymm2
vpsllw $7,%ymm6,%ymm2
vpsrad $7,%ymm6,%ymm2
vpsraw $7,%ymm6,%ymm2
vpsrld $7,%ymm6,%ymm2
vpsrldq $7,%ymm6,%ymm2
vpsrlq $7,%ymm6,%ymm2
vpsrlw $7,%ymm6,%ymm2
vpsubb %ymm4,%ymm6,%ymm2
vpsubd %ymm4,%ymm6,%ymm2
vpsubq %ymm4,%ymm6,%ymm2
vpsubsb %ymm4,%ymm6,%ymm2
vpsubsw %ymm4,%ymm6,%ymm2
vpsubusb %ymm4,%ymm6,%ymm2
vpsubusw %ymm4,%ymm6,%ymm2
vpsubw %ymm4,%ymm6,%ymm2
vpunpckhbw %ymm4,%ymm6,%ymm2
vpunpckhdq %ymm4,%ymm6,%ymm2
vpunpckhqdq %ymm4,%ymm6,%ymm2
vpunpckhwd %ymm4,%ymm6,%ymm2
vpunpcklbw %ymm4,%ymm6,%ymm2
vpunpckldq %ymm4,%ymm6,%ymm2
vpunpcklqdq %ymm4,%ymm6,%ymm2
vpunpcklwd %ymm4,%ymm6,%ymm2
vpxor %ymm4,%ymm6,%ymm2
|
stsp/binutils-ia16
| 18,422
|
gas/testsuite/gas/i386/x86-64-avx-scalar.s
|
# Check 64bit AVX scalar instructions
.allow_index_reg
.text
_start:
# Tests for op xmm/mem64, xmm
vcomisd %xmm4,%xmm6
vcomisd (%rcx),%xmm4
vucomisd %xmm4,%xmm6
vucomisd (%rcx),%xmm4
# Tests for op mem64, xmm
vmovsd (%rcx),%xmm4
# Tests for op xmm, mem64
vmovsd %xmm4,(%rcx)
# Tests for op xmm/mem64, regl
vcvtsd2si %xmm4,%ecx
vcvtsd2si (%rcx),%ecx
vcvttsd2si %xmm4,%ecx
vcvttsd2si (%rcx),%ecx
# Tests for op xmm/mem64, regq
vcvtsd2si %xmm4,%rcx
vcvtsd2si (%rcx),%rcx
vcvttsd2si %xmm4,%rcx
vcvttsd2si (%rcx),%rcx
# Tests for op regq/mem64, xmm, xmm
vcvtsi2sdq %rcx,%xmm4,%xmm6
vcvtsi2sdq (%rcx),%xmm4,%xmm6
vcvtsi2ssq %rcx,%xmm4,%xmm6
vcvtsi2ssq (%rcx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd $7,%xmm4,%xmm6,%xmm2
vcmpsd $7,(%rcx),%xmm6,%xmm2
vroundsd $7,%xmm4,%xmm6,%xmm2
vroundsd $7,(%rcx),%xmm6,%xmm2
# Tests for op xmm/mem64, xmm, xmm
vaddsd %xmm4,%xmm6,%xmm2
vaddsd (%rcx),%xmm6,%xmm2
vcvtsd2ss %xmm4,%xmm6,%xmm2
vcvtsd2ss (%rcx),%xmm6,%xmm2
vdivsd %xmm4,%xmm6,%xmm2
vdivsd (%rcx),%xmm6,%xmm2
vmaxsd %xmm4,%xmm6,%xmm2
vmaxsd (%rcx),%xmm6,%xmm2
vminsd %xmm4,%xmm6,%xmm2
vminsd (%rcx),%xmm6,%xmm2
vmulsd %xmm4,%xmm6,%xmm2
vmulsd (%rcx),%xmm6,%xmm2
vsqrtsd %xmm4,%xmm6,%xmm2
vsqrtsd (%rcx),%xmm6,%xmm2
vsubsd %xmm4,%xmm6,%xmm2
vsubsd (%rcx),%xmm6,%xmm2
vcmpeqsd %xmm4,%xmm6,%xmm2
vcmpeqsd (%rcx),%xmm6,%xmm2
vcmpltsd %xmm4,%xmm6,%xmm2
vcmpltsd (%rcx),%xmm6,%xmm2
vcmplesd %xmm4,%xmm6,%xmm2
vcmplesd (%rcx),%xmm6,%xmm2
vcmpunordsd %xmm4,%xmm6,%xmm2
vcmpunordsd (%rcx),%xmm6,%xmm2
vcmpneqsd %xmm4,%xmm6,%xmm2
vcmpneqsd (%rcx),%xmm6,%xmm2
vcmpnltsd %xmm4,%xmm6,%xmm2
vcmpnltsd (%rcx),%xmm6,%xmm2
vcmpnlesd %xmm4,%xmm6,%xmm2
vcmpnlesd (%rcx),%xmm6,%xmm2
vcmpordsd %xmm4,%xmm6,%xmm2
vcmpordsd (%rcx),%xmm6,%xmm2
vcmpeq_uqsd %xmm4,%xmm6,%xmm2
vcmpeq_uqsd (%rcx),%xmm6,%xmm2
vcmpngesd %xmm4,%xmm6,%xmm2
vcmpngesd (%rcx),%xmm6,%xmm2
vcmpngtsd %xmm4,%xmm6,%xmm2
vcmpngtsd (%rcx),%xmm6,%xmm2
vcmpfalsesd %xmm4,%xmm6,%xmm2
vcmpfalsesd (%rcx),%xmm6,%xmm2
vcmpneq_oqsd %xmm4,%xmm6,%xmm2
vcmpneq_oqsd (%rcx),%xmm6,%xmm2
vcmpgesd %xmm4,%xmm6,%xmm2
vcmpgesd (%rcx),%xmm6,%xmm2
vcmpgtsd %xmm4,%xmm6,%xmm2
vcmpgtsd (%rcx),%xmm6,%xmm2
vcmptruesd %xmm4,%xmm6,%xmm2
vcmptruesd (%rcx),%xmm6,%xmm2
vcmpeq_ossd %xmm4,%xmm6,%xmm2
vcmpeq_ossd (%rcx),%xmm6,%xmm2
vcmplt_oqsd %xmm4,%xmm6,%xmm2
vcmplt_oqsd (%rcx),%xmm6,%xmm2
vcmple_oqsd %xmm4,%xmm6,%xmm2
vcmple_oqsd (%rcx),%xmm6,%xmm2
vcmpunord_ssd %xmm4,%xmm6,%xmm2
vcmpunord_ssd (%rcx),%xmm6,%xmm2
vcmpneq_ussd %xmm4,%xmm6,%xmm2
vcmpneq_ussd (%rcx),%xmm6,%xmm2
vcmpnlt_uqsd %xmm4,%xmm6,%xmm2
vcmpnlt_uqsd (%rcx),%xmm6,%xmm2
vcmpnle_uqsd %xmm4,%xmm6,%xmm2
vcmpnle_uqsd (%rcx),%xmm6,%xmm2
vcmpord_ssd %xmm4,%xmm6,%xmm2
vcmpord_ssd (%rcx),%xmm6,%xmm2
vcmpeq_ussd %xmm4,%xmm6,%xmm2
vcmpeq_ussd (%rcx),%xmm6,%xmm2
vcmpnge_uqsd %xmm4,%xmm6,%xmm2
vcmpnge_uqsd (%rcx),%xmm6,%xmm2
vcmpngt_uqsd %xmm4,%xmm6,%xmm2
vcmpngt_uqsd (%rcx),%xmm6,%xmm2
vcmpfalse_ossd %xmm4,%xmm6,%xmm2
vcmpfalse_ossd (%rcx),%xmm6,%xmm2
vcmpneq_ossd %xmm4,%xmm6,%xmm2
vcmpneq_ossd (%rcx),%xmm6,%xmm2
vcmpge_oqsd %xmm4,%xmm6,%xmm2
vcmpge_oqsd (%rcx),%xmm6,%xmm2
vcmpgt_oqsd %xmm4,%xmm6,%xmm2
vcmpgt_oqsd (%rcx),%xmm6,%xmm2
vcmptrue_ussd %xmm4,%xmm6,%xmm2
vcmptrue_ussd (%rcx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm, xmm
vaddss %xmm4,%xmm6,%xmm2
vaddss (%rcx),%xmm6,%xmm2
vcvtss2sd %xmm4,%xmm6,%xmm2
vcvtss2sd (%rcx),%xmm6,%xmm2
vdivss %xmm4,%xmm6,%xmm2
vdivss (%rcx),%xmm6,%xmm2
vmaxss %xmm4,%xmm6,%xmm2
vmaxss (%rcx),%xmm6,%xmm2
vminss %xmm4,%xmm6,%xmm2
vminss (%rcx),%xmm6,%xmm2
vmulss %xmm4,%xmm6,%xmm2
vmulss (%rcx),%xmm6,%xmm2
vrcpss %xmm4,%xmm6,%xmm2
vrcpss (%rcx),%xmm6,%xmm2
vrsqrtss %xmm4,%xmm6,%xmm2
vrsqrtss (%rcx),%xmm6,%xmm2
vsqrtss %xmm4,%xmm6,%xmm2
vsqrtss (%rcx),%xmm6,%xmm2
vsubss %xmm4,%xmm6,%xmm2
vsubss (%rcx),%xmm6,%xmm2
vcmpeqss %xmm4,%xmm6,%xmm2
vcmpeqss (%rcx),%xmm6,%xmm2
vcmpltss %xmm4,%xmm6,%xmm2
vcmpltss (%rcx),%xmm6,%xmm2
vcmpless %xmm4,%xmm6,%xmm2
vcmpless (%rcx),%xmm6,%xmm2
vcmpunordss %xmm4,%xmm6,%xmm2
vcmpunordss (%rcx),%xmm6,%xmm2
vcmpneqss %xmm4,%xmm6,%xmm2
vcmpneqss (%rcx),%xmm6,%xmm2
vcmpnltss %xmm4,%xmm6,%xmm2
vcmpnltss (%rcx),%xmm6,%xmm2
vcmpnless %xmm4,%xmm6,%xmm2
vcmpnless (%rcx),%xmm6,%xmm2
vcmpordss %xmm4,%xmm6,%xmm2
vcmpordss (%rcx),%xmm6,%xmm2
vcmpeq_uqss %xmm4,%xmm6,%xmm2
vcmpeq_uqss (%rcx),%xmm6,%xmm2
vcmpngess %xmm4,%xmm6,%xmm2
vcmpngess (%rcx),%xmm6,%xmm2
vcmpngtss %xmm4,%xmm6,%xmm2
vcmpngtss (%rcx),%xmm6,%xmm2
vcmpfalsess %xmm4,%xmm6,%xmm2
vcmpfalsess (%rcx),%xmm6,%xmm2
vcmpneq_oqss %xmm4,%xmm6,%xmm2
vcmpneq_oqss (%rcx),%xmm6,%xmm2
vcmpgess %xmm4,%xmm6,%xmm2
vcmpgess (%rcx),%xmm6,%xmm2
vcmpgtss %xmm4,%xmm6,%xmm2
vcmpgtss (%rcx),%xmm6,%xmm2
vcmptruess %xmm4,%xmm6,%xmm2
vcmptruess (%rcx),%xmm6,%xmm2
vcmpeq_osss %xmm4,%xmm6,%xmm2
vcmpeq_osss (%rcx),%xmm6,%xmm2
vcmplt_oqss %xmm4,%xmm6,%xmm2
vcmplt_oqss (%rcx),%xmm6,%xmm2
vcmple_oqss %xmm4,%xmm6,%xmm2
vcmple_oqss (%rcx),%xmm6,%xmm2
vcmpunord_sss %xmm4,%xmm6,%xmm2
vcmpunord_sss (%rcx),%xmm6,%xmm2
vcmpneq_usss %xmm4,%xmm6,%xmm2
vcmpneq_usss (%rcx),%xmm6,%xmm2
vcmpnlt_uqss %xmm4,%xmm6,%xmm2
vcmpnlt_uqss (%rcx),%xmm6,%xmm2
vcmpnle_uqss %xmm4,%xmm6,%xmm2
vcmpnle_uqss (%rcx),%xmm6,%xmm2
vcmpord_sss %xmm4,%xmm6,%xmm2
vcmpord_sss (%rcx),%xmm6,%xmm2
vcmpeq_usss %xmm4,%xmm6,%xmm2
vcmpeq_usss (%rcx),%xmm6,%xmm2
vcmpnge_uqss %xmm4,%xmm6,%xmm2
vcmpnge_uqss (%rcx),%xmm6,%xmm2
vcmpngt_uqss %xmm4,%xmm6,%xmm2
vcmpngt_uqss (%rcx),%xmm6,%xmm2
vcmpfalse_osss %xmm4,%xmm6,%xmm2
vcmpfalse_osss (%rcx),%xmm6,%xmm2
vcmpneq_osss %xmm4,%xmm6,%xmm2
vcmpneq_osss (%rcx),%xmm6,%xmm2
vcmpge_oqss %xmm4,%xmm6,%xmm2
vcmpge_oqss (%rcx),%xmm6,%xmm2
vcmpgt_oqss %xmm4,%xmm6,%xmm2
vcmpgt_oqss (%rcx),%xmm6,%xmm2
vcmptrue_usss %xmm4,%xmm6,%xmm2
vcmptrue_usss (%rcx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm
vcomiss %xmm4,%xmm6
vcomiss (%rcx),%xmm4
vucomiss %xmm4,%xmm6
vucomiss (%rcx),%xmm4
# Tests for op mem32, xmm
vmovss (%rcx),%xmm4
# Tests for op xmm, mem32
vmovss %xmm4,(%rcx)
# Tests for op xmm/mem32, regl
vcvtss2si %xmm4,%ecx
vcvtss2si (%rcx),%ecx
vcvttss2si %xmm4,%ecx
vcvttss2si (%rcx),%ecx
# Tests for op xmm/mem32, regq
vcvtss2si %xmm4,%rcx
vcvtss2si (%rcx),%rcx
vcvttss2si %xmm4,%rcx
vcvttss2si (%rcx),%rcx
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd %ecx,%xmm4,%xmm6
vcvtsi2sdl (%rcx),%xmm4,%xmm6
vcvtsi2ss %ecx,%xmm4,%xmm6
vcvtsi2ssl (%rcx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss $7,%xmm4,%xmm6,%xmm2
vcmpss $7,(%rcx),%xmm6,%xmm2
vroundss $7,%xmm4,%xmm6,%xmm2
vroundss $7,(%rcx),%xmm6,%xmm2
# Tests for op xmm, xmm, xmm
vmovsd %xmm4,%xmm6,%xmm2
vmovss %xmm4,%xmm6,%xmm2
#Tests with different memory and register operands.
vcvtsi2sdl 0x12345678,%xmm8,%xmm15
vcvtsi2sdl (%rbp),%xmm8,%xmm15
vcvtsi2sdl (%rsp),%xmm8,%xmm15
vcvtsi2sdl 0x99(%rbp),%xmm8,%xmm15
vcvtsi2sdl 0x99(%r15),%xmm8,%xmm15
vcvtsi2sdl 0x99(%rip),%xmm8,%xmm15
vcvtsi2sdl 0x99(%rsp),%xmm8,%xmm15
vcvtsi2sdl 0x99(%r12),%xmm8,%xmm15
vcvtsi2sdl -0x99(,%riz),%xmm8,%xmm15
vcvtsi2sdl -0x99(,%riz,2),%xmm8,%xmm15
vcvtsi2sdl -0x99(%rbx,%riz),%xmm8,%xmm15
vcvtsi2sdl -0x99(%rbx,%riz,2),%xmm8,%xmm15
vcvtsi2sdl -0x99(%r12,%r15,4),%xmm8,%xmm15
vcvtsi2sdl -0x99(%r8,%r15,8),%xmm8,%xmm15
vcvtsi2sdl -0x99(%rbp,%r13,4),%xmm8,%xmm15
vcvtsi2sdl -0x99(%rsp,%r12,1),%xmm8,%xmm15
# Tests for all register operands.
vcvtsd2si %xmm8,%r8d
vcvtsi2sdl %r8d,%xmm8,%xmm15
# Tests for different memory/register operand
vcvtsd2si (%rcx),%r8
vcvtss2si (%rcx),%r8
.intel_syntax noprefix
# Tests for op xmm/mem64, xmm
vcomisd xmm6,xmm4
vcomisd xmm4,QWORD PTR [rcx]
vcomisd xmm4,[rcx]
vucomisd xmm6,xmm4
vucomisd xmm4,QWORD PTR [rcx]
vucomisd xmm4,[rcx]
# Tests for op mem64, xmm
vmovsd xmm4,QWORD PTR [rcx]
vmovsd xmm4,[rcx]
# Tests for op xmm, mem64
vmovsd QWORD PTR [rcx],xmm4
vmovsd [rcx],xmm4
# Tests for op xmm/mem64, regl
vcvtsd2si ecx,xmm4
vcvtsd2si ecx,QWORD PTR [rcx]
vcvtsd2si ecx,[rcx]
vcvttsd2si ecx,xmm4
vcvttsd2si ecx,QWORD PTR [rcx]
vcvttsd2si ecx,[rcx]
# Tests for op xmm/mem64, regq
vcvtsd2si rcx,xmm4
vcvtsd2si rcx,QWORD PTR [rcx]
vcvtsd2si rcx,[rcx]
vcvttsd2si rcx,xmm4
vcvttsd2si rcx,QWORD PTR [rcx]
vcvttsd2si rcx,[rcx]
# Tests for op regq/mem64, xmm, xmm
vcvtsi2sdq xmm6,xmm4,rcx
vcvtsi2sdq xmm6,xmm4,QWORD PTR [rcx]
vcvtsi2sdq xmm6,xmm4,[rcx]
vcvtsi2ssq xmm6,xmm4,rcx
vcvtsi2ssq xmm6,xmm4,QWORD PTR [rcx]
vcvtsi2ssq xmm6,xmm4,[rcx]
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd xmm2,xmm6,xmm4,7
vcmpsd xmm2,xmm6,QWORD PTR [rcx],7
vcmpsd xmm2,xmm6,[rcx],7
vroundsd xmm2,xmm6,xmm4,7
vroundsd xmm2,xmm6,QWORD PTR [rcx],7
vroundsd xmm2,xmm6,[rcx],7
# Tests for op xmm/mem64, xmm, xmm
vaddsd xmm2,xmm6,xmm4
vaddsd xmm2,xmm6,QWORD PTR [rcx]
vaddsd xmm2,xmm6,[rcx]
vcvtsd2ss xmm2,xmm6,xmm4
vcvtsd2ss xmm2,xmm6,QWORD PTR [rcx]
vcvtsd2ss xmm2,xmm6,[rcx]
vdivsd xmm2,xmm6,xmm4
vdivsd xmm2,xmm6,QWORD PTR [rcx]
vdivsd xmm2,xmm6,[rcx]
vmaxsd xmm2,xmm6,xmm4
vmaxsd xmm2,xmm6,QWORD PTR [rcx]
vmaxsd xmm2,xmm6,[rcx]
vminsd xmm2,xmm6,xmm4
vminsd xmm2,xmm6,QWORD PTR [rcx]
vminsd xmm2,xmm6,[rcx]
vmulsd xmm2,xmm6,xmm4
vmulsd xmm2,xmm6,QWORD PTR [rcx]
vmulsd xmm2,xmm6,[rcx]
vsqrtsd xmm2,xmm6,xmm4
vsqrtsd xmm2,xmm6,QWORD PTR [rcx]
vsqrtsd xmm2,xmm6,[rcx]
vsubsd xmm2,xmm6,xmm4
vsubsd xmm2,xmm6,QWORD PTR [rcx]
vsubsd xmm2,xmm6,[rcx]
vcmpeqsd xmm2,xmm6,xmm4
vcmpeqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpeqsd xmm2,xmm6,[rcx]
vcmpltsd xmm2,xmm6,xmm4
vcmpltsd xmm2,xmm6,QWORD PTR [rcx]
vcmpltsd xmm2,xmm6,[rcx]
vcmplesd xmm2,xmm6,xmm4
vcmplesd xmm2,xmm6,QWORD PTR [rcx]
vcmplesd xmm2,xmm6,[rcx]
vcmpunordsd xmm2,xmm6,xmm4
vcmpunordsd xmm2,xmm6,QWORD PTR [rcx]
vcmpunordsd xmm2,xmm6,[rcx]
vcmpneqsd xmm2,xmm6,xmm4
vcmpneqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpneqsd xmm2,xmm6,[rcx]
vcmpnltsd xmm2,xmm6,xmm4
vcmpnltsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnltsd xmm2,xmm6,[rcx]
vcmpnlesd xmm2,xmm6,xmm4
vcmpnlesd xmm2,xmm6,QWORD PTR [rcx]
vcmpnlesd xmm2,xmm6,[rcx]
vcmpordsd xmm2,xmm6,xmm4
vcmpordsd xmm2,xmm6,QWORD PTR [rcx]
vcmpordsd xmm2,xmm6,[rcx]
vcmpeq_uqsd xmm2,xmm6,xmm4
vcmpeq_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpeq_uqsd xmm2,xmm6,[rcx]
vcmpngesd xmm2,xmm6,xmm4
vcmpngesd xmm2,xmm6,QWORD PTR [rcx]
vcmpngesd xmm2,xmm6,[rcx]
vcmpngtsd xmm2,xmm6,xmm4
vcmpngtsd xmm2,xmm6,QWORD PTR [rcx]
vcmpngtsd xmm2,xmm6,[rcx]
vcmpfalsesd xmm2,xmm6,xmm4
vcmpfalsesd xmm2,xmm6,QWORD PTR [rcx]
vcmpfalsesd xmm2,xmm6,[rcx]
vcmpneq_oqsd xmm2,xmm6,xmm4
vcmpneq_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpneq_oqsd xmm2,xmm6,[rcx]
vcmpgesd xmm2,xmm6,xmm4
vcmpgesd xmm2,xmm6,QWORD PTR [rcx]
vcmpgesd xmm2,xmm6,[rcx]
vcmpgtsd xmm2,xmm6,xmm4
vcmpgtsd xmm2,xmm6,QWORD PTR [rcx]
vcmpgtsd xmm2,xmm6,[rcx]
vcmptruesd xmm2,xmm6,xmm4
vcmptruesd xmm2,xmm6,QWORD PTR [rcx]
vcmptruesd xmm2,xmm6,[rcx]
vcmpeq_ossd xmm2,xmm6,xmm4
vcmpeq_ossd xmm2,xmm6,QWORD PTR [rcx]
vcmpeq_ossd xmm2,xmm6,[rcx]
vcmplt_oqsd xmm2,xmm6,xmm4
vcmplt_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmplt_oqsd xmm2,xmm6,[rcx]
vcmple_oqsd xmm2,xmm6,xmm4
vcmple_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmple_oqsd xmm2,xmm6,[rcx]
vcmpunord_ssd xmm2,xmm6,xmm4
vcmpunord_ssd xmm2,xmm6,QWORD PTR [rcx]
vcmpunord_ssd xmm2,xmm6,[rcx]
vcmpneq_ussd xmm2,xmm6,xmm4
vcmpneq_ussd xmm2,xmm6,QWORD PTR [rcx]
vcmpneq_ussd xmm2,xmm6,[rcx]
vcmpnlt_uqsd xmm2,xmm6,xmm4
vcmpnlt_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnlt_uqsd xmm2,xmm6,[rcx]
vcmpnle_uqsd xmm2,xmm6,xmm4
vcmpnle_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnle_uqsd xmm2,xmm6,[rcx]
vcmpord_ssd xmm2,xmm6,xmm4
vcmpord_ssd xmm2,xmm6,QWORD PTR [rcx]
vcmpord_ssd xmm2,xmm6,[rcx]
vcmpeq_ussd xmm2,xmm6,xmm4
vcmpeq_ussd xmm2,xmm6,QWORD PTR [rcx]
vcmpeq_ussd xmm2,xmm6,[rcx]
vcmpnge_uqsd xmm2,xmm6,xmm4
vcmpnge_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpnge_uqsd xmm2,xmm6,[rcx]
vcmpngt_uqsd xmm2,xmm6,xmm4
vcmpngt_uqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpngt_uqsd xmm2,xmm6,[rcx]
vcmpfalse_ossd xmm2,xmm6,xmm4
vcmpfalse_ossd xmm2,xmm6,QWORD PTR [rcx]
vcmpfalse_ossd xmm2,xmm6,[rcx]
vcmpneq_ossd xmm2,xmm6,xmm4
vcmpneq_ossd xmm2,xmm6,QWORD PTR [rcx]
vcmpneq_ossd xmm2,xmm6,[rcx]
vcmpge_oqsd xmm2,xmm6,xmm4
vcmpge_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpge_oqsd xmm2,xmm6,[rcx]
vcmpgt_oqsd xmm2,xmm6,xmm4
vcmpgt_oqsd xmm2,xmm6,QWORD PTR [rcx]
vcmpgt_oqsd xmm2,xmm6,[rcx]
vcmptrue_ussd xmm2,xmm6,xmm4
vcmptrue_ussd xmm2,xmm6,QWORD PTR [rcx]
vcmptrue_ussd xmm2,xmm6,[rcx]
# Tests for op xmm/mem32, xmm, xmm
vaddss xmm2,xmm6,xmm4
vaddss xmm2,xmm6,DWORD PTR [rcx]
vaddss xmm2,xmm6,[rcx]
vcvtss2sd xmm2,xmm6,xmm4
vcvtss2sd xmm2,xmm6,DWORD PTR [rcx]
vcvtss2sd xmm2,xmm6,[rcx]
vdivss xmm2,xmm6,xmm4
vdivss xmm2,xmm6,DWORD PTR [rcx]
vdivss xmm2,xmm6,[rcx]
vmaxss xmm2,xmm6,xmm4
vmaxss xmm2,xmm6,DWORD PTR [rcx]
vmaxss xmm2,xmm6,[rcx]
vminss xmm2,xmm6,xmm4
vminss xmm2,xmm6,DWORD PTR [rcx]
vminss xmm2,xmm6,[rcx]
vmulss xmm2,xmm6,xmm4
vmulss xmm2,xmm6,DWORD PTR [rcx]
vmulss xmm2,xmm6,[rcx]
vrcpss xmm2,xmm6,xmm4
vrcpss xmm2,xmm6,DWORD PTR [rcx]
vrcpss xmm2,xmm6,[rcx]
vrsqrtss xmm2,xmm6,xmm4
vrsqrtss xmm2,xmm6,DWORD PTR [rcx]
vrsqrtss xmm2,xmm6,[rcx]
vsqrtss xmm2,xmm6,xmm4
vsqrtss xmm2,xmm6,DWORD PTR [rcx]
vsqrtss xmm2,xmm6,[rcx]
vsubss xmm2,xmm6,xmm4
vsubss xmm2,xmm6,DWORD PTR [rcx]
vsubss xmm2,xmm6,[rcx]
vcmpeqss xmm2,xmm6,xmm4
vcmpeqss xmm2,xmm6,DWORD PTR [rcx]
vcmpeqss xmm2,xmm6,[rcx]
vcmpltss xmm2,xmm6,xmm4
vcmpltss xmm2,xmm6,DWORD PTR [rcx]
vcmpltss xmm2,xmm6,[rcx]
vcmpless xmm2,xmm6,xmm4
vcmpless xmm2,xmm6,DWORD PTR [rcx]
vcmpless xmm2,xmm6,[rcx]
vcmpunordss xmm2,xmm6,xmm4
vcmpunordss xmm2,xmm6,DWORD PTR [rcx]
vcmpunordss xmm2,xmm6,[rcx]
vcmpneqss xmm2,xmm6,xmm4
vcmpneqss xmm2,xmm6,DWORD PTR [rcx]
vcmpneqss xmm2,xmm6,[rcx]
vcmpnltss xmm2,xmm6,xmm4
vcmpnltss xmm2,xmm6,DWORD PTR [rcx]
vcmpnltss xmm2,xmm6,[rcx]
vcmpnless xmm2,xmm6,xmm4
vcmpnless xmm2,xmm6,DWORD PTR [rcx]
vcmpnless xmm2,xmm6,[rcx]
vcmpordss xmm2,xmm6,xmm4
vcmpordss xmm2,xmm6,DWORD PTR [rcx]
vcmpordss xmm2,xmm6,[rcx]
vcmpeq_uqss xmm2,xmm6,xmm4
vcmpeq_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpeq_uqss xmm2,xmm6,[rcx]
vcmpngess xmm2,xmm6,xmm4
vcmpngess xmm2,xmm6,DWORD PTR [rcx]
vcmpngess xmm2,xmm6,[rcx]
vcmpngtss xmm2,xmm6,xmm4
vcmpngtss xmm2,xmm6,DWORD PTR [rcx]
vcmpngtss xmm2,xmm6,[rcx]
vcmpfalsess xmm2,xmm6,xmm4
vcmpfalsess xmm2,xmm6,DWORD PTR [rcx]
vcmpfalsess xmm2,xmm6,[rcx]
vcmpneq_oqss xmm2,xmm6,xmm4
vcmpneq_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmpneq_oqss xmm2,xmm6,[rcx]
vcmpgess xmm2,xmm6,xmm4
vcmpgess xmm2,xmm6,DWORD PTR [rcx]
vcmpgess xmm2,xmm6,[rcx]
vcmpgtss xmm2,xmm6,xmm4
vcmpgtss xmm2,xmm6,DWORD PTR [rcx]
vcmpgtss xmm2,xmm6,[rcx]
vcmptruess xmm2,xmm6,xmm4
vcmptruess xmm2,xmm6,DWORD PTR [rcx]
vcmptruess xmm2,xmm6,[rcx]
vcmpeq_osss xmm2,xmm6,xmm4
vcmpeq_osss xmm2,xmm6,DWORD PTR [rcx]
vcmpeq_osss xmm2,xmm6,[rcx]
vcmplt_oqss xmm2,xmm6,xmm4
vcmplt_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmplt_oqss xmm2,xmm6,[rcx]
vcmple_oqss xmm2,xmm6,xmm4
vcmple_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmple_oqss xmm2,xmm6,[rcx]
vcmpunord_sss xmm2,xmm6,xmm4
vcmpunord_sss xmm2,xmm6,DWORD PTR [rcx]
vcmpunord_sss xmm2,xmm6,[rcx]
vcmpneq_usss xmm2,xmm6,xmm4
vcmpneq_usss xmm2,xmm6,DWORD PTR [rcx]
vcmpneq_usss xmm2,xmm6,[rcx]
vcmpnlt_uqss xmm2,xmm6,xmm4
vcmpnlt_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpnlt_uqss xmm2,xmm6,[rcx]
vcmpnle_uqss xmm2,xmm6,xmm4
vcmpnle_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpnle_uqss xmm2,xmm6,[rcx]
vcmpord_sss xmm2,xmm6,xmm4
vcmpord_sss xmm2,xmm6,DWORD PTR [rcx]
vcmpord_sss xmm2,xmm6,[rcx]
vcmpeq_usss xmm2,xmm6,xmm4
vcmpeq_usss xmm2,xmm6,DWORD PTR [rcx]
vcmpeq_usss xmm2,xmm6,[rcx]
vcmpnge_uqss xmm2,xmm6,xmm4
vcmpnge_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpnge_uqss xmm2,xmm6,[rcx]
vcmpngt_uqss xmm2,xmm6,xmm4
vcmpngt_uqss xmm2,xmm6,DWORD PTR [rcx]
vcmpngt_uqss xmm2,xmm6,[rcx]
vcmpfalse_osss xmm2,xmm6,xmm4
vcmpfalse_osss xmm2,xmm6,DWORD PTR [rcx]
vcmpfalse_osss xmm2,xmm6,[rcx]
vcmpneq_osss xmm2,xmm6,xmm4
vcmpneq_osss xmm2,xmm6,DWORD PTR [rcx]
vcmpneq_osss xmm2,xmm6,[rcx]
vcmpge_oqss xmm2,xmm6,xmm4
vcmpge_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmpge_oqss xmm2,xmm6,[rcx]
vcmpgt_oqss xmm2,xmm6,xmm4
vcmpgt_oqss xmm2,xmm6,DWORD PTR [rcx]
vcmpgt_oqss xmm2,xmm6,[rcx]
vcmptrue_usss xmm2,xmm6,xmm4
vcmptrue_usss xmm2,xmm6,DWORD PTR [rcx]
vcmptrue_usss xmm2,xmm6,[rcx]
# Tests for op xmm/mem32, xmm
vcomiss xmm6,xmm4
vcomiss xmm4,DWORD PTR [rcx]
vcomiss xmm4,[rcx]
vucomiss xmm6,xmm4
vucomiss xmm4,DWORD PTR [rcx]
vucomiss xmm4,[rcx]
# Tests for op mem32, xmm
vmovss xmm4,DWORD PTR [rcx]
vmovss xmm4,[rcx]
# Tests for op xmm, mem32
vmovss DWORD PTR [rcx],xmm4
vmovss [rcx],xmm4
# Tests for op xmm/mem32, regl
vcvtss2si ecx,xmm4
vcvtss2si ecx,DWORD PTR [rcx]
vcvtss2si ecx,[rcx]
vcvttss2si ecx,xmm4
vcvttss2si ecx,DWORD PTR [rcx]
vcvttss2si ecx,[rcx]
# Tests for op xmm/mem32, regq
vcvtss2si rcx,xmm4
vcvtss2si rcx,DWORD PTR [rcx]
vcvtss2si rcx,[rcx]
vcvttss2si rcx,xmm4
vcvttss2si rcx,DWORD PTR [rcx]
vcvttss2si rcx,[rcx]
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd xmm6,xmm4,ecx
vcvtsi2sd xmm6,xmm4,DWORD PTR [rcx]
vcvtsi2ss xmm6,xmm4,ecx
vcvtsi2ss xmm6,xmm4,DWORD PTR [rcx]
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss xmm2,xmm6,xmm4,7
vcmpss xmm2,xmm6,DWORD PTR [rcx],7
vcmpss xmm2,xmm6,[rcx],7
vroundss xmm2,xmm6,xmm4,7
vroundss xmm2,xmm6,DWORD PTR [rcx],7
vroundss xmm2,xmm6,[rcx],7
# Tests for op xmm, xmm, xmm
vmovsd xmm2,xmm6,xmm4
vmovss xmm2,xmm6,xmm4
#Tests with different memory and register operands.
vcvtsi2sd xmm15,xmm8,DWORD PTR ds:0x12345678
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbp]
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbp+0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [r15+0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [rip+0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [rsp+0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [r12+0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [riz*1-0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [riz*2-0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbx+riz*1-0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbx+riz*2-0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [r12+r15*4-0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [r8+r15*8-0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [rbp+r12*4-0x99]
vcvtsi2sd xmm15,xmm8,DWORD PTR [rsp+r13*1-0x99]
# Tests for all register operands.
vcvtsd2si r8d,xmm8
vcvtsi2sd xmm15,xmm8,r8d
# Tests for different memory/register operand
vcvtsd2si r8,QWORD PTR [rcx]
vcvtss2si r8,DWORD PTR [rcx]
|
stsp/binutils-ia16
| 2,429
|
gas/testsuite/gas/i386/unspec.s
|
.text
unspec:
vblendvpd %xmm0, (%eax), %ymm0, %ymm0
vblendvpd %ymm0, (%eax), %xmm0, %xmm0
vblendvps %xmm0, (%eax), %ymm0, %ymm0
vblendvps %ymm0, (%eax), %xmm0, %xmm0
vfmaddpd %xmm0, (%eax), %ymm0, %ymm0
vfmaddpd %ymm0, (%eax), %xmm0, %xmm0
vfmaddps %xmm0, (%eax), %ymm0, %ymm0
vfmaddps %ymm0, (%eax), %xmm0, %xmm0
vgatherdpd %xmm0, (%eax,%xmm1), %ymm2
vgatherdpd %ymm0, (%eax,%xmm1), %xmm2
vgatherdps %xmm0, (%eax,%xmm1), %ymm2
vgatherdps %ymm0, (%eax,%ymm1), %xmm2
vgatherqpd %xmm0, (%eax,%xmm1), %ymm2
vgatherqpd %ymm0, (%eax,%ymm1), %xmm2
vgatherqps %xmm0, (%eax,%xmm1), %ymm2
vgatherqps %xmm0, (%eax,%ymm1), %ymm2
vpblendvb %xmm0, (%eax), %ymm0, %ymm0
vpblendvb %ymm0, (%eax), %xmm0, %xmm0
vpcmov %xmm0, (%eax), %ymm0, %ymm0
vpcmov %ymm0, (%eax), %xmm0, %xmm0
vpermil2pd $0, %xmm0, (%eax), %ymm0, %ymm0
vpermil2pd $0, %ymm0, (%eax), %xmm0, %xmm0
vpermil2ps $0, %xmm0, (%eax), %ymm0, %ymm0
vpermil2ps $0, %ymm0, (%eax), %xmm0, %xmm0
vpgatherdd %xmm0, (%eax,%xmm1), %ymm2
vpgatherdd %ymm0, (%eax,%ymm1), %xmm2
vpgatherdq %xmm0, (%eax,%xmm1), %ymm2
vpgatherdq %ymm0, (%eax,%xmm1), %xmm2
vpgatherqd %xmm0, (%eax,%xmm1), %ymm2
vpgatherqd %xmm0, (%eax,%ymm1), %ymm2
vpgatherqq %xmm0, (%eax,%xmm1), %ymm2
vpgatherqq %ymm0, (%eax,%ymm1), %xmm2
.intel_syntax noprefix
vblendvpd xmm0, xmm0, [eax], ymm0
vblendvpd ymm0, ymm0, [eax], xmm0
vblendvps xmm0, xmm0, [eax], ymm0
vblendvps ymm0, ymm0, [eax], xmm0
vfmaddpd xmm0, xmm0, [eax], ymm0
vfmaddpd ymm0, ymm0, [eax], xmm0
vfmaddps xmm0, xmm0, [eax], ymm0
vfmaddps ymm0, ymm0, [eax], xmm0
vgatherdpd xmm0, [eax+xmm1], ymm2
vgatherdpd ymm0, [eax+xmm1], xmm2
vgatherdps xmm0, [eax+xmm1], ymm2
vgatherdps ymm0, [eax+ymm1], xmm2
vgatherqpd xmm0, [eax+xmm1], ymm2
vgatherqpd ymm0, [eax+ymm1], xmm2
vgatherqps xmm0, [eax+xmm1], ymm2
vgatherqps xmm0, [eax+ymm1], ymm2
vpblendvb xmm0, xmm0, [eax], ymm0
vpblendvb ymm0, ymm0, [eax], xmm0
vpcmov xmm0, xmm0, [eax], ymm0
vpcmov ymm0, ymm0, [eax], xmm0
vpermil2pd xmm0, xmm0, [eax], ymm0, 0
vpermil2pd ymm0, ymm0, [eax], xmm0, 0
vpermil2ps xmm0, xmm0, [eax], ymm0, 0
vpermil2ps ymm0, ymm0, [eax], xmm0, 0
vpgatherdd xmm0, [eax+xmm1], ymm2
vpgatherdd ymm0, [eax+ymm1], xmm2
vpgatherdq xmm0, [eax+xmm1], ymm2
vpgatherdq ymm0, [eax+xmm1], xmm2
vpgatherqd xmm0, [eax+xmm1], ymm2
vpgatherqd xmm0, [eax+ymm1], ymm2
vpgatherqq xmm0, [eax+xmm1], ymm2
vpgatherqq ymm0, [eax+ymm1], xmm2
|
stsp/binutils-ia16
| 4,569
|
gas/testsuite/gas/i386/x86-64-avx512bw_vl-opts.s
|
# Check 64bit AVX512{BW,VL} swap instructions
.allow_index_reg
.text
_start:
vmovdqu8 %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu8.s %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu8 %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu8.s %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu8 %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8.s %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8 %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu8.s %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu8 %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu8.s %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu8 %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8.s %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu8.s %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu8.s %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8.s %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu8.s %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu8.s %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu8 %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu8.s %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu16.s %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu16.s %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16.s %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu16.s %xmm29, %xmm30 # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu16.s %xmm29, %xmm30{%k7} # AVX512{BW,VL}
vmovdqu16 %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16.s %xmm29, %xmm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu16.s %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu16.s %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16.s %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu16.s %ymm29, %ymm30 # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu16.s %ymm29, %ymm30{%k7} # AVX512{BW,VL}
vmovdqu16 %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
vmovdqu16.s %ymm29, %ymm30{%k7}{z} # AVX512{BW,VL}
.intel_syntax noprefix
vmovdqu8 xmm30, xmm29 # AVX512{BW,VL}
vmovdqu8.s xmm30, xmm29 # AVX512{BW,VL}
vmovdqu8 xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu8.s xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu8 xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu8.s xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu8 xmm30, xmm29 # AVX512{BW,VL}
vmovdqu8.s xmm30, xmm29 # AVX512{BW,VL}
vmovdqu8 xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu8.s xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu8 xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu8.s xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu8 ymm30, ymm29 # AVX512{BW,VL}
vmovdqu8.s ymm30, ymm29 # AVX512{BW,VL}
vmovdqu8 ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu8.s ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu8 ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu8.s ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu8 ymm30, ymm29 # AVX512{BW,VL}
vmovdqu8.s ymm30, ymm29 # AVX512{BW,VL}
vmovdqu8 ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu8.s ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu8 ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu8.s ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu16 xmm30, xmm29 # AVX512{BW,VL}
vmovdqu16.s xmm30, xmm29 # AVX512{BW,VL}
vmovdqu16 xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu16.s xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu16 xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu16.s xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu16 xmm30, xmm29 # AVX512{BW,VL}
vmovdqu16.s xmm30, xmm29 # AVX512{BW,VL}
vmovdqu16 xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu16.s xmm30{k7}, xmm29 # AVX512{BW,VL}
vmovdqu16 xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu16.s xmm30{k7}{z}, xmm29 # AVX512{BW,VL}
vmovdqu16 ymm30, ymm29 # AVX512{BW,VL}
vmovdqu16.s ymm30, ymm29 # AVX512{BW,VL}
vmovdqu16 ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu16.s ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu16 ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu16.s ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu16 ymm30, ymm29 # AVX512{BW,VL}
vmovdqu16.s ymm30, ymm29 # AVX512{BW,VL}
vmovdqu16 ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu16.s ymm30{k7}, ymm29 # AVX512{BW,VL}
vmovdqu16 ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
vmovdqu16.s ymm30{k7}{z}, ymm29 # AVX512{BW,VL}
|
stsp/binutils-ia16
| 1,977
|
gas/testsuite/gas/i386/lwp.s
|
# Check 64bit LWP instructions
.allow_index_reg
.text
_start:
llwpcb %eax
llwpcb %ecx
llwpcb %edx
llwpcb %ebx
llwpcb %esp
llwpcb %ebp
llwpcb %esi
llwpcb %edi
slwpcb %edi
slwpcb %esi
slwpcb %ebp
slwpcb %esp
slwpcb %ebx
slwpcb %edx
slwpcb %ecx
slwpcb %eax
lwpins $0x12345678, %edi, %eax
lwpins $0x12345678, %esi, %ecx
lwpins $0x12345678, %ebp, %edx
lwpins $0x12345678, %esp, %ebx
lwpins $0x12345678, %ebx, %esp
lwpins $0x12345678, %edx, %ebp
lwpins $0x12345678, %ecx, %esi
lwpins $0x12345678, %eax, %edi
lwpval $0x12345678, %edi, %eax
lwpval $0x12345678, %esi, %ecx
lwpval $0x12345678, %ebp, %edx
lwpval $0x12345678, %esp, %ebx
lwpval $0x12345678, %ebx, %esp
lwpval $0x12345678, %edx, %ebp
lwpval $0x12345678, %ecx, %esi
lwpval $0x12345678, %eax, %edi
lwpins $0x12345678, (%edi), %eax
lwpins $0x12345678, (%esi), %ecx
lwpins $0x12345678, (%ebp), %edx
lwpins $0x12345678, (%esp), %ebx
lwpins $0x12345678, (%ebx), %esp
lwpins $0x12345678, (%edx), %ebp
lwpins $0x12345678, (%ecx), %esi
lwpins $0x12345678, (%eax), %edi
lwpval $0x12345678, (%edi), %eax
lwpval $0x12345678, (%esi), %ecx
lwpval $0x12345678, (%ebp), %edx
lwpval $0x12345678, (%esp), %ebx
lwpval $0x12345678, (%ebx), %esp
lwpval $0x12345678, (%edx), %ebp
lwpval $0x12345678, (%ecx), %esi
lwpval $0x12345678, (%eax), %edi
lwpins $0x12345678, 0xcafe(%edi), %eax
lwpins $0x12345678, 0xcafe(%esi), %ecx
lwpins $0x12345678, 0xcafe(%ebp), %edx
lwpins $0x12345678, 0xcafe(%esp), %ebx
lwpins $0x12345678, 0xcafe(%ebx), %esp
lwpins $0x12345678, 0xcafe(%edx), %ebp
lwpins $0x12345678, 0xcafe(%ecx), %esi
lwpins $0x12345678, 0xcafe(%eax), %edi
lwpval $0x12345678, 0xcafe(%edi), %eax
lwpval $0x12345678, 0xcafe(%esi), %ecx
lwpval $0x12345678, 0xcafe(%ebp), %edx
lwpval $0x12345678, 0xcafe(%esp), %ebx
lwpval $0x12345678, 0xcafe(%ebx), %esp
lwpval $0x12345678, 0xcafe(%edx), %ebp
lwpval $0x12345678, 0xcafe(%ecx), %esi
lwpval $0x12345678, 0xcafe(%eax), %edi
|
stsp/binutils-ia16
| 7,669
|
gas/testsuite/gas/i386/x86-64-evex-wig.s
|
# Check EVEX WIG instructions
.allow_index_reg
.text
_start:
vextractps $0xab, %xmm29, %rax # AVX512
vextractps $123, %xmm29, %rax # AVX512
vextractps $123, %xmm29, %r8 # AVX512
vextractps $123, %xmm29, (%rcx) # AVX512
vextractps $123, %xmm29, 0x123(%rax,%r14,8) # AVX512
vextractps $123, %xmm29, 508(%rdx) # AVX512 Disp8
vextractps $123, %xmm29, 512(%rdx) # AVX512
vextractps $123, %xmm29, -512(%rdx) # AVX512 Disp8
vextractps $123, %xmm29, -516(%rdx) # AVX512
{evex} vpextrb $0, %xmm0, %eax
{evex} vpextrb $0, %xmm0, (%rax)
{evex} vpextrw $0, %xmm0, %eax
{evex} {store} vpextrw $0, %xmm0, %eax
{evex} vpextrw $0, %xmm0, (%rax)
{evex} vpinsrb $0, %eax, %xmm0, %xmm0
{evex} vpinsrb $0, (%rax), %xmm0, %xmm0
{evex} vpinsrw $0, %eax, %xmm0, %xmm0
{evex} vpinsrw $0, (%rax), %xmm0, %xmm0
vpmovsxbd %xmm29, %zmm30{%k7} # AVX512
vpmovsxbd %xmm29, %zmm30{%k7}{z} # AVX512
vpmovsxbd (%rcx), %zmm30{%k7} # AVX512
vpmovsxbd 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512
vpmovsxbd 2032(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovsxbd 2048(%rdx), %zmm30{%k7} # AVX512
vpmovsxbd -2048(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovsxbd -2064(%rdx), %zmm30{%k7} # AVX512
vpmovsxbq %xmm29, %zmm30{%k7} # AVX512
vpmovsxbq %xmm29, %zmm30{%k7}{z} # AVX512
vpmovsxbq (%rcx), %zmm30{%k7} # AVX512
vpmovsxbq 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512
vpmovsxbq 1016(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovsxbq 1024(%rdx), %zmm30{%k7} # AVX512
vpmovsxbq -1024(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovsxbq -1032(%rdx), %zmm30{%k7} # AVX512
vpmovsxwd %ymm29, %zmm30{%k7} # AVX512
vpmovsxwd %ymm29, %zmm30{%k7}{z} # AVX512
vpmovsxwd (%rcx), %zmm30{%k7} # AVX512
vpmovsxwd 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512
vpmovsxwd 4064(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovsxwd 4096(%rdx), %zmm30{%k7} # AVX512
vpmovsxwd -4096(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovsxwd -4128(%rdx), %zmm30{%k7} # AVX512
vpmovsxwq %xmm29, %zmm30{%k7} # AVX512
vpmovsxwq %xmm29, %zmm30{%k7}{z} # AVX512
vpmovsxwq (%rcx), %zmm30{%k7} # AVX512
vpmovsxwq 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512
vpmovsxwq 2032(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovsxwq 2048(%rdx), %zmm30{%k7} # AVX512
vpmovsxwq -2048(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovsxwq -2064(%rdx), %zmm30{%k7} # AVX512
vpmovzxbd %xmm29, %zmm30{%k7} # AVX512
vpmovzxbd %xmm29, %zmm30{%k7}{z} # AVX512
vpmovzxbd (%rcx), %zmm30{%k7} # AVX512
vpmovzxbd 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512
vpmovzxbd 2032(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovzxbd 2048(%rdx), %zmm30{%k7} # AVX512
vpmovzxbd -2048(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovzxbd -2064(%rdx), %zmm30{%k7} # AVX512
vpmovzxbq %xmm29, %zmm30{%k7} # AVX512
vpmovzxbq %xmm29, %zmm30{%k7}{z} # AVX512
vpmovzxbq (%rcx), %zmm30{%k7} # AVX512
vpmovzxbq 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512
vpmovzxbq 1016(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovzxbq 1024(%rdx), %zmm30{%k7} # AVX512
vpmovzxbq -1024(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovzxbq -1032(%rdx), %zmm30{%k7} # AVX512
vpmovzxwd %ymm29, %zmm30{%k7} # AVX512
vpmovzxwd %ymm29, %zmm30{%k7}{z} # AVX512
vpmovzxwd (%rcx), %zmm30{%k7} # AVX512
vpmovzxwd 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512
vpmovzxwd 4064(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovzxwd 4096(%rdx), %zmm30{%k7} # AVX512
vpmovzxwd -4096(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovzxwd -4128(%rdx), %zmm30{%k7} # AVX512
vpmovzxwq %xmm29, %zmm30{%k7} # AVX512
vpmovzxwq %xmm29, %zmm30{%k7}{z} # AVX512
vpmovzxwq (%rcx), %zmm30{%k7} # AVX512
vpmovzxwq 0x123(%rax,%r14,8), %zmm30{%k7} # AVX512
vpmovzxwq 2032(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovzxwq 2048(%rdx), %zmm30{%k7} # AVX512
vpmovzxwq -2048(%rdx), %zmm30{%k7} # AVX512 Disp8
vpmovzxwq -2064(%rdx), %zmm30{%k7} # AVX512
.intel_syntax noprefix
vextractps rax, xmm29, 0xab # AVX512
vextractps rax, xmm29, 123 # AVX512
vextractps r8, xmm29, 123 # AVX512
vextractps DWORD PTR [rcx], xmm29, 123 # AVX512
vextractps DWORD PTR [rax+r14*8+0x1234], xmm29, 123 # AVX512
vextractps DWORD PTR [rdx+508], xmm29, 123 # AVX512 Disp8
vextractps DWORD PTR [rdx+512], xmm29, 123 # AVX512
vextractps DWORD PTR [rdx-512], xmm29, 123 # AVX512 Disp8
vextractps DWORD PTR [rdx-516], xmm29, 123 # AVX512
vpmovsxbd zmm30{k7}, xmm29 # AVX512
vpmovsxbd zmm30{k7}{z}, xmm29 # AVX512
vpmovsxbd zmm30{k7}, XMMWORD PTR [rcx] # AVX512
vpmovsxbd zmm30{k7}, XMMWORD PTR [rax+r14*8+0x1234] # AVX512
vpmovsxbd zmm30{k7}, XMMWORD PTR [rdx+2032] # AVX512 Disp8
vpmovsxbd zmm30{k7}, XMMWORD PTR [rdx+2048] # AVX512
vpmovsxbd zmm30{k7}, XMMWORD PTR [rdx-2048] # AVX512 Disp8
vpmovsxbd zmm30{k7}, XMMWORD PTR [rdx-2064] # AVX512
vpmovsxbq zmm30{k7}, xmm29 # AVX512
vpmovsxbq zmm30{k7}{z}, xmm29 # AVX512
vpmovsxbq zmm30{k7}, QWORD PTR [rcx] # AVX512
vpmovsxbq zmm30{k7}, QWORD PTR [rax+r14*8+0x1234] # AVX512
vpmovsxbq zmm30{k7}, QWORD PTR [rdx+1016] # AVX512 Disp8
vpmovsxbq zmm30{k7}, QWORD PTR [rdx+1024] # AVX512
vpmovsxbq zmm30{k7}, QWORD PTR [rdx-1024] # AVX512 Disp8
vpmovsxbq zmm30{k7}, QWORD PTR [rdx-1032] # AVX512
vpmovsxwd zmm30{k7}, ymm29 # AVX512
vpmovsxwd zmm30{k7}{z}, ymm29 # AVX512
vpmovsxwd zmm30{k7}, YMMWORD PTR [rcx] # AVX512
vpmovsxwd zmm30{k7}, YMMWORD PTR [rax+r14*8+0x1234] # AVX512
vpmovsxwd zmm30{k7}, YMMWORD PTR [rdx+4064] # AVX512 Disp8
vpmovsxwd zmm30{k7}, YMMWORD PTR [rdx+4096] # AVX512
vpmovsxwd zmm30{k7}, YMMWORD PTR [rdx-4096] # AVX512 Disp8
vpmovsxwd zmm30{k7}, YMMWORD PTR [rdx-4128] # AVX512
vpmovsxwq zmm30{k7}, xmm29 # AVX512
vpmovsxwq zmm30{k7}{z}, xmm29 # AVX512
vpmovsxwq zmm30{k7}, XMMWORD PTR [rcx] # AVX512
vpmovsxwq zmm30{k7}, XMMWORD PTR [rax+r14*8+0x1234] # AVX512
vpmovsxwq zmm30{k7}, XMMWORD PTR [rdx+2032] # AVX512 Disp8
vpmovsxwq zmm30{k7}, XMMWORD PTR [rdx+2048] # AVX512
vpmovsxwq zmm30{k7}, XMMWORD PTR [rdx-2048] # AVX512 Disp8
vpmovsxwq zmm30{k7}, XMMWORD PTR [rdx-2064] # AVX512
vpmovzxbd zmm30{k7}, xmm29 # AVX512
vpmovzxbd zmm30{k7}{z}, xmm29 # AVX512
vpmovzxbd zmm30{k7}, XMMWORD PTR [rcx] # AVX512
vpmovzxbd zmm30{k7}, XMMWORD PTR [rax+r14*8+0x1234] # AVX512
vpmovzxbd zmm30{k7}, XMMWORD PTR [rdx+2032] # AVX512 Disp8
vpmovzxbd zmm30{k7}, XMMWORD PTR [rdx+2048] # AVX512
vpmovzxbd zmm30{k7}, XMMWORD PTR [rdx-2048] # AVX512 Disp8
vpmovzxbd zmm30{k7}, XMMWORD PTR [rdx-2064] # AVX512
vpmovzxbq zmm30{k7}, xmm29 # AVX512
vpmovzxbq zmm30{k7}{z}, xmm29 # AVX512
vpmovzxbq zmm30{k7}, QWORD PTR [rcx] # AVX512
vpmovzxbq zmm30{k7}, QWORD PTR [rax+r14*8+0x1234] # AVX512
vpmovzxbq zmm30{k7}, QWORD PTR [rdx+1016] # AVX512 Disp8
vpmovzxbq zmm30{k7}, QWORD PTR [rdx+1024] # AVX512
vpmovzxbq zmm30{k7}, QWORD PTR [rdx-1024] # AVX512 Disp8
vpmovzxbq zmm30{k7}, QWORD PTR [rdx-1032] # AVX512
vpmovzxwd zmm30{k7}, ymm29 # AVX512
vpmovzxwd zmm30{k7}{z}, ymm29 # AVX512
vpmovzxwd zmm30{k7}, YMMWORD PTR [rcx] # AVX512
vpmovzxwd zmm30{k7}, YMMWORD PTR [rax+r14*8+0x1234] # AVX512
vpmovzxwd zmm30{k7}, YMMWORD PTR [rdx+4064] # AVX512 Disp8
vpmovzxwd zmm30{k7}, YMMWORD PTR [rdx+4096] # AVX512
vpmovzxwd zmm30{k7}, YMMWORD PTR [rdx-4096] # AVX512 Disp8
vpmovzxwd zmm30{k7}, YMMWORD PTR [rdx-4128] # AVX512
vpmovzxwq zmm30{k7}, xmm29 # AVX512
vpmovzxwq zmm30{k7}{z}, xmm29 # AVX512
vpmovzxwq zmm30{k7}, XMMWORD PTR [rcx] # AVX512
vpmovzxwq zmm30{k7}, XMMWORD PTR [rax+r14*8+0x1234] # AVX512
vpmovzxwq zmm30{k7}, XMMWORD PTR [rdx+2032] # AVX512 Disp8
vpmovzxwq zmm30{k7}, XMMWORD PTR [rdx+2048] # AVX512
vpmovzxwq zmm30{k7}, XMMWORD PTR [rdx-2048] # AVX512 Disp8
vpmovzxwq zmm30{k7}, XMMWORD PTR [rdx-2064] # AVX512
|
stsp/binutils-ia16
| 3,423
|
gas/testsuite/gas/i386/x86-64-avx512ifma.s
|
# Check 64bit AVX512IFMA instructions
.allow_index_reg
.text
_start:
vpmadd52luq %zmm28, %zmm29, %zmm30 # AVX512IFMA
vpmadd52luq %zmm28, %zmm29, %zmm30{%k7} # AVX512IFMA
vpmadd52luq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512IFMA
vpmadd52luq (%rcx), %zmm29, %zmm30 # AVX512IFMA
vpmadd52luq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512IFMA
vpmadd52luq (%rcx){1to8}, %zmm29, %zmm30 # AVX512IFMA
vpmadd52luq 8128(%rdx), %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52luq 8192(%rdx), %zmm29, %zmm30 # AVX512IFMA
vpmadd52luq -8192(%rdx), %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52luq -8256(%rdx), %zmm29, %zmm30 # AVX512IFMA
vpmadd52luq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52luq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA
vpmadd52luq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52luq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq %zmm28, %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq %zmm28, %zmm29, %zmm30{%k7} # AVX512IFMA
vpmadd52huq %zmm28, %zmm29, %zmm30{%k7}{z} # AVX512IFMA
vpmadd52huq (%rcx), %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq 0x123(%rax,%r14,8), %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq (%rcx){1to8}, %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq 8128(%rdx), %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52huq 8192(%rdx), %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq -8192(%rdx), %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52huq -8256(%rdx), %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq 1016(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52huq 1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA
vpmadd52huq -1024(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA Disp8
vpmadd52huq -1032(%rdx){1to8}, %zmm29, %zmm30 # AVX512IFMA
.intel_syntax noprefix
vpmadd52luq zmm30, zmm29, zmm28 # AVX512IFMA
vpmadd52luq zmm30{k7}, zmm29, zmm28 # AVX512IFMA
vpmadd52luq zmm30{k7}{z}, zmm29, zmm28 # AVX512IFMA
vpmadd52luq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512IFMA
vpmadd52luq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512IFMA
vpmadd52luq zmm30, zmm29, [rcx]{1to8} # AVX512IFMA
vpmadd52luq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512IFMA Disp8
vpmadd52luq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512IFMA
vpmadd52luq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512IFMA Disp8
vpmadd52luq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512IFMA
vpmadd52luq zmm30, zmm29, [rdx+1016]{1to8} # AVX512IFMA Disp8
vpmadd52luq zmm30, zmm29, [rdx+1024]{1to8} # AVX512IFMA
vpmadd52luq zmm30, zmm29, [rdx-1024]{1to8} # AVX512IFMA Disp8
vpmadd52luq zmm30, zmm29, [rdx-1032]{1to8} # AVX512IFMA
vpmadd52huq zmm30, zmm29, zmm28 # AVX512IFMA
vpmadd52huq zmm30{k7}, zmm29, zmm28 # AVX512IFMA
vpmadd52huq zmm30{k7}{z}, zmm29, zmm28 # AVX512IFMA
vpmadd52huq zmm30, zmm29, ZMMWORD PTR [rcx] # AVX512IFMA
vpmadd52huq zmm30, zmm29, ZMMWORD PTR [rax+r14*8+0x1234] # AVX512IFMA
vpmadd52huq zmm30, zmm29, [rcx]{1to8} # AVX512IFMA
vpmadd52huq zmm30, zmm29, ZMMWORD PTR [rdx+8128] # AVX512IFMA Disp8
vpmadd52huq zmm30, zmm29, ZMMWORD PTR [rdx+8192] # AVX512IFMA
vpmadd52huq zmm30, zmm29, ZMMWORD PTR [rdx-8192] # AVX512IFMA Disp8
vpmadd52huq zmm30, zmm29, ZMMWORD PTR [rdx-8256] # AVX512IFMA
vpmadd52huq zmm30, zmm29, [rdx+1016]{1to8} # AVX512IFMA Disp8
vpmadd52huq zmm30, zmm29, [rdx+1024]{1to8} # AVX512IFMA
vpmadd52huq zmm30, zmm29, [rdx-1024]{1to8} # AVX512IFMA Disp8
vpmadd52huq zmm30, zmm29, [rdx-1032]{1to8} # AVX512IFMA
|
stsp/binutils-ia16
| 16,675
|
gas/testsuite/gas/i386/avx-scalar.s
|
# Check AVX scalar instructions
.allow_index_reg
.text
_start:
# Tests for op xmm/mem64, xmm
vcomisd %xmm4,%xmm6
vcomisd (%ecx),%xmm4
vucomisd %xmm4,%xmm6
vucomisd (%ecx),%xmm4
# Tests for op mem64, xmm
vmovsd (%ecx),%xmm4
# Tests for op xmm, mem64
vmovsd %xmm4,(%ecx)
# Tests for op xmm/mem64, regl
vcvtsd2si %xmm4,%ecx
vcvtsd2si (%ecx),%ecx
vcvttsd2si %xmm4,%ecx
vcvttsd2si (%ecx),%ecx
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd $7,%xmm4,%xmm6,%xmm2
vcmpsd $7,(%ecx),%xmm6,%xmm2
vroundsd $7,%xmm4,%xmm6,%xmm2
vroundsd $7,(%ecx),%xmm6,%xmm2
# Tests for op xmm/mem64, xmm, xmm
vaddsd %xmm4,%xmm6,%xmm2
vaddsd (%ecx),%xmm6,%xmm2
vcvtsd2ss %xmm4,%xmm6,%xmm2
vcvtsd2ss (%ecx),%xmm6,%xmm2
vdivsd %xmm4,%xmm6,%xmm2
vdivsd (%ecx),%xmm6,%xmm2
vmaxsd %xmm4,%xmm6,%xmm2
vmaxsd (%ecx),%xmm6,%xmm2
vminsd %xmm4,%xmm6,%xmm2
vminsd (%ecx),%xmm6,%xmm2
vmulsd %xmm4,%xmm6,%xmm2
vmulsd (%ecx),%xmm6,%xmm2
vsqrtsd %xmm4,%xmm6,%xmm2
vsqrtsd (%ecx),%xmm6,%xmm2
vsubsd %xmm4,%xmm6,%xmm2
vsubsd (%ecx),%xmm6,%xmm2
vcmpeqsd %xmm4,%xmm6,%xmm2
vcmpeqsd (%ecx),%xmm6,%xmm2
vcmpltsd %xmm4,%xmm6,%xmm2
vcmpltsd (%ecx),%xmm6,%xmm2
vcmplesd %xmm4,%xmm6,%xmm2
vcmplesd (%ecx),%xmm6,%xmm2
vcmpunordsd %xmm4,%xmm6,%xmm2
vcmpunordsd (%ecx),%xmm6,%xmm2
vcmpneqsd %xmm4,%xmm6,%xmm2
vcmpneqsd (%ecx),%xmm6,%xmm2
vcmpnltsd %xmm4,%xmm6,%xmm2
vcmpnltsd (%ecx),%xmm6,%xmm2
vcmpnlesd %xmm4,%xmm6,%xmm2
vcmpnlesd (%ecx),%xmm6,%xmm2
vcmpordsd %xmm4,%xmm6,%xmm2
vcmpordsd (%ecx),%xmm6,%xmm2
vcmpeq_uqsd %xmm4,%xmm6,%xmm2
vcmpeq_uqsd (%ecx),%xmm6,%xmm2
vcmpngesd %xmm4,%xmm6,%xmm2
vcmpngesd (%ecx),%xmm6,%xmm2
vcmpngtsd %xmm4,%xmm6,%xmm2
vcmpngtsd (%ecx),%xmm6,%xmm2
vcmpfalsesd %xmm4,%xmm6,%xmm2
vcmpfalsesd (%ecx),%xmm6,%xmm2
vcmpneq_oqsd %xmm4,%xmm6,%xmm2
vcmpneq_oqsd (%ecx),%xmm6,%xmm2
vcmpgesd %xmm4,%xmm6,%xmm2
vcmpgesd (%ecx),%xmm6,%xmm2
vcmpgtsd %xmm4,%xmm6,%xmm2
vcmpgtsd (%ecx),%xmm6,%xmm2
vcmptruesd %xmm4,%xmm6,%xmm2
vcmptruesd (%ecx),%xmm6,%xmm2
vcmpeq_ossd %xmm4,%xmm6,%xmm2
vcmpeq_ossd (%ecx),%xmm6,%xmm2
vcmplt_oqsd %xmm4,%xmm6,%xmm2
vcmplt_oqsd (%ecx),%xmm6,%xmm2
vcmple_oqsd %xmm4,%xmm6,%xmm2
vcmple_oqsd (%ecx),%xmm6,%xmm2
vcmpunord_ssd %xmm4,%xmm6,%xmm2
vcmpunord_ssd (%ecx),%xmm6,%xmm2
vcmpneq_ussd %xmm4,%xmm6,%xmm2
vcmpneq_ussd (%ecx),%xmm6,%xmm2
vcmpnlt_uqsd %xmm4,%xmm6,%xmm2
vcmpnlt_uqsd (%ecx),%xmm6,%xmm2
vcmpnle_uqsd %xmm4,%xmm6,%xmm2
vcmpnle_uqsd (%ecx),%xmm6,%xmm2
vcmpord_ssd %xmm4,%xmm6,%xmm2
vcmpord_ssd (%ecx),%xmm6,%xmm2
vcmpeq_ussd %xmm4,%xmm6,%xmm2
vcmpeq_ussd (%ecx),%xmm6,%xmm2
vcmpnge_uqsd %xmm4,%xmm6,%xmm2
vcmpnge_uqsd (%ecx),%xmm6,%xmm2
vcmpngt_uqsd %xmm4,%xmm6,%xmm2
vcmpngt_uqsd (%ecx),%xmm6,%xmm2
vcmpfalse_ossd %xmm4,%xmm6,%xmm2
vcmpfalse_ossd (%ecx),%xmm6,%xmm2
vcmpneq_ossd %xmm4,%xmm6,%xmm2
vcmpneq_ossd (%ecx),%xmm6,%xmm2
vcmpge_oqsd %xmm4,%xmm6,%xmm2
vcmpge_oqsd (%ecx),%xmm6,%xmm2
vcmpgt_oqsd %xmm4,%xmm6,%xmm2
vcmpgt_oqsd (%ecx),%xmm6,%xmm2
vcmptrue_ussd %xmm4,%xmm6,%xmm2
vcmptrue_ussd (%ecx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm, xmm
vaddss %xmm4,%xmm6,%xmm2
vaddss (%ecx),%xmm6,%xmm2
vcvtss2sd %xmm4,%xmm6,%xmm2
vcvtss2sd (%ecx),%xmm6,%xmm2
vdivss %xmm4,%xmm6,%xmm2
vdivss (%ecx),%xmm6,%xmm2
vmaxss %xmm4,%xmm6,%xmm2
vmaxss (%ecx),%xmm6,%xmm2
vminss %xmm4,%xmm6,%xmm2
vminss (%ecx),%xmm6,%xmm2
vmulss %xmm4,%xmm6,%xmm2
vmulss (%ecx),%xmm6,%xmm2
vrcpss %xmm4,%xmm6,%xmm2
vrcpss (%ecx),%xmm6,%xmm2
vrsqrtss %xmm4,%xmm6,%xmm2
vrsqrtss (%ecx),%xmm6,%xmm2
vsqrtss %xmm4,%xmm6,%xmm2
vsqrtss (%ecx),%xmm6,%xmm2
vsubss %xmm4,%xmm6,%xmm2
vsubss (%ecx),%xmm6,%xmm2
vcmpeqss %xmm4,%xmm6,%xmm2
vcmpeqss (%ecx),%xmm6,%xmm2
vcmpltss %xmm4,%xmm6,%xmm2
vcmpltss (%ecx),%xmm6,%xmm2
vcmpless %xmm4,%xmm6,%xmm2
vcmpless (%ecx),%xmm6,%xmm2
vcmpunordss %xmm4,%xmm6,%xmm2
vcmpunordss (%ecx),%xmm6,%xmm2
vcmpneqss %xmm4,%xmm6,%xmm2
vcmpneqss (%ecx),%xmm6,%xmm2
vcmpnltss %xmm4,%xmm6,%xmm2
vcmpnltss (%ecx),%xmm6,%xmm2
vcmpnless %xmm4,%xmm6,%xmm2
vcmpnless (%ecx),%xmm6,%xmm2
vcmpordss %xmm4,%xmm6,%xmm2
vcmpordss (%ecx),%xmm6,%xmm2
vcmpeq_uqss %xmm4,%xmm6,%xmm2
vcmpeq_uqss (%ecx),%xmm6,%xmm2
vcmpngess %xmm4,%xmm6,%xmm2
vcmpngess (%ecx),%xmm6,%xmm2
vcmpngtss %xmm4,%xmm6,%xmm2
vcmpngtss (%ecx),%xmm6,%xmm2
vcmpfalsess %xmm4,%xmm6,%xmm2
vcmpfalsess (%ecx),%xmm6,%xmm2
vcmpneq_oqss %xmm4,%xmm6,%xmm2
vcmpneq_oqss (%ecx),%xmm6,%xmm2
vcmpgess %xmm4,%xmm6,%xmm2
vcmpgess (%ecx),%xmm6,%xmm2
vcmpgtss %xmm4,%xmm6,%xmm2
vcmpgtss (%ecx),%xmm6,%xmm2
vcmptruess %xmm4,%xmm6,%xmm2
vcmptruess (%ecx),%xmm6,%xmm2
vcmpeq_osss %xmm4,%xmm6,%xmm2
vcmpeq_osss (%ecx),%xmm6,%xmm2
vcmplt_oqss %xmm4,%xmm6,%xmm2
vcmplt_oqss (%ecx),%xmm6,%xmm2
vcmple_oqss %xmm4,%xmm6,%xmm2
vcmple_oqss (%ecx),%xmm6,%xmm2
vcmpunord_sss %xmm4,%xmm6,%xmm2
vcmpunord_sss (%ecx),%xmm6,%xmm2
vcmpneq_usss %xmm4,%xmm6,%xmm2
vcmpneq_usss (%ecx),%xmm6,%xmm2
vcmpnlt_uqss %xmm4,%xmm6,%xmm2
vcmpnlt_uqss (%ecx),%xmm6,%xmm2
vcmpnle_uqss %xmm4,%xmm6,%xmm2
vcmpnle_uqss (%ecx),%xmm6,%xmm2
vcmpord_sss %xmm4,%xmm6,%xmm2
vcmpord_sss (%ecx),%xmm6,%xmm2
vcmpeq_usss %xmm4,%xmm6,%xmm2
vcmpeq_usss (%ecx),%xmm6,%xmm2
vcmpnge_uqss %xmm4,%xmm6,%xmm2
vcmpnge_uqss (%ecx),%xmm6,%xmm2
vcmpngt_uqss %xmm4,%xmm6,%xmm2
vcmpngt_uqss (%ecx),%xmm6,%xmm2
vcmpfalse_osss %xmm4,%xmm6,%xmm2
vcmpfalse_osss (%ecx),%xmm6,%xmm2
vcmpneq_osss %xmm4,%xmm6,%xmm2
vcmpneq_osss (%ecx),%xmm6,%xmm2
vcmpge_oqss %xmm4,%xmm6,%xmm2
vcmpge_oqss (%ecx),%xmm6,%xmm2
vcmpgt_oqss %xmm4,%xmm6,%xmm2
vcmpgt_oqss (%ecx),%xmm6,%xmm2
vcmptrue_usss %xmm4,%xmm6,%xmm2
vcmptrue_usss (%ecx),%xmm6,%xmm2
# Tests for op xmm/mem32, xmm
vcomiss %xmm4,%xmm6
vcomiss (%ecx),%xmm4
vucomiss %xmm4,%xmm6
vucomiss (%ecx),%xmm4
# Tests for op mem32, xmm
vmovss (%ecx),%xmm4
# Tests for op xmm, mem32
vmovss %xmm4,(%ecx)
# Tests for op xmm/mem32, regl
vcvtss2si %xmm4,%ecx
vcvtss2si (%ecx),%ecx
vcvttss2si %xmm4,%ecx
vcvttss2si (%ecx),%ecx
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd %ecx,%xmm4,%xmm6
vcvtsi2sd (%ecx),%xmm4,%xmm6
vcvtsi2ss %ecx,%xmm4,%xmm6
vcvtsi2ss (%ecx),%xmm4,%xmm6
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss $7,%xmm4,%xmm6,%xmm2
vcmpss $7,(%ecx),%xmm6,%xmm2
vroundss $7,%xmm4,%xmm6,%xmm2
vroundss $7,(%ecx),%xmm6,%xmm2
# Tests for op xmm, xmm, xmm
vmovsd %xmm4,%xmm6,%xmm2
vmovss %xmm4,%xmm6,%xmm2
#Tests with different memory and register operands.
vcvtsi2sdl 0x1234,%xmm0,%xmm7
vcvtsi2sdl (%ebp),%xmm0,%xmm7
vcvtsi2sdl (%esp),%xmm0,%xmm7
vcvtsi2sdl 0x99(%ebp),%xmm0,%xmm7
vcvtsi2sdl 0x99(,%eiz),%xmm0,%xmm7
vcvtsi2sdl 0x99(,%eiz,2),%xmm0,%xmm7
vcvtsi2sdl 0x99(%eax,%eiz),%xmm0,%xmm7
vcvtsi2sdl 0x99(%eax,%eiz,2),%xmm0,%xmm7
vcvtsi2sdl 0x99(%eax,%ebx,4),%xmm0,%xmm7
vcvtsi2sdl 0x99(%esp,%ecx,8),%xmm0,%xmm7
vcvtsi2sdl 0x99(%ebp,%edx,1),%xmm0,%xmm7
.intel_syntax noprefix
# Tests for op xmm/mem64, xmm
vcomisd xmm6,xmm4
vcomisd xmm4,QWORD PTR [ecx]
vcomisd xmm4,[ecx]
vucomisd xmm6,xmm4
vucomisd xmm4,QWORD PTR [ecx]
vucomisd xmm4,[ecx]
# Tests for op mem64, xmm
vmovsd xmm4,QWORD PTR [ecx]
vmovsd xmm4,[ecx]
# Tests for op xmm, mem64
vmovsd QWORD PTR [ecx],xmm4
vmovsd [ecx],xmm4
# Tests for op xmm/mem64, regl
vcvtsd2si ecx,xmm4
vcvtsd2si ecx,QWORD PTR [ecx]
vcvtsd2si ecx,[ecx]
vcvttsd2si ecx,xmm4
vcvttsd2si ecx,QWORD PTR [ecx]
vcvttsd2si ecx,[ecx]
# Tests for op imm8, xmm/mem64, xmm, xmm
vcmpsd xmm2,xmm6,xmm4,7
vcmpsd xmm2,xmm6,QWORD PTR [ecx],7
vcmpsd xmm2,xmm6,[ecx],7
vroundsd xmm2,xmm6,xmm4,7
vroundsd xmm2,xmm6,QWORD PTR [ecx],7
vroundsd xmm2,xmm6,[ecx],7
# Tests for op xmm/mem64, xmm, xmm
vaddsd xmm2,xmm6,xmm4
vaddsd xmm2,xmm6,QWORD PTR [ecx]
vaddsd xmm2,xmm6,[ecx]
vcvtsd2ss xmm2,xmm6,xmm4
vcvtsd2ss xmm2,xmm6,QWORD PTR [ecx]
vcvtsd2ss xmm2,xmm6,[ecx]
vdivsd xmm2,xmm6,xmm4
vdivsd xmm2,xmm6,QWORD PTR [ecx]
vdivsd xmm2,xmm6,[ecx]
vmaxsd xmm2,xmm6,xmm4
vmaxsd xmm2,xmm6,QWORD PTR [ecx]
vmaxsd xmm2,xmm6,[ecx]
vminsd xmm2,xmm6,xmm4
vminsd xmm2,xmm6,QWORD PTR [ecx]
vminsd xmm2,xmm6,[ecx]
vmulsd xmm2,xmm6,xmm4
vmulsd xmm2,xmm6,QWORD PTR [ecx]
vmulsd xmm2,xmm6,[ecx]
vsqrtsd xmm2,xmm6,xmm4
vsqrtsd xmm2,xmm6,QWORD PTR [ecx]
vsqrtsd xmm2,xmm6,[ecx]
vsubsd xmm2,xmm6,xmm4
vsubsd xmm2,xmm6,QWORD PTR [ecx]
vsubsd xmm2,xmm6,[ecx]
vcmpeqsd xmm2,xmm6,xmm4
vcmpeqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpeqsd xmm2,xmm6,[ecx]
vcmpltsd xmm2,xmm6,xmm4
vcmpltsd xmm2,xmm6,QWORD PTR [ecx]
vcmpltsd xmm2,xmm6,[ecx]
vcmplesd xmm2,xmm6,xmm4
vcmplesd xmm2,xmm6,QWORD PTR [ecx]
vcmplesd xmm2,xmm6,[ecx]
vcmpunordsd xmm2,xmm6,xmm4
vcmpunordsd xmm2,xmm6,QWORD PTR [ecx]
vcmpunordsd xmm2,xmm6,[ecx]
vcmpneqsd xmm2,xmm6,xmm4
vcmpneqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpneqsd xmm2,xmm6,[ecx]
vcmpnltsd xmm2,xmm6,xmm4
vcmpnltsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnltsd xmm2,xmm6,[ecx]
vcmpnlesd xmm2,xmm6,xmm4
vcmpnlesd xmm2,xmm6,QWORD PTR [ecx]
vcmpnlesd xmm2,xmm6,[ecx]
vcmpordsd xmm2,xmm6,xmm4
vcmpordsd xmm2,xmm6,QWORD PTR [ecx]
vcmpordsd xmm2,xmm6,[ecx]
vcmpeq_uqsd xmm2,xmm6,xmm4
vcmpeq_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpeq_uqsd xmm2,xmm6,[ecx]
vcmpngesd xmm2,xmm6,xmm4
vcmpngesd xmm2,xmm6,QWORD PTR [ecx]
vcmpngesd xmm2,xmm6,[ecx]
vcmpngtsd xmm2,xmm6,xmm4
vcmpngtsd xmm2,xmm6,QWORD PTR [ecx]
vcmpngtsd xmm2,xmm6,[ecx]
vcmpfalsesd xmm2,xmm6,xmm4
vcmpfalsesd xmm2,xmm6,QWORD PTR [ecx]
vcmpfalsesd xmm2,xmm6,[ecx]
vcmpneq_oqsd xmm2,xmm6,xmm4
vcmpneq_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpneq_oqsd xmm2,xmm6,[ecx]
vcmpgesd xmm2,xmm6,xmm4
vcmpgesd xmm2,xmm6,QWORD PTR [ecx]
vcmpgesd xmm2,xmm6,[ecx]
vcmpgtsd xmm2,xmm6,xmm4
vcmpgtsd xmm2,xmm6,QWORD PTR [ecx]
vcmpgtsd xmm2,xmm6,[ecx]
vcmptruesd xmm2,xmm6,xmm4
vcmptruesd xmm2,xmm6,QWORD PTR [ecx]
vcmptruesd xmm2,xmm6,[ecx]
vcmpeq_ossd xmm2,xmm6,xmm4
vcmpeq_ossd xmm2,xmm6,QWORD PTR [ecx]
vcmpeq_ossd xmm2,xmm6,[ecx]
vcmplt_oqsd xmm2,xmm6,xmm4
vcmplt_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmplt_oqsd xmm2,xmm6,[ecx]
vcmple_oqsd xmm2,xmm6,xmm4
vcmple_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmple_oqsd xmm2,xmm6,[ecx]
vcmpunord_ssd xmm2,xmm6,xmm4
vcmpunord_ssd xmm2,xmm6,QWORD PTR [ecx]
vcmpunord_ssd xmm2,xmm6,[ecx]
vcmpneq_ussd xmm2,xmm6,xmm4
vcmpneq_ussd xmm2,xmm6,QWORD PTR [ecx]
vcmpneq_ussd xmm2,xmm6,[ecx]
vcmpnlt_uqsd xmm2,xmm6,xmm4
vcmpnlt_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnlt_uqsd xmm2,xmm6,[ecx]
vcmpnle_uqsd xmm2,xmm6,xmm4
vcmpnle_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnle_uqsd xmm2,xmm6,[ecx]
vcmpord_ssd xmm2,xmm6,xmm4
vcmpord_ssd xmm2,xmm6,QWORD PTR [ecx]
vcmpord_ssd xmm2,xmm6,[ecx]
vcmpeq_ussd xmm2,xmm6,xmm4
vcmpeq_ussd xmm2,xmm6,QWORD PTR [ecx]
vcmpeq_ussd xmm2,xmm6,[ecx]
vcmpnge_uqsd xmm2,xmm6,xmm4
vcmpnge_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpnge_uqsd xmm2,xmm6,[ecx]
vcmpngt_uqsd xmm2,xmm6,xmm4
vcmpngt_uqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpngt_uqsd xmm2,xmm6,[ecx]
vcmpfalse_ossd xmm2,xmm6,xmm4
vcmpfalse_ossd xmm2,xmm6,QWORD PTR [ecx]
vcmpfalse_ossd xmm2,xmm6,[ecx]
vcmpneq_ossd xmm2,xmm6,xmm4
vcmpneq_ossd xmm2,xmm6,QWORD PTR [ecx]
vcmpneq_ossd xmm2,xmm6,[ecx]
vcmpge_oqsd xmm2,xmm6,xmm4
vcmpge_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpge_oqsd xmm2,xmm6,[ecx]
vcmpgt_oqsd xmm2,xmm6,xmm4
vcmpgt_oqsd xmm2,xmm6,QWORD PTR [ecx]
vcmpgt_oqsd xmm2,xmm6,[ecx]
vcmptrue_ussd xmm2,xmm6,xmm4
vcmptrue_ussd xmm2,xmm6,QWORD PTR [ecx]
vcmptrue_ussd xmm2,xmm6,[ecx]
# Tests for op xmm/mem32, xmm, xmm
vaddss xmm2,xmm6,xmm4
vaddss xmm2,xmm6,DWORD PTR [ecx]
vaddss xmm2,xmm6,[ecx]
vcvtss2sd xmm2,xmm6,xmm4
vcvtss2sd xmm2,xmm6,DWORD PTR [ecx]
vcvtss2sd xmm2,xmm6,[ecx]
vdivss xmm2,xmm6,xmm4
vdivss xmm2,xmm6,DWORD PTR [ecx]
vdivss xmm2,xmm6,[ecx]
vmaxss xmm2,xmm6,xmm4
vmaxss xmm2,xmm6,DWORD PTR [ecx]
vmaxss xmm2,xmm6,[ecx]
vminss xmm2,xmm6,xmm4
vminss xmm2,xmm6,DWORD PTR [ecx]
vminss xmm2,xmm6,[ecx]
vmulss xmm2,xmm6,xmm4
vmulss xmm2,xmm6,DWORD PTR [ecx]
vmulss xmm2,xmm6,[ecx]
vrcpss xmm2,xmm6,xmm4
vrcpss xmm2,xmm6,DWORD PTR [ecx]
vrcpss xmm2,xmm6,[ecx]
vrsqrtss xmm2,xmm6,xmm4
vrsqrtss xmm2,xmm6,DWORD PTR [ecx]
vrsqrtss xmm2,xmm6,[ecx]
vsqrtss xmm2,xmm6,xmm4
vsqrtss xmm2,xmm6,DWORD PTR [ecx]
vsqrtss xmm2,xmm6,[ecx]
vsubss xmm2,xmm6,xmm4
vsubss xmm2,xmm6,DWORD PTR [ecx]
vsubss xmm2,xmm6,[ecx]
vcmpeqss xmm2,xmm6,xmm4
vcmpeqss xmm2,xmm6,DWORD PTR [ecx]
vcmpeqss xmm2,xmm6,[ecx]
vcmpltss xmm2,xmm6,xmm4
vcmpltss xmm2,xmm6,DWORD PTR [ecx]
vcmpltss xmm2,xmm6,[ecx]
vcmpless xmm2,xmm6,xmm4
vcmpless xmm2,xmm6,DWORD PTR [ecx]
vcmpless xmm2,xmm6,[ecx]
vcmpunordss xmm2,xmm6,xmm4
vcmpunordss xmm2,xmm6,DWORD PTR [ecx]
vcmpunordss xmm2,xmm6,[ecx]
vcmpneqss xmm2,xmm6,xmm4
vcmpneqss xmm2,xmm6,DWORD PTR [ecx]
vcmpneqss xmm2,xmm6,[ecx]
vcmpnltss xmm2,xmm6,xmm4
vcmpnltss xmm2,xmm6,DWORD PTR [ecx]
vcmpnltss xmm2,xmm6,[ecx]
vcmpnless xmm2,xmm6,xmm4
vcmpnless xmm2,xmm6,DWORD PTR [ecx]
vcmpnless xmm2,xmm6,[ecx]
vcmpordss xmm2,xmm6,xmm4
vcmpordss xmm2,xmm6,DWORD PTR [ecx]
vcmpordss xmm2,xmm6,[ecx]
vcmpeq_uqss xmm2,xmm6,xmm4
vcmpeq_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpeq_uqss xmm2,xmm6,[ecx]
vcmpngess xmm2,xmm6,xmm4
vcmpngess xmm2,xmm6,DWORD PTR [ecx]
vcmpngess xmm2,xmm6,[ecx]
vcmpngtss xmm2,xmm6,xmm4
vcmpngtss xmm2,xmm6,DWORD PTR [ecx]
vcmpngtss xmm2,xmm6,[ecx]
vcmpfalsess xmm2,xmm6,xmm4
vcmpfalsess xmm2,xmm6,DWORD PTR [ecx]
vcmpfalsess xmm2,xmm6,[ecx]
vcmpneq_oqss xmm2,xmm6,xmm4
vcmpneq_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmpneq_oqss xmm2,xmm6,[ecx]
vcmpgess xmm2,xmm6,xmm4
vcmpgess xmm2,xmm6,DWORD PTR [ecx]
vcmpgess xmm2,xmm6,[ecx]
vcmpgtss xmm2,xmm6,xmm4
vcmpgtss xmm2,xmm6,DWORD PTR [ecx]
vcmpgtss xmm2,xmm6,[ecx]
vcmptruess xmm2,xmm6,xmm4
vcmptruess xmm2,xmm6,DWORD PTR [ecx]
vcmptruess xmm2,xmm6,[ecx]
vcmpeq_osss xmm2,xmm6,xmm4
vcmpeq_osss xmm2,xmm6,DWORD PTR [ecx]
vcmpeq_osss xmm2,xmm6,[ecx]
vcmplt_oqss xmm2,xmm6,xmm4
vcmplt_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmplt_oqss xmm2,xmm6,[ecx]
vcmple_oqss xmm2,xmm6,xmm4
vcmple_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmple_oqss xmm2,xmm6,[ecx]
vcmpunord_sss xmm2,xmm6,xmm4
vcmpunord_sss xmm2,xmm6,DWORD PTR [ecx]
vcmpunord_sss xmm2,xmm6,[ecx]
vcmpneq_usss xmm2,xmm6,xmm4
vcmpneq_usss xmm2,xmm6,DWORD PTR [ecx]
vcmpneq_usss xmm2,xmm6,[ecx]
vcmpnlt_uqss xmm2,xmm6,xmm4
vcmpnlt_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpnlt_uqss xmm2,xmm6,[ecx]
vcmpnle_uqss xmm2,xmm6,xmm4
vcmpnle_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpnle_uqss xmm2,xmm6,[ecx]
vcmpord_sss xmm2,xmm6,xmm4
vcmpord_sss xmm2,xmm6,DWORD PTR [ecx]
vcmpord_sss xmm2,xmm6,[ecx]
vcmpeq_usss xmm2,xmm6,xmm4
vcmpeq_usss xmm2,xmm6,DWORD PTR [ecx]
vcmpeq_usss xmm2,xmm6,[ecx]
vcmpnge_uqss xmm2,xmm6,xmm4
vcmpnge_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpnge_uqss xmm2,xmm6,[ecx]
vcmpngt_uqss xmm2,xmm6,xmm4
vcmpngt_uqss xmm2,xmm6,DWORD PTR [ecx]
vcmpngt_uqss xmm2,xmm6,[ecx]
vcmpfalse_osss xmm2,xmm6,xmm4
vcmpfalse_osss xmm2,xmm6,DWORD PTR [ecx]
vcmpfalse_osss xmm2,xmm6,[ecx]
vcmpneq_osss xmm2,xmm6,xmm4
vcmpneq_osss xmm2,xmm6,DWORD PTR [ecx]
vcmpneq_osss xmm2,xmm6,[ecx]
vcmpge_oqss xmm2,xmm6,xmm4
vcmpge_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmpge_oqss xmm2,xmm6,[ecx]
vcmpgt_oqss xmm2,xmm6,xmm4
vcmpgt_oqss xmm2,xmm6,DWORD PTR [ecx]
vcmpgt_oqss xmm2,xmm6,[ecx]
vcmptrue_usss xmm2,xmm6,xmm4
vcmptrue_usss xmm2,xmm6,DWORD PTR [ecx]
vcmptrue_usss xmm2,xmm6,[ecx]
# Tests for op xmm/mem32, xmm
vcomiss xmm6,xmm4
vcomiss xmm4,DWORD PTR [ecx]
vcomiss xmm4,[ecx]
vucomiss xmm6,xmm4
vucomiss xmm4,DWORD PTR [ecx]
vucomiss xmm4,[ecx]
# Tests for op mem32, xmm
vmovss xmm4,DWORD PTR [ecx]
vmovss xmm4,[ecx]
# Tests for op xmm, mem32
vmovss DWORD PTR [ecx],xmm4
vmovss [ecx],xmm4
# Tests for op xmm/mem32, regl
vcvtss2si ecx,xmm4
vcvtss2si ecx,DWORD PTR [ecx]
vcvtss2si ecx,[ecx]
vcvttss2si ecx,xmm4
vcvttss2si ecx,DWORD PTR [ecx]
vcvttss2si ecx,[ecx]
# Tests for op regl/mem32, xmm, xmm
vcvtsi2sd xmm6,xmm4,ecx
vcvtsi2sd xmm6,xmm4,DWORD PTR [ecx]
vcvtsi2sd xmm6,xmm4,[ecx]
vcvtsi2ss xmm6,xmm4,ecx
vcvtsi2ss xmm6,xmm4,DWORD PTR [ecx]
vcvtsi2ss xmm6,xmm4,[ecx]
# Tests for op imm8, xmm/mem32, xmm, xmm
vcmpss xmm2,xmm6,xmm4,7
vcmpss xmm2,xmm6,DWORD PTR [ecx],7
vcmpss xmm2,xmm6,[ecx],7
vroundss xmm2,xmm6,xmm4,7
vroundss xmm2,xmm6,DWORD PTR [ecx],7
vroundss xmm2,xmm6,[ecx],7
# Tests for op xmm, xmm, xmm
vmovsd xmm2,xmm6,xmm4
vmovss xmm2,xmm6,xmm4
#Tests with different memory and register operands.
vcvtsi2sd xmm7,xmm0,DWORD PTR ds:0x1234
vcvtsi2sd xmm7,xmm0,DWORD PTR [ebp]
vcvtsi2sd xmm7,xmm0,DWORD PTR [ebp+0x99]
vcvtsi2sd xmm7,xmm0,DWORD PTR [eiz*1+0x99]
vcvtsi2sd xmm7,xmm0,DWORD PTR [eiz*2+0x99]
vcvtsi2sd xmm7,xmm0,DWORD PTR [eax+eiz*1+0x99]
vcvtsi2sd xmm7,xmm0,DWORD PTR [eax+eiz*2+0x99]
vcvtsi2sd xmm7,xmm0,DWORD PTR [eax+ebx*4+0x99]
vcvtsi2sd xmm7,xmm0,DWORD PTR [esp+ecx*8+0x99]
vcvtsi2sd xmm7,xmm0,DWORD PTR [ebp+edx*1+0x99]
|
stsp/binutils-ia16
| 1,337
|
gas/testsuite/gas/i386/x86-64-align-branch-1.s
|
.text
.globl foo
.p2align 4
foo:
movl %eax, %fs:0x1
pushq %rbp
pushq %rbp
pushq %rbp
movq %rsp, %rbp
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
cmp %rax, %rbp
je .L_2
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %edi, -8(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
popq %rbp
popq %rbp
je .L_2
popq %rbp
je .L_2
movl %eax, -4(%rbp)
movl %esi, -12(%rbp)
movl %edi, -8(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
popq %rbp
popq %rbp
jmp .L_3
jmp .L_3
jmp .L_3
movl %eax, -4(%rbp)
movl %esi, -12(%rbp)
movl %edi, -8(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
movl %esi, -12(%rbp)
popq %rbp
popq %rbp
cmp %rax, %rbp
je .L_2
jmp .L_3
.L_2:
movl -12(%rbp), %eax
movl %eax, -4(%rbp)
.L_3:
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
movl %esi, -1200(%rbp)
jmp .L_3
popq %rbp
retq
|
stsp/binutils-ia16
| 12,047
|
gas/testsuite/gas/i386/avx512er.s
|
# Check 32bit AVX512ER instructions
.allow_index_reg
.text
_start:
vexp2ps %zmm5, %zmm6 # AVX512ER
vexp2ps {sae}, %zmm5, %zmm6 # AVX512ER
vexp2ps (%ecx), %zmm6 # AVX512ER
vexp2ps -123456(%esp,%esi,8), %zmm6 # AVX512ER
vexp2ps (%eax){1to16}, %zmm6 # AVX512ER
vexp2ps 8128(%edx), %zmm6 # AVX512ER Disp8
vexp2ps 8192(%edx), %zmm6 # AVX512ER
vexp2ps -8192(%edx), %zmm6 # AVX512ER Disp8
vexp2ps -8256(%edx), %zmm6 # AVX512ER
vexp2ps 508(%edx){1to16}, %zmm6 # AVX512ER Disp8
vexp2ps 512(%edx){1to16}, %zmm6 # AVX512ER
vexp2ps -512(%edx){1to16}, %zmm6 # AVX512ER Disp8
vexp2ps -516(%edx){1to16}, %zmm6 # AVX512ER
vexp2pd %zmm5, %zmm6 # AVX512ER
vexp2pd {sae}, %zmm5, %zmm6 # AVX512ER
vexp2pd (%ecx), %zmm6 # AVX512ER
vexp2pd -123456(%esp,%esi,8), %zmm6 # AVX512ER
vexp2pd (%eax){1to8}, %zmm6 # AVX512ER
vexp2pd 8128(%edx), %zmm6 # AVX512ER Disp8
vexp2pd 8192(%edx), %zmm6 # AVX512ER
vexp2pd -8192(%edx), %zmm6 # AVX512ER Disp8
vexp2pd -8256(%edx), %zmm6 # AVX512ER
vexp2pd 1016(%edx){1to8}, %zmm6 # AVX512ER Disp8
vexp2pd 1024(%edx){1to8}, %zmm6 # AVX512ER
vexp2pd -1024(%edx){1to8}, %zmm6 # AVX512ER Disp8
vexp2pd -1032(%edx){1to8}, %zmm6 # AVX512ER
vrcp28ps %zmm5, %zmm6 # AVX512ER
vrcp28ps %zmm5, %zmm6{%k7} # AVX512ER
vrcp28ps %zmm5, %zmm6{%k7}{z} # AVX512ER
vrcp28ps {sae}, %zmm5, %zmm6 # AVX512ER
vrcp28ps (%ecx), %zmm6 # AVX512ER
vrcp28ps -123456(%esp,%esi,8), %zmm6 # AVX512ER
vrcp28ps (%eax){1to16}, %zmm6 # AVX512ER
vrcp28ps 8128(%edx), %zmm6 # AVX512ER Disp8
vrcp28ps 8192(%edx), %zmm6 # AVX512ER
vrcp28ps -8192(%edx), %zmm6 # AVX512ER Disp8
vrcp28ps -8256(%edx), %zmm6 # AVX512ER
vrcp28ps 508(%edx){1to16}, %zmm6 # AVX512ER Disp8
vrcp28ps 512(%edx){1to16}, %zmm6 # AVX512ER
vrcp28ps -512(%edx){1to16}, %zmm6 # AVX512ER Disp8
vrcp28ps -516(%edx){1to16}, %zmm6 # AVX512ER
vrcp28pd %zmm5, %zmm6 # AVX512ER
vrcp28pd %zmm5, %zmm6{%k7} # AVX512ER
vrcp28pd %zmm5, %zmm6{%k7}{z} # AVX512ER
vrcp28pd {sae}, %zmm5, %zmm6 # AVX512ER
vrcp28pd (%ecx), %zmm6 # AVX512ER
vrcp28pd -123456(%esp,%esi,8), %zmm6 # AVX512ER
vrcp28pd (%eax){1to8}, %zmm6 # AVX512ER
vrcp28pd 8128(%edx), %zmm6 # AVX512ER Disp8
vrcp28pd 8192(%edx), %zmm6 # AVX512ER
vrcp28pd -8192(%edx), %zmm6 # AVX512ER Disp8
vrcp28pd -8256(%edx), %zmm6 # AVX512ER
vrcp28pd 1016(%edx){1to8}, %zmm6 # AVX512ER Disp8
vrcp28pd 1024(%edx){1to8}, %zmm6 # AVX512ER
vrcp28pd -1024(%edx){1to8}, %zmm6 # AVX512ER Disp8
vrcp28pd -1032(%edx){1to8}, %zmm6 # AVX512ER
vrcp28ss %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrcp28ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512ER
vrcp28ss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrcp28ss (%ecx), %xmm5, %xmm6{%k7} # AVX512ER
vrcp28ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512ER
vrcp28ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrcp28ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512ER
vrcp28ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrcp28ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512ER
vrcp28sd %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrcp28sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512ER
vrcp28sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrcp28sd (%ecx), %xmm5, %xmm6{%k7} # AVX512ER
vrcp28sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512ER
vrcp28sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrcp28sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512ER
vrcp28sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrcp28sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28ps %zmm5, %zmm6 # AVX512ER
vrsqrt28ps %zmm5, %zmm6{%k7} # AVX512ER
vrsqrt28ps %zmm5, %zmm6{%k7}{z} # AVX512ER
vrsqrt28ps {sae}, %zmm5, %zmm6 # AVX512ER
vrsqrt28ps (%ecx), %zmm6 # AVX512ER
vrsqrt28ps -123456(%esp,%esi,8), %zmm6 # AVX512ER
vrsqrt28ps (%eax){1to16}, %zmm6 # AVX512ER
vrsqrt28ps 8128(%edx), %zmm6 # AVX512ER Disp8
vrsqrt28ps 8192(%edx), %zmm6 # AVX512ER
vrsqrt28ps -8192(%edx), %zmm6 # AVX512ER Disp8
vrsqrt28ps -8256(%edx), %zmm6 # AVX512ER
vrsqrt28ps 508(%edx){1to16}, %zmm6 # AVX512ER Disp8
vrsqrt28ps 512(%edx){1to16}, %zmm6 # AVX512ER
vrsqrt28ps -512(%edx){1to16}, %zmm6 # AVX512ER Disp8
vrsqrt28ps -516(%edx){1to16}, %zmm6 # AVX512ER
vrsqrt28pd %zmm5, %zmm6 # AVX512ER
vrsqrt28pd %zmm5, %zmm6{%k7} # AVX512ER
vrsqrt28pd %zmm5, %zmm6{%k7}{z} # AVX512ER
vrsqrt28pd {sae}, %zmm5, %zmm6 # AVX512ER
vrsqrt28pd (%ecx), %zmm6 # AVX512ER
vrsqrt28pd -123456(%esp,%esi,8), %zmm6 # AVX512ER
vrsqrt28pd (%eax){1to8}, %zmm6 # AVX512ER
vrsqrt28pd 8128(%edx), %zmm6 # AVX512ER Disp8
vrsqrt28pd 8192(%edx), %zmm6 # AVX512ER
vrsqrt28pd -8192(%edx), %zmm6 # AVX512ER Disp8
vrsqrt28pd -8256(%edx), %zmm6 # AVX512ER
vrsqrt28pd 1016(%edx){1to8}, %zmm6 # AVX512ER Disp8
vrsqrt28pd 1024(%edx){1to8}, %zmm6 # AVX512ER
vrsqrt28pd -1024(%edx){1to8}, %zmm6 # AVX512ER Disp8
vrsqrt28pd -1032(%edx){1to8}, %zmm6 # AVX512ER
vrsqrt28ss %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28ss %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512ER
vrsqrt28ss {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28ss (%ecx), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28ss -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28ss 508(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrsqrt28ss 512(%edx), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28ss -512(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrsqrt28ss -516(%edx), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28sd %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28sd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512ER
vrsqrt28sd {sae}, %xmm4, %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28sd (%ecx), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28sd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28sd 1016(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrsqrt28sd 1024(%edx), %xmm5, %xmm6{%k7} # AVX512ER
vrsqrt28sd -1024(%edx), %xmm5, %xmm6{%k7} # AVX512ER Disp8
vrsqrt28sd -1032(%edx), %xmm5, %xmm6{%k7} # AVX512ER
.intel_syntax noprefix
vexp2ps zmm6, zmm5 # AVX512ER
vexp2ps zmm6, zmm5{sae} # AVX512ER
vexp2ps zmm6, ZMMWORD PTR [ecx] # AVX512ER
vexp2ps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512ER
vexp2ps zmm6, [eax]{1to16} # AVX512ER
vexp2ps zmm6, ZMMWORD PTR [edx+8128] # AVX512ER Disp8
vexp2ps zmm6, ZMMWORD PTR [edx+8192] # AVX512ER
vexp2ps zmm6, ZMMWORD PTR [edx-8192] # AVX512ER Disp8
vexp2ps zmm6, ZMMWORD PTR [edx-8256] # AVX512ER
vexp2ps zmm6, [edx+508]{1to16} # AVX512ER Disp8
vexp2ps zmm6, [edx+512]{1to16} # AVX512ER
vexp2ps zmm6, [edx-512]{1to16} # AVX512ER Disp8
vexp2ps zmm6, [edx-516]{1to16} # AVX512ER
vexp2pd zmm6, zmm5 # AVX512ER
vexp2pd zmm6, zmm5{sae} # AVX512ER
vexp2pd zmm6, ZMMWORD PTR [ecx] # AVX512ER
vexp2pd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512ER
vexp2pd zmm6, [eax]{1to8} # AVX512ER
vexp2pd zmm6, ZMMWORD PTR [edx+8128] # AVX512ER Disp8
vexp2pd zmm6, ZMMWORD PTR [edx+8192] # AVX512ER
vexp2pd zmm6, ZMMWORD PTR [edx-8192] # AVX512ER Disp8
vexp2pd zmm6, ZMMWORD PTR [edx-8256] # AVX512ER
vexp2pd zmm6, [edx+1016]{1to8} # AVX512ER Disp8
vexp2pd zmm6, [edx+1024]{1to8} # AVX512ER
vexp2pd zmm6, [edx-1024]{1to8} # AVX512ER Disp8
vexp2pd zmm6, [edx-1032]{1to8} # AVX512ER
vrcp28ps zmm6, zmm5 # AVX512ER
vrcp28ps zmm6{k7}, zmm5 # AVX512ER
vrcp28ps zmm6{k7}{z}, zmm5 # AVX512ER
vrcp28ps zmm6, zmm5{sae} # AVX512ER
vrcp28ps zmm6, ZMMWORD PTR [ecx] # AVX512ER
vrcp28ps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512ER
vrcp28ps zmm6, [eax]{1to16} # AVX512ER
vrcp28ps zmm6, ZMMWORD PTR [edx+8128] # AVX512ER Disp8
vrcp28ps zmm6, ZMMWORD PTR [edx+8192] # AVX512ER
vrcp28ps zmm6, ZMMWORD PTR [edx-8192] # AVX512ER Disp8
vrcp28ps zmm6, ZMMWORD PTR [edx-8256] # AVX512ER
vrcp28ps zmm6, [edx+508]{1to16} # AVX512ER Disp8
vrcp28ps zmm6, [edx+512]{1to16} # AVX512ER
vrcp28ps zmm6, [edx-512]{1to16} # AVX512ER Disp8
vrcp28ps zmm6, [edx-516]{1to16} # AVX512ER
vrcp28pd zmm6, zmm5 # AVX512ER
vrcp28pd zmm6{k7}, zmm5 # AVX512ER
vrcp28pd zmm6{k7}{z}, zmm5 # AVX512ER
vrcp28pd zmm6, zmm5{sae} # AVX512ER
vrcp28pd zmm6, ZMMWORD PTR [ecx] # AVX512ER
vrcp28pd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512ER
vrcp28pd zmm6, [eax]{1to8} # AVX512ER
vrcp28pd zmm6, ZMMWORD PTR [edx+8128] # AVX512ER Disp8
vrcp28pd zmm6, ZMMWORD PTR [edx+8192] # AVX512ER
vrcp28pd zmm6, ZMMWORD PTR [edx-8192] # AVX512ER Disp8
vrcp28pd zmm6, ZMMWORD PTR [edx-8256] # AVX512ER
vrcp28pd zmm6, [edx+1016]{1to8} # AVX512ER Disp8
vrcp28pd zmm6, [edx+1024]{1to8} # AVX512ER
vrcp28pd zmm6, [edx-1024]{1to8} # AVX512ER Disp8
vrcp28pd zmm6, [edx-1032]{1to8} # AVX512ER
vrcp28ss xmm6{k7}, xmm5, xmm4 # AVX512ER
vrcp28ss xmm6{k7}{z}, xmm5, xmm4 # AVX512ER
vrcp28ss xmm6{k7}, xmm5, xmm4{sae} # AVX512ER
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512ER
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512ER
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512ER Disp8
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512ER
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512ER Disp8
vrcp28ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512ER
vrcp28sd xmm6{k7}, xmm5, xmm4 # AVX512ER
vrcp28sd xmm6{k7}{z}, xmm5, xmm4 # AVX512ER
vrcp28sd xmm6{k7}, xmm5, xmm4{sae} # AVX512ER
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512ER
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512ER
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512ER Disp8
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512ER
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512ER Disp8
vrcp28sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512ER
vrsqrt28ps zmm6, zmm5 # AVX512ER
vrsqrt28ps zmm6{k7}, zmm5 # AVX512ER
vrsqrt28ps zmm6{k7}{z}, zmm5 # AVX512ER
vrsqrt28ps zmm6, zmm5{sae} # AVX512ER
vrsqrt28ps zmm6, ZMMWORD PTR [ecx] # AVX512ER
vrsqrt28ps zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512ER
vrsqrt28ps zmm6, [eax]{1to16} # AVX512ER
vrsqrt28ps zmm6, ZMMWORD PTR [edx+8128] # AVX512ER Disp8
vrsqrt28ps zmm6, ZMMWORD PTR [edx+8192] # AVX512ER
vrsqrt28ps zmm6, ZMMWORD PTR [edx-8192] # AVX512ER Disp8
vrsqrt28ps zmm6, ZMMWORD PTR [edx-8256] # AVX512ER
vrsqrt28ps zmm6, [edx+508]{1to16} # AVX512ER Disp8
vrsqrt28ps zmm6, [edx+512]{1to16} # AVX512ER
vrsqrt28ps zmm6, [edx-512]{1to16} # AVX512ER Disp8
vrsqrt28ps zmm6, [edx-516]{1to16} # AVX512ER
vrsqrt28pd zmm6, zmm5 # AVX512ER
vrsqrt28pd zmm6{k7}, zmm5 # AVX512ER
vrsqrt28pd zmm6{k7}{z}, zmm5 # AVX512ER
vrsqrt28pd zmm6, zmm5{sae} # AVX512ER
vrsqrt28pd zmm6, ZMMWORD PTR [ecx] # AVX512ER
vrsqrt28pd zmm6, ZMMWORD PTR [esp+esi*8-123456] # AVX512ER
vrsqrt28pd zmm6, [eax]{1to8} # AVX512ER
vrsqrt28pd zmm6, ZMMWORD PTR [edx+8128] # AVX512ER Disp8
vrsqrt28pd zmm6, ZMMWORD PTR [edx+8192] # AVX512ER
vrsqrt28pd zmm6, ZMMWORD PTR [edx-8192] # AVX512ER Disp8
vrsqrt28pd zmm6, ZMMWORD PTR [edx-8256] # AVX512ER
vrsqrt28pd zmm6, [edx+1016]{1to8} # AVX512ER Disp8
vrsqrt28pd zmm6, [edx+1024]{1to8} # AVX512ER
vrsqrt28pd zmm6, [edx-1024]{1to8} # AVX512ER Disp8
vrsqrt28pd zmm6, [edx-1032]{1to8} # AVX512ER
vrsqrt28ss xmm6{k7}, xmm5, xmm4 # AVX512ER
vrsqrt28ss xmm6{k7}{z}, xmm5, xmm4 # AVX512ER
vrsqrt28ss xmm6{k7}, xmm5, xmm4{sae} # AVX512ER
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [ecx] # AVX512ER
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [esp+esi*8-123456] # AVX512ER
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx+508] # AVX512ER Disp8
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx+512] # AVX512ER
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx-512] # AVX512ER Disp8
vrsqrt28ss xmm6{k7}, xmm5, DWORD PTR [edx-516] # AVX512ER
vrsqrt28sd xmm6{k7}, xmm5, xmm4 # AVX512ER
vrsqrt28sd xmm6{k7}{z}, xmm5, xmm4 # AVX512ER
vrsqrt28sd xmm6{k7}, xmm5, xmm4{sae} # AVX512ER
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [ecx] # AVX512ER
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [esp+esi*8-123456] # AVX512ER
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx+1016] # AVX512ER Disp8
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx+1024] # AVX512ER
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx-1024] # AVX512ER Disp8
vrsqrt28sd xmm6{k7}, xmm5, QWORD PTR [edx-1032] # AVX512ER
|
stsp/binutils-ia16
| 1,762
|
gas/testsuite/gas/i386/x86-64-sib.s
|
#Test the special case of the index bits, 0x4, in SIB.
.text
.allow_index_reg
foo:
mov -30,%ebx
mov -30(,%riz),%ebx
mov -30(,%riz,1),%eax
mov -30(,%riz,2),%eax
mov -30(,%riz,4),%eax
mov -30(,%riz,8),%eax
mov 30,%eax
mov 30(,%riz),%eax
mov 30(,%riz,1),%eax
mov 30(,%riz,2),%eax
mov 30(,%riz,4),%eax
mov 30(,%riz,8),%eax
mov (%rbx),%eax
mov (%rbx,%riz),%eax
mov (%rbx,%riz,1),%eax
mov (%rbx,%riz,2),%eax
mov (%rbx,%riz,4),%eax
mov (%rbx,%riz,8),%eax
mov (%rsp),%eax
mov (%rsp,%riz),%eax
mov (%rsp,%riz,1),%eax
mov (%rsp,%riz,2),%eax
mov (%rsp,%riz,4),%eax
mov (%rsp,%riz,8),%eax
mov (%r12),%eax
mov (%r12,%riz),%eax
mov (%r12,%riz,1),%eax
mov (%r12,%riz,2),%eax
mov (%r12,%riz,4),%eax
mov (%r12,%riz,8),%eax
.intel_syntax noprefix
mov eax,DWORD PTR [riz*1-30]
mov eax,DWORD PTR [riz*2-30]
mov eax,DWORD PTR [riz*4-30]
mov eax,DWORD PTR [riz*8-30]
mov eax,DWORD PTR [riz*1+30]
mov eax,DWORD PTR [riz*2+30]
mov eax,DWORD PTR [riz*4+30]
mov eax,DWORD PTR [riz*8+30]
mov eax,DWORD PTR [rbx+riz]
mov eax,DWORD PTR [rbx+riz*1]
mov eax,DWORD PTR [rbx+riz*2]
mov eax,DWORD PTR [rbx+riz*4]
mov eax,DWORD PTR [rbx+riz*8]
mov eax,DWORD PTR [rsp]
mov eax,DWORD PTR [rsp+riz]
mov eax,DWORD PTR [rsp+riz*1]
mov eax,DWORD PTR [rsp+riz*2]
mov eax,DWORD PTR [rsp+riz*4]
mov eax,DWORD PTR [rsp+riz*8]
mov eax,DWORD PTR [r12]
mov eax,DWORD PTR [r12+riz]
mov eax,DWORD PTR [r12+riz*1]
mov eax,DWORD PTR [r12+riz*2]
mov eax,DWORD PTR [r12+riz*4]
mov eax,DWORD PTR [r12+riz*8]
|
stsp/binutils-ia16
| 6,555
|
gas/testsuite/gas/i386/avx2.s
|
# Check i386 AVX2 instructions
.allow_index_reg
.text
_start:
# Tests for op mem256, mask, ymm
# Tests for op ymm, mask, mem256
vpmaskmovd (%ecx),%ymm4,%ymm6
vpmaskmovd %ymm4,%ymm6,(%ecx)
vpmaskmovq (%ecx),%ymm4,%ymm6
vpmaskmovq %ymm4,%ymm6,(%ecx)
# Tests for op imm8, ymm/mem256, ymm
vpermpd $7,%ymm6,%ymm2
vpermpd $7,(%ecx),%ymm6
vpermq $7,%ymm6,%ymm2
vpermq $7,(%ecx),%ymm6
# Tests for op ymm/mem256, ymm, ymm
vpermd %ymm4,%ymm6,%ymm2
vpermd (%ecx),%ymm6,%ymm2
vpermps %ymm4,%ymm6,%ymm2
vpermps (%ecx),%ymm6,%ymm2
vpsllvd %ymm4,%ymm6,%ymm2
vpsllvd (%ecx),%ymm6,%ymm2
vpsllvq %ymm4,%ymm6,%ymm2
vpsllvq (%ecx),%ymm6,%ymm2
vpsravd %ymm4,%ymm6,%ymm2
vpsravd (%ecx),%ymm6,%ymm2
vpsrlvd %ymm4,%ymm6,%ymm2
vpsrlvd (%ecx),%ymm6,%ymm2
vpsrlvq %ymm4,%ymm6,%ymm2
vpsrlvq (%ecx),%ymm6,%ymm2
# Tests for op mem256, ymm
vmovntdqa (%ecx),%ymm4
# Tests for op ymm, xmm
vbroadcastsd %xmm4,%ymm6
vbroadcastss %xmm4,%ymm6
# Tests for op imm8, ymm/mem256, ymm, ymm
vpblendd $7,%ymm4,%ymm6,%ymm2
vpblendd $7,(%ecx),%ymm6,%ymm2
vperm2i128 $7,%ymm4,%ymm6,%ymm2
vperm2i128 $7,(%ecx),%ymm6,%ymm2
# Tests for op imm8, xmm/mem128, ymm, ymm
vinserti128 $7,%xmm4,%ymm4,%ymm6
vinserti128 $7,(%ecx),%ymm4,%ymm6
# Tests for op mem128, ymm
vbroadcasti128 (%ecx),%ymm4
# Tests for op xmm/mem128, xmm, xmm
vpsllvd %xmm4,%xmm6,%xmm2
vpsllvd (%ecx),%xmm6,%xmm7
vpsllvq %xmm4,%xmm6,%xmm2
vpsllvq (%ecx),%xmm6,%xmm7
vpsravd %xmm4,%xmm6,%xmm2
vpsravd (%ecx),%xmm6,%xmm7
vpsrlvd %xmm4,%xmm6,%xmm2
vpsrlvd (%ecx),%xmm6,%xmm7
vpsrlvq %xmm4,%xmm6,%xmm2
vpsrlvq (%ecx),%xmm6,%xmm7
# Tests for op mem128, xmm, xmm
vpmaskmovd (%ecx),%xmm4,%xmm6
vpmaskmovq (%ecx),%xmm4,%xmm6
# Tests for op imm8, ymm, xmm128/mem
vextracti128 $7,%ymm4,%xmm6
vextracti128 $7,%ymm4,(%ecx)
# Tests for op xmm, xmm, mem128
vpmaskmovd %xmm4,%xmm6,(%ecx)
vpmaskmovq %xmm4,%xmm6,(%ecx)
# Tests for op imm8, xmm/mem128, xmm, xmm
vpblendd $7,%xmm4,%xmm6,%xmm2
vpblendd $7,(%ecx),%xmm6,%xmm2
# Tests for op xmm/mem64, xmm
vpbroadcastq %xmm4,%xmm6
vpbroadcastq (%ecx),%xmm4
# Tests for op xmm/mem64, ymm
vpbroadcastq %xmm4,%ymm6
vpbroadcastq (%ecx),%ymm4
# Tests for op xmm/mem32, ymm
vpbroadcastd %xmm4,%ymm4
vpbroadcastd (%ecx),%ymm4
# Tests for op xmm/mem32, xmm
vpbroadcastd %xmm4,%xmm6
vpbroadcastd (%ecx),%xmm4
# Tests for op xmm/m16, xmm
vpbroadcastw %xmm4,%xmm6
vpbroadcastw (%ecx),%xmm4
# Tests for op xmm/m16, ymm
vpbroadcastw %xmm4,%ymm6
vpbroadcastw (%ecx),%ymm4
# Tests for op xmm/m8, xmm
vpbroadcastb %xmm4,%xmm6
vpbroadcastb (%ecx),%xmm4
# Tests for op xmm/m8, ymm
vpbroadcastb %xmm4,%ymm6
vpbroadcastb (%ecx),%ymm4
# Tests for op xmm, xmm
vbroadcastss %xmm4,%xmm6
.intel_syntax noprefix
# Tests for op mem256, mask, ymm
# Tests for op ymm, mask, mem256
vpmaskmovd ymm6,ymm4,YMMWORD PTR [ecx]
vpmaskmovd YMMWORD PTR [ecx],ymm6,ymm4
vpmaskmovd ymm6,ymm4,[ecx]
vpmaskmovd [ecx],ymm6,ymm4
vpmaskmovq ymm6,ymm4,YMMWORD PTR [ecx]
vpmaskmovq YMMWORD PTR [ecx],ymm6,ymm4
vpmaskmovq ymm6,ymm4,[ecx]
vpmaskmovq [ecx],ymm6,ymm4
# Tests for op imm8, ymm/mem256, ymm
vpermpd ymm2,ymm6,7
vpermpd ymm6,YMMWORD PTR [ecx],7
vpermpd ymm6,[ecx],7
vpermq ymm2,ymm6,7
vpermq ymm6,YMMWORD PTR [ecx],7
vpermq ymm6,[ecx],7
# Tests for op ymm/mem256, ymm, ymm
vpermd ymm2,ymm6,ymm4
vpermd ymm2,ymm6,YMMWORD PTR [ecx]
vpermd ymm2,ymm6,[ecx]
vpermps ymm2,ymm6,ymm4
vpermps ymm2,ymm6,YMMWORD PTR [ecx]
vpermps ymm2,ymm6,[ecx]
vpsllvd ymm2,ymm6,ymm4
vpsllvd ymm2,ymm6,YMMWORD PTR [ecx]
vpsllvd ymm2,ymm6,[ecx]
vpsllvq ymm2,ymm6,ymm4
vpsllvq ymm2,ymm6,YMMWORD PTR [ecx]
vpsllvq ymm2,ymm6,[ecx]
vpsravd ymm2,ymm6,ymm4
vpsravd ymm2,ymm6,YMMWORD PTR [ecx]
vpsravd ymm2,ymm6,[ecx]
vpsrlvd ymm2,ymm6,ymm4
vpsrlvd ymm2,ymm6,YMMWORD PTR [ecx]
vpsrlvd ymm2,ymm6,[ecx]
vpsrlvq ymm2,ymm6,ymm4
vpsrlvq ymm2,ymm6,YMMWORD PTR [ecx]
vpsrlvq ymm2,ymm6,[ecx]
# Tests for op mem256, ymm
vmovntdqa ymm4,YMMWORD PTR [ecx]
vmovntdqa ymm4,[ecx]
# Tests for op ymm, xmm
vbroadcastsd ymm6,xmm4
vbroadcastss ymm6,xmm4
# Tests for op imm8, ymm/mem256, ymm, ymm
vpblendd ymm2,ymm6,ymm4,7
vpblendd ymm2,ymm6,YMMWORD PTR [ecx],7
vpblendd ymm2,ymm6,[ecx],7
vperm2i128 ymm2,ymm6,ymm4,7
vperm2i128 ymm2,ymm6,YMMWORD PTR [ecx],7
vperm2i128 ymm2,ymm6,[ecx],7
# Tests for op imm8, xmm/mem128, ymm, ymm
vinserti128 ymm6,ymm4,xmm4,7
vinserti128 ymm6,ymm4,XMMWORD PTR [ecx],7
vinserti128 ymm6,ymm4,[ecx],7
# Tests for op mem128, ymm
vbroadcasti128 ymm4,XMMWORD PTR [ecx]
vbroadcasti128 ymm4,[ecx]
# Tests for op xmm/mem128, xmm, xmm
vpsllvd xmm2,xmm6,xmm4
vpsllvd xmm7,xmm6,XMMWORD PTR [ecx]
vpsllvd xmm7,xmm6,[ecx]
vpsllvq xmm2,xmm6,xmm4
vpsllvq xmm7,xmm6,XMMWORD PTR [ecx]
vpsllvq xmm7,xmm6,[ecx]
vpsravd xmm2,xmm6,xmm4
vpsravd xmm7,xmm6,XMMWORD PTR [ecx]
vpsravd xmm7,xmm6,[ecx]
vpsrlvd xmm2,xmm6,xmm4
vpsrlvd xmm7,xmm6,XMMWORD PTR [ecx]
vpsrlvd xmm7,xmm6,[ecx]
vpsrlvq xmm2,xmm6,xmm4
vpsrlvq xmm7,xmm6,XMMWORD PTR [ecx]
vpsrlvq xmm7,xmm6,[ecx]
# Tests for op mem128, xmm, xmm
vpmaskmovd xmm6,xmm4,XMMWORD PTR [ecx]
vpmaskmovd xmm6,xmm4,[ecx]
vpmaskmovq xmm6,xmm4,XMMWORD PTR [ecx]
vpmaskmovq xmm6,xmm4,[ecx]
# Tests for op imm8, ymm, xmm128/mem
vextracti128 xmm6,ymm4,7
vextracti128 XMMWORD PTR [ecx],ymm4,7
vextracti128 [ecx],ymm4,7
# Tests for op xmm, xmm, mem128
vpmaskmovd XMMWORD PTR [ecx],xmm6,xmm4
vpmaskmovd [ecx],xmm6,xmm4
vpmaskmovq XMMWORD PTR [ecx],xmm6,xmm4
vpmaskmovq [ecx],xmm6,xmm4
# Tests for op imm8, xmm/mem128, xmm, xmm
vpblendd xmm2,xmm6,xmm4,7
vpblendd xmm2,xmm6,XMMWORD PTR [ecx],7
vpblendd xmm2,xmm6,[ecx],7
# Tests for op xmm/mem64, xmm
vpbroadcastq xmm6,xmm4
vpbroadcastq xmm4,QWORD PTR [ecx]
vpbroadcastq xmm4,[ecx]
# Tests for op xmm/mem64, ymm
vpbroadcastq ymm6,xmm4
vpbroadcastq ymm4,QWORD PTR [ecx]
vpbroadcastq ymm4,[ecx]
# Tests for op xmm/mem32, ymm
vpbroadcastd ymm4,xmm4
vpbroadcastd ymm4,DWORD PTR [ecx]
vpbroadcastd ymm4,[ecx]
# Tests for op xmm/mem32, xmm
vpbroadcastd xmm6,xmm4
vpbroadcastd xmm4,DWORD PTR [ecx]
vpbroadcastd xmm4,[ecx]
# Tests for op xmm/m16, xmm
vpbroadcastw xmm6,xmm4
vpbroadcastw xmm4,WORD PTR [ecx]
vpbroadcastw xmm4,[ecx]
# Tests for op xmm/m16, ymm
vpbroadcastw ymm6,xmm4
vpbroadcastw ymm4,WORD PTR [ecx]
vpbroadcastw ymm4,[ecx]
# Tests for op xmm/m8, xmm
vpbroadcastb xmm6,xmm4
vpbroadcastb xmm4,BYTE PTR [ecx]
vpbroadcastb xmm4,[ecx]
# Tests for op xmm/m8, ymm
vpbroadcastb ymm6,xmm4
vpbroadcastb ymm4,BYTE PTR [ecx]
vpbroadcastb ymm4,[ecx]
# Tests for op xmm, xmm
vbroadcastss xmm6,xmm4
|
stsp/binutils-ia16
| 7,314
|
gas/testsuite/gas/i386/x86-64-avx512ifma_vl.s
|
# Check 64bit AVX512{IFMA,VL} instructions
.allow_index_reg
.text
_start:
vpmadd52luq %xmm28, %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52luq %xmm28, %xmm29, %xmm30{%k7} # AVX512{IFMA,VL}
vpmadd52luq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{IFMA,VL}
vpmadd52luq (%rcx), %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52luq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52luq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52luq 2032(%rdx), %xmm29, %xmm30 # AVX512{IFMA,VL} Disp8
vpmadd52luq 2048(%rdx), %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52luq -2048(%rdx), %xmm29, %xmm30 # AVX512{IFMA,VL} Disp8
vpmadd52luq -2064(%rdx), %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52luq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{IFMA,VL} Disp8
vpmadd52luq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52luq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{IFMA,VL} Disp8
vpmadd52luq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52luq %ymm28, %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52luq %ymm28, %ymm29, %ymm30{%k7} # AVX512{IFMA,VL}
vpmadd52luq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{IFMA,VL}
vpmadd52luq (%rcx), %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52luq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52luq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52luq 4064(%rdx), %ymm29, %ymm30 # AVX512{IFMA,VL} Disp8
vpmadd52luq 4096(%rdx), %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52luq -4096(%rdx), %ymm29, %ymm30 # AVX512{IFMA,VL} Disp8
vpmadd52luq -4128(%rdx), %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52luq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{IFMA,VL} Disp8
vpmadd52luq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52luq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{IFMA,VL} Disp8
vpmadd52luq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52huq %xmm28, %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52huq %xmm28, %xmm29, %xmm30{%k7} # AVX512{IFMA,VL}
vpmadd52huq %xmm28, %xmm29, %xmm30{%k7}{z} # AVX512{IFMA,VL}
vpmadd52huq (%rcx), %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52huq 0x123(%rax,%r14,8), %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52huq (%rcx){1to2}, %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52huq 2032(%rdx), %xmm29, %xmm30 # AVX512{IFMA,VL} Disp8
vpmadd52huq 2048(%rdx), %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52huq -2048(%rdx), %xmm29, %xmm30 # AVX512{IFMA,VL} Disp8
vpmadd52huq -2064(%rdx), %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52huq 1016(%rdx){1to2}, %xmm29, %xmm30 # AVX512{IFMA,VL} Disp8
vpmadd52huq 1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52huq -1024(%rdx){1to2}, %xmm29, %xmm30 # AVX512{IFMA,VL} Disp8
vpmadd52huq -1032(%rdx){1to2}, %xmm29, %xmm30 # AVX512{IFMA,VL}
vpmadd52huq %ymm28, %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52huq %ymm28, %ymm29, %ymm30{%k7} # AVX512{IFMA,VL}
vpmadd52huq %ymm28, %ymm29, %ymm30{%k7}{z} # AVX512{IFMA,VL}
vpmadd52huq (%rcx), %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52huq 0x123(%rax,%r14,8), %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52huq (%rcx){1to4}, %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52huq 4064(%rdx), %ymm29, %ymm30 # AVX512{IFMA,VL} Disp8
vpmadd52huq 4096(%rdx), %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52huq -4096(%rdx), %ymm29, %ymm30 # AVX512{IFMA,VL} Disp8
vpmadd52huq -4128(%rdx), %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52huq 1016(%rdx){1to4}, %ymm29, %ymm30 # AVX512{IFMA,VL} Disp8
vpmadd52huq 1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{IFMA,VL}
vpmadd52huq -1024(%rdx){1to4}, %ymm29, %ymm30 # AVX512{IFMA,VL} Disp8
vpmadd52huq -1032(%rdx){1to4}, %ymm29, %ymm30 # AVX512{IFMA,VL}
.intel_syntax noprefix
vpmadd52luq xmm30, xmm29, xmm28 # AVX512{IFMA,VL}
vpmadd52luq xmm30{k7}, xmm29, xmm28 # AVX512{IFMA,VL}
vpmadd52luq xmm30{k7}{z}, xmm29, xmm28 # AVX512{IFMA,VL}
vpmadd52luq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{IFMA,VL}
vpmadd52luq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{IFMA,VL}
vpmadd52luq xmm30, xmm29, [rcx]{1to2} # AVX512{IFMA,VL}
vpmadd52luq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{IFMA,VL} Disp8
vpmadd52luq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{IFMA,VL}
vpmadd52luq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{IFMA,VL} Disp8
vpmadd52luq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{IFMA,VL}
vpmadd52luq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{IFMA,VL} Disp8
vpmadd52luq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{IFMA,VL}
vpmadd52luq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{IFMA,VL} Disp8
vpmadd52luq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{IFMA,VL}
vpmadd52luq ymm30, ymm29, ymm28 # AVX512{IFMA,VL}
vpmadd52luq ymm30{k7}, ymm29, ymm28 # AVX512{IFMA,VL}
vpmadd52luq ymm30{k7}{z}, ymm29, ymm28 # AVX512{IFMA,VL}
vpmadd52luq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{IFMA,VL}
vpmadd52luq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{IFMA,VL}
vpmadd52luq ymm30, ymm29, [rcx]{1to4} # AVX512{IFMA,VL}
vpmadd52luq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{IFMA,VL} Disp8
vpmadd52luq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{IFMA,VL}
vpmadd52luq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{IFMA,VL} Disp8
vpmadd52luq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{IFMA,VL}
vpmadd52luq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{IFMA,VL} Disp8
vpmadd52luq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{IFMA,VL}
vpmadd52luq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{IFMA,VL} Disp8
vpmadd52luq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{IFMA,VL}
vpmadd52huq xmm30, xmm29, xmm28 # AVX512{IFMA,VL}
vpmadd52huq xmm30{k7}, xmm29, xmm28 # AVX512{IFMA,VL}
vpmadd52huq xmm30{k7}{z}, xmm29, xmm28 # AVX512{IFMA,VL}
vpmadd52huq xmm30, xmm29, XMMWORD PTR [rcx] # AVX512{IFMA,VL}
vpmadd52huq xmm30, xmm29, XMMWORD PTR [rax+r14*8+0x1234] # AVX512{IFMA,VL}
vpmadd52huq xmm30, xmm29, [rcx]{1to2} # AVX512{IFMA,VL}
vpmadd52huq xmm30, xmm29, XMMWORD PTR [rdx+2032] # AVX512{IFMA,VL} Disp8
vpmadd52huq xmm30, xmm29, XMMWORD PTR [rdx+2048] # AVX512{IFMA,VL}
vpmadd52huq xmm30, xmm29, XMMWORD PTR [rdx-2048] # AVX512{IFMA,VL} Disp8
vpmadd52huq xmm30, xmm29, XMMWORD PTR [rdx-2064] # AVX512{IFMA,VL}
vpmadd52huq xmm30, xmm29, [rdx+1016]{1to2} # AVX512{IFMA,VL} Disp8
vpmadd52huq xmm30, xmm29, [rdx+1024]{1to2} # AVX512{IFMA,VL}
vpmadd52huq xmm30, xmm29, [rdx-1024]{1to2} # AVX512{IFMA,VL} Disp8
vpmadd52huq xmm30, xmm29, [rdx-1032]{1to2} # AVX512{IFMA,VL}
vpmadd52huq ymm30, ymm29, ymm28 # AVX512{IFMA,VL}
vpmadd52huq ymm30{k7}, ymm29, ymm28 # AVX512{IFMA,VL}
vpmadd52huq ymm30{k7}{z}, ymm29, ymm28 # AVX512{IFMA,VL}
vpmadd52huq ymm30, ymm29, YMMWORD PTR [rcx] # AVX512{IFMA,VL}
vpmadd52huq ymm30, ymm29, YMMWORD PTR [rax+r14*8+0x1234] # AVX512{IFMA,VL}
vpmadd52huq ymm30, ymm29, [rcx]{1to4} # AVX512{IFMA,VL}
vpmadd52huq ymm30, ymm29, YMMWORD PTR [rdx+4064] # AVX512{IFMA,VL} Disp8
vpmadd52huq ymm30, ymm29, YMMWORD PTR [rdx+4096] # AVX512{IFMA,VL}
vpmadd52huq ymm30, ymm29, YMMWORD PTR [rdx-4096] # AVX512{IFMA,VL} Disp8
vpmadd52huq ymm30, ymm29, YMMWORD PTR [rdx-4128] # AVX512{IFMA,VL}
vpmadd52huq ymm30, ymm29, [rdx+1016]{1to4} # AVX512{IFMA,VL} Disp8
vpmadd52huq ymm30, ymm29, [rdx+1024]{1to4} # AVX512{IFMA,VL}
vpmadd52huq ymm30, ymm29, [rdx-1024]{1to4} # AVX512{IFMA,VL} Disp8
vpmadd52huq ymm30, ymm29, [rdx-1032]{1to4} # AVX512{IFMA,VL}
|
stsp/binutils-ia16
| 6,955
|
gas/testsuite/gas/i386/movx64.s
|
.text
.psize 0
movsx:
movsx %al, %cl
movsx %ax, %cl
movsx %eax, %cl
movsx %rax, %cl
movsx %al, %cx
movsx %ax, %cx
movsx %eax, %cx
movsx %rax, %cx
movsx %al, %ecx
movsx %ax, %ecx
movsx %eax, %ecx
movsx %rax, %ecx
movsx %al, %rcx
movsx %ax, %rcx
movsx %eax, %rcx
movsx %rax, %rcx
movsxb %al, %cl
movsxb %ax, %cl
movsxb %eax, %cl
movsxb %rax, %cl
movsxb %al, %cx
movsxb %ax, %cx
movsxb %eax, %cx
movsxb %rax, %cx
movsxb %al, %ecx
movsxb %ax, %ecx
movsxb %eax, %ecx
movsxb %rax, %ecx
movsxb %al, %rcx
movsxb %ax, %rcx
movsxb %eax, %rcx
movsxb %rax, %rcx
movsxw %al, %cl
movsxw %ax, %cl
movsxw %eax, %cl
movsxw %rax, %cl
movsxw %al, %cx
movsxw %ax, %cx
movsxw %eax, %cx
movsxw %rax, %cx
movsxw %al, %ecx
movsxw %ax, %ecx
movsxw %eax, %ecx
movsxw %rax, %ecx
movsxw %al, %rcx
movsxw %ax, %rcx
movsxw %eax, %rcx
movsxw %rax, %rcx
movsxl %al, %cl
movsxl %ax, %cl
movsxl %eax, %cl
movsxl %rax, %cl
movsxl %al, %cx
movsxl %ax, %cx
movsxl %eax, %cx
movsxl %rax, %cx
movsxl %al, %ecx
movsxl %ax, %ecx
movsxl %eax, %ecx
movsxl %rax, %ecx
movsxl %al, %rcx
movsxl %ax, %rcx
movsxl %eax, %rcx
movsxl %rax, %rcx
movsxd %al, %cl
movsxd %ax, %cl
movsxd %eax, %cl
movsxd %rax, %cl
movsxd %al, %cx
movsxd %ax, %cx
movsxd %eax, %cx
movsxd %rax, %cx
movsxd %al, %ecx
movsxd %ax, %ecx
movsxd %eax, %ecx
movsxd %rax, %ecx
movsxd %al, %rcx
movsxd %ax, %rcx
movsxd %eax, %rcx
movsxd %rax, %rcx
movsb %al, %cl
movsb %ax, %cl
movsb %eax, %cl
movsb %rax, %cl
movsb %al, %cx
movsb %ax, %cx
movsb %eax, %cx
movsb %rax, %cx
movsb %al, %ecx
movsb %ax, %ecx
movsb %eax, %ecx
movsb %rax, %ecx
movsb %al, %rcx
movsb %ax, %rcx
movsb %eax, %rcx
movsb %rax, %rcx
movsbw %al, %cl
movsbw %ax, %cl
movsbw %eax, %cl
movsbw %rax, %cl
movsbw %al, %cx
movsbw %ax, %cx
movsbw %eax, %cx
movsbw %rax, %cx
movsbw %al, %ecx
movsbw %ax, %ecx
movsbw %eax, %ecx
movsbw %rax, %ecx
movsbw %al, %rcx
movsbw %ax, %rcx
movsbw %eax, %rcx
movsbw %rax, %rcx
movsbl %al, %cl
movsbl %ax, %cl
movsbl %eax, %cl
movsbl %rax, %cl
movsbl %al, %cx
movsbl %ax, %cx
movsbl %eax, %cx
movsbl %rax, %cx
movsbl %al, %ecx
movsbl %ax, %ecx
movsbl %eax, %ecx
movsbl %rax, %ecx
movsbl %al, %rcx
movsbl %ax, %rcx
movsbl %eax, %rcx
movsbl %rax, %rcx
movsbq %al, %cl
movsbq %ax, %cl
movsbq %eax, %cl
movsbq %rax, %cl
movsbq %al, %cx
movsbq %ax, %cx
movsbq %eax, %cx
movsbq %rax, %cx
movsbq %al, %ecx
movsbq %ax, %ecx
movsbq %eax, %ecx
movsbq %rax, %ecx
movsbq %al, %rcx
movsbq %ax, %rcx
movsbq %eax, %rcx
movsbq %rax, %rcx
movsw %al, %cl
movsw %ax, %cl
movsw %eax, %cl
movsw %rax, %cl
movsw %al, %cx
movsw %ax, %cx
movsw %eax, %cx
movsw %rax, %cx
movsw %al, %ecx
movsw %ax, %ecx
movsw %eax, %ecx
movsw %rax, %ecx
movsw %al, %rcx
movsw %ax, %rcx
movsw %eax, %rcx
movsw %rax, %rcx
movswl %al, %cl
movswl %ax, %cl
movswl %eax, %cl
movswl %rax, %cl
movswl %al, %cx
movswl %ax, %cx
movswl %eax, %cx
movswl %rax, %cx
movswl %al, %ecx
movswl %ax, %ecx
movswl %eax, %ecx
movswl %rax, %ecx
movswl %al, %rcx
movswl %ax, %rcx
movswl %eax, %rcx
movswl %rax, %rcx
movswq %al, %cl
movswq %ax, %cl
movswq %eax, %cl
movswq %rax, %cl
movswq %al, %cx
movswq %ax, %cx
movswq %eax, %cx
movswq %rax, %cx
movswq %al, %ecx
movswq %ax, %ecx
movswq %eax, %ecx
movswq %rax, %ecx
movswq %al, %rcx
movswq %ax, %rcx
movswq %eax, %rcx
movswq %rax, %rcx
movzx:
movzx %al, %cl
movzx %ax, %cl
movzx %eax, %cl
movzx %rax, %cl
movzx %al, %cx
movzx %ax, %cx
movzx %eax, %cx
movzx %rax, %cx
movzx %al, %ecx
movzx %ax, %ecx
movzx %eax, %ecx
movzx %rax, %ecx
movzx %al, %rcx
movzx %ax, %rcx
movzx %eax, %rcx
movzx %rax, %rcx
movzxb %al, %cl
movzxb %ax, %cl
movzxb %eax, %cl
movzxb %rax, %cl
movzxb %al, %cx
movzxb %ax, %cx
movzxb %eax, %cx
movzxb %rax, %cx
movzxb %al, %ecx
movzxb %ax, %ecx
movzxb %eax, %ecx
movzxb %rax, %ecx
movzxb %al, %rcx
movzxb %ax, %rcx
movzxb %eax, %rcx
movzxb %rax, %rcx
movzxw %al, %cl
movzxw %ax, %cl
movzxw %eax, %cl
movzxw %rax, %cl
movzxw %al, %cx
movzxw %ax, %cx
movzxw %eax, %cx
movzxw %rax, %cx
movzxw %al, %ecx
movzxw %ax, %ecx
movzxw %eax, %ecx
movzxw %rax, %ecx
movzxw %al, %rcx
movzxw %ax, %rcx
movzxw %eax, %rcx
movzxw %rax, %rcx
movzxl %al, %cl
movzxl %ax, %cl
movzxl %eax, %cl
movzxl %rax, %cl
movzxl %al, %cx
movzxl %ax, %cx
movzxl %eax, %cx
movzxl %rax, %cx
movzxl %al, %ecx
movzxl %ax, %ecx
movzxl %eax, %ecx
movzxl %rax, %ecx
movzxl %al, %rcx
movzxl %ax, %rcx
movzxl %eax, %rcx
movzxl %rax, %rcx
movzxd %al, %cl
movzxd %ax, %cl
movzxd %eax, %cl
movzxd %rax, %cl
movzxd %al, %cx
movzxd %ax, %cx
movzxd %eax, %cx
movzxd %rax, %cx
movzxd %al, %ecx
movzxd %ax, %ecx
movzxd %eax, %ecx
movzxd %rax, %ecx
movzxd %al, %rcx
movzxd %ax, %rcx
movzxd %eax, %rcx
movzxd %rax, %rcx
movzb %al, %cl
movzb %ax, %cl
movzb %eax, %cl
movzb %rax, %cl
movzb %al, %cx
movzb %ax, %cx
movzb %eax, %cx
movzb %rax, %cx
movzb %al, %ecx
movzb %ax, %ecx
movzb %eax, %ecx
movzb %rax, %ecx
movzb %al, %rcx
movzb %ax, %rcx
movzb %eax, %rcx
movzb %rax, %rcx
movzbw %al, %cl
movzbw %ax, %cl
movzbw %eax, %cl
movzbw %rax, %cl
movzbw %al, %cx
movzbw %ax, %cx
movzbw %eax, %cx
movzbw %rax, %cx
movzbw %al, %ecx
movzbw %ax, %ecx
movzbw %eax, %ecx
movzbw %rax, %ecx
movzbw %al, %rcx
movzbw %ax, %rcx
movzbw %eax, %rcx
movzbw %rax, %rcx
movzbl %al, %cl
movzbl %ax, %cl
movzbl %eax, %cl
movzbl %rax, %cl
movzbl %al, %cx
movzbl %ax, %cx
movzbl %eax, %cx
movzbl %rax, %cx
movzbl %al, %ecx
movzbl %ax, %ecx
movzbl %eax, %ecx
movzbl %rax, %ecx
movzbl %al, %rcx
movzbl %ax, %rcx
movzbl %eax, %rcx
movzbl %rax, %rcx
movzbq %al, %cl
movzbq %ax, %cl
movzbq %eax, %cl
movzbq %rax, %cl
movzbq %al, %cx
movzbq %ax, %cx
movzbq %eax, %cx
movzbq %rax, %cx
movzbq %al, %ecx
movzbq %ax, %ecx
movzbq %eax, %ecx
movzbq %rax, %ecx
movzbq %al, %rcx
movzbq %ax, %rcx
movzbq %eax, %rcx
movzbq %rax, %rcx
movzw %al, %cl
movzw %ax, %cl
movzw %eax, %cl
movzw %rax, %cl
movzw %al, %cx
movzw %ax, %cx
movzw %eax, %cx
movzw %rax, %cx
movzw %al, %ecx
movzw %ax, %ecx
movzw %eax, %ecx
movzw %rax, %ecx
movzw %al, %rcx
movzw %ax, %rcx
movzw %eax, %rcx
movzw %rax, %rcx
movzwl %al, %cl
movzwl %ax, %cl
movzwl %eax, %cl
movzwl %rax, %cl
movzwl %al, %cx
movzwl %ax, %cx
movzwl %eax, %cx
movzwl %rax, %cx
movzwl %al, %ecx
movzwl %ax, %ecx
movzwl %eax, %ecx
movzwl %rax, %ecx
movzwl %al, %rcx
movzwl %ax, %rcx
movzwl %eax, %rcx
movzwl %rax, %rcx
movzwq %al, %cl
movzwq %ax, %cl
movzwq %eax, %cl
movzwq %rax, %cl
movzwq %al, %cx
movzwq %ax, %cx
movzwq %eax, %cx
movzwq %rax, %cx
movzwq %al, %ecx
movzwq %ax, %ecx
movzwq %eax, %ecx
movzwq %rax, %ecx
movzwq %al, %rcx
movzwq %ax, %rcx
movzwq %eax, %rcx
movzwq %rax, %rcx
.p2align 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.